-
Notifications
You must be signed in to change notification settings - Fork 804
Description
chatglm3_mode False
[['cuda', 'fp16']]
Exception in thread Thread-1 (load_model):
Traceback (most recent call last):
File "/home/server/common/sunyankang/anaconda3/envs/wenda311/lib/python3.11/threading.py", line 1038, in _bootstrap_inner
self.run()
File "/home/server/common/sunyankang/anaconda3/envs/wenda311/lib/python3.11/threading.py", line 975, in run
self._target(*self._args, **self._kwargs)
File "/home/server/workspace/project/oilfield/wenda/wenda.py", line 53, in load_model
LLM.load_model()
File "/home/server/workspace/project/oilfield/wenda/llms/llm_glm6b.py", line 102, in load_model
tokenizer = AutoTokenizer.from_pretrained(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/server/common/sunyankang/anaconda3/envs/wenda311/lib/python3.11/site-packages/transformers/models/auto/tokenization_auto.py", line 801, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/server/common/sunyankang/anaconda3/envs/wenda311/lib/python3.11/site-packages/transformers/tokenization_utils_base.py", line 2029, in from_pretrained
return cls._from_pretrained(
^^^^^^^^^^^^^^^^^^^^^
File "/home/server/common/sunyankang/anaconda3/envs/wenda311/lib/python3.11/site-packages/transformers/tokenization_utils_base.py", line 2261, in _from_pretrained
tokenizer = cls(*init_inputs, **init_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/server/.cache/huggingface/modules/transformers_modules/chatglm2-6b/tokenization_chatglm.py", line 69, in init
super().init(padding_side=padding_side, **kwargs)
File "/home/server/common/sunyankang/anaconda3/envs/wenda311/lib/python3.11/site-packages/transformers/tokenization_utils.py", line 367, in init
self._add_tokens(
File "/home/server/common/sunyankang/anaconda3/envs/wenda311/lib/python3.11/site-packages/transformers/tokenization_utils.py", line 467, in _add_tokens
current_vocab = self.get_vocab().copy()
^^^^^^^^^^^^^^^^
File "/home/server/.cache/huggingface/modules/transformers_modules/chatglm2-6b/tokenization_chatglm.py", line 99, in get_vocab
vocab = {self._convert_id_to_token(i): i for i in range(self.vocab_size)}
^^^^^^^^^^^^^^^
File "/home/server/.cache/huggingface/modules/transformers_modules/chatglm2-6b/tokenization_chatglm.py", line 95, in vocab_size
return self.tokenizer.n_words
^^^^^^^^^^^^^^
AttributeError: 'ChatGLMTokenizer' object has no attribute 'tokenizer'. Did you mean: 'tokenize'?