Fix https://github.com/huggingface/peft/issues/1974#issue-2437471248
#74
by
Finger-ebic
- opened
- modeling_chatglm.py +4 -1
modeling_chatglm.py
CHANGED
@@ -872,7 +872,10 @@ class ChatGLMModel(ChatGLMPreTrainedModel):
|
|
872 |
use_cache = use_cache if use_cache is not None else self.config.use_cache
|
873 |
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
|
874 |
|
875 |
-
|
|
|
|
|
|
|
876 |
|
877 |
if inputs_embeds is None:
|
878 |
inputs_embeds = self.embedding(input_ids)
|
|
|
872 |
use_cache = use_cache if use_cache is not None else self.config.use_cache
|
873 |
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
|
874 |
|
875 |
+
if input_ids is not None:
|
876 |
+
batch_size, seq_length = input_ids.shape
|
877 |
+
else:
|
878 |
+
batch_size, seq_length, _ = inputs_embeds.shape
|
879 |
|
880 |
if inputs_embeds is None:
|
881 |
inputs_embeds = self.embedding(input_ids)
|