fix compatibility issue for transformers 4.46+
Browse files- config.json +1 -0
- configuration_internvl_chat.py +1 -1
config.json
CHANGED
@@ -21,6 +21,7 @@
|
|
21 |
"attention_dropout": 0.0,
|
22 |
"auto_map": {
|
23 |
"AutoConfig": "configuration_phi3.Phi3Config",
|
|
|
24 |
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
|
25 |
},
|
26 |
"bad_words_ids": null,
|
|
|
21 |
"attention_dropout": 0.0,
|
22 |
"auto_map": {
|
23 |
"AutoConfig": "configuration_phi3.Phi3Config",
|
24 |
+
"AutoModel": "modeling_phi3.Phi3ForCausalLM",
|
25 |
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
|
26 |
},
|
27 |
"bad_words_ids": null,
|
configuration_internvl_chat.py
CHANGED
@@ -49,7 +49,7 @@ class InternVLChatConfig(PretrainedConfig):
|
|
49 |
self.vision_config = InternVisionConfig(**vision_config)
|
50 |
if llm_config.get('architectures')[0] == 'LlamaForCausalLM':
|
51 |
self.llm_config = LlamaConfig(**llm_config)
|
52 |
-
elif llm_config.get
|
53 |
self.llm_config = Phi3Config(**llm_config)
|
54 |
else:
|
55 |
raise ValueError('Unsupported architecture: {}'.format(llm_config.get(['architectures'])[0]))
|
|
|
49 |
self.vision_config = InternVisionConfig(**vision_config)
|
50 |
if llm_config.get('architectures')[0] == 'LlamaForCausalLM':
|
51 |
self.llm_config = LlamaConfig(**llm_config)
|
52 |
+
elif llm_config.get('architectures')[0] == 'Phi3ForCausalLM':
|
53 |
self.llm_config = Phi3Config(**llm_config)
|
54 |
else:
|
55 |
raise ValueError('Unsupported architecture: {}'.format(llm_config.get(['architectures'])[0]))
|