Skip to content

Commit f203a5c

Browse files
authored
fix Telechat model (modelscope#623)
1 parent a4c8545 commit f203a5c

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

swift/llm/utils/model.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1204,12 +1204,6 @@ def cross_entropy_forward(self, inputs: Tensor,
12041204
support_flash_attn=True,
12051205
support_vllm=True,
12061206
support_gradient_checkpointing=False)
1207-
@register_model(
1208-
ModelType.telechat_12b,
1209-
'TeleAI/TeleChat-12B',
1210-
LoRATM.telechat,
1211-
TemplateType.telechat,
1212-
support_flash_attn=True)
12131207
def get_model_tokenizer_with_flash_attn(model_dir: str,
12141208
torch_dtype: Dtype,
12151209
model_kwargs: Dict[str, Any],
@@ -2355,6 +2349,12 @@ def get_model_tokenizer_codellama(model_dir: str,
23552349
support_vllm=True,
23562350
support_gradient_checkpointing=False,
23572351
tags=['coding'])
2352+
@register_model(
2353+
ModelType.telechat_12b,
2354+
'TeleAI/TeleChat-12B',
2355+
LoRATM.telechat,
2356+
TemplateType.telechat,
2357+
support_flash_attn=True)
23582358
def get_model_tokenizer_phi(model_dir: str,
23592359
torch_dtype: Dtype,
23602360
model_kwargs: Dict[str, Any],

0 commit comments

Comments
 (0)