@@ -1726,6 +1726,31 @@ def get_default_conv_template(self, model_path: str) -> Conversation:
1726
1726
return get_conv_template ("llama2-chinese" )
1727
1727
1728
1728
1729
+ class Lamma2ChineseAlpacaAdapter (BaseModelAdapter ):
1730
+ """The model adapter for ymcui/Chinese-LLaMA-Alpaca sft"""
1731
+
1732
+ def match (self , model_path : str ):
1733
+ return "chinese-alpaca" in model_path .lower ()
1734
+
1735
+ def load_model (self , model_path : str , from_pretrained_kwargs : dict ):
1736
+ revision = from_pretrained_kwargs .get ("revision" , "main" )
1737
+ tokenizer = AutoTokenizer .from_pretrained (
1738
+ model_path ,
1739
+ trust_remote_code = True ,
1740
+ revision = revision ,
1741
+ )
1742
+ model = AutoModelForCausalLM .from_pretrained (
1743
+ model_path ,
1744
+ trust_remote_code = True ,
1745
+ low_cpu_mem_usage = True ,
1746
+ ** from_pretrained_kwargs ,
1747
+ )
1748
+ return model , tokenizer
1749
+
1750
+ def get_default_conv_template (self , model_path : str ) -> Conversation :
1751
+ return get_conv_template ("llama2-chinese" )
1752
+
1753
+
1729
1754
class VigogneAdapter (BaseModelAdapter ):
1730
1755
"""The model adapter for vigogne (e.g., bofenghuang/vigogne-2-7b-chat)"""
1731
1756
@@ -1941,6 +1966,7 @@ def get_default_conv_template(self, model_path: str) -> Conversation:
1941
1966
register_model_adapter (BGEAdapter )
1942
1967
register_model_adapter (E5Adapter )
1943
1968
register_model_adapter (Lamma2ChineseAdapter )
1969
+ register_model_adapter (Lamma2ChineseAlpacaAdapter )
1944
1970
register_model_adapter (VigogneAdapter )
1945
1971
register_model_adapter (OpenLLaMaOpenInstructAdapter )
1946
1972
register_model_adapter (ReaLMAdapter )
0 commit comments