File tree Expand file tree Collapse file tree 2 files changed +3
-3
lines changed
Expand file tree Collapse file tree 2 files changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -3352,7 +3352,7 @@ class MiniCPMv26ChatHandler(Llava15ChatHandler):
33523352 )
33533353
33543354
3355- class PaligemmaChatHandler (Llava15ChatHandler ):
3355+ class PaliGemmaChatHandler (Llava15ChatHandler ):
33563356 def __call__ (
33573357 self ,
33583358 * ,
Original file line number Diff line number Diff line change @@ -175,14 +175,14 @@ def load_llama_from_model_settings(settings: ModelSettings) -> llama_cpp.Llama:
175175 assert settings .clip_model_path is not None , "clip model not found"
176176 if settings .hf_model_repo_id is not None :
177177 chat_handler = (
178- llama_cpp .llama_chat_format .PaligemmaChatHandler .from_pretrained (
178+ llama_cpp .llama_chat_format .PaliGemmaChatHandler .from_pretrained (
179179 repo_id = settings .hf_model_repo_id ,
180180 filename = settings .clip_model_path ,
181181 verbose = settings .verbose ,
182182 )
183183 )
184184 else :
185- chat_handler = llama_cpp .llama_chat_format .PaligemmaChatHandler (
185+ chat_handler = llama_cpp .llama_chat_format .PaliGemmaChatHandler (
186186 clip_model_path = settings .clip_model_path , verbose = settings .verbose
187187 )
188188 elif settings .chat_format == "hf-autotokenizer" :
You can’t perform that action at this time.
0 commit comments