Skip to content

Commit f12bb75

Browse files
committed
Update the output_type formatting of the llamacpp model
1 parent f9e0a15 commit f12bb75

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

outlines/models/llamacpp.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@ def format_chat_input(self, model_input: Chat) -> list:
197197

198198
def format_output_type(
199199
self, output_type: Optional[OutlinesLogitsProcessor] = None,
200-
) -> "LogitsProcessorList":
200+
) -> Optional["LogitsProcessorList"]:
201201
"""Generate the logits processor argument to pass to the model.
202202
203203
Parameters
@@ -213,7 +213,9 @@ def format_output_type(
213213
"""
214214
from llama_cpp import LogitsProcessorList
215215

216-
return LogitsProcessorList([output_type])
216+
if output_type is not None:
217+
return LogitsProcessorList([output_type])
218+
return None
217219

218220

219221
class LlamaCpp(Model):

0 commit comments

Comments
 (0)