Skip to content

Commit b29ed45

Browse files
committed
fix(model_qwen): compatible with both vLLM and Bailian thinking switch control
1 parent 54b99ff commit b29ed45

File tree

1 file changed

+6
-0
lines changed

1 file changed

+6
-0
lines changed

components/model/qwen/chatmodel.go

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -226,10 +226,16 @@ func (cm *ChatModel) parseCustomOpetions(opts ...model.Option) []model.Option {
226226
// Using extra fields to pass the custom options to the underlying client
227227
extraFields := make(map[string]any)
228228
if qwenOpts.EnableThinking != nil {
229+
// If using BaiLian, use `chat_template_kwargs`.
230+
// See https://bailian.console.aliyun.com/?tab=api#/api/?type=model&url=2712576
229231
enableThinkingSwitch := map[string]bool{
230232
"enable_thinking": *qwenOpts.EnableThinking,
231233
}
232234
extraFields["chat_template_kwargs"] = enableThinkingSwitch
235+
236+
// If using vLLM, use `enable_thinking` directly.
237+
// See https://qwen.readthedocs.io/zh-cn/latest/deployment/vllm.html
238+
extraFields["enable_thinking"] = *qwenOpts.EnableThinking
233239
}
234240
if len(extraFields) > 0 {
235241
opts = append(opts, openai.WithExtraFields(extraFields))

0 commit comments

Comments
 (0)