Skip to content

Commit 0a56dc2

Browse files
committed
🤖 feat: Support OpenAI Web Search models (#6313)
* fix: reorder vision model entries for cheaper models first * fix: add endpoint property to bedrock client initialization * fix: exclude unsupported parameters for OpenAI Web Search models * fix: enhance options to exclude unsupported parameters for Web Search models
1 parent 666f9c3 commit 0a56dc2

File tree

5 files changed

+61
-18
lines changed

5 files changed

+61
-18
lines changed

api/app/clients/OpenAIClient.js

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1272,6 +1272,29 @@ ${convo}
12721272
});
12731273
}
12741274

1275+
/** Note: OpenAI Web Search models do not support any known parameters besdies `max_tokens` */
1276+
if (modelOptions.model && /gpt-4o.*search/.test(modelOptions.model)) {
1277+
const searchExcludeParams = [
1278+
'frequency_penalty',
1279+
'presence_penalty',
1280+
'temperature',
1281+
'top_p',
1282+
'top_k',
1283+
'stop',
1284+
'logit_bias',
1285+
'seed',
1286+
'response_format',
1287+
'n',
1288+
'logprobs',
1289+
'user',
1290+
];
1291+
1292+
this.options.dropParams = this.options.dropParams || [];
1293+
this.options.dropParams = [
1294+
...new Set([...this.options.dropParams, ...searchExcludeParams]),
1295+
];
1296+
}
1297+
12751298
if (this.options.dropParams && Array.isArray(this.options.dropParams)) {
12761299
this.options.dropParams.forEach((param) => {
12771300
delete modelOptions[param];

api/server/services/Endpoints/bedrock/initialize.js

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,9 @@ const initializeClient = async ({ req, res, endpointOption }) => {
2323
const agent = {
2424
id: EModelEndpoint.bedrock,
2525
name: endpointOption.name,
26-
instructions: endpointOption.promptPrefix,
2726
provider: EModelEndpoint.bedrock,
27+
endpoint: EModelEndpoint.bedrock,
28+
instructions: endpointOption.promptPrefix,
2829
model: endpointOption.model_parameters.model,
2930
model_parameters: endpointOption.model_parameters,
3031
};

api/server/services/Endpoints/openAI/initialize.js

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -135,12 +135,9 @@ const initializeClient = async ({
135135
}
136136

137137
if (optionsOnly) {
138-
clientOptions = Object.assign(
139-
{
140-
modelOptions: endpointOption.model_parameters,
141-
},
142-
clientOptions,
143-
);
138+
const modelOptions = endpointOption.model_parameters;
139+
modelOptions.model = modelName;
140+
clientOptions = Object.assign({ modelOptions }, clientOptions);
144141
clientOptions.modelOptions.user = req.user.id;
145142
const options = getLLMConfig(apiKey, clientOptions);
146143
if (!clientOptions.streamRate) {

api/server/services/Endpoints/openAI/llm.js

Lines changed: 24 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ const { isEnabled } = require('~/server/utils');
2828
* @returns {Object} Configuration options for creating an LLM instance.
2929
*/
3030
function getLLMConfig(apiKey, options = {}, endpoint = null) {
31-
const {
31+
let {
3232
modelOptions = {},
3333
reverseProxyUrl,
3434
defaultQuery,
@@ -50,10 +50,32 @@ function getLLMConfig(apiKey, options = {}, endpoint = null) {
5050
if (addParams && typeof addParams === 'object') {
5151
Object.assign(llmConfig, addParams);
5252
}
53+
/** Note: OpenAI Web Search models do not support any known parameters besdies `max_tokens` */
54+
if (modelOptions.model && /gpt-4o.*search/.test(modelOptions.model)) {
55+
const searchExcludeParams = [
56+
'frequency_penalty',
57+
'presence_penalty',
58+
'temperature',
59+
'top_p',
60+
'top_k',
61+
'stop',
62+
'logit_bias',
63+
'seed',
64+
'response_format',
65+
'n',
66+
'logprobs',
67+
'user',
68+
];
69+
70+
dropParams = dropParams || [];
71+
dropParams = [...new Set([...dropParams, ...searchExcludeParams])];
72+
}
5373

5474
if (dropParams && Array.isArray(dropParams)) {
5575
dropParams.forEach((param) => {
56-
delete llmConfig[param];
76+
if (llmConfig[param]) {
77+
llmConfig[param] = undefined;
78+
}
5779
});
5880
}
5981

packages/data-provider/src/config.ts

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -827,28 +827,28 @@ export const supportsBalanceCheck = {
827827
};
828828

829829
export const visionModels = [
830-
'grok-3',
831-
'grok-2-vision',
832830
'grok-vision',
833-
'gpt-4.5',
834-
'gpt-4o',
831+
'grok-2-vision',
832+
'grok-3',
835833
'gpt-4o-mini',
836-
'o1',
834+
'gpt-4o',
837835
'gpt-4-turbo',
838836
'gpt-4-vision',
837+
'o1',
838+
'gpt-4.5',
839839
'llava',
840840
'llava-13b',
841841
'gemini-pro-vision',
842842
'claude-3',
843-
'gemini-2.0',
844-
'gemini-1.5',
845843
'gemini-exp',
844+
'gemini-1.5',
845+
'gemini-2.0',
846846
'moondream',
847847
'llama3.2-vision',
848-
'llama-3.2-90b-vision',
849848
'llama-3.2-11b-vision',
850-
'llama-3-2-90b-vision',
851849
'llama-3-2-11b-vision',
850+
'llama-3.2-90b-vision',
851+
'llama-3-2-90b-vision',
852852
];
853853
export enum VisionModes {
854854
generative = 'generative',

0 commit comments

Comments
 (0)