Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions api/app/clients/OpenAIClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ class OpenAIClient extends BaseClient {
/** @type {OpenAIUsageMetadata | undefined} */
this.usage;
/** @type {boolean|undefined} */
this.isO1Model;
this.isOmni;
/** @type {SplitStreamHandler | undefined} */
this.streamHandler;
}
Expand Down Expand Up @@ -105,8 +105,8 @@ class OpenAIClient extends BaseClient {
this.checkVisionRequest(this.options.attachments);
}

const o1Pattern = /\bo1\b/i;
this.isO1Model = o1Pattern.test(this.modelOptions.model);
const omniPattern = /\b(o1|o3)\b/i;
this.isOmni = omniPattern.test(this.modelOptions.model);

const { OPENROUTER_API_KEY, OPENAI_FORCE_PROMPT } = process.env ?? {};
if (OPENROUTER_API_KEY && !this.azure) {
Expand Down Expand Up @@ -146,7 +146,7 @@ class OpenAIClient extends BaseClient {
const { model } = this.modelOptions;

this.isChatCompletion =
o1Pattern.test(model) || model.includes('gpt') || this.useOpenRouter || !!reverseProxy;
omniPattern.test(model) || model.includes('gpt') || this.useOpenRouter || !!reverseProxy;
this.isChatGptModel = this.isChatCompletion;
if (
model.includes('text-davinci') ||
Expand Down Expand Up @@ -475,7 +475,7 @@ class OpenAIClient extends BaseClient {
promptPrefix = this.augmentedPrompt + promptPrefix;
}

if (promptPrefix && this.isO1Model !== true) {
if (promptPrefix && this.isOmni !== true) {
promptPrefix = `Instructions:\n${promptPrefix.trim()}`;
instructions = {
role: 'system',
Expand Down Expand Up @@ -503,7 +503,7 @@ class OpenAIClient extends BaseClient {
};

/** EXPERIMENTAL */
if (promptPrefix && this.isO1Model === true) {
if (promptPrefix && this.isOmni === true) {
const lastUserMessageIndex = payload.findLastIndex((message) => message.role === 'user');
if (lastUserMessageIndex !== -1) {
payload[
Expand Down Expand Up @@ -1200,7 +1200,7 @@ ${convo}
opts.defaultHeaders = { ...opts.defaultHeaders, 'api-key': this.apiKey };
}

if (this.isO1Model === true && modelOptions.max_tokens != null) {
if (this.isOmni === true && modelOptions.max_tokens != null) {
modelOptions.max_completion_tokens = modelOptions.max_tokens;
delete modelOptions.max_tokens;
}
Expand Down Expand Up @@ -1280,13 +1280,13 @@ ${convo}
let streamResolve;

if (
this.isO1Model === true &&
this.isOmni === true &&
(this.azure || /o1(?!-(?:mini|preview)).*$/.test(modelOptions.model)) &&
modelOptions.stream
) {
delete modelOptions.stream;
delete modelOptions.stop;
} else if (!this.isO1Model && modelOptions.reasoning_effort != null) {
} else if (!this.isOmni && modelOptions.reasoning_effort != null) {
delete modelOptions.reasoning_effort;
}

Expand Down Expand Up @@ -1366,7 +1366,7 @@ ${convo}
for await (const chunk of stream) {
// Add finish_reason: null if missing in any choice
if (chunk.choices) {
chunk.choices.forEach(choice => {
chunk.choices.forEach((choice) => {
if (!('finish_reason' in choice)) {
choice.finish_reason = null;
}
Expand Down
3 changes: 2 additions & 1 deletion api/models/tx.js
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,9 @@ const tokenValues = Object.assign(
'4k': { prompt: 1.5, completion: 2 },
'16k': { prompt: 3, completion: 4 },
'gpt-3.5-turbo-1106': { prompt: 1, completion: 2 },
'o3-mini': { prompt: 1.1, completion: 4.4 },
'o1-mini': { prompt: 1.1, completion: 4.4 },
'o1-preview': { prompt: 15, completion: 60 },
'o1-mini': { prompt: 3, completion: 12 },
o1: { prompt: 15, completion: 60 },
'gpt-4o-mini': { prompt: 0.15, completion: 0.6 },
'gpt-4o': { prompt: 2.5, completion: 10 },
Expand Down
1 change: 1 addition & 0 deletions api/utils/tokens.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ const z = require('zod');
const { EModelEndpoint } = require('librechat-data-provider');

const openAIModels = {
'o3-mini': 195000, // -5000 from max
o1: 195000, // -5000 from max
'o1-mini': 127500, // -500 from max
'o1-preview': 127500, // -500 from max
Expand Down
2 changes: 1 addition & 1 deletion client/src/components/Endpoints/MessageEndpointIcon.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ type EndpointIcon = {

function getOpenAIColor(_model: string | null | undefined) {
const model = _model?.toLowerCase() ?? '';
if (model && /\bo1\b/i.test(model)) {
if (model && /\b(o1|o3)\b/i.test(model)) {
return '#000000';
}
return model.includes('gpt-4') ? '#AB68FF' : '#19C37D';
Expand Down
2 changes: 1 addition & 1 deletion packages/data-provider/src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -778,9 +778,9 @@ export const supportsBalanceCheck = {
};

export const visionModels = [
'o1',
'gpt-4o',
'gpt-4o-mini',
'o1',
'gpt-4-turbo',
'gpt-4-vision',
'llava',
Expand Down
2 changes: 2 additions & 0 deletions packages/data-provider/src/parsers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,8 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
return modelLabel;
} else if (model && /\bo1\b/i.test(model)) {
return 'o1';
} else if (model && /\bo3\b/i.test(model)) {
return 'o3';
} else if (model && model.includes('gpt-3')) {
return 'GPT-3.5';
} else if (model && model.includes('gpt-4o')) {
Expand Down
Loading