Skip to content

Commit bdaeca0

Browse files
committed
🔃 fix: Draft Clearing, Claude Titles, Remove Default Vision Max Tokens (#6501)
* refactor: remove legacy max_tokens setting for vision models in OpenAIClient (intended for gpt-4-preview) * refactor: streamline capability checks in loadAgentTools function, still allow actions if tools are disabled * fix: enhance error handling for token limits in AnthropicClient and update error message in translations * feat: append timestamp to cloned agent names for better identification * chore: update @librechat/agents dependency to version 2.3.94 * refactor: remove clearDraft helper from useSubmitMessage and centralize draft clearing logic to SSE handling, helps prevent user message loss if logout occurs * refactor: increase debounce time for clearDraft function to improve auto-save performance
1 parent b62c7ca commit bdaeca0

File tree

11 files changed

+924
-875
lines changed

11 files changed

+924
-875
lines changed

‎api/app/clients/AnthropicClient.js

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ const Anthropic = require('@anthropic-ai/sdk');
22
const { HttpsProxyAgent } = require('https-proxy-agent');
33
const {
44
Constants,
5+
ErrorTypes,
56
EModelEndpoint,
67
anthropicSettings,
78
getResponseSender,
@@ -147,12 +148,17 @@ class AnthropicClient extends BaseClient {
147148
this.maxPromptTokens =
148149
this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens;
149150

150-
if (this.maxPromptTokens + this.maxResponseTokens > this.maxContextTokens) {
151-
throw new Error(
152-
`maxPromptTokens + maxOutputTokens (${this.maxPromptTokens} + ${this.maxResponseTokens} = ${
153-
this.maxPromptTokens + this.maxResponseTokens
154-
}) must be less than or equal to maxContextTokens (${this.maxContextTokens})`,
155-
);
151+
const reservedTokens = this.maxPromptTokens + this.maxResponseTokens;
152+
if (reservedTokens > this.maxContextTokens) {
153+
const info = `Total Possible Tokens + Max Output Tokens must be less than or equal to Max Context Tokens: ${this.maxPromptTokens} (total possible output) + ${this.maxResponseTokens} (max output) = ${reservedTokens}/${this.maxContextTokens} (max context)`;
154+
const errorMessage = `{ "type": "${ErrorTypes.INPUT_LENGTH}", "info": "${info}" }`;
155+
logger.warn(info);
156+
throw new Error(errorMessage);
157+
} else if (this.maxResponseTokens === this.maxContextTokens) {
158+
const info = `Max Output Tokens must be less than Max Context Tokens: ${this.maxResponseTokens} (max output) = ${this.maxContextTokens} (max context)`;
159+
const errorMessage = `{ "type": "${ErrorTypes.INPUT_LENGTH}", "info": "${info}" }`;
160+
logger.warn(info);
161+
throw new Error(errorMessage);
156162
}
157163

158164
this.sender =

‎api/app/clients/OpenAIClient.js

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1185,10 +1185,6 @@ ${convo}
11851185
opts.httpAgent = new HttpsProxyAgent(this.options.proxy);
11861186
}
11871187

1188-
if (this.isVisionModel) {
1189-
modelOptions.max_tokens = 4000;
1190-
}
1191-
11921188
/** @type {TAzureConfig | undefined} */
11931189
const azureConfig = this.options?.req?.app?.locals?.[EModelEndpoint.azureOpenAI];
11941190

‎api/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@
5050
"@langchain/google-genai": "^0.1.11",
5151
"@langchain/google-vertexai": "^0.2.2",
5252
"@langchain/textsplitters": "^0.1.0",
53-
"@librechat/agents": "^2.3.93",
53+
"@librechat/agents": "^2.3.94",
5454
"@librechat/data-schemas": "*",
5555
"@waylaidwanderer/fetch-event-source": "^3.0.1",
5656
"axios": "^1.8.2",

‎api/server/controllers/agents/v1.js

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -216,6 +216,11 @@ const duplicateAgentHandler = async (req, res) => {
216216
tool_resources: _tool_resources = {},
217217
...cloneData
218218
} = agent;
219+
cloneData.name = `${agent.name} (${new Date().toLocaleString('en-US', {
220+
dateStyle: 'short',
221+
timeStyle: 'short',
222+
hour12: false,
223+
})})`;
219224

220225
if (_tool_resources?.[EToolResources.ocr]) {
221226
cloneData.tool_resources = {

‎api/server/services/ToolService.js

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -427,21 +427,16 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
427427
}
428428

429429
const endpointsConfig = await getEndpointsConfig(req);
430-
const capabilities = endpointsConfig?.[EModelEndpoint.agents]?.capabilities ?? [];
431-
const areToolsEnabled = capabilities.includes(AgentCapabilities.tools);
432-
if (!areToolsEnabled) {
433-
logger.debug('Tools are not enabled for this agent.');
434-
return {};
435-
}
436-
437-
const isFileSearchEnabled = capabilities.includes(AgentCapabilities.file_search);
438-
const isCodeEnabled = capabilities.includes(AgentCapabilities.execute_code);
439-
const areActionsEnabled = capabilities.includes(AgentCapabilities.actions);
430+
const enabledCapabilities = new Set(endpointsConfig?.[EModelEndpoint.agents]?.capabilities ?? []);
431+
const checkCapability = (capability) => enabledCapabilities.has(capability);
432+
const areToolsEnabled = checkCapability(AgentCapabilities.tools);
440433

441434
const _agentTools = agent.tools?.filter((tool) => {
442-
if (tool === Tools.file_search && !isFileSearchEnabled) {
435+
if (tool === Tools.file_search && !checkCapability(AgentCapabilities.file_search)) {
436+
return false;
437+
} else if (tool === Tools.execute_code && !checkCapability(AgentCapabilities.execute_code)) {
443438
return false;
444-
} else if (tool === Tools.execute_code && !isCodeEnabled) {
439+
} else if (!areToolsEnabled && !tool.includes(actionDelimiter)) {
445440
return false;
446441
}
447442
return true;
@@ -479,6 +474,10 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
479474
continue;
480475
}
481476

477+
if (!areToolsEnabled) {
478+
continue;
479+
}
480+
482481
if (tool.mcp === true) {
483482
agentTools.push(tool);
484483
continue;
@@ -511,7 +510,7 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
511510
return map;
512511
}, {});
513512

514-
if (!areActionsEnabled) {
513+
if (!checkCapability(AgentCapabilities.actions)) {
515514
return {
516515
tools: agentTools,
517516
toolContextMap,

‎client/src/components/Chat/Input/ChatForm.tsx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -93,15 +93,15 @@ const ChatForm = ({ index = 0 }) => {
9393
} = useAddedChatContext();
9494
const showStopAdded = useRecoilValue(store.showStopButtonByIndex(addedIndex));
9595

96-
const { clearDraft } = useAutoSave({
96+
useAutoSave({
9797
conversationId: useMemo(() => conversation?.conversationId, [conversation]),
9898
textAreaRef,
9999
files,
100100
setFiles,
101101
});
102102

103103
const assistantMap = useAssistantsMapContext();
104-
const { submitMessage, submitPrompt } = useSubmitMessage({ clearDraft });
104+
const { submitMessage, submitPrompt } = useSubmitMessage();
105105

106106
const { endpoint: _endpoint, endpointType } = conversation ?? { endpoint: null };
107107
const endpoint = endpointType ?? _endpoint;

‎client/src/hooks/Input/useAutoSave.ts

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,10 @@ import { useChatFormContext } from '~/Providers';
77
import { useGetFiles } from '~/data-provider';
88
import store from '~/store';
99

10+
const clearDraft = debounce((id?: string | null) => {
11+
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${id ?? ''}`);
12+
}, 2500);
13+
1014
export const useAutoSave = ({
1115
conversationId,
1216
textAreaRef,
@@ -103,7 +107,7 @@ export const useAutoSave = ({
103107
}
104108
// Save the draft of the current conversation before switching
105109
if (textAreaRef.current.value === '') {
106-
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${id}`);
110+
clearDraft(id);
107111
} else {
108112
localStorage.setItem(
109113
`${LocalStorageKeys.TEXT_DRAFT}${id}`,
@@ -208,13 +212,4 @@ export const useAutoSave = ({
208212
);
209213
}
210214
}, [files, conversationId, saveDrafts, currentConversationId, fileIds]);
211-
212-
const clearDraft = useCallback(() => {
213-
if (conversationId != null && conversationId) {
214-
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${conversationId}`);
215-
localStorage.removeItem(`${LocalStorageKeys.FILES_DRAFT}${conversationId}`);
216-
}
217-
}, [conversationId]);
218-
219-
return { clearDraft };
220215
};

‎client/src/hooks/Messages/useSubmitMessage.ts

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ const appendIndex = (index: number, value?: string) => {
1414
return `${value}${Constants.COMMON_DIVIDER}${index}`;
1515
};
1616

17-
export default function useSubmitMessage(helpers?: { clearDraft?: () => void }) {
17+
export default function useSubmitMessage() {
1818
const { user } = useAuthContext();
1919
const methods = useChatFormContext();
2020
const { ask, index, getMessages, setMessages, latestMessage } = useChatContext();
@@ -66,12 +66,10 @@ export default function useSubmitMessage(helpers?: { clearDraft?: () => void })
6666
);
6767
}
6868
methods.reset();
69-
helpers?.clearDraft && helpers.clearDraft();
7069
},
7170
[
7271
ask,
7372
methods,
74-
helpers,
7573
addedIndex,
7674
addedConvo,
7775
setMessages,

‎client/src/hooks/SSE/useSSE.ts

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,11 @@ import { SSE } from 'sse.js';
44
import { useSetRecoilState } from 'recoil';
55
import {
66
request,
7+
Constants,
78
/* @ts-ignore */
89
createPayload,
910
isAgentsEndpoint,
11+
LocalStorageKeys,
1012
removeNullishValues,
1113
isAssistantsEndpoint,
1214
} from 'librechat-data-provider';
@@ -18,6 +20,16 @@ import { useAuthContext } from '~/hooks/AuthContext';
1820
import useEventHandlers from './useEventHandlers';
1921
import store from '~/store';
2022

23+
const clearDraft = (conversationId?: string | null) => {
24+
if (conversationId) {
25+
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${conversationId}`);
26+
localStorage.removeItem(`${LocalStorageKeys.FILES_DRAFT}${conversationId}`);
27+
} else {
28+
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${Constants.NEW_CONVO}`);
29+
localStorage.removeItem(`${LocalStorageKeys.FILES_DRAFT}${Constants.NEW_CONVO}`);
30+
}
31+
};
32+
2133
type ChatHelpers = Pick<
2234
EventHandlerParams,
2335
| 'setMessages'
@@ -112,6 +124,7 @@ export default function useSSE(
112124
const data = JSON.parse(e.data);
113125

114126
if (data.final != null) {
127+
clearDraft(submission.conversationId);
115128
const { plugins } = data;
116129
finalHandler(data, { ...submission, plugins } as EventSubmission);
117130
(startupConfig?.balance?.enabled ?? false) && balanceQuery.refetch();

‎client/src/locales/en/translation.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -264,7 +264,7 @@
264264
"com_error_files_upload": "An error occurred while uploading the file.",
265265
"com_error_files_upload_canceled": "The file upload request was canceled. Note: the file upload may still be processing and will need to be manually deleted.",
266266
"com_error_files_validation": "An error occurred while validating the file.",
267-
"com_error_input_length": "The latest message token count is too long, exceeding the token limit ({{0}} respectively). Please shorten your message, adjust the max context size from the conversation parameters, or fork the conversation to continue.",
267+
"com_error_input_length": "The latest message token count is too long, exceeding the token limit, or your token limit parameters are misconfigured, adversely affecting the context window. More info: {{0}}. Please shorten your message, adjust the max context size from the conversation parameters, or fork the conversation to continue.",
268268
"com_error_invalid_user_key": "Invalid key provided. Please provide a valid key and try again.",
269269
"com_error_moderation": "It appears that the content submitted has been flagged by our moderation system for not aligning with our community guidelines. We're unable to proceed with this specific topic. If you have any other questions or topics you'd like to explore, please edit your message, or create a new conversation.",
270270
"com_error_no_base_url": "No base URL found. Please provide one and try again.",
@@ -848,4 +848,4 @@
848848
"com_ui_zoom": "Zoom",
849849
"com_user_message": "You",
850850
"com_warning_resubmit_unsupported": "Resubmitting the AI message is not supported for this endpoint."
851-
}
851+
}

0 commit comments

Comments
 (0)