Skip to content

🔃 fix: Draft Clearing, Claude Titles, Remove Default Vision Max Tokens #6501

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Mar 23, 2025
18 changes: 12 additions & 6 deletions api/app/clients/AnthropicClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ const Anthropic = require('@anthropic-ai/sdk');
const { HttpsProxyAgent } = require('https-proxy-agent');
const {
Constants,
ErrorTypes,
EModelEndpoint,
anthropicSettings,
getResponseSender,
Expand Down Expand Up @@ -147,12 +148,17 @@ class AnthropicClient extends BaseClient {
this.maxPromptTokens =
this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens;

if (this.maxPromptTokens + this.maxResponseTokens > this.maxContextTokens) {
throw new Error(
`maxPromptTokens + maxOutputTokens (${this.maxPromptTokens} + ${this.maxResponseTokens} = ${
this.maxPromptTokens + this.maxResponseTokens
}) must be less than or equal to maxContextTokens (${this.maxContextTokens})`,
);
const reservedTokens = this.maxPromptTokens + this.maxResponseTokens;
if (reservedTokens > this.maxContextTokens) {
const info = `Total Possible Tokens + Max Output Tokens must be less than or equal to Max Context Tokens: ${this.maxPromptTokens} (total possible output) + ${this.maxResponseTokens} (max output) = ${reservedTokens}/${this.maxContextTokens} (max context)`;
const errorMessage = `{ "type": "${ErrorTypes.INPUT_LENGTH}", "info": "${info}" }`;
logger.warn(info);
throw new Error(errorMessage);
} else if (this.maxResponseTokens === this.maxContextTokens) {
const info = `Max Output Tokens must be less than Max Context Tokens: ${this.maxResponseTokens} (max output) = ${this.maxContextTokens} (max context)`;
const errorMessage = `{ "type": "${ErrorTypes.INPUT_LENGTH}", "info": "${info}" }`;
logger.warn(info);
throw new Error(errorMessage);
}

this.sender =
Expand Down
4 changes: 0 additions & 4 deletions api/app/clients/OpenAIClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -1185,10 +1185,6 @@ ${convo}
opts.httpAgent = new HttpsProxyAgent(this.options.proxy);
}

if (this.isVisionModel) {
modelOptions.max_tokens = 4000;
}

/** @type {TAzureConfig | undefined} */
const azureConfig = this.options?.req?.app?.locals?.[EModelEndpoint.azureOpenAI];

Expand Down
2 changes: 1 addition & 1 deletion api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
"@langchain/google-genai": "^0.1.11",
"@langchain/google-vertexai": "^0.2.2",
"@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.3.93",
"@librechat/agents": "^2.3.94",
"@librechat/data-schemas": "*",
"@waylaidwanderer/fetch-event-source": "^3.0.1",
"axios": "^1.8.2",
Expand Down
5 changes: 5 additions & 0 deletions api/server/controllers/agents/v1.js
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,11 @@ const duplicateAgentHandler = async (req, res) => {
tool_resources: _tool_resources = {},
...cloneData
} = agent;
cloneData.name = `${agent.name} (${new Date().toLocaleString('en-US', {
dateStyle: 'short',
timeStyle: 'short',
hour12: false,
})})`;

if (_tool_resources?.[EToolResources.ocr]) {
cloneData.tool_resources = {
Expand Down
25 changes: 12 additions & 13 deletions api/server/services/ToolService.js
Original file line number Diff line number Diff line change
Expand Up @@ -425,21 +425,16 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
}

const endpointsConfig = await getEndpointsConfig(req);
const capabilities = endpointsConfig?.[EModelEndpoint.agents]?.capabilities ?? [];
const areToolsEnabled = capabilities.includes(AgentCapabilities.tools);
if (!areToolsEnabled) {
logger.debug('Tools are not enabled for this agent.');
return {};
}

const isFileSearchEnabled = capabilities.includes(AgentCapabilities.file_search);
const isCodeEnabled = capabilities.includes(AgentCapabilities.execute_code);
const areActionsEnabled = capabilities.includes(AgentCapabilities.actions);
const enabledCapabilities = new Set(endpointsConfig?.[EModelEndpoint.agents]?.capabilities ?? []);
const checkCapability = (capability) => enabledCapabilities.has(capability);
const areToolsEnabled = checkCapability(AgentCapabilities.tools);

const _agentTools = agent.tools?.filter((tool) => {
if (tool === Tools.file_search && !isFileSearchEnabled) {
if (tool === Tools.file_search && !checkCapability(AgentCapabilities.file_search)) {
return false;
} else if (tool === Tools.execute_code && !checkCapability(AgentCapabilities.execute_code)) {
return false;
} else if (tool === Tools.execute_code && !isCodeEnabled) {
} else if (!areToolsEnabled && !tool.includes(actionDelimiter)) {
return false;
}
return true;
Expand Down Expand Up @@ -473,6 +468,10 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
continue;
}

if (!areToolsEnabled) {
continue;
}

if (tool.mcp === true) {
agentTools.push(tool);
continue;
Expand Down Expand Up @@ -505,7 +504,7 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
return map;
}, {});

if (!areActionsEnabled) {
if (!checkCapability(AgentCapabilities.actions)) {
return {
tools: agentTools,
toolContextMap,
Expand Down
4 changes: 2 additions & 2 deletions client/src/components/Chat/Input/ChatForm.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -93,15 +93,15 @@ const ChatForm = ({ index = 0 }) => {
} = useAddedChatContext();
const showStopAdded = useRecoilValue(store.showStopButtonByIndex(addedIndex));

const { clearDraft } = useAutoSave({
useAutoSave({
conversationId: useMemo(() => conversation?.conversationId, [conversation]),
textAreaRef,
files,
setFiles,
});

const assistantMap = useAssistantsMapContext();
const { submitMessage, submitPrompt } = useSubmitMessage({ clearDraft });
const { submitMessage, submitPrompt } = useSubmitMessage();

const { endpoint: _endpoint, endpointType } = conversation ?? { endpoint: null };
const endpoint = endpointType ?? _endpoint;
Expand Down
15 changes: 5 additions & 10 deletions client/src/hooks/Input/useAutoSave.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ import { useChatFormContext } from '~/Providers';
import { useGetFiles } from '~/data-provider';
import store from '~/store';

const clearDraft = debounce((id?: string | null) => {
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${id ?? ''}`);
}, 2500);

export const useAutoSave = ({
conversationId,
textAreaRef,
Expand Down Expand Up @@ -103,7 +107,7 @@ export const useAutoSave = ({
}
// Save the draft of the current conversation before switching
if (textAreaRef.current.value === '') {
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${id}`);
clearDraft(id);
} else {
localStorage.setItem(
`${LocalStorageKeys.TEXT_DRAFT}${id}`,
Expand Down Expand Up @@ -208,13 +212,4 @@ export const useAutoSave = ({
);
}
}, [files, conversationId, saveDrafts, currentConversationId, fileIds]);

const clearDraft = useCallback(() => {
if (conversationId != null && conversationId) {
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${conversationId}`);
localStorage.removeItem(`${LocalStorageKeys.FILES_DRAFT}${conversationId}`);
}
}, [conversationId]);

return { clearDraft };
};
4 changes: 1 addition & 3 deletions client/src/hooks/Messages/useSubmitMessage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ const appendIndex = (index: number, value?: string) => {
return `${value}${Constants.COMMON_DIVIDER}${index}`;
};

export default function useSubmitMessage(helpers?: { clearDraft?: () => void }) {
export default function useSubmitMessage() {
const { user } = useAuthContext();
const methods = useChatFormContext();
const { ask, index, getMessages, setMessages, latestMessage } = useChatContext();
Expand Down Expand Up @@ -66,12 +66,10 @@ export default function useSubmitMessage(helpers?: { clearDraft?: () => void })
);
}
methods.reset();
helpers?.clearDraft && helpers.clearDraft();
},
[
ask,
methods,
helpers,
addedIndex,
addedConvo,
setMessages,
Expand Down
13 changes: 13 additions & 0 deletions client/src/hooks/SSE/useSSE.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@ import { SSE } from 'sse.js';
import { useSetRecoilState } from 'recoil';
import {
request,
Constants,
/* @ts-ignore */
createPayload,
isAgentsEndpoint,
LocalStorageKeys,
removeNullishValues,
isAssistantsEndpoint,
} from 'librechat-data-provider';
Expand All @@ -18,6 +20,16 @@ import { useAuthContext } from '~/hooks/AuthContext';
import useEventHandlers from './useEventHandlers';
import store from '~/store';

const clearDraft = (conversationId?: string | null) => {
if (conversationId) {
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${conversationId}`);
localStorage.removeItem(`${LocalStorageKeys.FILES_DRAFT}${conversationId}`);
} else {
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${Constants.NEW_CONVO}`);
localStorage.removeItem(`${LocalStorageKeys.FILES_DRAFT}${Constants.NEW_CONVO}`);
}
};

type ChatHelpers = Pick<
EventHandlerParams,
| 'setMessages'
Expand Down Expand Up @@ -112,6 +124,7 @@ export default function useSSE(
const data = JSON.parse(e.data);

if (data.final != null) {
clearDraft(submission.conversationId);
const { plugins } = data;
finalHandler(data, { ...submission, plugins } as EventSubmission);
(startupConfig?.balance?.enabled ?? false) && balanceQuery.refetch();
Expand Down
4 changes: 2 additions & 2 deletions client/src/locales/en/translation.json
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@
"com_error_files_upload": "An error occurred while uploading the file.",
"com_error_files_upload_canceled": "The file upload request was canceled. Note: the file upload may still be processing and will need to be manually deleted.",
"com_error_files_validation": "An error occurred while validating the file.",
"com_error_input_length": "The latest message token count is too long, exceeding the token limit ({{0}} respectively). Please shorten your message, adjust the max context size from the conversation parameters, or fork the conversation to continue.",
"com_error_input_length": "The latest message token count is too long, exceeding the token limit, or your token limit parameters are misconfigured, adversely affecting the context window. More info: {{0}}. Please shorten your message, adjust the max context size from the conversation parameters, or fork the conversation to continue.",
"com_error_invalid_user_key": "Invalid key provided. Please provide a valid key and try again.",
"com_error_moderation": "It appears that the content submitted has been flagged by our moderation system for not aligning with our community guidelines. We're unable to proceed with this specific topic. If you have any other questions or topics you'd like to explore, please edit your message, or create a new conversation.",
"com_error_no_base_url": "No base URL found. Please provide one and try again.",
Expand Down Expand Up @@ -848,4 +848,4 @@
"com_ui_zoom": "Zoom",
"com_user_message": "You",
"com_warning_resubmit_unsupported": "Resubmitting the AI message is not supported for this endpoint."
}
}
Loading