Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions libs/langchain-core/src/tools/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,13 @@ export interface ToolParams extends BaseLangChainParams {

export type ToolRunnableConfig<
// eslint-disable-next-line @typescript-eslint/no-explicit-any
ConfigurableFieldType extends Record<string, any> = Record<string, any>
> = RunnableConfig<ConfigurableFieldType> & { toolCall?: ToolCall };
ConfigurableFieldType extends Record<string, any> = Record<string, any>,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
ContextSchema extends Record<string, any> = Record<string, any>
> = RunnableConfig<ConfigurableFieldType> & {
toolCall?: ToolCall;
context?: ContextSchema;
};

/**
* Schema for defining tools.
Expand Down
19 changes: 18 additions & 1 deletion libs/langchain/src/agents/ReactAgent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,26 @@ export class ReactAgent<
/**
* Check if the LLM already has bound tools and throw if it does.
*/
if (typeof options.llm !== "function") {
if (options.llm && typeof options.llm !== "function") {
validateLLMHasNoBoundTools(options.llm);
}

/**
* validate that model and llm options are not provided together
*/
if (options.llm && options.model) {
throw new Error("Cannot provide both `model` and `llm` options.");
}

/**
* validate that either model or llm option is provided
*/
if (!options.llm && !options.model) {
throw new Error(
"Either `model` or `llm` option must be provided to create an agent."
);
}

const toolClasses = Array.isArray(options.tools)
? options.tools
: options.tools.tools;
Expand Down Expand Up @@ -114,6 +130,7 @@ export class ReactAgent<
"agent",
new AgentNode({
llm: this.options.llm,
model: this.options.model,
prompt: this.options.prompt,
includeAgentName: this.options.includeAgentName,
name: this.options.name,
Expand Down
50 changes: 40 additions & 10 deletions libs/langchain/src/agents/nodes/AgentNode.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
} from "@langchain/core/utils/types";
import type { ToolCall } from "@langchain/core/messages/tool";

import { initChatModel } from "../../chat_models/universal.js";
import { MultipleStructuredOutputsError } from "../errors.js";
import { RunnableCallable } from "../RunnableCallable.js";
import { PreHookAnnotation, AnyAnnotationRoot } from "../annotation.js";
Expand Down Expand Up @@ -55,7 +56,7 @@ export interface AgentNodeOptions<
StructuredResponseFormat,
ContextSchema
>,
"llm" | "prompt" | "includeAgentName" | "name" | "responseFormat"
"llm" | "model" | "prompt" | "includeAgentName" | "name" | "responseFormat"
> {
toolClasses: (ClientTool | ServerTool)[];
shouldReturnDirect: Set<string>;
Expand Down Expand Up @@ -168,6 +169,43 @@ export class AgentNode<
return { messages: [response] };
}

/**
* Derive the model from the options.
* @param state - The state of the agent.
* @param config - The config of the agent.
* @returns The model.
*/
#deriveModel(
state: InternalAgentState<StructuredResponseFormat> &
PreHookAnnotation["State"],
config: RunnableConfig
) {
if (this.#options.model) {
if (typeof this.#options.model === "string") {
return initChatModel(this.#options.model);
}

throw new Error("`model` option must be a string.");
}

const model = this.#options.llm;

/**
* If the model is a function, call it to get the model.
*/
if (typeof model === "function") {
return model(state, config);
}

if (model) {
return model;
}

throw new Error(
"No model option was provided, either via `model` or via `llm` option."
);
}

async #invokeModel(
state: InternalAgentState<StructuredResponseFormat> &
PreHookAnnotation["State"],
Expand All @@ -180,15 +218,7 @@ export class AgentNode<
| Command
| { structuredResponse: StructuredResponseFormat; messages?: BaseMessage[] }
> {
let model = this.#options.llm;

/**
* If the model is a function, call it to get the model.
* @deprecated likely to be removed in the next version of the agent
*/
if (typeof model === "function") {
model = await model(state, config);
}
const model = await this.#deriveModel(state, config);

/**
* Check if the LLM already has bound tools and throw if it does.
Expand Down
16 changes: 16 additions & 0 deletions libs/langchain/src/agents/tests/reactAgent.int.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,22 @@ describe("createReactAgent Integration Tests", () => {
expect(fetchMock).toHaveBeenCalledTimes(2);
});

it("should work with model option", async () => {
const agent = createReactAgent({
model: "claude-3-5-sonnet-20240620",
tools: [getWeather],
responseFormat: answerSchema,
});

const result = await agent.invoke({
messages: [new HumanMessage("What's the weather in Tokyo?")],
});

expect(result.structuredResponse).toBeDefined();
expect(result.structuredResponse?.answer).toBe("yes");
expect(result.structuredResponse?.city).toBe("Tokyo");
});

it("should throw if a user tries to use native response format with Anthropic", async () => {
const agent = createReactAgent({
llm,
Expand Down
40 changes: 40 additions & 0 deletions libs/langchain/src/agents/tests/reactAgent.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1521,4 +1521,44 @@ describe("createReactAgent", () => {
expect(configAbortController.signal.aborted).toBe(true);
});
});

describe("model option", () => {
it("should not accept model and llm option", async () => {
const model = new FakeToolCallingChatModel({
responses: [new AIMessage("ai response")],
});

await expect(() =>
createReactAgent({
llm: model,
model: "gpt-4o",
tools: [],
})
).toThrow("Cannot provide both `model` and `llm` options.");
});

it("should throw if no model or llm option is provided", async () => {
await expect(() =>
createReactAgent({
tools: [],
})
).toThrow(
"Either `model` or `llm` option must be provided to create an agent."
);
});

it("throws if model is not a string", async () => {
const model = new FakeToolCallingChatModel({
responses: [new AIMessage("ai response")],
});

await expect(() =>
createReactAgent({
// @ts-expect-error - model is not a string
model,
tools: [],
}).invoke({ messages: [new HumanMessage("Hello Input!")] })
).rejects.toThrow("`model` option must be a string.");
});
});
});
18 changes: 17 additions & 1 deletion libs/langchain/src/agents/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ import type {
ResponseFormatUndefined,
JsonSchemaFormat,
} from "./responses.js";
import type { initChatModel } from "../chat_models/universal.js";

export const META_EXTRAS_DESCRIPTION_PREFIX = "lg:";

Expand Down Expand Up @@ -177,7 +178,22 @@ export type CreateReactAgentParams<
| ResponseFormatUndefined
> = {
/** The chat model that can utilize OpenAI-style tool calling. */
llm: LanguageModelLike | DynamicLLMFunction<StateSchema, ContextSchema>;
llm?: LanguageModelLike | DynamicLLMFunction<StateSchema, ContextSchema>;

/**
* Initializes a ChatModel based on the provided model name and provider.
* It supports various model providers and allows for runtime configuration of model parameters.
*
* @uses {@link initChatModel}
* @example
* ```ts
* const agent = createReactAgent({
* model: "anthropic:claude-3-7-sonnet-latest",
* // ...
* });
* ```
*/
model?: string;

/** A list of tools or a ToolNode. */
tools: ToolNode | (ServerTool | ClientTool)[];
Expand Down
3 changes: 2 additions & 1 deletion libs/langchain/src/hub/base.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import type { BaseLanguageModel } from "@langchain/core/language_models/base";
import type { Runnable } from "@langchain/core/runnables";
import type { Client, ClientConfig } from "langsmith";
import type { PromptCommit } from "langsmith/schemas";

/**
* Push a prompt to the hub.
Expand Down Expand Up @@ -43,7 +44,7 @@ export async function basePush(
export async function basePull(
ownerRepoCommit: string,
options?: { apiKey?: string; apiUrl?: string; includeModel?: boolean }
) {
): Promise<PromptCommit> {
const Client = await loadLangSmith();
const client = new Client(options);

Expand Down
5 changes: 5 additions & 0 deletions libs/langchain/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,11 @@ export {
ToolMessage,
} from "@langchain/core/messages";

/**
* Universal Chat Model
*/
export { initChatModel } from "./chat_models/universal.js";

/**
* LangChain Tools
*/
Expand Down
Loading