Skip to content
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
2 changes: 1 addition & 1 deletion .github/workflows/check-broken-links.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,4 @@ jobs:
- name: Build scripts
run: pnpm build --filter @langchain/scripts
- name: Check broken links
run: pnpm --filter core_docs check:broken-links:ci
run: pnpm check:broken-links:ci
2 changes: 1 addition & 1 deletion .github/workflows/deploy-api-refs-preview.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Build All Projects
run: pnpm build --filter=!examples --filter=!api_refs --filter=!core_docs --filter=!create-langchain-integration
run: pnpm build --filter=!examples --filter=!api_refs --filter=!create-langchain-integration
- name: Build Project Artifacts
run: vercel build --token=${{ secrets.VERCEL_TOKEN }}
- name: Deploy Project Artifacts to Vercel
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/deploy-api-refs-prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ jobs:
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Build All Projects
run: pnpm build --filter=!examples --filter=!api_refs --filter=!core_docs --filter=!create-langchain-integration
run: pnpm build --filter=!examples --filter=!api_refs --filter=!create-langchain-integration
- name: Build Project Artifacts
run: |
if [ ${{ github.ref }} = 'refs/heads/main' ]; then
Expand Down
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,7 @@ and in generated documentation.
##### Note: you only need to follow these steps if you are building the docs site locally

1. [Quarto](https://quarto.org/) - package that converts Jupyter notebooks (`.ipynb` files) into `.mdx` files for serving in Docusaurus.
2. `pnpm build --filter=core_docs` - It's as simple as that! (or you can simply run `pnpm build` from `docs/core_docs/`)
2. `pnpm build` - It's as simple as that! (or you can simply run `pnpm build` from `docs/core_docs/`)

All notebooks are converted to `.md` files and automatically gitignored. If you would like to create a non notebook doc, it must be a `.mdx` file.

Expand Down
12 changes: 11 additions & 1 deletion dependency_range_tests/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

services:
# LangChain
langchain-latest-deps:
Expand Down Expand Up @@ -38,6 +37,7 @@ services:
- ./scripts/with_standard_tests/community/node/package.json:/package.json
- ../libs/langchain-standard-tests:/libs/langchain-standard-tests
- ../libs/langchain-community:/libs/langchain-community
- ../internal:/internal
- ./scripts:/scripts
command: bash /scripts/with_standard_tests/community/test-with-latest-deps.sh
community-lowest-deps:
Expand All @@ -53,6 +53,7 @@ services:
- ./scripts/with_standard_tests/community/node/package.json:/package.json
- ../libs/langchain-standard-tests:/libs/langchain-standard-tests
- ../libs/langchain-community:/libs/langchain-community
- ../internal:/internal
- ./scripts:/scripts
command: bash /scripts/with_standard_tests/community/test-with-lowest-deps.sh
community-npm-install:
Expand All @@ -67,6 +68,7 @@ services:
- ./scripts/with_standard_tests/community/node/package.json:/package.json
- ../libs/langchain-standard-tests:/libs/langchain-standard-tests
- ../libs/langchain-community:/libs/langchain-community
- ../internal:/internal
- ./scripts:/scripts
command: bash /scripts/with_standard_tests/community/npm-install.sh

Expand All @@ -84,6 +86,7 @@ services:
- ./scripts/with_standard_tests/openai/node/package.json:/package.json
- ../libs/langchain-standard-tests:/libs/langchain-standard-tests
- ../libs/providers/langchain-openai:/libs/providers/langchain-openai
- ../internal:/internal
- ./scripts:/scripts
command: bash /scripts/with_standard_tests/openai/test-with-latest-deps.sh
openai-lowest-deps:
Expand All @@ -99,6 +102,7 @@ services:
- ./scripts/with_standard_tests/openai/node/package.json:/package.json
- ../libs/langchain-standard-tests:/libs/langchain-standard-tests
- ../libs/providers/langchain-openai:/libs/providers/langchain-openai
- ../internal:/internal
- ./scripts:/scripts
command: bash /scripts/with_standard_tests/openai/test-with-lowest-deps.sh

Expand All @@ -116,6 +120,7 @@ services:
- ./scripts/with_standard_tests/anthropic/node/package.json:/package.json
- ../libs/langchain-standard-tests:/libs/langchain-standard-tests
- ../libs/providers/langchain-anthropic:/libs/providers/langchain-anthropic
- ../internal:/internal
- ./scripts:/scripts
command: bash /scripts/with_standard_tests/anthropic/test-with-latest-deps.sh
anthropic-lowest-deps:
Expand All @@ -131,6 +136,7 @@ services:
- ./scripts/with_standard_tests/anthropic/node/package.json:/package.json
- ../libs/langchain-standard-tests:/libs/langchain-standard-tests
- ../libs/providers/langchain-anthropic:/libs/providers/langchain-anthropic
- ../internal:/internal
- ./scripts:/scripts
command: bash /scripts/with_standard_tests/anthropic/test-with-lowest-deps.sh

Expand All @@ -148,6 +154,7 @@ services:
- ./scripts/with_standard_tests/google-vertexai/node/package.json:/package.json
- ../libs/langchain-standard-tests:/libs/langchain-standard-tests
- ../libs/providers/langchain-google-vertexai:/libs/providers/langchain-google-vertexai
- ../internal:/internal
- ./scripts:/scripts
command: bash /scripts/with_standard_tests/google-vertexai/test-with-latest-deps.sh
google-vertexai-lowest-deps:
Expand All @@ -163,6 +170,7 @@ services:
- ./scripts/with_standard_tests/google-vertexai/node/package.json:/package.json
- ../libs/langchain-standard-tests:/libs/langchain-standard-tests
- ../libs/providers/langchain-google-vertexai:/libs/providers/langchain-google-vertexai
- ../internal:/internal
- ./scripts:/scripts
command: bash /scripts/with_standard_tests/google-vertexai/test-with-lowest-deps.sh

Expand All @@ -180,6 +188,7 @@ services:
- ./scripts/with_standard_tests/cohere/node/package.json:/package.json
- ../libs/langchain-standard-tests:/libs/langchain-standard-tests
- ../libs/providers/langchain-cohere:/libs/providers/langchain-cohere
- ../internal:/internal
- ./scripts:/scripts
command: bash /scripts/with_standard_tests/cohere/test-with-latest-deps.sh
cohere-lowest-deps:
Expand All @@ -195,5 +204,6 @@ services:
- ./scripts/with_standard_tests/cohere/node/package.json:/package.json
- ../libs/langchain-standard-tests:/libs/langchain-standard-tests
- ../libs/providers/langchain-cohere:/libs/providers/langchain-cohere
- ../internal:/internal
- ./scripts:/scripts
command: bash /scripts/with_standard_tests/cohere/test-with-lowest-deps.sh
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,9 @@ pnpm install --no-frozen-lockfile
# not try to build the package/its workspace dependencies.
cd "$monorepo_vertexai_dir"

# Install @langchain/google-gauth dependency explicitly
# This is needed because the update script converts the workspace dependency
# to use the minimum version, but in the test environment we need to install
# it as a regular dependency since workspace packages aren't available
pnpm add @langchain/google-gauth
pnpm test
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this will lead to a bunch of false positives? We purposefully want to use the lowest version of the package for -gauth published on the registry

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yep, will revert this as I improved environment tests in a different PR.

8 changes: 8 additions & 0 deletions environment_tests/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ services:
- ../libs/langchain-scripts:/langchain-scripts
- ../libs/langchain-textsplitters:/langchain-textsplitters
- ../internal/build:/langchain-build
- ../internal/eslint:/langchain-eslint
command: bash /scripts/docker-entrypoint.sh
test-exports-esm:
image: node:20
Expand All @@ -46,6 +47,7 @@ services:
- ../libs/langchain-scripts:/langchain-scripts
- ../libs/langchain-textsplitters:/langchain-textsplitters
- ../internal/build:/langchain-build
- ../internal/eslint:/langchain-eslint
command: bash /scripts/docker-entrypoint.sh
test-exports-tsc:
image: node:20
Expand All @@ -70,6 +72,7 @@ services:
- ../libs/langchain-scripts:/langchain-scripts
- ../libs/langchain-textsplitters:/langchain-textsplitters
- ../internal/build:/langchain-build
- ../internal/eslint:/langchain-eslint
command: bash /scripts/docker-entrypoint.sh
test-exports-cjs:
image: node:20
Expand All @@ -94,6 +97,7 @@ services:
- ../libs/langchain-scripts:/langchain-scripts
- ../libs/langchain-textsplitters:/langchain-textsplitters
- ../internal/build:/langchain-build
- ../internal/eslint:/langchain-eslint
command: bash /scripts/docker-entrypoint.sh
test-exports-cf:
image: node:20
Expand All @@ -118,6 +122,7 @@ services:
- ../libs/langchain-scripts:/langchain-scripts
- ../libs/langchain-textsplitters:/langchain-textsplitters
- ../internal/build:/langchain-build
- ../internal/eslint:/langchain-eslint
command: bash /scripts/docker-entrypoint.sh
test-exports-vercel:
image: node:20
Expand All @@ -142,6 +147,7 @@ services:
- ../libs/langchain-scripts:/langchain-scripts
- ../libs/langchain-textsplitters:/langchain-textsplitters
- ../internal/build:/langchain-build
- ../internal/eslint:/langchain-eslint
command: bash /scripts/docker-entrypoint.sh
test-exports-vite:
image: node:20
Expand All @@ -166,6 +172,7 @@ services:
- ../libs/langchain-scripts:/langchain-scripts
- ../libs/langchain-textsplitters:/langchain-textsplitters
- ../internal/build:/langchain-build
- ../internal/eslint:/langchain-eslint
command: bash /scripts/docker-entrypoint.sh
test-exports-bun:
image: oven/bun
Expand All @@ -187,6 +194,7 @@ services:
- ../libs/langchain-scripts:/langchain-scripts
- ../libs/langchain-textsplitters:/langchain-textsplitters
- ../internal/build:/langchain-build
- ../internal/eslint:/langchain-eslint
command: bash /scripts/docker-bun-entrypoint.sh
success:
image: alpine:3.14
Expand Down
1 change: 1 addition & 0 deletions environment_tests/scripts/test-runner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ const dockerPackages: WorkspacePackage[] = [
path: "/langchain-textsplitters",
},
{ pkg: { name: "@langchain/build" }, path: "/langchain-build" },
{ pkg: { name: "@langchain/eslint" }, path: "/langchain-eslint" },
];

class EnvironmentTestRunner {
Expand Down
1 change: 0 additions & 1 deletion examples/src/embeddings/bedrock.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { BedrockEmbeddings } from "@langchain/aws";

const embeddings = new BedrockEmbeddings({
Expand Down
1 change: 0 additions & 1 deletion examples/src/guides/expression_language/message_history.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import {
ChatPromptTemplate,
MessagesPlaceholder,
Expand Down
27 changes: 15 additions & 12 deletions examples/src/indexes/vector_stores/cloudflare_vectorize/example.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,4 @@
// @ts-nocheck

import type {
VectorizeIndex,
Fetcher,
Request,
} from "@cloudflare/workers-types";
import type { VectorizeIndex, Ai, Request } from "@cloudflare/workers-types";

import {
CloudflareVectorizeStore,
Expand All @@ -13,7 +7,7 @@ import {

export interface Env {
VECTORIZE_INDEX: VectorizeIndex;
AI: Fetcher;
AI: Ai;
}

export default {
Expand All @@ -28,7 +22,9 @@ export default {
});
if (pathname === "/") {
const results = await store.similaritySearch("hello", 5);
return Response.json(results);
return new Response(JSON.stringify(results), {
headers: { "Content-Type": "application/json" },
});
} else if (pathname === "/load") {
// Upsertion by id is supported
await store.addDocuments(
Expand All @@ -49,12 +45,19 @@ export default {
{ ids: ["id1", "id2", "id3"] }
);

return Response.json({ success: true });
return new Response(JSON.stringify({ success: true }), {
headers: { "Content-Type": "application/json" },
});
} else if (pathname === "/clear") {
await store.delete({ ids: ["id1", "id2", "id3"] });
return Response.json({ success: true });
return new Response(JSON.stringify({ success: true }), {
headers: { "Content-Type": "application/json" },
});
}

return Response.json({ error: "Not Found" }, { status: 404 });
return new Response(JSON.stringify({ error: "Not Found" }), {
status: 404,
headers: { "Content-Type": "application/json" },
});
},
};
1 change: 0 additions & 1 deletion examples/src/indexes/vector_stores/googlevertexai.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { SyntheticEmbeddings } from "@langchain/core/utils/testing";
import { GoogleCloudStorageDocstore } from "@langchain/community/stores/doc/gcs";
import {
Expand Down
1 change: 0 additions & 1 deletion examples/src/indexes/vector_stores/pinecone/delete_docs.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { Pinecone } from "@pinecone-database/pinecone";
import { Document } from "@langchain/core/documents";
import { OpenAIEmbeddings } from "@langchain/openai";
Expand Down
1 change: 0 additions & 1 deletion examples/src/indexes/vector_stores/pinecone/index_docs.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { Pinecone } from "@pinecone-database/pinecone";
import { Document } from "@langchain/core/documents";
import { OpenAIEmbeddings } from "@langchain/openai";
Expand Down
1 change: 0 additions & 1 deletion examples/src/indexes/vector_stores/pinecone/mmr.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { Pinecone } from "@pinecone-database/pinecone";
import { OpenAIEmbeddings } from "@langchain/openai";
import { PineconeStore } from "@langchain/pinecone";
Expand Down
1 change: 0 additions & 1 deletion examples/src/indexes/vector_stores/pinecone/query_docs.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { Pinecone } from "@pinecone-database/pinecone";
import { OpenAIEmbeddings } from "@langchain/openai";
import { PineconeStore } from "@langchain/pinecone";
Expand Down
1 change: 0 additions & 1 deletion examples/src/retrievers/metal.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import Metal from "@getmetal/metal-sdk";
import { MetalRetriever } from "@langchain/community/retrievers/metal";

Expand Down
1 change: 0 additions & 1 deletion examples/src/retrievers/multi_vector_small_chunks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@ const subDocs = [];
for (let i = 0; i < docs.length; i += 1) {
const childDocs = await childSplitter.splitDocuments([docs[i]]);
const taggedChildDocs = childDocs.map((childDoc) => {
// eslint-disable-next-line no-param-reassign
childDoc.metadata[idKey] = docIds[i];
return childDoc;
});
Expand Down
1 change: 0 additions & 1 deletion examples/src/retrievers/zep_cloud.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import { randomUUID } from "crypto";
import { ZepClient, type Zep } from "@getzep/zep-cloud";

function sleep(ms: number) {
// eslint-disable-next-line no-promise-executor-return
return new Promise((resolve) => setTimeout(resolve, ms));
}

Expand Down
1 change: 0 additions & 1 deletion examples/src/tools/dalle_image_generation.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
/* eslint-disable no-process-env */
import { DallEAPIWrapper } from "@langchain/openai";

const tool = new DallEAPIWrapper({
Expand Down
6 changes: 1 addition & 5 deletions examples/src/use_cases/chatbots/memory_management.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,3 @@
/* eslint-disable import/first */
/* eslint-disable arrow-body-style */
/* eslint-disable import/no-duplicates */

import { ChatOpenAI } from "@langchain/openai";

const chat = new ChatOpenAI({
Expand Down Expand Up @@ -147,7 +143,7 @@ import {
RunnableSequence,
} from "@langchain/core/runnables";

const trimMessages = async (_chainInput: Record<string, any>) => {
const trimMessages = async (_chainInput: Record<string, unknown>) => {
const storedMessages = await demoEphemeralChatMessageHistory.getMessages();
if (storedMessages.length <= 2) {
return false;
Expand Down
4 changes: 0 additions & 4 deletions examples/src/use_cases/chatbots/quickstart.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,3 @@
/* eslint-disable import/first */
/* eslint-disable arrow-body-style */
/* eslint-disable import/no-duplicates */

import { ChatOpenAI } from "@langchain/openai";

const chat = new ChatOpenAI({
Expand Down
4 changes: 0 additions & 4 deletions examples/src/use_cases/chatbots/retrieval.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,3 @@
/* eslint-disable import/first */
/* eslint-disable arrow-body-style */
/* eslint-disable import/no-duplicates */

import { ChatOpenAI } from "@langchain/openai";

const chat = new ChatOpenAI({
Expand Down
4 changes: 0 additions & 4 deletions examples/src/use_cases/chatbots/tool_usage.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,3 @@
/* eslint-disable import/first */
/* eslint-disable arrow-body-style */
/* eslint-disable import/no-duplicates */

import { TavilySearchResults } from "@langchain/community/tools/tavily_search";
import { ChatOpenAI } from "@langchain/openai";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ function askQuestion(question: string): Promise<string> {
});
}

async function humanApproval(toolInvocations: any[]): Promise<any[]> {
async function humanApproval(toolInvocations: unknown[]): Promise<unknown[]> {
const toolStrs = toolInvocations
.map((toolCall) => JSON.stringify(toolCall, null, 2))
.join("\n\n");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,4 @@ const embeddings = new OpenAIEmbeddings({
const vectorStore = await Chroma.fromDocuments(chunkedDocs, embeddings, {
collectionName: "yt-videos",
});
console.log(vectorStore);
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,7 @@ const dates = [
new Date("Nov 2, 2023"),
];
docs.forEach((doc, idx) => {
// eslint-disable-next-line no-param-reassign
doc.metadata.publish_year = getYear(dates[idx]);
// eslint-disable-next-line no-param-reassign
doc.metadata.publish_date = dates[idx];
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ const db = await SqlDatabase.fromDataSourceParams({
async function queryAsList(query: string): Promise<string[]> {
const res: Array<{ [key: string]: string }> = JSON.parse(await db.run(query))
.flat()
.filter((el: any) => el != null);
.filter((el: unknown) => el != null);
const justValues: Array<string> = res.map((item) =>
Object.values(item)[0]
.replace(/\b\d+\b/g, "")
Expand Down
Loading
Loading