Skip to content

Commit 1ce17fa

Browse files
committed
Update OpenAI library & types
1 parent 4f87167 commit 1ce17fa

File tree

5 files changed

+25
-136
lines changed

5 files changed

+25
-136
lines changed

package-lock.json

Lines changed: 10 additions & 105 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "openai-chat-tokens",
3-
"version": "0.2.5",
3+
"version": "0.2.6",
44
"description": "Estimate the number of tokens an OpenAI chat completion request will use",
55
"main": "dist/index.js",
66
"types": "dist/index.d.ts",
@@ -25,11 +25,11 @@
2525
"devDependencies": {
2626
"@types/jest": "^29.5.3",
2727
"jest": "^29.6.1",
28-
"openai": "^4.0.0-beta.4",
28+
"openai": "^4.2.0",
2929
"ts-jest": "^29.1.1",
3030
"typescript": "^5.1.6"
3131
},
3232
"dependencies": {
3333
"js-tiktoken": "^1.0.7"
3434
}
35-
}
35+
}

src/functions.ts

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,11 @@
1+
import OpenAI from "openai";
2+
3+
type OpenAIFunction = OpenAI.Chat.CompletionCreateParams.Function;
4+
15
// Types representing the OpenAI function definitions. While the OpenAI client library
26
// does have types for function definitions, the properties are just Record<string, unknown>,
37
// which isn't very useful for type checking this formatting code.
4-
export interface FunctionDef {
8+
export interface FunctionDef extends Omit<OpenAIFunction, "parameters"> {
59
name: string;
610
description?: string;
711
parameters: ObjectProp;
@@ -32,9 +36,9 @@ type Prop = {
3236
| { type: "boolean" }
3337
| { type: "null" }
3438
| {
35-
type: "array";
36-
items?: Prop;
37-
}
39+
type: "array";
40+
items?: Prop;
41+
}
3842
);
3943

4044
// When OpenAI use functions in the prompt, they format them as TypeScript definitions rather than OpenAPI JSON schemas.

src/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@ import OpenAI from "openai";
22
import { Tiktoken, getEncoding } from "js-tiktoken";
33
import { FunctionDef, formatFunctionDefinitions } from "./functions";
44

5-
type Message = OpenAI.Chat.CompletionCreateParams.CreateChatCompletionRequestNonStreaming.Message;
6-
type Function = OpenAI.Chat.CompletionCreateParams.CreateChatCompletionRequestNonStreaming.Function;
5+
type Message = OpenAI.Chat.CreateChatCompletionRequestMessage;
6+
type Function = OpenAI.Chat.CompletionCreateParams.Function;
77

88
let encoder: Tiktoken | undefined;
99

tests/token-counts.test.ts

Lines changed: 2 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,35 +1,15 @@
11
import OpenAI from "openai";
22
import { promptTokensEstimate } from "../src";
33

4-
type Message = OpenAI.Chat.CompletionCreateParams.CreateChatCompletionRequestNonStreaming.Message;
5-
type Function = OpenAI.Chat.CompletionCreateParams.CreateChatCompletionRequestNonStreaming.Function;
4+
type Message = OpenAI.Chat.CreateChatCompletionRequestMessage;
5+
type Function = OpenAI.Chat.CompletionCreateParams.Function;
66
type Example = {
77
messages: Message[];
88
functions?: Function[];
99
tokens: number;
1010
validate?: boolean
1111
};
1212

13-
const r: OpenAI.Chat.CompletionCreateParams.CreateChatCompletionRequestNonStreaming = {
14-
"model": "gpt-3.5-turbo",
15-
"temperature": 0,
16-
"functions": [
17-
{
18-
"name": "do_stuff",
19-
"parameters": {
20-
"type": "object",
21-
"properties": {}
22-
}
23-
}
24-
],
25-
"messages": [
26-
{
27-
"role": "system",
28-
"content": "hello:"
29-
},
30-
]
31-
};
32-
3313
const TEST_CASES: Example[] = [
3414
// these match https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
3515
{

0 commit comments

Comments
 (0)