Skip to content

Commit cc38cc3

Browse files
committed
fix(ai-bot): correct configuration to call ai.lies.exposed from remote
1 parent 6ed7cc8 commit cc38cc3

File tree

5 files changed

+77
-28
lines changed

5 files changed

+77
-28
lines changed

services/ai-bot/config/ai-bot.config.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
{
22
"api": { "url": "http://api.liexp.dev/v1" },
33
"localAi": {
4-
"url": "http://localai.liexp.dev:8080/v1",
4+
"url": "https://ai.lies.exposed/v1",
55
"apiKey": "no-key-is-a-good-key",
66
"models": {
77
"summarize": "gpt-4o",
8-
"chat": "qwen3-embedding-8b",
8+
"chat": "qwen3-8b",
99
"embeddings": "qwen3-embedding-8b"
1010
}
1111
}
Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
import { fp } from "@liexp/core/lib/fp/index.js";
2+
import { throwTE } from "@liexp/shared/lib/utils/task.utils.js";
3+
import { pipe } from "fp-ts/lib/function.js";
4+
import { toAIBotError } from "../common/error/index.js";
5+
import { type CommandFlow } from "./CommandFlow.js";
6+
7+
export const chatCommand: CommandFlow = async (ctx, args) => {
8+
return pipe(
9+
fp.TE.tryCatch(async () => {
10+
const result = await ctx.langchain.chat
11+
.withConfig({
12+
reasoning: {
13+
effort: "minimal",
14+
},
15+
})
16+
.invoke([
17+
{
18+
role: "system",
19+
content:
20+
"You are a helpful assistant that translates answer to the given message.",
21+
},
22+
{
23+
role: "user",
24+
content: args[0],
25+
},
26+
]);
27+
28+
ctx.logger.info.log(`Message: %O`, result.toJSON());
29+
}, toAIBotError),
30+
throwTE,
31+
);
32+
};

services/ai-bot/src/cli/index.ts

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,16 @@ import { fp } from "@liexp/core/lib/fp/index.js";
33
import { throwTE } from "@liexp/shared/lib/utils/task.utils.js";
44
import D from "debug";
55
import { pipe } from "fp-ts/lib/function.js";
6+
import { type ClientContext } from "../context.js";
67
import { userLogin } from "../flows/userLogin.flow.js";
78
import { loadContext } from "../load-context.js";
89
import { type CommandFlow } from "./CommandFlow.js";
10+
import { chatCommand } from "./chat.command.js";
911
import { processJobCommand } from "./process-job.command.js";
1012

1113
const commands: Record<string, CommandFlow> = {
1214
"process-job": processJobCommand,
15+
chat: chatCommand,
1316
};
1417

1518
const run = async ([command, ...args]: string[]): Promise<void> => {
@@ -24,24 +27,30 @@ const run = async ([command, ...args]: string[]): Promise<void> => {
2427
loadENV(process.cwd(), process.env.DOTENV_CONFIG_PATH ?? "../../.env");
2528

2629
let token: string = "invalid-token";
30+
let ctx: ClientContext | undefined = undefined;
31+
try {
32+
ctx = await pipe(
33+
loadContext(() => token),
34+
throwTE,
35+
);
2736

28-
const ctx = await pipe(
29-
loadContext(() => token),
30-
throwTE,
31-
);
37+
token = await pipe(userLogin()(ctx), throwTE);
3238

33-
token = await pipe(userLogin()(ctx), throwTE);
39+
D.enable(fp.O.getOrElse(() => "-")(ctx.env.DEBUG));
3440

35-
D.enable(fp.O.getOrElse(() => "-")(ctx.env.DEBUG));
41+
ctx.logger.info.log("Running command %s with args: %O", command, args);
3642

37-
ctx.logger.info.log("Running command %s with args: %O", command, args);
38-
try {
3943
await commands[command](ctx, args);
4044
} catch (e) {
41-
ctx.logger.error.log("Error running command %s: %O", command, e);
45+
ctx?.logger.error.log("Error running command %s: %O", command, e);
46+
throw e;
4247
}
4348
};
4449

4550
void run(process.argv.splice(2))
4651
.then(() => process.exit(0))
47-
.catch(() => process.exit(1));
52+
.catch((e) => {
53+
// eslint-disable-next-line no-console
54+
console.error(e);
55+
process.exit(1);
56+
});

services/ai-bot/src/env.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ const ENV = Schema.Struct({
1111
DEBUG: OptionFromNullishToNull(Schema.String),
1212
LIEXP_USERNAME: OptionFromNullishToNull(Schema.String),
1313
LIEXP_PASSWORD: OptionFromNullishToNull(Schema.String),
14-
LOCAL_AI_URL: OptionFromNullishToNull(Schema.String),
14+
LOCALAI_URL: OptionFromNullishToNull(Schema.String),
1515
LOCALAI_API_KEY: Schema.String,
1616
LOCALAI_TIMEOUT: OptionFromNullishToNull(Schema.NumberFromString),
1717
CF_ACCESS_CLIENT_ID: OptionFromNullishToNull(Schema.String),

services/ai-bot/src/load-context.ts

Lines changed: 23 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ export const loadContext = (
4545
configProvider({ fs }),
4646
fp.TE.bind("localAIURL", (config) => {
4747
return pipe(
48-
env.LOCAL_AI_URL,
48+
env.LOCALAI_URL,
4949
fp.O.getOrElse(() => config.config.localAi.url),
5050
fp.TE.right<AIBotError, string>,
5151
);
@@ -61,7 +61,7 @@ export const loadContext = (
6161
const timeout = pipe(
6262
fp.O.fromNullable(_timeout),
6363
fp.O.alt(() => env.LOCALAI_TIMEOUT),
64-
fp.O.getOrElse(() => 60 * 3_600),
64+
fp.O.getOrElse(() => 3_600), // 1 hour
6565
);
6666

6767
return {
@@ -85,15 +85,19 @@ export const loadContext = (
8585
"CF-Access-Client-Id": env.CF_ACCESS_CLIENT_ID,
8686
"CF-Access-Client-Secret": env.CF_ACCESS_CLIENT_SECRET,
8787
}),
88-
fp.O.map((headers) => ({
89-
...headers,
90-
Cookie: `token=${env.LOCALAI_API_KEY}`,
91-
})),
92-
fp.O.toUndefined,
93-
fp.TE.right<AIBotError, Record<string, string> | undefined>,
88+
fp.O.getOrElse(() => ({})),
89+
fp.TE.right<AIBotError, HeadersInit>,
9490
);
9591
}),
96-
fp.TE.bind("langchain", ({ config, localaiHeaders }) =>
92+
fp.TE.bind("headers", ({ localaiHeaders, env }) =>
93+
fp.TE.right({
94+
...localaiHeaders,
95+
Accept: "application/json",
96+
"content-type": "application/json",
97+
Cookie: `token=${env.LOCALAI_API_KEY}`,
98+
}),
99+
),
100+
fp.TE.bind("langchain", ({ config, headers }) =>
97101
fp.TE.right(
98102
GetLangchainProvider({
99103
baseURL: config.config.localAi.url,
@@ -106,32 +110,36 @@ export const loadContext = (
106110
options: {
107111
chat: {
108112
timeout: config.config.localAi.timeout,
113+
// useResponsesApi: true,
109114
configuration: {
110-
defaultHeaders: localaiHeaders,
115+
defaultHeaders: headers,
111116
fetchOptions: {
112-
headers: localaiHeaders as any,
117+
headers: headers as any,
113118
},
114119
},
115120
},
116121
embeddings: {
117122
timeout: config.config.localAi.timeout,
118123
configuration: {
119-
defaultHeaders: localaiHeaders,
124+
defaultHeaders: headers,
125+
fetchOptions: {
126+
headers: headers as any,
127+
},
120128
},
121129
},
122130
},
123131
}),
124132
),
125133
),
126-
fp.TE.bind("openAI", ({ config, localaiHeaders }) =>
134+
fp.TE.bind("openAI", ({ config, headers }) =>
127135
fp.TE.right(
128136
GetOpenAIProvider({
129137
baseURL: config.config.localAi.url,
130138
apiKey: config.config.localAi.apiKey,
131139
timeout: config.config.localAi.timeout,
132-
defaultHeaders: localaiHeaders,
140+
defaultHeaders: headers,
133141
fetchOptions: {
134-
headers: localaiHeaders as any,
142+
headers: headers as any,
135143
},
136144
}),
137145
),

0 commit comments

Comments
 (0)