Skip to content

Commit 9ada023

Browse files
authored
feat (ai/core): mask data stream error messages. (#2617)
1 parent b75ad80 commit 9ada023

File tree

4 files changed

+82
-10
lines changed

4 files changed

+82
-10
lines changed

.changeset/tricky-trains-yawn.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': patch
3+
---
4+
5+
feat (ai/core): mask data stream error messages with streamText

content/docs/07-reference/ai-sdk-core/02-stream-text.mdx

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -769,11 +769,6 @@ for await (const textPart of textStream) {
769769
},
770770
],
771771
},
772-
{
773-
name: 'toDataStream',
774-
type: '(callbacks?: AIStreamCallbacksAndOptions) => data stream',
775-
description: 'Converts the result to an data stream.',
776-
},
777772
{
778773
name: 'pipeDataStreamToResponse',
779774
type: '(response: ServerResponse, init?: { headers?: Record<string, string>; status?: number } => void',
@@ -826,6 +821,13 @@ for await (const textPart of textStream) {
826821
optional: true,
827822
description: 'The stream data object.',
828823
},
824+
{
825+
name: 'getErrorMessage',
826+
type: '(error: unknown) => string',
827+
description:
828+
'A function to get the error message from the error object. By default, all errors are masked as "" for safety reasons.',
829+
optional: true,
830+
},
829831
],
830832
},
831833
],

packages/ai/core/generate-text/stream-text.test.ts

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1054,6 +1054,48 @@ describe('result.toDataStreamResponse', () => {
10541054
'0:"world!"\n',
10551055
]);
10561056
});
1057+
1058+
it('should mask error messages by default', async () => {
1059+
const result = await streamText({
1060+
model: new MockLanguageModelV1({
1061+
doStream: async () => ({
1062+
stream: convertArrayToReadableStream([
1063+
{ type: 'error', error: 'error' },
1064+
]),
1065+
rawCall: { rawPrompt: 'prompt', rawSettings: {} },
1066+
}),
1067+
}),
1068+
prompt: 'test-input',
1069+
});
1070+
1071+
const response = result.toDataStreamResponse();
1072+
1073+
assert.deepStrictEqual(await convertResponseStreamToArray(response), [
1074+
'3:""\n',
1075+
]);
1076+
});
1077+
1078+
it('should support custom error messages', async () => {
1079+
const result = await streamText({
1080+
model: new MockLanguageModelV1({
1081+
doStream: async () => ({
1082+
stream: convertArrayToReadableStream([
1083+
{ type: 'error', error: 'error' },
1084+
]),
1085+
rawCall: { rawPrompt: 'prompt', rawSettings: {} },
1086+
}),
1087+
}),
1088+
prompt: 'test-input',
1089+
});
1090+
1091+
const response = result.toDataStreamResponse({
1092+
getErrorMessage: error => `custom error message: ${error}`,
1093+
});
1094+
1095+
assert.deepStrictEqual(await convertResponseStreamToArray(response), [
1096+
'3:"custom error message: error"\n',
1097+
]);
1098+
});
10571099
});
10581100

10591101
describe('result.toTextStreamResponse', () => {

packages/ai/core/generate-text/stream-text.ts

Lines changed: 28 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -513,6 +513,16 @@ However, the LLM results are expected to be small enough to not cause issues.
513513
}
514514

515515
toAIStream(callbacks: AIStreamCallbacksAndOptions = {}) {
516+
return this.toDataStream({ callbacks });
517+
}
518+
519+
private toDataStream({
520+
callbacks = {},
521+
getErrorMessage = () => '', // mask error messages for safety by default
522+
}: {
523+
callbacks?: AIStreamCallbacksAndOptions;
524+
getErrorMessage?: (error: unknown) => string;
525+
} = {}) {
516526
let aggregatedResponse = '';
517527

518528
const callbackTransformer = new TransformStream<
@@ -588,7 +598,7 @@ However, the LLM results are expected to be small enough to not cause issues.
588598
break;
589599
case 'error':
590600
controller.enqueue(
591-
formatStreamPart('error', JSON.stringify(chunk.error)),
601+
formatStreamPart('error', getErrorMessage(chunk.error)),
592602
);
593603
break;
594604
case 'finish':
@@ -632,7 +642,7 @@ However, the LLM results are expected to be small enough to not cause issues.
632642
...init?.headers,
633643
});
634644

635-
const reader = this.toAIStream().getReader();
645+
const reader = this.toDataStream().getReader();
636646

637647
const read = async () => {
638648
try {
@@ -688,7 +698,13 @@ However, the LLM results are expected to be small enough to not cause issues.
688698
}
689699

690700
toDataStreamResponse(
691-
options?: ResponseInit | { init?: ResponseInit; data?: StreamData },
701+
options?:
702+
| ResponseInit
703+
| {
704+
init?: ResponseInit;
705+
data?: StreamData;
706+
getErrorMessage?: (error: unknown) => string;
707+
},
692708
): Response {
693709
const init: ResponseInit | undefined =
694710
options == null
@@ -709,9 +725,16 @@ However, the LLM results are expected to be small enough to not cause issues.
709725
? options.data
710726
: undefined;
711727

728+
const getErrorMessage: ((error: unknown) => string) | undefined =
729+
options == null
730+
? undefined
731+
: 'getErrorMessage' in options
732+
? options.getErrorMessage
733+
: undefined;
734+
712735
const stream = data
713-
? mergeStreams(data.stream, this.toAIStream())
714-
: this.toAIStream();
736+
? mergeStreams(data.stream, this.toDataStream({ getErrorMessage }))
737+
: this.toDataStream({ getErrorMessage });
715738

716739
return new Response(stream, {
717740
status: init?.status ?? 200,

0 commit comments

Comments
 (0)