Skip to content

Commit df6ea8e

Browse files
authored
fix: Skip content-length assignment when transfer-encoding is chunked. (#271)
1 parent f5f3957 commit df6ea8e

File tree

2 files changed

+55
-30
lines changed

2 files changed

+55
-30
lines changed

src/listener.ts

Lines changed: 33 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,7 @@ const responseViaCache = async (
7474
header = buildOutgoingHttpHeaders(header)
7575
}
7676

77+
// in `responseViaCache`, if body is not stream, Transfer-Encoding is considered not chunked
7778
if (typeof body === 'string') {
7879
header['Content-Length'] = Buffer.byteLength(body)
7980
} else if (body instanceof Uint8Array) {
@@ -131,43 +132,45 @@ const responseViaResponseObject = async (
131132
let done = false
132133
let currentReadPromise: Promise<ReadableStreamReadResult<Uint8Array>> | undefined = undefined
133134

134-
// In the case of synchronous responses, usually a maximum of two (or three in special cases) readings is done
135-
let maxReadCount = 2
136-
for (let i = 0; i < maxReadCount; i++) {
137-
currentReadPromise ||= reader.read()
138-
const chunk = await readWithoutBlocking(currentReadPromise).catch((e) => {
139-
console.error(e)
140-
done = true
141-
})
142-
if (!chunk) {
143-
if (i === 1 && resHeaderRecord['transfer-encoding'] !== 'chunked') {
144-
// XXX: In Node.js v24, some response bodies are not read all the way through until the next task queue,
145-
// so wait a moment and retry. (e.g. new Blob([new Uint8Array(contents)]) )
146-
await new Promise((resolve) => setTimeout(resolve))
147-
maxReadCount = 3
148-
continue
135+
if (resHeaderRecord['transfer-encoding'] !== 'chunked') {
136+
// In the case of synchronous responses, usually a maximum of two (or three in special cases) readings is done
137+
let maxReadCount = 2
138+
for (let i = 0; i < maxReadCount; i++) {
139+
currentReadPromise ||= reader.read()
140+
const chunk = await readWithoutBlocking(currentReadPromise).catch((e) => {
141+
console.error(e)
142+
done = true
143+
})
144+
if (!chunk) {
145+
if (i === 1) {
146+
// XXX: In Node.js v24, some response bodies are not read all the way through until the next task queue,
147+
// so wait a moment and retry. (e.g. new Blob([new Uint8Array(contents)]) )
148+
await new Promise((resolve) => setTimeout(resolve))
149+
maxReadCount = 3
150+
continue
151+
}
152+
153+
// Error occurred or currentReadPromise is not yet resolved.
154+
// If an error occurs, immediately break the loop.
155+
// If currentReadPromise is not yet resolved, pass it to writeFromReadableStreamDefaultReader.
156+
break
149157
}
158+
currentReadPromise = undefined
150159

151-
// Error occurred or currentReadPromise is not yet resolved.
152-
// If an error occurs, immediately break the loop.
153-
// If currentReadPromise is not yet resolved, pass it to writeFromReadableStreamDefaultReader.
154-
break
160+
if (chunk.value) {
161+
values.push(chunk.value)
162+
}
163+
if (chunk.done) {
164+
done = true
165+
break
166+
}
155167
}
156-
currentReadPromise = undefined
157168

158-
if (chunk.value) {
159-
values.push(chunk.value)
160-
}
161-
if (chunk.done) {
162-
done = true
163-
break
169+
if (done && !('content-length' in resHeaderRecord)) {
170+
resHeaderRecord['content-length'] = values.reduce((acc, value) => acc + value.length, 0)
164171
}
165172
}
166173

167-
if (done && !('content-length' in resHeaderRecord)) {
168-
resHeaderRecord['content-length'] = values.reduce((acc, value) => acc + value.length, 0)
169-
}
170-
171174
outgoing.writeHead(res.status, resHeaderRecord)
172175
values.forEach((value) => {
173176
;(outgoing as Writable).write(value)

test/server.test.ts

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -185,6 +185,20 @@ describe('various response body types', () => {
185185
})
186186
return new Response(stream)
187187
})
188+
app.get('/readable-stream-with-transfer-encoding', () => {
189+
const stream = new ReadableStream({
190+
async start(controller) {
191+
controller.enqueue('Hello!') // send one chunk synchronously
192+
controller.close()
193+
},
194+
})
195+
return new Response(stream, {
196+
headers: {
197+
'content-type': 'text/plain; charset=UTF-8',
198+
'transfer-encoding': 'chunked',
199+
},
200+
})
201+
})
188202
const eventStreamPromise = new Promise<void>((resolve) => {
189203
resolveEventStreamPromise = resolve
190204
})
@@ -295,6 +309,14 @@ describe('various response body types', () => {
295309
expect(expectedChunks.length).toBe(0) // all chunks are received
296310
})
297311

312+
it('Should return 200 response - GET /readable-stream-with-transfer-encoding', async () => {
313+
const res = await request(server).get('/readable-stream-with-transfer-encoding')
314+
expect(res.status).toBe(200)
315+
expect(res.headers['content-type']).toMatch('text/plain; charset=UTF-8')
316+
expect(res.headers['transfer-encoding']).toBe('chunked')
317+
expect(res.headers['content-length']).toBeUndefined()
318+
})
319+
298320
it('Should return 200 response - GET /event-stream', async () => {
299321
const expectedChunks = ['data: First!\n\n', 'data: Second!\n\n']
300322
const resPromise = request(server)

0 commit comments

Comments
 (0)