Skip to content
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 31 additions & 28 deletions aiohttp/http_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -394,6 +394,7 @@
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress,
lax=self.lax,
headers_parser=self._headers_parser,
)
if not payload_parser.done:
self._payload_parser = payload_parser
Expand All @@ -412,6 +413,7 @@
compression=msg.compression,
auto_decompress=self._auto_decompress,
lax=self.lax,
headers_parser=self._headers_parser,
)
elif not empty_body and length is None and self.read_until_eof:
payload = StreamReader(
Expand All @@ -430,6 +432,7 @@
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress,
lax=self.lax,
headers_parser=self._headers_parser,
)
if not payload_parser.done:
self._payload_parser = payload_parser
Expand Down Expand Up @@ -467,6 +470,8 @@

eof = True
data = b""
if isinstance(underlying_exc, BadHttpMessage):
raise

if eof:
start_pos = 0
Expand Down Expand Up @@ -758,6 +763,8 @@
response_with_body: bool = True,
auto_decompress: bool = True,
lax: bool = False,
*,
headers_parser: HeadersParser,
) -> None:
self._length = 0
self._type = ParseState.PARSE_UNTIL_EOF
Expand All @@ -766,6 +773,9 @@
self._chunk_tail = b""
self._auto_decompress = auto_decompress
self._lax = lax
self._headers_parser = headers_parser
# HeadersParser expects status/request line first, so skips the first line.
self._trailer_lines: list[bytes] = [b""]
self.done = False

# payload decompression wrapper
Expand Down Expand Up @@ -854,7 +864,7 @@

chunk = chunk[pos + len(SEP) :]
if size == 0: # eof marker
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
self._chunk = ChunkState.PARSE_TRAILERS
if self._lax and chunk.startswith(b"\r"):
chunk = chunk[1:]
else:
Expand Down Expand Up @@ -888,38 +898,31 @@
self._chunk_tail = chunk
return False, b""

# if stream does not contain trailer, after 0\r\n
# we should get another \r\n otherwise
# trailers needs to be skipped until \r\n\r\n
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
head = chunk[: len(SEP)]
if head == SEP:
# end of stream
self.payload.feed_eof()
return True, chunk[len(SEP) :]
# Both CR and LF, or only LF may not be received yet. It is
# expected that CRLF or LF will be shown at the very first
# byte next time, otherwise trailers should come. The last
# CRLF which marks the end of response might not be
# contained in the same TCP segment which delivered the
# size indicator.
if not head:
return False, b""
if head == SEP[:1]:
self._chunk_tail = head
return False, b""
self._chunk = ChunkState.PARSE_TRAILERS

# read and discard trailer up to the CRLF terminator
if self._chunk == ChunkState.PARSE_TRAILERS:
pos = chunk.find(SEP)
if pos >= 0:
chunk = chunk[pos + len(SEP) :]
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
if pos < 0: # No line found
self._chunk_tail = chunk
return False, b""

line = chunk[:pos]
chunk = chunk[pos + len(SEP) :]
if SEP == b"\n": # For lax response parsing
line = line.rstrip(b"\r")
self._trailer_lines.append(line)

# \r\n\r\n found, end of stream
if self._trailer_lines[-1] == b"":
# Headers and trailers are defined the same way,
# so we reuse the HeadersParser here.
try:
trailers, raw_trailers = self._headers_parser.parse_headers(
self._trailer_lines
)
finally:
self._trailer_lines.clear()
self.payload.feed_eof()
return True, chunk

# Read all bytes until eof
elif self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_data(chunk)
Expand Down
Loading
Loading