Skip to content

Commit 5e78c41

Browse files
committed
apply max_form_memory_size another level up in the parser
1 parent 2fc6d4f commit 5e78c41

File tree

4 files changed

+38
-1
lines changed

4 files changed

+38
-1
lines changed

CHANGES.rst

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,9 @@
1+
0.19.7
2+
------
3+
4+
* Security Fix how ``max_form_memory_size`` is applied when parsing large
5+
non-file fields. https://github.com/advisories/GHSA-q34m-jh98-gwm2
6+
17
0.19.6 2024-05-19
28
-----------------
39

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "Quart"
3-
version = "0.19.6"
3+
version = "0.19.7.dev"
44
description = "A Python ASGI web microframework with the same API as Flask"
55
authors = ["pgjones <[email protected]>"]
66
classifiers = [

src/quart/formparser.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
from urllib.parse import parse_qsl
1616

1717
from werkzeug.datastructures import Headers, MultiDict
18+
from werkzeug.exceptions import RequestEntityTooLarge
1819
from werkzeug.formparser import default_stream_factory
1920
from werkzeug.http import parse_options_header
2021
from werkzeug.sansio.multipart import Data, Epilogue, Field, File, MultipartDecoder, NeedData
@@ -173,19 +174,28 @@ async def parse(
173174
files = []
174175

175176
current_part: Field | File
177+
field_size: int | None = None
176178
async for data in body:
177179
parser.receive_data(data)
178180
event = parser.next_event()
179181
while not isinstance(event, (Epilogue, NeedData)):
180182
if isinstance(event, Field):
181183
current_part = event
184+
field_size = 0
182185
container = []
183186
_write = container.append
184187
elif isinstance(event, File):
185188
current_part = event
189+
field_size = None
186190
container = self.start_file_streaming(event, content_length)
187191
_write = container.write
188192
elif isinstance(event, Data):
193+
if field_size is not None:
194+
field_size += len(event.data)
195+
196+
if field_size > self.max_form_memory_size:
197+
raise RequestEntityTooLarge()
198+
189199
_write(event.data)
190200
if not event.more_data:
191201
if isinstance(current_part, Field):

tests/test_formparser.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
from __future__ import annotations
2+
3+
import pytest
4+
from werkzeug.exceptions import RequestEntityTooLarge
5+
6+
from quart.formparser import MultiPartParser
7+
from quart.wrappers.request import Body
8+
9+
10+
async def test_multipart_max_form_memory_size() -> None:
11+
"""max_form_memory_size is tracked across multiple data events."""
12+
data = b"--bound\r\nContent-Disposition: form-field; name=a\r\n\r\n"
13+
data += b"a" * 15 + b"\r\n--bound--"
14+
body = Body(None, None)
15+
body.set_result(data)
16+
# The buffer size is less than the max size, so multiple data events will be
17+
# returned. The field size is greater than the max.
18+
parser = MultiPartParser(max_form_memory_size=10, buffer_size=5)
19+
20+
with pytest.raises(RequestEntityTooLarge):
21+
await parser.parse(body, b"bound", 0)

0 commit comments

Comments
 (0)