Skip to content

Commit 03a5c34

Browse files
ShogunPandaRafaelGSS
authored andcommitted
http: add maximum chunk extension size
PR-URL: nodejs-private/node-private#518 Fixes: https://hackerone.com/reports/2233486 Reviewed-By: Matteo Collina <[email protected]> Reviewed-By: Marco Ippolito <[email protected]> Reviewed-By: Rafael Gonzaga <[email protected]> CVE-ID: CVE-2024-22019
1 parent 834ae37 commit 03a5c34

File tree

3 files changed

+159
-4
lines changed

3 files changed

+159
-4
lines changed

lib/_http_server.js

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -866,6 +866,11 @@ const requestHeaderFieldsTooLargeResponse = Buffer.from(
866866
'Connection: close\r\n\r\n', 'ascii',
867867
);
868868

869+
const requestChunkExtensionsTooLargeResponse = Buffer.from(
870+
`HTTP/1.1 413 ${STATUS_CODES[413]}\r\n` +
871+
'Connection: close\r\n\r\n', 'ascii',
872+
);
873+
869874
function socketOnError(e) {
870875
// Ignore further errors
871876
this.removeListener('error', socketOnError);
@@ -886,6 +891,9 @@ function socketOnError(e) {
886891
case 'HPE_HEADER_OVERFLOW':
887892
response = requestHeaderFieldsTooLargeResponse;
888893
break;
894+
case 'HPE_CHUNK_EXTENSIONS_OVERFLOW':
895+
response = requestChunkExtensionsTooLargeResponse;
896+
break;
889897
case 'ERR_HTTP_REQUEST_TIMEOUT':
890898
response = requestTimeoutResponse;
891899
break;

src/node_http_parser.cc

Lines changed: 20 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,8 @@ const uint32_t kOnExecute = 5;
7979
const uint32_t kOnTimeout = 6;
8080
// Any more fields than this will be flushed into JS
8181
const size_t kMaxHeaderFieldsCount = 32;
82+
// Maximum size of chunk extensions
83+
const size_t kMaxChunkExtensionsSize = 16384;
8284

8385
const uint32_t kLenientNone = 0;
8486
const uint32_t kLenientHeaders = 1 << 0;
@@ -271,6 +273,7 @@ class Parser : public AsyncWrap, public StreamListener {
271273

272274
num_fields_ = num_values_ = 0;
273275
headers_completed_ = false;
276+
chunk_extensions_nread_ = 0;
274277
last_message_start_ = uv_hrtime();
275278
url_.Reset();
276279
status_message_.Reset();
@@ -526,9 +529,22 @@ class Parser : public AsyncWrap, public StreamListener {
526529
return 0;
527530
}
528531

529-
// Reset nread for the next chunk
532+
int on_chunk_extension(const char* at, size_t length) {
533+
chunk_extensions_nread_ += length;
534+
535+
if (chunk_extensions_nread_ > kMaxChunkExtensionsSize) {
536+
llhttp_set_error_reason(&parser_,
537+
"HPE_CHUNK_EXTENSIONS_OVERFLOW:Chunk extensions overflow");
538+
return HPE_USER;
539+
}
540+
541+
return 0;
542+
}
543+
544+
// Reset nread for the next chunk and also reset the extensions counter
530545
int on_chunk_header() {
531546
header_nread_ = 0;
547+
chunk_extensions_nread_ = 0;
532548
return 0;
533549
}
534550

@@ -1017,6 +1033,7 @@ class Parser : public AsyncWrap, public StreamListener {
10171033
bool headers_completed_ = false;
10181034
bool pending_pause_ = false;
10191035
uint64_t header_nread_ = 0;
1036+
uint64_t chunk_extensions_nread_ = 0;
10201037
uint64_t max_http_header_size_;
10211038
uint64_t last_message_start_;
10221039
ConnectionsList* connectionsList_;
@@ -1195,10 +1212,9 @@ const llhttp_settings_t Parser::settings = {
11951212
Proxy<DataCall, &Parser::on_header_value>::Raw,
11961213

11971214
// on_chunk_extension_name
1198-
nullptr,
1215+
Proxy<DataCall, &Parser::on_chunk_extension>::Raw,
11991216
// on_chunk_extension_value
1200-
nullptr,
1201-
1217+
Proxy<DataCall, &Parser::on_chunk_extension>::Raw,
12021218
Proxy<Call, &Parser::on_headers_complete>::Raw,
12031219
Proxy<DataCall, &Parser::on_body>::Raw,
12041220
Proxy<Call, &Parser::on_message_complete>::Raw,
Lines changed: 131 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,131 @@
1+
'use strict';
2+
3+
const common = require('../common');
4+
const http = require('http');
5+
const net = require('net');
6+
const assert = require('assert');
7+
8+
// Verify that chunk extensions are limited in size when sent all together.
9+
{
10+
const server = http.createServer((req, res) => {
11+
req.on('end', () => {
12+
res.writeHead(200, { 'Content-Type': 'text/plain' });
13+
res.end('bye');
14+
});
15+
16+
req.resume();
17+
});
18+
19+
server.listen(0, () => {
20+
const sock = net.connect(server.address().port);
21+
let data = '';
22+
23+
sock.on('data', (chunk) => data += chunk.toString('utf-8'));
24+
25+
sock.on('end', common.mustCall(function() {
26+
assert.strictEqual(data, 'HTTP/1.1 413 Payload Too Large\r\nConnection: close\r\n\r\n');
27+
server.close();
28+
}));
29+
30+
sock.end('' +
31+
'GET / HTTP/1.1\r\n' +
32+
'Host: localhost:8080\r\n' +
33+
'Transfer-Encoding: chunked\r\n\r\n' +
34+
'2;' + 'A'.repeat(20000) + '=bar\r\nAA\r\n' +
35+
'0\r\n\r\n'
36+
);
37+
});
38+
}
39+
40+
// Verify that chunk extensions are limited in size when sent in intervals.
41+
{
42+
const server = http.createServer((req, res) => {
43+
req.on('end', () => {
44+
res.writeHead(200, { 'Content-Type': 'text/plain' });
45+
res.end('bye');
46+
});
47+
48+
req.resume();
49+
});
50+
51+
server.listen(0, () => {
52+
const sock = net.connect(server.address().port);
53+
let remaining = 20000;
54+
let data = '';
55+
56+
const interval = setInterval(
57+
() => {
58+
if (remaining > 0) {
59+
sock.write('A'.repeat(1000));
60+
} else {
61+
sock.write('=bar\r\nAA\r\n0\r\n\r\n');
62+
clearInterval(interval);
63+
}
64+
65+
remaining -= 1000;
66+
},
67+
common.platformTimeout(20),
68+
).unref();
69+
70+
sock.on('data', (chunk) => data += chunk.toString('utf-8'));
71+
72+
sock.on('end', common.mustCall(function() {
73+
assert.strictEqual(data, 'HTTP/1.1 413 Payload Too Large\r\nConnection: close\r\n\r\n');
74+
server.close();
75+
}));
76+
77+
sock.write('' +
78+
'GET / HTTP/1.1\r\n' +
79+
'Host: localhost:8080\r\n' +
80+
'Transfer-Encoding: chunked\r\n\r\n' +
81+
'2;'
82+
);
83+
});
84+
}
85+
86+
// Verify the chunk extensions is correctly reset after a chunk
87+
{
88+
const server = http.createServer((req, res) => {
89+
req.on('end', () => {
90+
res.writeHead(200, { 'content-type': 'text/plain', 'connection': 'close', 'date': 'now' });
91+
res.end('bye');
92+
});
93+
94+
req.resume();
95+
});
96+
97+
server.listen(0, () => {
98+
const sock = net.connect(server.address().port);
99+
let data = '';
100+
101+
sock.on('data', (chunk) => data += chunk.toString('utf-8'));
102+
103+
sock.on('end', common.mustCall(function() {
104+
assert.strictEqual(
105+
data,
106+
'HTTP/1.1 200 OK\r\n' +
107+
'content-type: text/plain\r\n' +
108+
'connection: close\r\n' +
109+
'date: now\r\n' +
110+
'Transfer-Encoding: chunked\r\n' +
111+
'\r\n' +
112+
'3\r\n' +
113+
'bye\r\n' +
114+
'0\r\n' +
115+
'\r\n',
116+
);
117+
118+
server.close();
119+
}));
120+
121+
sock.end('' +
122+
'GET / HTTP/1.1\r\n' +
123+
'Host: localhost:8080\r\n' +
124+
'Transfer-Encoding: chunked\r\n\r\n' +
125+
'2;' + 'A'.repeat(10000) + '=bar\r\nAA\r\n' +
126+
'2;' + 'A'.repeat(10000) + '=bar\r\nAA\r\n' +
127+
'2;' + 'A'.repeat(10000) + '=bar\r\nAA\r\n' +
128+
'0\r\n\r\n'
129+
);
130+
});
131+
}

0 commit comments

Comments
 (0)