summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/api/cli.md8
-rw-r--r--doc/node.13
-rw-r--r--src/node_http_parser_impl.h18
-rw-r--r--src/node_options.cc4
-rw-r--r--src/node_options.h1
-rw-r--r--test/sequential/test-http-max-http-headers.js69
-rw-r--r--test/sequential/test-set-http-max-http-headers.js109
7 files changed, 184 insertions, 28 deletions
diff --git a/doc/api/cli.md b/doc/api/cli.md
index 22e087f317..97cfea239e 100644
--- a/doc/api/cli.md
+++ b/doc/api/cli.md
@@ -197,6 +197,13 @@ added: v9.0.0
Specify the `file` of the custom [experimental ECMAScript Module][] loader.
+### `--max-http-header-size=size`
+<!-- YAML
+added: REPLACEME
+-->
+
+Specify the maximum size, in bytes, of HTTP headers. Defaults to 8KB.
+
### `--napi-modules`
<!-- YAML
added: v7.10.0
@@ -620,6 +627,7 @@ Node.js options that are allowed are:
- `--inspect-brk`
- `--inspect-port`
- `--loader`
+- `--max-http-header-size`
- `--napi-modules`
- `--no-deprecation`
- `--no-force-async-hooks-checks`
diff --git a/doc/node.1 b/doc/node.1
index ca16c78a51..66933066bc 100644
--- a/doc/node.1
+++ b/doc/node.1
@@ -139,6 +139,9 @@ Specify the
as a custom loader, to load
.Fl -experimental-modules .
.
+.It Fl -max-http-header-size Ns = Ns Ar size
+Specify the maximum size of HTTP headers in bytes. Defaults to 8KB.
+.
.It Fl -napi-modules
This option is a no-op.
It is kept for compatibility.
diff --git a/src/node_http_parser_impl.h b/src/node_http_parser_impl.h
index 4233ccbfca..9274f9dae0 100644
--- a/src/node_http_parser_impl.h
+++ b/src/node_http_parser_impl.h
@@ -830,7 +830,7 @@ class Parser : public AsyncWrap, public StreamListener {
int TrackHeader(size_t len) {
#ifdef NODE_EXPERIMENTAL_HTTP
header_nread_ += len;
- if (header_nread_ >= kMaxHeaderSize) {
+ if (header_nread_ >= per_process_opts->max_http_header_size) {
llhttp_set_error_reason(&parser_, "HPE_HEADER_OVERFLOW:Header overflow");
return HPE_USER;
}
@@ -892,9 +892,6 @@ class Parser : public AsyncWrap, public StreamListener {
typedef int (Parser::*DataCall)(const char* at, size_t length);
static const parser_settings_t settings;
-#ifdef NODE_EXPERIMENTAL_HTTP
- static const uint64_t kMaxHeaderSize = 8 * 1024;
-#endif /* NODE_EXPERIMENTAL_HTTP */
};
const parser_settings_t Parser::settings = {
@@ -916,6 +913,14 @@ const parser_settings_t Parser::settings = {
};
+#ifndef NODE_EXPERIMENTAL_HTTP
+void InitMaxHttpHeaderSizeOnce() {
+ const uint32_t max_http_header_size = per_process_opts->max_http_header_size;
+ http_parser_set_max_header_size(max_http_header_size);
+}
+#endif /* NODE_EXPERIMENTAL_HTTP */
+
+
void InitializeHttpParser(Local<Object> target,
Local<Value> unused,
Local<Context> context,
@@ -965,6 +970,11 @@ void InitializeHttpParser(Local<Object> target,
target->Set(env->context(),
FIXED_ONE_BYTE_STRING(env->isolate(), "HTTPParser"),
t->GetFunction(env->context()).ToLocalChecked()).FromJust();
+
+#ifndef NODE_EXPERIMENTAL_HTTP
+ static uv_once_t init_once = UV_ONCE_INIT;
+ uv_once(&init_once, InitMaxHttpHeaderSizeOnce);
+#endif /* NODE_EXPERIMENTAL_HTTP */
}
} // anonymous namespace
diff --git a/src/node_options.cc b/src/node_options.cc
index d4cc19eb02..117bff4a94 100644
--- a/src/node_options.cc
+++ b/src/node_options.cc
@@ -259,6 +259,10 @@ PerProcessOptionsParser::PerProcessOptionsParser() {
kAllowedInEnvironment);
AddAlias("--trace-events-enabled", {
"--trace-event-categories", "v8,node,node.async_hooks" });
+ AddOption("--max-http-header-size",
+ "set the maximum size of HTTP headers (default: 8KB)",
+ &PerProcessOptions::max_http_header_size,
+ kAllowedInEnvironment);
AddOption("--v8-pool-size",
"set V8's thread pool size",
&PerProcessOptions::v8_thread_pool_size,
diff --git a/src/node_options.h b/src/node_options.h
index f5489900b4..ae7d7a4be5 100644
--- a/src/node_options.h
+++ b/src/node_options.h
@@ -151,6 +151,7 @@ class PerProcessOptions : public Options {
std::string title;
std::string trace_event_categories;
std::string trace_event_file_pattern = "node_trace.${rotation}.log";
+ uint64_t max_http_header_size = 8 * 1024;
int64_t v8_thread_pool_size = 4;
bool zero_fill_all_buffers = false;
diff --git a/test/sequential/test-http-max-http-headers.js b/test/sequential/test-http-max-http-headers.js
index f1be923e22..b91135a61a 100644
--- a/test/sequential/test-http-max-http-headers.js
+++ b/test/sequential/test-http-max-http-headers.js
@@ -4,10 +4,14 @@ const common = require('../common');
const assert = require('assert');
const http = require('http');
const net = require('net');
-const MAX = 8 * 1024; // 8KB
+const MAX = +(process.argv[2] || 8 * 1024); // Command line option, or 8KB.
const { getOptionValue } = require('internal/options');
+console.log('pid is', process.pid);
+console.log('max header size is', getOptionValue('--max-http-header-size'));
+console.log('current http parser is', getOptionValue('--http-parser'));
+
// Verify that we cannot receive more than 8KB of headers.
function once(cb) {
@@ -38,19 +42,15 @@ function fillHeaders(headers, currentSize, valid = false) {
// Generate valid headers
if (valid) {
- // TODO(mcollina): understand why -9 is needed instead of -1
- headers = headers.slice(0, -9);
+ // TODO(mcollina): understand why -32 is needed instead of -1
+ headers = headers.slice(0, -32);
}
return headers + '\r\n\r\n';
}
-const timeout = common.platformTimeout(10);
-
function writeHeaders(socket, headers) {
const array = [];
-
- // This is off from 1024 so that \r\n does not get split
- const chunkSize = 1000;
+ const chunkSize = 100;
let last = 0;
for (let i = 0; i < headers.length / chunkSize; i++) {
@@ -65,19 +65,25 @@ function writeHeaders(socket, headers) {
next();
function next() {
- if (socket.write(array.shift())) {
- if (array.length === 0) {
- socket.end();
- } else {
- setTimeout(next, timeout);
- }
+ if (socket.destroyed) {
+ console.log('socket was destroyed early, data left to write:',
+ array.join('').length);
+ return;
+ }
+
+ const chunk = array.shift();
+
+ if (chunk) {
+ console.log('writing chunk of size', chunk.length);
+ socket.write(chunk, next);
} else {
- socket.once('drain', next);
+ socket.end();
}
}
}
function test1() {
+ console.log('test1');
let headers =
'HTTP/1.1 200 OK\r\n' +
'Content-Length: 0\r\n' +
@@ -92,6 +98,9 @@ function test1() {
writeHeaders(sock, headers);
sock.resume();
});
+
+ // The socket might error but that's ok
+ sock.on('error', () => {});
});
server.listen(0, common.mustCall(() => {
@@ -100,17 +109,17 @@ function test1() {
client.on('error', common.mustCall((err) => {
assert.strictEqual(err.code, 'HPE_HEADER_OVERFLOW');
- server.close();
- setImmediate(test2);
+ server.close(test2);
}));
}));
}
const test2 = common.mustCall(() => {
+ console.log('test2');
let headers =
'GET / HTTP/1.1\r\n' +
'Host: localhost\r\n' +
- 'Agent: node\r\n' +
+ 'Agent: nod2\r\n' +
'X-CRASH: ';
// /, Host, localhost, Agent, node, X-CRASH, a...
@@ -119,7 +128,7 @@ const test2 = common.mustCall(() => {
const server = http.createServer(common.mustNotCall());
- server.on('clientError', common.mustCall((err) => {
+ server.once('clientError', common.mustCall((err) => {
assert.strictEqual(err.code, 'HPE_HEADER_OVERFLOW');
}));
@@ -131,34 +140,46 @@ const test2 = common.mustCall(() => {
});
finished(client, common.mustCall((err) => {
- server.close();
- setImmediate(test3);
+ server.close(test3);
}));
}));
});
const test3 = common.mustCall(() => {
+ console.log('test3');
let headers =
'GET / HTTP/1.1\r\n' +
'Host: localhost\r\n' +
- 'Agent: node\r\n' +
+ 'Agent: nod3\r\n' +
'X-CRASH: ';
// /, Host, localhost, Agent, node, X-CRASH, a...
const currentSize = 1 + 4 + 9 + 5 + 4 + 7;
headers = fillHeaders(headers, currentSize, true);
+ console.log('writing', headers.length);
+
const server = http.createServer(common.mustCall((req, res) => {
- res.end('hello world');
- setImmediate(server.close.bind(server));
+ res.end('hello from test3 server');
+ server.close();
}));
+ server.on('clientError', (err) => {
+ console.log(err.code);
+ if (err.code === 'HPE_HEADER_OVERFLOW') {
+ console.log(err.rawPacket.toString('hex'));
+ }
+ });
+ server.on('clientError', common.mustNotCall());
+
server.listen(0, common.mustCall(() => {
const client = net.connect(server.address().port);
client.on('connect', () => {
writeHeaders(client, headers);
client.resume();
});
+
+ client.pipe(process.stdout);
}));
});
diff --git a/test/sequential/test-set-http-max-http-headers.js b/test/sequential/test-set-http-max-http-headers.js
new file mode 100644
index 0000000000..657f9948be
--- /dev/null
+++ b/test/sequential/test-set-http-max-http-headers.js
@@ -0,0 +1,109 @@
+'use strict';
+
+const common = require('../common');
+const assert = require('assert');
+const { spawn } = require('child_process');
+const path = require('path');
+const testName = path.join(__dirname, 'test-http-max-http-headers.js');
+const parsers = ['legacy', 'llhttp'];
+
+const timeout = common.platformTimeout(100);
+
+const tests = [];
+
+function test(fn) {
+ tests.push(fn);
+}
+
+parsers.forEach((parser) => {
+ test(function(cb) {
+ console.log('running subtest expecting failure');
+
+ // Validate that the test fails if the max header size is too small.
+ const args = ['--expose-internals',
+ `--http-parser=${parser}`,
+ '--max-http-header-size=1024',
+ testName];
+ const cp = spawn(process.execPath, args, { stdio: 'inherit' });
+
+ cp.on('close', common.mustCall((code, signal) => {
+ assert.strictEqual(code, 1);
+ assert.strictEqual(signal, null);
+ cb();
+ }));
+ });
+
+ test(function(cb) {
+ console.log('running subtest expecting success');
+
+ const env = Object.assign({}, process.env, {
+ NODE_DEBUG: 'http'
+ });
+
+ // Validate that the test fails if the max header size is too small.
+ // Validate that the test now passes if the same limit becomes large enough.
+ const args = ['--expose-internals',
+ `--http-parser=${parser}`,
+ '--max-http-header-size=1024',
+ testName,
+ '1024'];
+ const cp = spawn(process.execPath, args, {
+ env,
+ stdio: 'inherit'
+ });
+
+ cp.on('close', common.mustCall((code, signal) => {
+ assert.strictEqual(code, 0);
+ assert.strictEqual(signal, null);
+ cb();
+ }));
+ });
+
+ // Next, repeat the same checks using NODE_OPTIONS if it is supported.
+ if (process.config.variables.node_without_node_options) {
+ const env = Object.assign({}, process.env, {
+ NODE_OPTIONS: `--http-parser=${parser} --max-http-header-size=1024`
+ });
+
+ test(function(cb) {
+ console.log('running subtest expecting failure');
+
+ // Validate that the test fails if the max header size is too small.
+ const args = ['--expose-internals', testName];
+ const cp = spawn(process.execPath, args, { env, stdio: 'inherit' });
+
+ cp.on('close', common.mustCall((code, signal) => {
+ assert.strictEqual(code, 1);
+ assert.strictEqual(signal, null);
+ cb();
+ }));
+ });
+
+ test(function(cb) {
+ // Validate that the test now passes if the same limit
+ // becomes large enough.
+ const args = ['--expose-internals', testName, '1024'];
+ const cp = spawn(process.execPath, args, { env, stdio: 'inherit' });
+
+ cp.on('close', common.mustCall((code, signal) => {
+ assert.strictEqual(code, 0);
+ assert.strictEqual(signal, null);
+ cb();
+ }));
+ });
+ }
+});
+
+function runTest() {
+ const fn = tests.shift();
+
+ if (!fn) {
+ return;
+ }
+
+ fn(() => {
+ setTimeout(runTest, timeout);
+ });
+}
+
+runTest();