Files
nodejs4/CVE-2018-12121.patch
Adam Majer 57718cd79b * cli: add --max-http-header-size flag (max_header_size.patch)
+ add maxHeaderSize property (max_header_size.patch)
      (CVE-2018-12121.patch - CVE-2018-12121, bsc#1117626)
    + A timeout of 40 seconds now applies to servers receiving
      HTTP headers. This value can be adjusted with
      server.headersTimeout. Where headers are not completely
      received within this period, the socket is destroyed on
      the next received chunk. In conjunction with
      server.setTimeout(), this aids in protecting against
      excessive resource retention and possible Denial of Service.
      (CVE-2018-12122.patch - CVE-2018-12122, bsc#1117627)
      (CVE-2018-12116.patch - CVE-2018-12116, bsc#1117630)
    (CVE-2018-12123.patch - CVE-2018-12123, bnc#1117629)

OBS-URL: https://build.opensuse.org/package/show/devel:languages:nodejs/nodejs4?expand=0&rev=101
2019-01-09 14:07:18 +00:00

253 lines
7.1 KiB
Diff

ported from:
deps,http: http_parser set max header size to 8KB
CVE-2018-12121
PR-URL: nodejs-private/node-private#143
Ref: nodejs-private/security#139
Ref: nodejs-private/http-parser-private#2
Reviewed-By: Anatoli Papirovski <apapirovski@mac.com>
Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
Reviewed-By: James M Snell <jasnell@gmail.com>
Reviewed-By: Rod Vagg <rod@vagg.org>
Reviewed-By: Anna Henningsen <anna@addaleax.net>
Index: node-v4.9.1/deps/http_parser/http_parser.gyp
===================================================================
--- node-v4.9.1.orig/deps/http_parser/http_parser.gyp
+++ node-v4.9.1/deps/http_parser/http_parser.gyp
@@ -56,7 +56,7 @@
'defines': [ 'HTTP_PARSER_STRICT=0' ],
'include_dirs': [ '.' ],
},
- 'defines': [ 'HTTP_PARSER_STRICT=0' ],
+ 'defines': [ 'HTTP_MAX_HEADER_SIZE=8192', 'HTTP_PARSER_STRICT=0' ],
'sources': [ './http_parser.c', ],
'conditions': [
['OS=="win"', {
@@ -79,7 +79,7 @@
'defines': [ 'HTTP_PARSER_STRICT=1' ],
'include_dirs': [ '.' ],
},
- 'defines': [ 'HTTP_PARSER_STRICT=1' ],
+ 'defines': [ 'HTTP_MAX_HEADER_SIZE=8192', 'HTTP_PARSER_STRICT=1' ],
'sources': [ './http_parser.c', ],
'conditions': [
['OS=="win"', {
Index: node-v4.9.1/src/node_http_parser.cc
===================================================================
--- node-v4.9.1.orig/src/node_http_parser.cc
+++ node-v4.9.1/src/node_http_parser.cc
@@ -611,6 +611,8 @@ class Parser : public AsyncWrap {
size_t nparsed =
http_parser_execute(&parser_, &settings, data, len);
+ enum http_errno err = HTTP_PARSER_ERRNO(&parser_);
+
Save();
// Unassign the 'buffer_' variable
@@ -625,9 +627,7 @@ class Parser : public AsyncWrap {
Local<Integer> nparsed_obj = Integer::New(env()->isolate(), nparsed);
// If there was a parse error in one of the callbacks
// TODO(bnoordhuis) What if there is an error on EOF?
- if (!parser_.upgrade && nparsed != len) {
- enum http_errno err = HTTP_PARSER_ERRNO(&parser_);
-
+ if ((!parser_.upgrade && nparsed != len) || err != HPE_OK) {
Local<Value> e = Exception::Error(env()->parse_error_string());
Local<Object> obj = e->ToObject(env()->isolate());
obj->Set(env()->bytes_parsed_string(), nparsed_obj);
Index: node-v4.9.1/test/parallel/test-http-max-headers-count.js
===================================================================
--- node-v4.9.1.orig/test/parallel/test-http-max-headers-count.js
+++ node-v4.9.1/test/parallel/test-http-max-headers-count.js
@@ -7,14 +7,14 @@ var requests = 0;
var responses = 0;
var headers = {};
-var N = 2000;
+var N = 100;
for (var i = 0; i < N; ++i) {
headers['key' + i] = i;
}
var maxAndExpected = [ // for server
[50, 50],
- [1500, 1500],
+ [1500, 102],
[0, N + 2] // Host and Connection
];
var max = maxAndExpected[requests][0];
@@ -35,7 +35,7 @@ server.maxHeadersCount = max;
server.listen(0, function() {
var maxAndExpected = [ // for client
[20, 20],
- [1200, 1200],
+ [1200, 103],
[0, N + 3] // Connection, Date and Transfer-Encoding
];
doRequest();
Index: node-v4.9.1/test/sequential/test-http-max-http-headers.js
===================================================================
--- /dev/null
+++ node-v4.9.1/test/sequential/test-http-max-http-headers.js
@@ -0,0 +1,155 @@
+'use strict';
+
+const assert = require('assert');
+const common = require('../common');
+const http = require('http');
+const net = require('net');
+const MAX = 8 * 1024; // 8KB
+
+// Verify that we cannot receive more than 8KB of headers.
+
+function once(cb) {
+ let called = false;
+ return () => {
+ if (!called) {
+ called = true;
+ cb();
+ }
+ };
+}
+
+function finished(client, callback) {
+ 'abort error end'.split(' ').forEach((e) => {
+ client.on(e, once(() => setImmediate(callback)));
+ });
+}
+
+function fillHeaders(headers, currentSize, valid) {
+ headers += 'a'.repeat(MAX - headers.length - 3);
+ // Generate valid headers
+ if (valid) {
+ // TODO(mcollina): understand why -9 is needed instead of -1
+ headers = headers.slice(0, -9);
+ }
+ return headers + '\r\n\r\n';
+}
+
+const timeout = common.platformTimeout(10);
+
+function writeHeaders(socket, headers) {
+ const array = [];
+
+ // this is off from 1024 so that \r\n does not get split
+ const chunkSize = 1000;
+ let last = 0;
+
+ for (let i = 0; i < headers.length / chunkSize; i++) {
+ const current = (i + 1) * chunkSize;
+ array.push(headers.slice(last, current));
+ last = current;
+ }
+
+ // safety check we are chunking correctly
+ assert.strictEqual(array.join(''), headers);
+
+ next();
+
+ function next() {
+ if (socket.write(array.shift())) {
+ if (array.length === 0) {
+ socket.end();
+ } else {
+ setTimeout(next, timeout);
+ }
+ } else {
+ socket.once('drain', next);
+ }
+ }
+}
+
+function test1() {
+ let headers =
+ 'HTTP/1.1 200 OK\r\n' +
+ 'Content-Length: 0\r\n' +
+ 'X-CRASH: ';
+
+ // OK, Content-Length, 0, X-CRASH, aaa...
+ const currentSize = 2 + 14 + 1 + 7;
+ headers = fillHeaders(headers, currentSize, false);
+
+ const server = net.createServer((sock) => {
+ sock.once('data', (chunk) => {
+ writeHeaders(sock, headers);
+ sock.resume();
+ });
+ });
+
+ server.listen(0, common.mustCall(() => {
+ const port = server.address().port;
+ const client = http.get({ port: port }, common.mustNotCall(() => {}));
+
+ client.on('error', common.mustCall((err) => {
+ assert.strictEqual(err.code, 'HPE_HEADER_OVERFLOW');
+ server.close();
+ setImmediate(test2);
+ }));
+ }));
+}
+
+const test2 = common.mustCall(() => {
+ let headers =
+ 'GET / HTTP/1.1\r\n' +
+ 'Host: localhost\r\n' +
+ 'Agent: node\r\n' +
+ 'X-CRASH: ';
+
+ // /, Host, localhost, Agent, node, X-CRASH, a...
+ const currentSize = 1 + 4 + 9 + 5 + 4 + 7;
+ headers = fillHeaders(headers, currentSize, false);
+
+ const server = http.createServer(common.mustNotCall());
+
+ server.on('clientError', common.mustCall((err) => {
+ assert.strictEqual(err.code, 'HPE_HEADER_OVERFLOW');
+ }));
+
+ server.listen(0, common.mustCall(() => {
+ const client = net.connect(server.address().port);
+ client.on('connect', () => {
+ writeHeaders(client, headers);
+ client.resume();
+ });
+
+ finished(client, common.mustCall((err) => {
+ server.close();
+ setImmediate(test3);
+ }));
+ }));
+});
+
+const test3 = common.mustCall(() => {
+ let headers =
+ 'GET / HTTP/1.1\r\n' +
+ 'Host: localhost\r\n' +
+ 'Agent: node\r\n' +
+ 'X-CRASH: ';
+
+ // /, Host, localhost, Agent, node, X-CRASH, a...
+ const currentSize = 1 + 4 + 9 + 5 + 4 + 7;
+ headers = fillHeaders(headers, currentSize, true);
+
+ const server = http.createServer(common.mustCall((req, res) => {
+ res.end('hello world');
+ setImmediate(server.close.bind(server));
+ }));
+
+ server.listen(0, common.mustCall(() => {
+ const client = net.connect(server.address().port);
+ client.on('connect', () => {
+ writeHeaders(client, headers);
+ client.resume();
+ });
+ }));
+});
+
+test1();