Compare commits
No commits in common. 'c9' and 'i8c' have entirely different histories.
@ -1 +1 @@
|
||||
SOURCES/http-parser-2.9.4.tar.gz
|
||||
SOURCES/http-parser-2.8.0.tar.gz
|
||||
|
@ -1 +1 @@
|
||||
8df5277feefe79f3d4472b8f2c5ca9224b2221dd SOURCES/http-parser-2.9.4.tar.gz
|
||||
deb0ce507a267a592e0f70dee45c62ce53b6a1a6 SOURCES/http-parser-2.8.0.tar.gz
|
||||
|
@ -1,79 +0,0 @@
|
||||
From d1bb7a564e0f92ef2081d3af8b4b7f85a307c38f Mon Sep 17 00:00:00 2001
|
||||
From: Edward Thomson <ethomson@edwardthomson.com>
|
||||
Date: Fri, 14 Jun 2019 17:37:22 +0100
|
||||
Subject: [PATCH] url: treat empty port as default
|
||||
|
||||
When parsing URLs, treat an empty port (eg `http://hostname:/`) as if it
|
||||
were unspecified. RFC 3986 says:
|
||||
|
||||
> URI producers and normalizers SHOULD omit the port component and its
|
||||
> ":" delimiter if port is empty or if its value would be the same as
|
||||
> that of the scheme's default.
|
||||
|
||||
(Emphasis on the "SHOULD" is mine.) This indicates that URIs MAY be
|
||||
produced with an empty port and the `:` delimiter.
|
||||
|
||||
Thus, we stop failing if we end host parsing at the port delimiter.
|
||||
---
|
||||
http_parser.c | 1 -
|
||||
test.c | 25 +++++++++++++++++++------
|
||||
2 files changed, 19 insertions(+), 7 deletions(-)
|
||||
|
||||
diff --git a/http_parser.c b/http_parser.c
|
||||
index 4896385..7e268d8 100644
|
||||
--- a/http_parser.c
|
||||
+++ b/http_parser.c
|
||||
@@ -2326,7 +2326,6 @@ http_parse_host(const char * buf, struct http_parser_url *u, int found_at) {
|
||||
case s_http_host_v6:
|
||||
case s_http_host_v6_zone_start:
|
||||
case s_http_host_v6_zone:
|
||||
- case s_http_host_port_start:
|
||||
case s_http_userinfo:
|
||||
case s_http_userinfo_start:
|
||||
return 1;
|
||||
diff --git a/test.c b/test.c
|
||||
index 0140a18..54eca61 100644
|
||||
--- a/test.c
|
||||
+++ b/test.c
|
||||
@@ -2825,6 +2825,25 @@ const struct url_test url_tests[] =
|
||||
,.rv=0
|
||||
}
|
||||
|
||||
+, {.name="proxy empty port"
|
||||
+ ,.url="http://hostname:/"
|
||||
+ ,.is_connect=0
|
||||
+ ,.u=
|
||||
+ {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PATH)
|
||||
+ ,.port=0
|
||||
+ ,.field_data=
|
||||
+ {{ 0, 4 } /* UF_SCHEMA */
|
||||
+ ,{ 7, 8 } /* UF_HOST */
|
||||
+ ,{ 0, 0 } /* UF_PORT */
|
||||
+ ,{ 16, 1 } /* UF_PATH */
|
||||
+ ,{ 0, 0 } /* UF_QUERY */
|
||||
+ ,{ 0, 0 } /* UF_FRAGMENT */
|
||||
+ ,{ 0, 0 } /* UF_USERINFO */
|
||||
+ }
|
||||
+ }
|
||||
+ ,.rv=0
|
||||
+ }
|
||||
+
|
||||
, {.name="CONNECT request"
|
||||
,.url="hostname:443"
|
||||
,.is_connect=1
|
||||
@@ -3059,12 +3078,6 @@ const struct url_test url_tests[] =
|
||||
,.rv=1
|
||||
}
|
||||
|
||||
-, {.name="proxy empty port"
|
||||
- ,.url="http://hostname:/"
|
||||
- ,.is_connect=0
|
||||
- ,.rv=1
|
||||
- }
|
||||
-
|
||||
, {.name="CONNECT with basic auth"
|
||||
,.url="a:b@hostname:443"
|
||||
,.is_connect=1
|
||||
--
|
||||
2.25.1
|
||||
|
@ -0,0 +1,82 @@
|
||||
From 05da7fb51cda374ae351829f67018924f931f18b Mon Sep 17 00:00:00 2001
|
||||
From: Sergio Correia <scorreia@redhat.com>
|
||||
Date: Tue, 18 Feb 2020 09:10:18 -0300
|
||||
Subject: [PATCH] CVE-2018-12121
|
||||
|
||||
---
|
||||
http_parser.c | 15 +++++++++++----
|
||||
http_parser.h | 3 +++
|
||||
2 files changed, 14 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/http_parser.c b/http_parser.c
|
||||
index f9991c3..aef4437 100644
|
||||
--- a/http_parser.c
|
||||
+++ b/http_parser.c
|
||||
@@ -25,6 +25,8 @@
|
||||
#include <string.h>
|
||||
#include <limits.h>
|
||||
|
||||
+static uint32_t max_header_size = HTTP_MAX_HEADER_SIZE;
|
||||
+
|
||||
#ifndef ULLONG_MAX
|
||||
# define ULLONG_MAX ((uint64_t) -1) /* 2^64-1 */
|
||||
#endif
|
||||
@@ -137,20 +139,20 @@ do { \
|
||||
} while (0)
|
||||
|
||||
/* Don't allow the total size of the HTTP headers (including the status
|
||||
- * line) to exceed HTTP_MAX_HEADER_SIZE. This check is here to protect
|
||||
+ * line) to exceed max_header_size. This check is here to protect
|
||||
* embedders against denial-of-service attacks where the attacker feeds
|
||||
* us a never-ending header that the embedder keeps buffering.
|
||||
*
|
||||
* This check is arguably the responsibility of embedders but we're doing
|
||||
* it on the embedder's behalf because most won't bother and this way we
|
||||
- * make the web a little safer. HTTP_MAX_HEADER_SIZE is still far bigger
|
||||
+ * make the web a little safer. max_header_size is still far bigger
|
||||
* than any reasonable request or response so this should never affect
|
||||
* day-to-day operation.
|
||||
*/
|
||||
#define COUNT_HEADER_SIZE(V) \
|
||||
do { \
|
||||
parser->nread += (V); \
|
||||
- if (UNLIKELY(parser->nread > (HTTP_MAX_HEADER_SIZE))) { \
|
||||
+ if (UNLIKELY(parser->nread > (max_header_size))) { \
|
||||
SET_ERRNO(HPE_HEADER_OVERFLOW); \
|
||||
goto error; \
|
||||
} \
|
||||
@@ -1471,7 +1473,7 @@ reexecute:
|
||||
const char* p_lf;
|
||||
size_t limit = data + len - p;
|
||||
|
||||
- limit = MIN(limit, HTTP_MAX_HEADER_SIZE);
|
||||
+ limit = MIN(limit, max_header_size);
|
||||
|
||||
p_cr = (const char*) memchr(p, CR, limit);
|
||||
p_lf = (const char*) memchr(p, LF, limit);
|
||||
@@ -2438,3 +2440,8 @@ http_parser_version(void) {
|
||||
HTTP_PARSER_VERSION_MINOR * 0x00100 |
|
||||
HTTP_PARSER_VERSION_PATCH * 0x00001;
|
||||
}
|
||||
+
|
||||
+void
|
||||
+http_parser_set_max_header_size(uint32_t size) {
|
||||
+ max_header_size = size;
|
||||
+}
|
||||
diff --git a/http_parser.h b/http_parser.h
|
||||
index 1fbf30e..ea7bafe 100644
|
||||
--- a/http_parser.h
|
||||
+++ b/http_parser.h
|
||||
@@ -427,6 +427,9 @@ void http_parser_pause(http_parser *parser, int paused);
|
||||
/* Checks if this is the final chunk of the body. */
|
||||
int http_body_is_final(const http_parser *parser);
|
||||
|
||||
+/* Change the maximum header size provided at compile time. */
|
||||
+void http_parser_set_max_header_size(uint32_t size);
|
||||
+
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
--
|
||||
2.18.2
|
||||
|
@ -0,0 +1,121 @@
|
||||
From 5756942f51426a24add619377da15b18ecae91ef Mon Sep 17 00:00:00 2001
|
||||
From: Ben Noordhuis <info@bnoordhuis.nl>
|
||||
Date: Tue, 27 Mar 2018 16:45:33 +0200
|
||||
Subject: [PATCH] deps: reject interior blanks in Content-Length
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
Original commit message follows:
|
||||
|
||||
Before this commit `Content-Length: 4 2` was accepted as a valid
|
||||
header and recorded as `parser->content_length = 42`. Now it is
|
||||
a parse error that fails with error `HPE_INVALID_CONTENT_LENGTH`.
|
||||
|
||||
Downstream users that inspect `parser->content_length` and naively
|
||||
parse the string value using `strtoul()` might get confused by the
|
||||
discrepancy between the two values. Resolve that by simply not
|
||||
letting it happen.
|
||||
|
||||
Fixes: https://github.com/nodejs-private/security/issues/178
|
||||
PR-URL: https://github.com/nodejs-private/http-parser-private/pull/1
|
||||
Reviewed-By: Сковорода Никита Андреевич <chalkerx@gmail.com>
|
||||
Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
|
||||
Reviewed-By: Evan Lucas <evanlucas@me.com>
|
||||
Reviewed-By: Fedor Indutny <fedor.indutny@gmail.com>
|
||||
Reviewed-By: James M Snell <jasnell@gmail.com>
|
||||
Reviewed-By: Rod Vagg <rod@vagg.org>
|
||||
---
|
||||
http_parser.c | 19 ++++++++++++++++++-
|
||||
test.c | 21 +++++++++++++++++++++
|
||||
2 files changed, 39 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/http_parser.c b/http_parser.c
|
||||
index 5b5657b..f9991c3 100644
|
||||
--- a/http_parser.c
|
||||
+++ b/http_parser.c
|
||||
@@ -370,6 +370,8 @@ enum header_states
|
||||
|
||||
, h_connection
|
||||
, h_content_length
|
||||
+ , h_content_length_num
|
||||
+ , h_content_length_ws
|
||||
, h_transfer_encoding
|
||||
, h_upgrade
|
||||
|
||||
@@ -1406,6 +1408,7 @@ reexecute:
|
||||
|
||||
parser->flags |= F_CONTENTLENGTH;
|
||||
parser->content_length = ch - '0';
|
||||
+ parser->header_state = h_content_length_num;
|
||||
break;
|
||||
|
||||
case h_connection:
|
||||
@@ -1493,10 +1496,18 @@ reexecute:
|
||||
break;
|
||||
|
||||
case h_content_length:
|
||||
+ if (ch == ' ') break;
|
||||
+ h_state = h_content_length_num;
|
||||
+ /* FALLTHROUGH */
|
||||
+
|
||||
+ case h_content_length_num:
|
||||
{
|
||||
uint64_t t;
|
||||
|
||||
- if (ch == ' ') break;
|
||||
+ if (ch == ' ') {
|
||||
+ h_state = h_content_length_ws;
|
||||
+ break;
|
||||
+ }
|
||||
|
||||
if (UNLIKELY(!IS_NUM(ch))) {
|
||||
SET_ERRNO(HPE_INVALID_CONTENT_LENGTH);
|
||||
@@ -1519,6 +1530,12 @@ reexecute:
|
||||
break;
|
||||
}
|
||||
|
||||
+ case h_content_length_ws:
|
||||
+ if (ch == ' ') break;
|
||||
+ SET_ERRNO(HPE_INVALID_CONTENT_LENGTH);
|
||||
+ parser->header_state = h_state;
|
||||
+ goto error;
|
||||
+
|
||||
/* Transfer-Encoding: chunked */
|
||||
case h_matching_transfer_encoding_chunked:
|
||||
parser->index++;
|
||||
diff --git a/test.c b/test.c
|
||||
index bc4e664..cb445ce 100644
|
||||
--- a/test.c
|
||||
+++ b/test.c
|
||||
@@ -4168,6 +4168,27 @@ main (void)
|
||||
test_invalid_header_field_token_error(HTTP_RESPONSE);
|
||||
test_invalid_header_field_content_error(HTTP_RESPONSE);
|
||||
|
||||
+ test_simple_type(
|
||||
+ "POST / HTTP/1.1\r\n"
|
||||
+ "Content-Length: 42 \r\n" // Note the surrounding whitespace.
|
||||
+ "\r\n",
|
||||
+ HPE_OK,
|
||||
+ HTTP_REQUEST);
|
||||
+
|
||||
+ test_simple_type(
|
||||
+ "POST / HTTP/1.1\r\n"
|
||||
+ "Content-Length: 4 2\r\n"
|
||||
+ "\r\n",
|
||||
+ HPE_INVALID_CONTENT_LENGTH,
|
||||
+ HTTP_REQUEST);
|
||||
+
|
||||
+ test_simple_type(
|
||||
+ "POST / HTTP/1.1\r\n"
|
||||
+ "Content-Length: 13 37\r\n"
|
||||
+ "\r\n",
|
||||
+ HPE_INVALID_CONTENT_LENGTH,
|
||||
+ HTTP_REQUEST);
|
||||
+
|
||||
//// RESPONSES
|
||||
|
||||
test_simple_type("HTP/1.1 200 OK\r\n\r\n", HPE_INVALID_VERSION, HTTP_RESPONSE);
|
||||
--
|
||||
2.18.2
|
||||
|
@ -0,0 +1,450 @@
|
||||
From b41d69bedcdbb8fe0cd790d0bcccbb457d6170d3 Mon Sep 17 00:00:00 2001
|
||||
From: Sergio Correia <scorreia@redhat.com>
|
||||
Date: Wed, 26 Feb 2020 17:03:26 -0300
|
||||
Subject: [PATCH] CVE-2019-15605 - HTTP request smuggling
|
||||
|
||||
Upstream: https://github.com/nodejs/http-parser/commit/7d5c99d09f6743b055d53fc3f642746d9801479b
|
||||
|
||||
Support multi-coding Transfer-Encoding
|
||||
|
||||
`Transfer-Encoding` header might have multiple codings in it. Even
|
||||
though llhttp cares only about `chunked`, it must check that `chunked`
|
||||
is the last coding (if present).
|
||||
|
||||
ABNF from RFC 7230:
|
||||
|
||||
```
|
||||
Transfer-Encoding = *( "," OWS ) transfer-coding *( OWS "," [ OWS
|
||||
transfer-coding ] )
|
||||
transfer-coding = "chunked" / "compress" / "deflate" / "gzip" /
|
||||
transfer-extension
|
||||
transfer-extension = token *( OWS ";" OWS transfer-parameter )
|
||||
transfer-parameter = token BWS "=" BWS ( token / quoted-string )
|
||||
```
|
||||
|
||||
However, if `chunked` is not last - llhttp must assume that the encoding
|
||||
and size of the body is unknown (according to 3.3.3 of RFC 7230) and
|
||||
read the response until EOF. For request - the error must be raised for
|
||||
an unknown `Transfer-Encoding`.
|
||||
|
||||
Furthermore, 3.3.3 of RFC 7230 explicitly states that presence of both
|
||||
`Transfer-Encoding` and `Content-Length` indicates the smuggling attack
|
||||
and "ought to be handled as an error".
|
||||
|
||||
For the lenient mode:
|
||||
|
||||
* Unknown `Transfer-Encoding` in requests is not an error and request
|
||||
body is simply read until EOF (end of connection)
|
||||
* Only `Transfer-Encoding: chunked` together with `Content-Length` would
|
||||
result an error (just like before the patch)
|
||||
|
||||
PR-URL: nodejs-private/http-parser-private#4
|
||||
Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
|
||||
Reviewed-By: Sam Roberts <vieuxtech@gmail.com>
|
||||
Reviewed-By: James M Snell <jasnell@gmail.com>
|
||||
---
|
||||
http_parser.c | 110 ++++++++++++++++++++++++++++++++++++++++++++------
|
||||
http_parser.h | 8 ++--
|
||||
test.c | 90 +++++++++++++++++++++++++++++++++++++++--
|
||||
3 files changed, 189 insertions(+), 19 deletions(-)
|
||||
|
||||
diff --git a/http_parser.c b/http_parser.c
|
||||
index aef4437..cd120d8 100644
|
||||
--- a/http_parser.c
|
||||
+++ b/http_parser.c
|
||||
@@ -176,6 +176,22 @@ static const char *method_strings[] =
|
||||
#undef XX
|
||||
};
|
||||
|
||||
+/* Added for handling CVE-2019-15605. */
|
||||
+static void reset_flags(http_parser* p)
|
||||
+{
|
||||
+ p->flags = 0;
|
||||
+ p->transfer_encoding = 0;
|
||||
+}
|
||||
+
|
||||
+static void set_transfer_encoding(http_parser* p)
|
||||
+{
|
||||
+ p->transfer_encoding = 1;
|
||||
+}
|
||||
+
|
||||
+static int is_transfer_encoding(const http_parser* p)
|
||||
+{
|
||||
+ return p->transfer_encoding;
|
||||
+}
|
||||
|
||||
/* Tokens as defined by rfc 2616. Also lowercases them.
|
||||
* token = 1*<any CHAR except CTLs or separators>
|
||||
@@ -378,6 +394,7 @@ enum header_states
|
||||
, h_upgrade
|
||||
|
||||
, h_matching_transfer_encoding_chunked
|
||||
+
|
||||
, h_matching_connection_token_start
|
||||
, h_matching_connection_keep_alive
|
||||
, h_matching_connection_close
|
||||
@@ -388,6 +405,10 @@ enum header_states
|
||||
, h_connection_keep_alive
|
||||
, h_connection_close
|
||||
, h_connection_upgrade
|
||||
+
|
||||
+ /* CVE-2019-15605 */
|
||||
+ , h_matching_transfer_encoding_token_start
|
||||
+ , h_matching_transfer_encoding_token
|
||||
};
|
||||
|
||||
enum http_host_state
|
||||
@@ -722,7 +743,7 @@ reexecute:
|
||||
{
|
||||
if (ch == CR || ch == LF)
|
||||
break;
|
||||
- parser->flags = 0;
|
||||
+ reset_flags(parser);
|
||||
parser->content_length = ULLONG_MAX;
|
||||
|
||||
if (ch == 'H') {
|
||||
@@ -757,7 +778,7 @@ reexecute:
|
||||
|
||||
case s_start_res:
|
||||
{
|
||||
- parser->flags = 0;
|
||||
+ reset_flags(parser);
|
||||
parser->content_length = ULLONG_MAX;
|
||||
|
||||
switch (ch) {
|
||||
@@ -921,7 +942,7 @@ reexecute:
|
||||
{
|
||||
if (ch == CR || ch == LF)
|
||||
break;
|
||||
- parser->flags = 0;
|
||||
+ reset_flags(parser);
|
||||
parser->content_length = ULLONG_MAX;
|
||||
|
||||
if (UNLIKELY(!IS_ALPHA(ch))) {
|
||||
@@ -1313,6 +1334,7 @@ reexecute:
|
||||
parser->header_state = h_general;
|
||||
} else if (parser->index == sizeof(TRANSFER_ENCODING)-2) {
|
||||
parser->header_state = h_transfer_encoding;
|
||||
+ set_transfer_encoding(parser);
|
||||
}
|
||||
break;
|
||||
|
||||
@@ -1393,10 +1415,14 @@ reexecute:
|
||||
if ('c' == c) {
|
||||
parser->header_state = h_matching_transfer_encoding_chunked;
|
||||
} else {
|
||||
- parser->header_state = h_general;
|
||||
+ parser->header_state = h_matching_transfer_encoding_token;
|
||||
}
|
||||
break;
|
||||
|
||||
+ /* Multi-value `Transfer-Encoding` header */
|
||||
+ case h_matching_transfer_encoding_token_start:
|
||||
+ break;
|
||||
+
|
||||
case h_content_length:
|
||||
if (UNLIKELY(!IS_NUM(ch))) {
|
||||
SET_ERRNO(HPE_INVALID_CONTENT_LENGTH);
|
||||
@@ -1539,16 +1565,41 @@ reexecute:
|
||||
goto error;
|
||||
|
||||
/* Transfer-Encoding: chunked */
|
||||
+ case h_matching_transfer_encoding_token_start:
|
||||
+ /* looking for 'Transfer-Encoding: chunked' */
|
||||
+ if ('c' == c) {
|
||||
+ h_state = h_matching_transfer_encoding_chunked;
|
||||
+ } else if (STRICT_TOKEN(c)) {
|
||||
+ /* TODO(indutny): similar code below does this, but why?
|
||||
+ * At the very least it seems to be inconsistent given that
|
||||
+ * h_matching_transfer_encoding_token does not check for
|
||||
+ * `STRICT_TOKEN`
|
||||
+ */
|
||||
+ h_state = h_matching_transfer_encoding_token;
|
||||
+ } else if (c == ' ' || c == '\t') {
|
||||
+ /* Skip lws */
|
||||
+ } else {
|
||||
+ h_state = h_general;
|
||||
+ }
|
||||
+ break;
|
||||
+
|
||||
case h_matching_transfer_encoding_chunked:
|
||||
parser->index++;
|
||||
if (parser->index > sizeof(CHUNKED)-1
|
||||
|| c != CHUNKED[parser->index]) {
|
||||
- h_state = h_general;
|
||||
+ h_state = h_matching_transfer_encoding_token;
|
||||
} else if (parser->index == sizeof(CHUNKED)-2) {
|
||||
h_state = h_transfer_encoding_chunked;
|
||||
}
|
||||
break;
|
||||
|
||||
+ case h_matching_transfer_encoding_token:
|
||||
+ if (ch == ',') {
|
||||
+ h_state = h_matching_transfer_encoding_token_start;
|
||||
+ parser->index = 0;
|
||||
+ }
|
||||
+ break;
|
||||
+
|
||||
case h_matching_connection_token_start:
|
||||
/* looking for 'Connection: keep-alive' */
|
||||
if (c == 'k') {
|
||||
@@ -1607,7 +1658,7 @@ reexecute:
|
||||
break;
|
||||
|
||||
case h_transfer_encoding_chunked:
|
||||
- if (ch != ' ') h_state = h_general;
|
||||
+ if (ch != ' ') h_state = h_matching_transfer_encoding_token;
|
||||
break;
|
||||
|
||||
case h_connection_keep_alive:
|
||||
@@ -1732,12 +1783,17 @@ reexecute:
|
||||
REEXECUTE();
|
||||
}
|
||||
|
||||
- /* Cannot use chunked encoding and a content-length header together
|
||||
- per the HTTP specification. */
|
||||
- if ((parser->flags & F_CHUNKED) &&
|
||||
+ /* Cannot use transfer-encoding and a content-length header together
|
||||
+ per the HTTP specification. (RFC 7230 Section 3.3.3) */
|
||||
+ if ((is_transfer_encoding(parser)) &&
|
||||
(parser->flags & F_CONTENTLENGTH)) {
|
||||
- SET_ERRNO(HPE_UNEXPECTED_CONTENT_LENGTH);
|
||||
- goto error;
|
||||
+ /* Allow it for lenient parsing as long as `Transfer-Encoding` is
|
||||
+ * not `chunked`
|
||||
+ */
|
||||
+ if (!lenient || (parser->flags & F_CHUNKED)) {
|
||||
+ SET_ERRNO(HPE_UNEXPECTED_CONTENT_LENGTH);
|
||||
+ goto error;
|
||||
+ }
|
||||
}
|
||||
|
||||
UPDATE_STATE(s_headers_done);
|
||||
@@ -1811,8 +1867,31 @@ reexecute:
|
||||
UPDATE_STATE(NEW_MESSAGE());
|
||||
CALLBACK_NOTIFY(message_complete);
|
||||
} else if (parser->flags & F_CHUNKED) {
|
||||
- /* chunked encoding - ignore Content-Length header */
|
||||
+ /* chunked encoding - ignore Content-Length header,
|
||||
+ * prepare for a chunk */
|
||||
UPDATE_STATE(s_chunk_size_start);
|
||||
+ } else if (is_transfer_encoding(parser)) {
|
||||
+ if (parser->type == HTTP_REQUEST && !lenient) {
|
||||
+ /* RFC 7230 3.3.3 */
|
||||
+
|
||||
+ /* If a Transfer-Encoding header field
|
||||
+ * is present in a request and the chunked transfer coding is not
|
||||
+ * the final encoding, the message body length cannot be determined
|
||||
+ * reliably; the server MUST respond with the 400 (Bad Request)
|
||||
+ * status code and then close the connection.
|
||||
+ */
|
||||
+ SET_ERRNO(HPE_INVALID_TRANSFER_ENCODING);
|
||||
+ RETURN(p - data); /* Error */
|
||||
+ } else {
|
||||
+ /* RFC 7230 3.3.3 */
|
||||
+
|
||||
+ /* If a Transfer-Encoding header field is present in a response and
|
||||
+ * the chunked transfer coding is not the final encoding, the
|
||||
+ * message body length is determined by reading the connection until
|
||||
+ * it is closed by the server.
|
||||
+ */
|
||||
+ UPDATE_STATE(s_body_identity_eof);
|
||||
+ }
|
||||
} else {
|
||||
if (parser->content_length == 0) {
|
||||
/* Content-Length header given but zero: Content-Length: 0\r\n */
|
||||
@@ -2064,6 +2143,12 @@ http_message_needs_eof (const http_parser *parser)
|
||||
return 0;
|
||||
}
|
||||
|
||||
+ /* RFC 7230 3.3.3, see `s_headers_almost_done` */
|
||||
+ if ((is_transfer_encoding(parser)) &&
|
||||
+ (parser->flags & F_CHUNKED) == 0) {
|
||||
+ return 1;
|
||||
+ }
|
||||
+
|
||||
if ((parser->flags & F_CHUNKED) || parser->content_length != ULLONG_MAX) {
|
||||
return 0;
|
||||
}
|
||||
@@ -2107,6 +2192,7 @@ http_parser_init (http_parser *parser, enum http_parser_type t)
|
||||
parser->type = t;
|
||||
parser->state = (t == HTTP_REQUEST ? s_start_req : (t == HTTP_RESPONSE ? s_start_res : s_start_req_or_res));
|
||||
parser->http_errno = HPE_OK;
|
||||
+ reset_flags(parser);
|
||||
}
|
||||
|
||||
void
|
||||
diff --git a/http_parser.h b/http_parser.h
|
||||
index ea7bafe..a4841be 100644
|
||||
--- a/http_parser.h
|
||||
+++ b/http_parser.h
|
||||
@@ -275,8 +275,9 @@ enum flags
|
||||
XX(INVALID_INTERNAL_STATE, "encountered unexpected internal state")\
|
||||
XX(STRICT, "strict mode assertion failed") \
|
||||
XX(PAUSED, "parser is paused") \
|
||||
- XX(UNKNOWN, "an unknown error occurred")
|
||||
-
|
||||
+ XX(UNKNOWN, "an unknown error occurred") \
|
||||
+ XX(INVALID_TRANSFER_ENCODING, \
|
||||
+ "request has invalid transfer-encoding")
|
||||
|
||||
/* Define HPE_* values for each errno value above */
|
||||
#define HTTP_ERRNO_GEN(n, s) HPE_##n,
|
||||
@@ -293,7 +294,7 @@ enum http_errno {
|
||||
struct http_parser {
|
||||
/** PRIVATE **/
|
||||
unsigned int type : 2; /* enum http_parser_type */
|
||||
- unsigned int flags : 8; /* F_* values from 'flags' enum; semi-public */
|
||||
+ unsigned int flags : 8; /* F_* values from 'flags' enum; semi-public */
|
||||
unsigned int state : 7; /* enum state from http_parser.c */
|
||||
unsigned int header_state : 7; /* enum header_state from http_parser.c */
|
||||
unsigned int index : 7; /* index into current matcher */
|
||||
@@ -318,6 +319,7 @@ struct http_parser {
|
||||
|
||||
/** PUBLIC **/
|
||||
void *data; /* A pointer to get hook to the "connection" or "socket" object */
|
||||
+ unsigned int transfer_encoding : 8; /* CVE-2019-15605 */
|
||||
};
|
||||
|
||||
|
||||
diff --git a/test.c b/test.c
|
||||
index a1fa0d3..bb83d14 100644
|
||||
--- a/test.c
|
||||
+++ b/test.c
|
||||
@@ -260,7 +260,6 @@ const struct message requests[] =
|
||||
,.type= HTTP_REQUEST
|
||||
,.raw= "POST /post_identity_body_world?q=search#hey HTTP/1.1\r\n"
|
||||
"Accept: */*\r\n"
|
||||
- "Transfer-Encoding: identity\r\n"
|
||||
"Content-Length: 5\r\n"
|
||||
"\r\n"
|
||||
"World"
|
||||
@@ -273,10 +272,9 @@ const struct message requests[] =
|
||||
,.fragment= "hey"
|
||||
,.request_path= "/post_identity_body_world"
|
||||
,.request_url= "/post_identity_body_world?q=search#hey"
|
||||
- ,.num_headers= 3
|
||||
+ ,.num_headers= 2
|
||||
,.headers=
|
||||
{ { "Accept", "*/*" }
|
||||
- , { "Transfer-Encoding", "identity" }
|
||||
, { "Content-Length", "5" }
|
||||
}
|
||||
,.body= "World"
|
||||
@@ -1172,6 +1170,61 @@ const struct message requests[] =
|
||||
,.body= ""
|
||||
}
|
||||
|
||||
+#define POST_MULTI_TE_LAST_CHUNKED 43
|
||||
+, {.name= "post - multi coding transfer-encoding chunked body"
|
||||
+ ,.type= HTTP_REQUEST
|
||||
+ ,.raw= "POST / HTTP/1.1\r\n"
|
||||
+ "Transfer-Encoding: deflate, chunked\r\n"
|
||||
+ "\r\n"
|
||||
+ "1e\r\nall your base are belong to us\r\n"
|
||||
+ "0\r\n"
|
||||
+ "\r\n"
|
||||
+ ,.should_keep_alive= TRUE
|
||||
+ ,.message_complete_on_eof= FALSE
|
||||
+ ,.http_major= 1
|
||||
+ ,.http_minor= 1
|
||||
+ ,.method= HTTP_POST
|
||||
+ ,.query_string= ""
|
||||
+ ,.fragment= ""
|
||||
+ ,.request_path= "/"
|
||||
+ ,.request_url= "/"
|
||||
+ ,.num_headers= 1
|
||||
+ ,.headers=
|
||||
+ { { "Transfer-Encoding" , "deflate, chunked" }
|
||||
+ }
|
||||
+ ,.body= "all your base are belong to us"
|
||||
+ ,.num_chunks_complete= 2
|
||||
+ ,.chunk_lengths= { 0x1e }
|
||||
+ }
|
||||
+
|
||||
+#define POST_MULTI_LINE_TE_LAST_CHUNKED 43
|
||||
+, {.name= "post - multi coding transfer-encoding chunked body"
|
||||
+ ,.type= HTTP_REQUEST
|
||||
+ ,.raw= "POST / HTTP/1.1\r\n"
|
||||
+ "Transfer-Encoding: deflate,\r\n"
|
||||
+ " chunked\r\n"
|
||||
+ "\r\n"
|
||||
+ "1e\r\nall your base are belong to us\r\n"
|
||||
+ "0\r\n"
|
||||
+ "\r\n"
|
||||
+ ,.should_keep_alive= TRUE
|
||||
+ ,.message_complete_on_eof= FALSE
|
||||
+ ,.http_major= 1
|
||||
+ ,.http_minor= 1
|
||||
+ ,.method= HTTP_POST
|
||||
+ ,.query_string= ""
|
||||
+ ,.fragment= ""
|
||||
+ ,.request_path= "/"
|
||||
+ ,.request_url= "/"
|
||||
+ ,.num_headers= 1
|
||||
+ ,.headers=
|
||||
+ { { "Transfer-Encoding" , "deflate, chunked" }
|
||||
+ }
|
||||
+ ,.body= "all your base are belong to us"
|
||||
+ ,.num_chunks_complete= 2
|
||||
+ ,.chunk_lengths= { 0x1e }
|
||||
+ }
|
||||
+
|
||||
, {.name= NULL } /* sentinel */
|
||||
};
|
||||
|
||||
@@ -1951,6 +2004,29 @@ const struct message responses[] =
|
||||
,.chunk_lengths= { 2, 2 }
|
||||
}
|
||||
|
||||
+#define HTTP_200_MULTI_TE_NOT_LAST_CHUNKED 28
|
||||
+, {.name= "HTTP 200 response with `chunked` being *not last* Transfer-Encoding"
|
||||
+ ,.type= HTTP_RESPONSE
|
||||
+ ,.raw= "HTTP/1.1 200 OK\r\n"
|
||||
+ "Transfer-Encoding: chunked, identity\r\n"
|
||||
+ "\r\n"
|
||||
+ "2\r\n"
|
||||
+ "OK\r\n"
|
||||
+ "0\r\n"
|
||||
+ "\r\n"
|
||||
+ ,.should_keep_alive= FALSE
|
||||
+ ,.message_complete_on_eof= TRUE
|
||||
+ ,.http_major= 1
|
||||
+ ,.http_minor= 1
|
||||
+ ,.status_code= 200
|
||||
+ ,.response_status= "OK"
|
||||
+ ,.num_headers= 1
|
||||
+ ,.headers= { { "Transfer-Encoding", "chunked, identity" }
|
||||
+ }
|
||||
+ ,.body= "2\r\nOK\r\n0\r\n\r\n"
|
||||
+ ,.num_chunks_complete= 0
|
||||
+ }
|
||||
+
|
||||
, {.name= NULL } /* sentinel */
|
||||
};
|
||||
|
||||
@@ -3629,7 +3705,7 @@ test_chunked_content_length_error (int req)
|
||||
parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf));
|
||||
assert(parsed == strlen(buf));
|
||||
|
||||
- buf = "Transfer-Encoding: chunked\r\nContent-Length: 1\r\n\r\n";
|
||||
+ buf = "Transfer-Encoding: anything\r\nContent-Length: 1\r\n\r\n";
|
||||
size_t buflen = strlen(buf);
|
||||
|
||||
parsed = http_parser_execute(&parser, &settings_null, buf, buflen);
|
||||
@@ -4277,6 +4353,12 @@ main (void)
|
||||
"fooba",
|
||||
HPE_OK);
|
||||
|
||||
+ // Unknown Transfer-Encoding in request
|
||||
+ test_simple("GET / HTTP/1.1\r\n"
|
||||
+ "Transfer-Encoding: unknown\r\n"
|
||||
+ "\r\n",
|
||||
+ HPE_INVALID_TRANSFER_ENCODING);
|
||||
+
|
||||
static const char *all_methods[] = {
|
||||
"DELETE",
|
||||
"GET",
|
||||
--
|
||||
2.18.2
|
||||
|
@ -0,0 +1,64 @@
|
||||
From d22c4c1e3ab9ffe5faaa08bacd993c814ca1c92e Mon Sep 17 00:00:00 2001
|
||||
From: Sergio Correia <scorreia@redhat.com>
|
||||
Date: Tue, 18 Feb 2020 09:11:05 -0300
|
||||
Subject: [PATCH] Remove unused functions from test runner.
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
Upstream: 705e519bf56e5904c761a411b38a66e84bf7fc2d
|
||||
|
||||
PR-URL: https://github.com/nodejs-private/http-parser-private/pull/1
|
||||
Reviewed-By: Сковорода Никита Андреевич <chalkerx@gmail.com>
|
||||
Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
|
||||
Reviewed-By: Evan Lucas <evanlucas@me.com>
|
||||
Reviewed-By: Fedor Indutny <fedor.indutny@gmail.com>
|
||||
Reviewed-By: James M Snell <jasnell@gmail.com>
|
||||
Reviewed-By: Rod Vagg <rod@vagg.org>
|
||||
---
|
||||
test.c | 14 --------------
|
||||
1 file changed, 14 deletions(-)
|
||||
|
||||
diff --git a/test.c b/test.c
|
||||
index cb445ce..a1fa0d3 100644
|
||||
--- a/test.c
|
||||
+++ b/test.c
|
||||
@@ -27,9 +27,7 @@
|
||||
#include <stdarg.h>
|
||||
|
||||
#if defined(__APPLE__)
|
||||
-# undef strlcat
|
||||
# undef strlncpy
|
||||
-# undef strlcpy
|
||||
#endif /* defined(__APPLE__) */
|
||||
|
||||
#undef TRUE
|
||||
@@ -1993,12 +1991,6 @@ strlncat(char *dst, size_t len, const char *src, size_t n)
|
||||
return slen + dlen;
|
||||
}
|
||||
|
||||
-size_t
|
||||
-strlcat(char *dst, const char *src, size_t len)
|
||||
-{
|
||||
- return strlncat(dst, len, src, (size_t) -1);
|
||||
-}
|
||||
-
|
||||
size_t
|
||||
strlncpy(char *dst, size_t len, const char *src, size_t n)
|
||||
{
|
||||
@@ -2017,12 +2009,6 @@ strlncpy(char *dst, size_t len, const char *src, size_t n)
|
||||
return slen;
|
||||
}
|
||||
|
||||
-size_t
|
||||
-strlcpy(char *dst, const char *src, size_t len)
|
||||
-{
|
||||
- return strlncpy(dst, len, src, (size_t) -1);
|
||||
-}
|
||||
-
|
||||
int
|
||||
request_url_cb (http_parser *p, const char *buf, size_t len)
|
||||
{
|
||||
--
|
||||
2.18.2
|
||||
|
Loading…
Reference in new issue