parent
4e38d762bc
commit
54dc49111d
@ -1,178 +0,0 @@
|
|||||||
From 05f6af2f4c85cc99323cfff6149c3d74af661b6d Mon Sep 17 00:00:00 2001
|
|
||||||
From: Amos Jeffries <yadij@users.noreply.github.com>
|
|
||||||
Date: Fri, 13 Oct 2023 08:44:16 +0000
|
|
||||||
Subject: [PATCH] RFC 9112: Improve HTTP chunked encoding compliance (#1498)
|
|
||||||
|
|
||||||
---
|
|
||||||
src/http/one/Parser.cc | 8 +-------
|
|
||||||
src/http/one/Parser.h | 4 +---
|
|
||||||
src/http/one/TeChunkedParser.cc | 23 ++++++++++++++++++-----
|
|
||||||
src/parser/Tokenizer.cc | 12 ++++++++++++
|
|
||||||
src/parser/Tokenizer.h | 7 +++++++
|
|
||||||
5 files changed, 39 insertions(+), 15 deletions(-)
|
|
||||||
|
|
||||||
diff --git a/src/http/one/Parser.cc b/src/http/one/Parser.cc
|
|
||||||
index c78ddd7f0..291ae39f0 100644
|
|
||||||
--- a/src/http/one/Parser.cc
|
|
||||||
+++ b/src/http/one/Parser.cc
|
|
||||||
@@ -65,16 +65,10 @@ Http::One::Parser::DelimiterCharacters()
|
|
||||||
void
|
|
||||||
Http::One::Parser::skipLineTerminator(Tokenizer &tok) const
|
|
||||||
{
|
|
||||||
- if (tok.skip(Http1::CrLf()))
|
|
||||||
- return;
|
|
||||||
-
|
|
||||||
if (Config.onoff.relaxed_header_parser && tok.skipOne(CharacterSet::LF))
|
|
||||||
return;
|
|
||||||
|
|
||||||
- if (tok.atEnd() || (tok.remaining().length() == 1 && tok.remaining().at(0) == '\r'))
|
|
||||||
- throw InsufficientInput();
|
|
||||||
-
|
|
||||||
- throw TexcHere("garbage instead of CRLF line terminator");
|
|
||||||
+ tok.skipRequired("line-terminating CRLF", Http1::CrLf());
|
|
||||||
}
|
|
||||||
|
|
||||||
/// all characters except the LF line terminator
|
|
||||||
diff --git a/src/http/one/Parser.h b/src/http/one/Parser.h
|
|
||||||
index f83c01a9a..aab895583 100644
|
|
||||||
--- a/src/http/one/Parser.h
|
|
||||||
+++ b/src/http/one/Parser.h
|
|
||||||
@@ -124,9 +124,7 @@ protected:
|
|
||||||
* detect and skip the CRLF or (if tolerant) LF line terminator
|
|
||||||
* consume from the tokenizer.
|
|
||||||
*
|
|
||||||
- * \throws exception on bad or InsuffientInput.
|
|
||||||
- * \retval true only if line terminator found.
|
|
||||||
- * \retval false incomplete or missing line terminator, need more data.
|
|
||||||
+ * \throws exception on bad or InsufficientInput
|
|
||||||
*/
|
|
||||||
void skipLineTerminator(Tokenizer &) const;
|
|
||||||
|
|
||||||
diff --git a/src/http/one/TeChunkedParser.cc b/src/http/one/TeChunkedParser.cc
|
|
||||||
index 1434100b6..8bdb65abb 100644
|
|
||||||
--- a/src/http/one/TeChunkedParser.cc
|
|
||||||
+++ b/src/http/one/TeChunkedParser.cc
|
|
||||||
@@ -91,6 +91,11 @@ Http::One::TeChunkedParser::parseChunkSize(Tokenizer &tok)
|
|
||||||
{
|
|
||||||
Must(theChunkSize <= 0); // Should(), really
|
|
||||||
|
|
||||||
+ static const SBuf bannedHexPrefixLower("0x");
|
|
||||||
+ static const SBuf bannedHexPrefixUpper("0X");
|
|
||||||
+ if (tok.skip(bannedHexPrefixLower) || tok.skip(bannedHexPrefixUpper))
|
|
||||||
+ throw TextException("chunk starts with 0x", Here());
|
|
||||||
+
|
|
||||||
int64_t size = -1;
|
|
||||||
if (tok.int64(size, 16, false) && !tok.atEnd()) {
|
|
||||||
if (size < 0)
|
|
||||||
@@ -121,7 +126,7 @@ Http::One::TeChunkedParser::parseChunkMetadataSuffix(Tokenizer &tok)
|
|
||||||
// bad or insufficient input, like in the code below. TODO: Expand up.
|
|
||||||
try {
|
|
||||||
parseChunkExtensions(tok); // a possibly empty chunk-ext list
|
|
||||||
- skipLineTerminator(tok);
|
|
||||||
+ tok.skipRequired("CRLF after [chunk-ext]", Http1::CrLf());
|
|
||||||
buf_ = tok.remaining();
|
|
||||||
parsingStage_ = theChunkSize ? Http1::HTTP_PARSE_CHUNK : Http1::HTTP_PARSE_MIME;
|
|
||||||
return true;
|
|
||||||
@@ -132,12 +137,14 @@ Http::One::TeChunkedParser::parseChunkMetadataSuffix(Tokenizer &tok)
|
|
||||||
// other exceptions bubble up to kill message parsing
|
|
||||||
}
|
|
||||||
|
|
||||||
-/// Parses the chunk-ext list (RFC 7230 section 4.1.1 and its Errata #4667):
|
|
||||||
+/// Parses the chunk-ext list (RFC 9112 section 7.1.1:
|
|
||||||
/// chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val ] )
|
|
||||||
void
|
|
||||||
-Http::One::TeChunkedParser::parseChunkExtensions(Tokenizer &tok)
|
|
||||||
+Http::One::TeChunkedParser::parseChunkExtensions(Tokenizer &callerTok)
|
|
||||||
{
|
|
||||||
do {
|
|
||||||
+ auto tok = callerTok;
|
|
||||||
+
|
|
||||||
ParseBws(tok); // Bug 4492: IBM_HTTP_Server sends SP after chunk-size
|
|
||||||
|
|
||||||
if (!tok.skip(';'))
|
|
||||||
@@ -145,6 +152,7 @@ Http::One::TeChunkedParser::parseChunkExtensions(Tokenizer &tok)
|
|
||||||
|
|
||||||
parseOneChunkExtension(tok);
|
|
||||||
buf_ = tok.remaining(); // got one extension
|
|
||||||
+ callerTok = tok;
|
|
||||||
} while (true);
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -158,11 +166,14 @@ Http::One::ChunkExtensionValueParser::Ignore(Tokenizer &tok, const SBuf &extName
|
|
||||||
/// Parses a single chunk-ext list element:
|
|
||||||
/// chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val ] )
|
|
||||||
void
|
|
||||||
-Http::One::TeChunkedParser::parseOneChunkExtension(Tokenizer &tok)
|
|
||||||
+Http::One::TeChunkedParser::parseOneChunkExtension(Tokenizer &callerTok)
|
|
||||||
{
|
|
||||||
+ auto tok = callerTok;
|
|
||||||
+
|
|
||||||
ParseBws(tok); // Bug 4492: ICAP servers send SP before chunk-ext-name
|
|
||||||
|
|
||||||
const auto extName = tok.prefix("chunk-ext-name", CharacterSet::TCHAR);
|
|
||||||
+ callerTok = tok; // in case we determine that this is a valueless chunk-ext
|
|
||||||
|
|
||||||
ParseBws(tok);
|
|
||||||
|
|
||||||
@@ -176,6 +187,8 @@ Http::One::TeChunkedParser::parseOneChunkExtension(Tokenizer &tok)
|
|
||||||
customExtensionValueParser->parse(tok, extName);
|
|
||||||
else
|
|
||||||
ChunkExtensionValueParser::Ignore(tok, extName);
|
|
||||||
+
|
|
||||||
+ callerTok = tok;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
|
||||||
@@ -209,7 +222,7 @@ Http::One::TeChunkedParser::parseChunkEnd(Tokenizer &tok)
|
|
||||||
Must(theLeftBodySize == 0); // Should(), really
|
|
||||||
|
|
||||||
try {
|
|
||||||
- skipLineTerminator(tok);
|
|
||||||
+ tok.skipRequired("chunk CRLF", Http1::CrLf());
|
|
||||||
buf_ = tok.remaining(); // parse checkpoint
|
|
||||||
theChunkSize = 0; // done with the current chunk
|
|
||||||
parsingStage_ = Http1::HTTP_PARSE_CHUNK_SZ;
|
|
||||||
diff --git a/src/parser/Tokenizer.cc b/src/parser/Tokenizer.cc
|
|
||||||
index edaffd8d3..15df793b8 100644
|
|
||||||
--- a/src/parser/Tokenizer.cc
|
|
||||||
+++ b/src/parser/Tokenizer.cc
|
|
||||||
@@ -147,6 +147,18 @@ Parser::Tokenizer::skipAll(const CharacterSet &tokenChars)
|
|
||||||
return success(prefixLen);
|
|
||||||
}
|
|
||||||
|
|
||||||
+void
|
|
||||||
+Parser::Tokenizer::skipRequired(const char *description, const SBuf &tokenToSkip)
|
|
||||||
+{
|
|
||||||
+ if (skip(tokenToSkip) || tokenToSkip.isEmpty())
|
|
||||||
+ return;
|
|
||||||
+
|
|
||||||
+ if (tokenToSkip.startsWith(buf_))
|
|
||||||
+ throw InsufficientInput();
|
|
||||||
+
|
|
||||||
+ throw TextException(ToSBuf("cannot skip ", description), Here());
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
bool
|
|
||||||
Parser::Tokenizer::skipOne(const CharacterSet &chars)
|
|
||||||
{
|
|
||||||
diff --git a/src/parser/Tokenizer.h b/src/parser/Tokenizer.h
|
|
||||||
index 7bae1ccbb..3cfa7dd6c 100644
|
|
||||||
--- a/src/parser/Tokenizer.h
|
|
||||||
+++ b/src/parser/Tokenizer.h
|
|
||||||
@@ -115,6 +115,13 @@ public:
|
|
||||||
*/
|
|
||||||
SBuf::size_type skipAll(const CharacterSet &discardables);
|
|
||||||
|
|
||||||
+ /** skips a given character sequence (string);
|
|
||||||
+ * does nothing if the sequence is empty
|
|
||||||
+ *
|
|
||||||
+ * \throws exception on mismatching prefix or InsufficientInput
|
|
||||||
+ */
|
|
||||||
+ void skipRequired(const char *description, const SBuf &tokenToSkip);
|
|
||||||
+
|
|
||||||
/** Removes a single trailing character from the set.
|
|
||||||
*
|
|
||||||
* \return whether a character was removed
|
|
||||||
--
|
|
||||||
2.25.1
|
|
||||||
|
|
@ -1,43 +0,0 @@
|
|||||||
From 052cf082b0faaef4eaaa4e94119d7a1437aac4a3 Mon Sep 17 00:00:00 2001
|
|
||||||
From: squidadm <squidadm@users.noreply.github.com>
|
|
||||||
Date: Wed, 18 Oct 2023 04:50:56 +1300
|
|
||||||
Subject: [PATCH] Fix stack buffer overflow when parsing Digest Authorization
|
|
||||||
(#1517)
|
|
||||||
|
|
||||||
The bug was discovered and detailed by Joshua Rogers at
|
|
||||||
https://megamansec.github.io/Squid-Security-Audit/digest-overflow.html
|
|
||||||
where it was filed as "Stack Buffer Overflow in Digest Authentication".
|
|
||||||
|
|
||||||
---------
|
|
||||||
|
|
||||||
Co-authored-by: Alex Bason <nonsleepr@gmail.com>
|
|
||||||
Co-authored-by: Amos Jeffries <yadij@users.noreply.github.com>
|
|
||||||
---
|
|
||||||
src/auth/digest/Config.cc | 10 +++++++---
|
|
||||||
1 file changed, 7 insertions(+), 3 deletions(-)
|
|
||||||
|
|
||||||
diff --git a/src/auth/digest/Config.cc b/src/auth/digest/Config.cc
|
|
||||||
index d42831a55..be9f3c433 100644
|
|
||||||
--- a/src/auth/digest/Config.cc
|
|
||||||
+++ b/src/auth/digest/Config.cc
|
|
||||||
@@ -844,11 +844,15 @@ Auth::Digest::Config::decode(char const *proxy_auth, const HttpRequest *request,
|
|
||||||
break;
|
|
||||||
|
|
||||||
case DIGEST_NC:
|
|
||||||
- if (value.size() != 8) {
|
|
||||||
+ if (value.size() == 8) {
|
|
||||||
+ // for historical reasons, the nc value MUST be exactly 8 bytes
|
|
||||||
+ static_assert(sizeof(digest_request->nc) == 8 + 1, "bad nc buffer size");
|
|
||||||
+ xstrncpy(digest_request->nc, value.rawBuf(), value.size() + 1);
|
|
||||||
+ debugs(29, 9, "Found noncecount '" << digest_request->nc << "'");
|
|
||||||
+ } else {
|
|
||||||
debugs(29, 9, "Invalid nc '" << value << "' in '" << temp << "'");
|
|
||||||
+ digest_request->nc[0] = 0;
|
|
||||||
}
|
|
||||||
- xstrncpy(digest_request->nc, value.rawBuf(), value.size() + 1);
|
|
||||||
- debugs(29, 9, "Found noncecount '" << digest_request->nc << "'");
|
|
||||||
break;
|
|
||||||
|
|
||||||
case DIGEST_CNONCE:
|
|
||||||
--
|
|
||||||
2.25.1
|
|
@ -1,46 +0,0 @@
|
|||||||
From c67bf049871a49e9871efe50b230a7f37b7039f6 Mon Sep 17 00:00:00 2001
|
|
||||||
From: Alex Rousskov <rousskov@measurement-factory.com>
|
|
||||||
Date: Thu, 25 May 2023 02:10:28 +0000
|
|
||||||
Subject: [PATCH] Fix userinfo percent-encoding (#1367)
|
|
||||||
|
|
||||||
%X expects an unsigned int, and that is what we were giving it. However,
|
|
||||||
to get to the correct unsigned int value from a (signed) char, one has
|
|
||||||
to cast to an unsigned char (or equivalent) first.
|
|
||||||
|
|
||||||
Broken since inception in commit 7b75100.
|
|
||||||
|
|
||||||
Also adjusted similar (commented out) ext_edirectory_userip_acl code.
|
|
||||||
---
|
|
||||||
src/acl/external/eDirectory_userip/ext_edirectory_userip_acl.cc | 2 +-
|
|
||||||
src/anyp/Uri.cc | 2 +-
|
|
||||||
2 files changed, 2 insertions(+), 2 deletions(-)
|
|
||||||
|
|
||||||
diff --git a/src/acl/external/eDirectory_userip/ext_edirectory_userip_acl.cc b/src/acl/external/eDirectory_userip/ext_edirectory_userip_acl.cc
|
|
||||||
index dbc20ae54..9028d1562 100644
|
|
||||||
--- a/src/acl/external/eDirectory_userip/ext_edirectory_userip_acl.cc
|
|
||||||
+++ b/src/acl/external/eDirectory_userip/ext_edirectory_userip_acl.cc
|
|
||||||
@@ -1612,7 +1612,7 @@ MainSafe(int argc, char **argv)
|
|
||||||
/* BINARY DEBUGGING *
|
|
||||||
local_printfx("while() -> bufa[%" PRIuSIZE "]: %s", k, bufa);
|
|
||||||
for (i = 0; i < k; ++i)
|
|
||||||
- local_printfx("%02X", bufa[i]);
|
|
||||||
+ local_printfx("%02X", static_cast<unsigned int>(static_cast<unsigned char>(bufa[i])));
|
|
||||||
local_printfx("\n");
|
|
||||||
* BINARY DEBUGGING */
|
|
||||||
/* Check for CRLF */
|
|
||||||
diff --git a/src/anyp/Uri.cc b/src/anyp/Uri.cc
|
|
||||||
index a6a5d5d9e..3d19188e9 100644
|
|
||||||
--- a/src/anyp/Uri.cc
|
|
||||||
+++ b/src/anyp/Uri.cc
|
|
||||||
@@ -70,7 +70,7 @@ AnyP::Uri::Encode(const SBuf &buf, const CharacterSet &ignore)
|
|
||||||
while (!tk.atEnd()) {
|
|
||||||
// TODO: Add Tokenizer::parseOne(void).
|
|
||||||
const auto ch = tk.remaining()[0];
|
|
||||||
- output.appendf("%%%02X", static_cast<unsigned int>(ch)); // TODO: Optimize using a table
|
|
||||||
+ output.appendf("%%%02X", static_cast<unsigned int>(static_cast<unsigned char>(ch))); // TODO: Optimize using a table
|
|
||||||
(void)tk.skip(ch);
|
|
||||||
|
|
||||||
if (tk.prefix(goodSection, ignore))
|
|
||||||
--
|
|
||||||
2.25.1
|
|
||||||
|
|
@ -0,0 +1,158 @@
|
|||||||
|
diff --git a/src/client_side.cc b/src/client_side.cc
|
||||||
|
index 4eb6976..63f1b66 100644
|
||||||
|
--- a/src/client_side.cc
|
||||||
|
+++ b/src/client_side.cc
|
||||||
|
@@ -957,7 +957,7 @@ ConnStateData::kick()
|
||||||
|
* We are done with the response, and we are either still receiving request
|
||||||
|
* body (early response!) or have already stopped receiving anything.
|
||||||
|
*
|
||||||
|
- * If we are still receiving, then clientParseRequest() below will fail.
|
||||||
|
+ * If we are still receiving, then parseRequests() below will fail.
|
||||||
|
* (XXX: but then we will call readNextRequest() which may succeed and
|
||||||
|
* execute a smuggled request as we are not done with the current request).
|
||||||
|
*
|
||||||
|
@@ -977,28 +977,12 @@ ConnStateData::kick()
|
||||||
|
* Attempt to parse a request from the request buffer.
|
||||||
|
* If we've been fed a pipelined request it may already
|
||||||
|
* be in our read buffer.
|
||||||
|
- *
|
||||||
|
- \par
|
||||||
|
- * This needs to fall through - if we're unlucky and parse the _last_ request
|
||||||
|
- * from our read buffer we may never re-register for another client read.
|
||||||
|
*/
|
||||||
|
|
||||||
|
- if (clientParseRequests()) {
|
||||||
|
- debugs(33, 3, clientConnection << ": parsed next request from buffer");
|
||||||
|
- }
|
||||||
|
+ parseRequests();
|
||||||
|
|
||||||
|
- /** \par
|
||||||
|
- * Either we need to kick-start another read or, if we have
|
||||||
|
- * a half-closed connection, kill it after the last request.
|
||||||
|
- * This saves waiting for half-closed connections to finished being
|
||||||
|
- * half-closed _AND_ then, sometimes, spending "Timeout" time in
|
||||||
|
- * the keepalive "Waiting for next request" state.
|
||||||
|
- */
|
||||||
|
- if (commIsHalfClosed(clientConnection->fd) && pipeline.empty()) {
|
||||||
|
- debugs(33, 3, "half-closed client with no pending requests, closing");
|
||||||
|
- clientConnection->close();
|
||||||
|
+ if (!isOpen())
|
||||||
|
return;
|
||||||
|
- }
|
||||||
|
|
||||||
|
/** \par
|
||||||
|
* At this point we either have a parsed request (which we've
|
||||||
|
@@ -1935,16 +1919,11 @@ ConnStateData::receivedFirstByte()
|
||||||
|
resetReadTimeout(Config.Timeout.request);
|
||||||
|
}
|
||||||
|
|
||||||
|
-/**
|
||||||
|
- * Attempt to parse one or more requests from the input buffer.
|
||||||
|
- * Returns true after completing parsing of at least one request [header]. That
|
||||||
|
- * includes cases where parsing ended with an error (e.g., a huge request).
|
||||||
|
- */
|
||||||
|
-bool
|
||||||
|
-ConnStateData::clientParseRequests()
|
||||||
|
+/// Attempt to parse one or more requests from the input buffer.
|
||||||
|
+/// May close the connection.
|
||||||
|
+void
|
||||||
|
+ConnStateData::parseRequests()
|
||||||
|
{
|
||||||
|
- bool parsed_req = false;
|
||||||
|
-
|
||||||
|
debugs(33, 5, HERE << clientConnection << ": attempting to parse");
|
||||||
|
|
||||||
|
// Loop while we have read bytes that are not needed for producing the body
|
||||||
|
@@ -1989,8 +1968,6 @@ ConnStateData::clientParseRequests()
|
||||||
|
|
||||||
|
processParsedRequest(context);
|
||||||
|
|
||||||
|
- parsed_req = true; // XXX: do we really need to parse everything right NOW ?
|
||||||
|
-
|
||||||
|
if (context->mayUseConnection()) {
|
||||||
|
debugs(33, 3, HERE << "Not parsing new requests, as this request may need the connection");
|
||||||
|
break;
|
||||||
|
@@ -2003,8 +1980,19 @@ ConnStateData::clientParseRequests()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- /* XXX where to 'finish' the parsing pass? */
|
||||||
|
- return parsed_req;
|
||||||
|
+ debugs(33, 7, "buffered leftovers: " << inBuf.length());
|
||||||
|
+
|
||||||
|
+ if (isOpen() && commIsHalfClosed(clientConnection->fd)) {
|
||||||
|
+ if (pipeline.empty()) {
|
||||||
|
+ // we processed what we could parse, and no more data is coming
|
||||||
|
+ debugs(33, 5, "closing half-closed without parsed requests: " << clientConnection);
|
||||||
|
+ clientConnection->close();
|
||||||
|
+ } else {
|
||||||
|
+ // we parsed what we could, and no more data is coming
|
||||||
|
+ debugs(33, 5, "monitoring half-closed while processing parsed requests: " << clientConnection);
|
||||||
|
+ flags.readMore = false; // may already be false
|
||||||
|
+ }
|
||||||
|
+ }
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
@@ -2021,18 +2009,7 @@ ConnStateData::afterClientRead()
|
||||||
|
if (pipeline.empty())
|
||||||
|
fd_note(clientConnection->fd, "Reading next request");
|
||||||
|
|
||||||
|
- if (!clientParseRequests()) {
|
||||||
|
- if (!isOpen())
|
||||||
|
- return;
|
||||||
|
- // We may get here if the client half-closed after sending a partial
|
||||||
|
- // request. See doClientRead() and shouldCloseOnEof().
|
||||||
|
- // XXX: This partially duplicates ConnStateData::kick().
|
||||||
|
- if (pipeline.empty() && commIsHalfClosed(clientConnection->fd)) {
|
||||||
|
- debugs(33, 5, clientConnection << ": half-closed connection, no completed request parsed, connection closing.");
|
||||||
|
- clientConnection->close();
|
||||||
|
- return;
|
||||||
|
- }
|
||||||
|
- }
|
||||||
|
+ parseRequests();
|
||||||
|
|
||||||
|
if (!isOpen())
|
||||||
|
return;
|
||||||
|
@@ -3789,7 +3766,7 @@ ConnStateData::notePinnedConnectionBecameIdle(PinnedIdleContext pic)
|
||||||
|
startPinnedConnectionMonitoring();
|
||||||
|
|
||||||
|
if (pipeline.empty())
|
||||||
|
- kick(); // in case clientParseRequests() was blocked by a busy pic.connection
|
||||||
|
+ kick(); // in case parseRequests() was blocked by a busy pic.connection
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Forward future client requests using the given server connection.
|
||||||
|
diff --git a/src/client_side.h b/src/client_side.h
|
||||||
|
index 2793673..7c8d86b 100644
|
||||||
|
--- a/src/client_side.h
|
||||||
|
+++ b/src/client_side.h
|
||||||
|
@@ -93,7 +93,6 @@ public:
|
||||||
|
virtual void doneWithControlMsg();
|
||||||
|
|
||||||
|
/// Traffic parsing
|
||||||
|
- bool clientParseRequests();
|
||||||
|
void readNextRequest();
|
||||||
|
|
||||||
|
/// try to make progress on a transaction or read more I/O
|
||||||
|
@@ -422,6 +421,7 @@ private:
|
||||||
|
|
||||||
|
void checkLogging();
|
||||||
|
|
||||||
|
+ void parseRequests();
|
||||||
|
void clientAfterReadingRequests();
|
||||||
|
bool concurrentRequestQueueFilled() const;
|
||||||
|
|
||||||
|
diff --git a/src/tests/stub_client_side.cc b/src/tests/stub_client_side.cc
|
||||||
|
index acf61c4..b1d82bf 100644
|
||||||
|
--- a/src/tests/stub_client_side.cc
|
||||||
|
+++ b/src/tests/stub_client_side.cc
|
||||||
|
@@ -14,7 +14,7 @@
|
||||||
|
#include "tests/STUB.h"
|
||||||
|
|
||||||
|
#include "client_side.h"
|
||||||
|
-bool ConnStateData::clientParseRequests() STUB_RETVAL(false)
|
||||||
|
+void ConnStateData::parseRequests() STUB
|
||||||
|
void ConnStateData::readNextRequest() STUB
|
||||||
|
bool ConnStateData::isOpen() const STUB_RETVAL(false)
|
||||||
|
void ConnStateData::kick() STUB
|
Loading…
Reference in new issue