From 8b6cd901b1fccb7479863171e8996f1a61b6b342 Mon Sep 17 00:00:00 2001 From: Jan Macku Date: Sep 13 2023 13:35:12 +0000 Subject: Resolves: CVE-2023-38039 - fix HTTP headers eat all memory --- diff --git a/0032-curl-7.87.0-CVE-2023-38039.patch b/0032-curl-7.87.0-CVE-2023-38039.patch new file mode 100644 index 0000000..66dca86 --- /dev/null +++ b/0032-curl-7.87.0-CVE-2023-38039.patch @@ -0,0 +1,203 @@ +From e0ffb54555f4f5199613f7d2db8dd16ffb2d33b3 Mon Sep 17 00:00:00 2001 +From: Daniel Stenberg +Date: Wed, 2 Aug 2023 23:34:48 +0200 +Subject: [PATCH] http: return error when receiving too large header set + +To avoid abuse. The limit is set to 300 KB for the accumulated size of +all received HTTP headers for a single response. Incomplete research +suggests that Chrome uses a 256-300 KB limit, while Firefox allows up to +1MB. + +Closes #11582 + +(cherry picked from commit 3ee79c1674fd6f99e8efca52cd7510e08b766770) +Signed-off-by: Jan Macku +--- + lib/c-hyper.c | 13 +++++++------ + lib/http.c | 34 ++++++++++++++++++++++++++++++---- + lib/http.h | 9 +++++++++ + lib/pingpong.c | 4 +++- + lib/urldata.h | 17 ++++++++--------- + 5 files changed, 57 insertions(+), 20 deletions(-) + +diff --git a/lib/c-hyper.c b/lib/c-hyper.c +index 58957934f..8fecf997d 100644 +--- a/lib/c-hyper.c ++++ b/lib/c-hyper.c +@@ -169,8 +169,11 @@ static int hyper_each_header(void *userdata, + } + } + +- data->info.header_size += (long)len; +- data->req.headerbytecount += (long)len; ++ result = Curl_bump_headersize(data, len, FALSE); ++ if(result) { ++ data->state.hresult = result; ++ return HYPER_ITER_BREAK; ++ } + return HYPER_ITER_CONTINUE; + } + +@@ -298,10 +301,8 @@ static CURLcode status_line(struct Curl_easy *data, + if(result) + return result; + } +- data->info.header_size += (long)len; +- data->req.headerbytecount += (long)len; +- data->req.httpcode = http_status; +- return CURLE_OK; ++ result = Curl_bump_headersize(data, len, FALSE); ++ return result; + } + + /* +diff --git a/lib/http.c b/lib/http.c +index f177c0ef5..76ccc5480 100644 +--- a/lib/http.c ++++ b/lib/http.c +@@ -3824,6 +3824,29 @@ static CURLcode verify_header(struct Curl_easy *data) + return CURLE_OK; + } + ++CURLcode Curl_bump_headersize(struct Curl_easy *data, ++ size_t delta, ++ bool connect_only) ++{ ++ size_t bad = 0; ++ if(delta < MAX_HTTP_RESP_HEADER_SIZE) { ++ if(!connect_only) ++ data->req.headerbytecount += (unsigned int)delta; ++ data->info.header_size += (unsigned int)delta; ++ if(data->info.header_size > MAX_HTTP_RESP_HEADER_SIZE) ++ bad = data->info.header_size; ++ } ++ else ++ bad = data->info.header_size + delta; ++ if(bad) { ++ failf(data, "Too large response headers: %zu > %zu", ++ bad, MAX_HTTP_RESP_HEADER_SIZE); ++ return CURLE_RECV_ERROR; ++ } ++ return CURLE_OK; ++} ++ ++ + /* + * Read any HTTP header lines from the server and pass them to the client app. + */ +@@ -4057,8 +4080,9 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data, + if(result) + return result; + +- data->info.header_size += (long)headerlen; +- data->req.headerbytecount += (long)headerlen; ++ result = Curl_bump_headersize(data, headerlen, FALSE); ++ if(result) ++ return result; + + /* + * When all the headers have been parsed, see if we should give +@@ -4358,8 +4382,10 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data, + if(result) + return result; + +- data->info.header_size += Curl_dyn_len(&data->state.headerb); +- data->req.headerbytecount += Curl_dyn_len(&data->state.headerb); ++ result = Curl_bump_headersize(data, Curl_dyn_len(&data->state.headerb), ++ FALSE); ++ if(result) ++ return result; + + Curl_dyn_reset(&data->state.headerb); + } +diff --git a/lib/http.h b/lib/http.h +index 2ac287eca..bcf5e0609 100644 +--- a/lib/http.h ++++ b/lib/http.h +@@ -50,6 +50,10 @@ extern const struct Curl_handler Curl_handler_http; + extern const struct Curl_handler Curl_handler_https; + #endif + ++CURLcode Curl_bump_headersize(struct Curl_easy *data, ++ size_t delta, ++ bool connect_only); ++ + /* Header specific functions */ + bool Curl_compareheader(const char *headerline, /* line to check */ + const char *header, /* header keyword _with_ colon */ +@@ -163,6 +167,11 @@ CURLcode Curl_http_auth_act(struct Curl_easy *data); + #define EXPECT_100_THRESHOLD (1024*1024) + #endif + ++/* MAX_HTTP_RESP_HEADER_SIZE is the maximum size of all response headers ++ combined that libcurl allows for a single HTTP response, any HTTP ++ version. This count includes CONNECT response headers. */ ++#define MAX_HTTP_RESP_HEADER_SIZE (300*1024) ++ + #endif /* CURL_DISABLE_HTTP */ + + #ifdef USE_NGHTTP3 +diff --git a/lib/pingpong.c b/lib/pingpong.c +index 74a678a1a..aa536ec62 100644 +--- a/lib/pingpong.c ++++ b/lib/pingpong.c +@@ -340,7 +340,9 @@ CURLcode Curl_pp_readresp(struct Curl_easy *data, + ssize_t clipamount = 0; + bool restart = FALSE; + +- data->req.headerbytecount += (long)gotbytes; ++ result = Curl_bump_headersize(data, gotbytes, FALSE); ++ if(result) ++ return result; + + pp->nread_resp += gotbytes; + for(i = 0; i < gotbytes; ptr++, i++) { +diff --git a/lib/urldata.h b/lib/urldata.h +index 69c14eb54..512bae24c 100644 +--- a/lib/urldata.h ++++ b/lib/urldata.h +@@ -612,18 +612,17 @@ struct SingleRequest { + curl_off_t bytecount; /* total number of bytes read */ + curl_off_t writebytecount; /* number of bytes written */ + +- curl_off_t headerbytecount; /* only count received headers */ +- curl_off_t deductheadercount; /* this amount of bytes doesn't count when we +- check if anything has been transferred at +- the end of a connection. We use this +- counter to make only a 100 reply (without a +- following second response code) result in a +- CURLE_GOT_NOTHING error code */ +- + curl_off_t pendingheader; /* this many bytes left to send is actually + header and not body */ + struct curltime start; /* transfer started at this time */ + struct curltime now; /* current time */ ++ unsigned int headerbytecount; /* only count received headers */ ++ unsigned int deductheadercount; /* this amount of bytes doesn't count when ++ we check if anything has been transferred ++ at the end of a connection. We use this ++ counter to make only a 100 reply (without ++ a following second response code) result ++ in a CURLE_GOT_NOTHING error code */ + enum { + HEADER_NORMAL, /* no bad header at all */ + HEADER_PARTHEADER, /* part of the chunk is a bad header, the rest +@@ -1139,7 +1138,6 @@ struct PureInfo { + int httpversion; /* the http version number X.Y = X*10+Y */ + time_t filetime; /* If requested, this is might get set. Set to -1 if the + time was unretrievable. */ +- curl_off_t header_size; /* size of read header(s) in bytes */ + curl_off_t request_size; /* the amount of bytes sent in the request(s) */ + unsigned long proxyauthavail; /* what proxy auth types were announced */ + unsigned long httpauthavail; /* what host auth types were announced */ +@@ -1147,6 +1145,7 @@ struct PureInfo { + char *contenttype; /* the content type of the object */ + char *wouldredirect; /* URL this would've been redirected to if asked to */ + curl_off_t retry_after; /* info from Retry-After: header */ ++ unsigned int header_size; /* size of read header(s) in bytes */ + + /* PureInfo members 'conn_primary_ip', 'conn_primary_port', 'conn_local_ip' + and, 'conn_local_port' are copied over from the connectdata struct in +-- +2.41.0 + diff --git a/curl.spec b/curl.spec index f324848..5a87baf 100644 --- a/curl.spec +++ b/curl.spec @@ -1,7 +1,7 @@ Summary: A utility for getting files from remote servers (FTP, HTTP, and others) Name: curl Version: 7.85.0 -Release: 10%{?dist} +Release: 11%{?dist} License: MIT Source0: https://curl.se/download/%{name}-%{version}.tar.xz Source1: https://curl.se/download/%{name}-%{version}.tar.xz.asc @@ -67,6 +67,9 @@ Patch30: 0030-curl-7.87.0-CVE-2023-28321.patch # fix fopen race condition (CVE-2023-32001) Patch31: 0031-curl-7.87.0-CVE-2023-32001.patch +# fix HTTP headers eat all memory (CVE-2023-38039) +Patch32: 0032-curl-7.87.0-CVE-2023-38039.patch + # patch making libcurl multilib ready Patch101: 0101-curl-7.32.0-multilib.patch @@ -270,6 +273,7 @@ be installed. %patch29 -p1 %patch30 -p1 %patch31 -p1 +%patch32 -p1 # Fedora patches %patch101 -p1 @@ -305,6 +309,11 @@ printf "702\n703\n716\n" >> tests/data/DISABLED printf "3000\n3001\n" >> tests/data/DISABLED %endif +# temporarily disable test 678 on aarch64 (it suddently started crashing without any change in curl) +%ifarch aarch64 +printf "678\n" >> tests/data/DISABLED +%endif + # test3026: avoid pthread_create() failure due to resource exhaustion on i386 %ifarch %{ix86} sed -e 's|NUM_THREADS 1000$|NUM_THREADS 256|' \ @@ -503,6 +512,10 @@ rm -f ${RPM_BUILD_ROOT}%{_libdir}/libcurl.la %{_libdir}/libcurl.so.4.[0-9].[0-9].minimal %changelog +* Wed Sep 13 2023 Jan Macku - 7.85.0-11 +- fix HTTP headers eat all memory (CVE-2023-38039) +- temporarily disable test 678 on aarch64 (it suddently started crashing without any change in curl) + * Thu Jul 20 2023 Jan Macku - 7.85.0-10 - fix fopen race condition (CVE-2023-32001)