summaryrefslogtreecommitdiff
path: root/net-misc/curl
diff options
context:
space:
mode:
authorV3n3RiX <venerix@koprulu.sector>2024-12-24 06:30:58 +0000
committerV3n3RiX <venerix@koprulu.sector>2024-12-24 06:30:58 +0000
commit17e417b73cb3e25edbc6541bd107bc9c593d66bd (patch)
tree866f98b73f846149d0c9e7e5a1cf0527b7c54e08 /net-misc/curl
parent338095605b6c9c82690f77f27a32490c9a57bdcb (diff)
gentoo auto-resync : 24:12:2024 - 06:30:57
Diffstat (limited to 'net-misc/curl')
-rw-r--r--net-misc/curl/Manifest5
-rw-r--r--net-misc/curl/files/curl-8.7.1-chunked-post.patch57
-rw-r--r--net-misc/curl/files/curl-8.7.1-fix-compress-option.patch153
-rw-r--r--net-misc/curl/files/curl-8.7.1-http2-git-clone.patch342
-rw-r--r--net-misc/curl/files/curl-8.7.1-rustls-fixes.patch49
-rw-r--r--net-misc/curl/files/curl-prefix.patch21
6 files changed, 0 insertions, 627 deletions
diff --git a/net-misc/curl/Manifest b/net-misc/curl/Manifest
index b32b31402aae..ea776dfc1373 100644
--- a/net-misc/curl/Manifest
+++ b/net-misc/curl/Manifest
@@ -6,17 +6,12 @@ AUX curl-8.11.0-mbedtls-global-init.patch 1868 BLAKE2B 722959cd1f70d336f093ae833
AUX curl-8.11.0-netrc-large-file.patch 719 BLAKE2B b7a838f76a6c6eba341bacb7826caf1c7ca73c57a3b60f8136b47b32e91bfe3b0663d47b3273095b8a93736ceda96833d25f106d82efbc7afe6315020bfca798 SHA512 ef0d4204e6ca7bee17ccebf20ea6873076a571819bcece058eae7e6c3a62bff431d6161aa4da0725ea1e8b278f121d682361f4df1fe4adc58a1375e611a510a8
AUX curl-8.11.0-setopt-http_content_decoding.patch 691 BLAKE2B a812c178c0a3a4f5dc78fc576c9aef247de70eb32ab02b96e2e1e5b9428e6928c538e0a981ac98ae2f1428ae4c12bad767e1efc32a48a21ed99e0afc50180ec6 SHA512 9ef6fca7ae98bce6cee40ca5c25e7a22e937e0118bcee948e95cb8a8c5e6ca3f7e8b2327716cf7d55d976bfd6d30d57617d7da52ff971b82b22ff33878fa1d3e
AUX curl-8.11.1-async-thread-close-eventfd.patch 1080 BLAKE2B fa1ff8ff20870c80a4c7dd0ef2ec116ce78a987b9659a77f513c44b6237f0b0b6bf48e547cefd6e157d85ee6113669bffe02e6f7321b19a8ffec12caa2d655f4 SHA512 a4662fbea884a05e151c0d100e012e9f1c1f8de53f70f20c9df6fc54ea9d293211475dd084bae32bf0508c7438971f921f6840cae4e9424589511dbe44584609
-AUX curl-8.7.1-chunked-post.patch 2389 BLAKE2B d283816be5dca19ab0058db1937d686b4f9056caf3c46a8dcd731bc69ae91df1b01d10f643ce127eeec7aac0e9282f8b5c5109fe1cea82b30d2cf3270e93ae24 SHA512 3b1a67010704f4863f973e79d6a695e6e13b938fbaa695a05bc92b1a577fa5852c310b38f9c88518275f82c7d088474f5e4c585ac5ff790043e6ca969b0dc1d9
-AUX curl-8.7.1-fix-compress-option.patch 6222 BLAKE2B 7ff5c096fa6cc5cc039f47188a85c49364cdfe6cfeae3a9c58b38939eb59896bcf204c682f36e89ace37a0753236593307e1af45bc2d0f099ff685af8e366713 SHA512 1622adf015a016a205d4bf621715ed06fd345cd45f032ad44b6d9324d398c0e2004f04fe734401b12007c3eb145dac84aa3ce90bdaa9b16e09b5908da75f902f
-AUX curl-8.7.1-http2-git-clone.patch 14120 BLAKE2B 5bc2c0765ebd400f2e79d0552621c98640c757f189ae0fa029d8fb6d9fa74ac0e4fd93948fed6bc7f93b329e30948a585be1f20dde18423acf162cd36b4fb166 SHA512 278720daae7d4d3288502e0dd2b591ada66a559040eb341b0cf5673094e9d214eb166772279378487da96f43912313e04d3b174fdcb2b2cbfa766f241df62f34
-AUX curl-8.7.1-rustls-fixes.patch 1526 BLAKE2B c1ce730bc1d78d9655378a174b32706bc964e07b1b4fde5385212542bf0c85b2adfa8110266410b6d8766bb2ca5c46a58295d7098ab4acb71acb0a5dbd6d5d20 SHA512 7ca3004306595ead6057bbbfa6dcbb5d7b8b7782a7627e5607a916fff8626b4c3bfb2cbac48aa0e65a93b3c4fb0f87a7ee09273f1f9b1eadb9126fbcc4a72ce2
AUX curl-8.8.0-install-manpage.patch 659 BLAKE2B 44a303863fb826d0fca139c8a9091ce1e9ed461f3ee1545b4c3fb9a03dae2f707622b61d4f34345bafcccab751f989972eead676de5a784b5af138a1cc0e467c SHA512 0d5acfb2cf32b3855a29e01cacc5b3f06014832ca0adfa554938b3b06658dd63a4bac75932b4476b856f4c9adb096217bbb549b1a81a4a012f8b668cb9420079
AUX curl-8.8.0-mbedtls.patch 1686 BLAKE2B 6bafbfcf8c418e0507dc910aafe8667c3a71c535fa13f329ef67087edb4f6dcbe70113f752baf8a9ac6c72424045a3111b4e21992c84c7d2cd66684cfaa0ad0f SHA512 07ae1a42da92a200bbc72d911ecbf83b93df2a2be6022605842fc08bc870e466b19b6dc78298039027266155a5c9fcc81f05107fdbdfba4cfac4ddaad4d2a586
AUX curl-8.8.0-multi_wait-timeout.patch 2732 BLAKE2B ae2f40f6dcec16761d959fe5eeaff531ff3ac2c2cd26d676261ecac406653c3d560941455f6d46dd1552f20429582173e45f05865cbdd009c28f21de6a98a458 SHA512 1fbb5506ce1b87c90ea2d029adb9eaccb9930203c8aa0c119666cbee6fc1fc190423f02592bb2620ca3fa1b60822fc704bbc6f671152e877b2383173260acb04
AUX curl-8.9.1-sigpipe.patch 773 BLAKE2B 7ef3bbd4ca9558621b4f1ea5cb9847705cd6331b7f3d5d2a950642d090c789b802c0ff4f96ff2805b9e1dfe807b14daaee223dbcb2ff1426458dadb3e71d1e79 SHA512 55e55da046425e9a60064ff79ec3f8ab37a10275210f46cfb10ad29e9a29cb137a28ba6a0effa90bfc46d687aab929565b97bfa7549ac8a202746e332d1fa6ed
AUX curl-prefix-2.patch 1075 BLAKE2B 657ac450c9d50a08f62be72dc9d1bc68c494dcc0c8153f1a89d5cb50d240aa6bcd56b48242ed3ea1352da5cd5d98b8c6daaff6fdcc672a95a0233db1f8d27bdd SHA512 91a531291426477a68108dcc40656e6a56caabfda26f351d4b7cdb39e66f500ffd532f2417a81c5a789ea624b4f6325aa8006155292910612e54ab3d9daa3620
AUX curl-prefix-3.patch 1068 BLAKE2B dbaf21f38874a308557997d6a3a7a1f9fdb817b22d9846b4d6bd2bae413b7177650f20f17b10bfcc4a6747a89470dc2672ce79ba67d08fc92ebbbba353ba6c3a SHA512 c10a0062564be885f36e7c37bb58f3aa59c387369b9aae6342f818d4671e31cec99fd9ddda3577f03d1f8e87b926b8d75b006ca543f41a264e8595af79242b62
-AUX curl-prefix.patch 880 BLAKE2B 5b7552a8339014221864a585d174b02a96ec7dd7fe8762d331d1981834044f8ec4db64d527a4ded3f5f4cccc86f281576668de092439eb19f5477d5fcf8369cf SHA512 c7cd13b9ccbd12ed01ea121ffece9c23b898a5b34698bae59ae1dd23b1cf2445180b84d80c4a640981f16dba5018df944f405dd5c660addab54ca21e0e673b7f
AUX curl-respect-cflags-3.patch 406 BLAKE2B 1b533144858aff5566150c4a2648ad2e48e8ff29849ae285592edfee4b3332d06e750395dea7190ee6a01d2b5ee2c2c42c10400c2e5defa09963a90a1a10417d SHA512 3219e4e67d534e35012909243fc8d69d58989462db44dd507c502e7aaa299f1d9a01392e2c83797cc2bdb53d503470c5d6e7bf94572a6ccc6e5eafcc0466bc54
DIST curl-8.10.1.tar.xz 2726748 BLAKE2B bfdfa24f6d652884044c5e8eea5d70daad651b46255c99c9df502f9595a2dcbf8c4034446becf9e87f8e8a3f397a8fda29ab3e0d6020ac0dae62dd42b8136b78 SHA512 f1c7a12492dcfb8ba08be69b96a83ce9074592cbaa6b95c72b3c16fc58ad35e9f9deec7b72baca7d360d013b0b1c7ea38bd4edae464903ac67aa3c76238d8c6c
DIST curl-8.10.1.tar.xz.asc 488 BLAKE2B 8e8f2b628d4e8964a76c1c43c5557aacbfc2d2dbc51be8a0fa1b157c257f15f29aedba842cba7cb270c4adcf0b4a5d9c8b0b3d49633c48b061fb3e1472303d66 SHA512 21d6d560c027efc9e3e5db182a77501d6376442221ba910df817e2ec980bee44a9fe2afc698205f8d5e8313ae47915a341d60206a46b46e816d73ee357a894ac
diff --git a/net-misc/curl/files/curl-8.7.1-chunked-post.patch b/net-misc/curl/files/curl-8.7.1-chunked-post.patch
deleted file mode 100644
index 9d1fef73d383..000000000000
--- a/net-misc/curl/files/curl-8.7.1-chunked-post.patch
+++ /dev/null
@@ -1,57 +0,0 @@
-https://github.com/curl/curl/commit/721941aadf4adf4f6aeb3f4c0ab489bb89610c36
-From: Stefan Eissing <stefan@eissing.org>
-Date: Mon, 1 Apr 2024 15:41:18 +0200
-Subject: [PATCH] http: with chunked POST forced, disable length check on read
- callback
-
-- when an application forces HTTP/1.1 chunked transfer encoding
- by setting the corresponding header and instructs curl to use
- the CURLOPT_READFUNCTION, disregard any POST length information.
-- this establishes backward compatibility with previous curl versions
-
-Applications are encouraged to not force "chunked", but rather
-set length information for a POST. By setting -1, curl will
-auto-select chunked on HTTP/1.1 and work properly on other HTTP
-versions.
-
-Reported-by: Jeff King
-Fixes #13229
-Closes #13257
---- a/lib/http.c
-+++ b/lib/http.c
-@@ -2046,8 +2046,19 @@ static CURLcode set_reader(struct Curl_easy *data, Curl_HttpReq httpreq)
- else
- result = Curl_creader_set_null(data);
- }
-- else { /* we read the bytes from the callback */
-- result = Curl_creader_set_fread(data, postsize);
-+ else {
-+ /* we read the bytes from the callback. In case "chunked" encoding
-+ * is forced by the application, we disregard `postsize`. This is
-+ * a backward compatibility decision to earlier versions where
-+ * chunking disregarded this. See issue #13229. */
-+ bool chunked = FALSE;
-+ char *ptr = Curl_checkheaders(data, STRCONST("Transfer-Encoding"));
-+ if(ptr) {
-+ /* Some kind of TE is requested, check if 'chunked' is chosen */
-+ chunked = Curl_compareheader(ptr, STRCONST("Transfer-Encoding:"),
-+ STRCONST("chunked"));
-+ }
-+ result = Curl_creader_set_fread(data, chunked? -1 : postsize);
- }
- return result;
-
-@@ -2115,6 +2126,13 @@ CURLcode Curl_http_req_set_reader(struct Curl_easy *data,
- data->req.upload_chunky =
- Curl_compareheader(ptr,
- STRCONST("Transfer-Encoding:"), STRCONST("chunked"));
-+ if(data->req.upload_chunky &&
-+ Curl_use_http_1_1plus(data, data->conn) &&
-+ (data->conn->httpversion >= 20)) {
-+ infof(data, "suppressing chunked transfer encoding on connection "
-+ "using HTTP version 2 or higher");
-+ data->req.upload_chunky = FALSE;
-+ }
- }
- else {
- curl_off_t req_clen = Curl_creader_total_length(data);
diff --git a/net-misc/curl/files/curl-8.7.1-fix-compress-option.patch b/net-misc/curl/files/curl-8.7.1-fix-compress-option.patch
deleted file mode 100644
index a06a53729533..000000000000
--- a/net-misc/curl/files/curl-8.7.1-fix-compress-option.patch
+++ /dev/null
@@ -1,153 +0,0 @@
-https://github.com/curl/curl/commit/b30d694a027eb771c02a3db0dee0ca03ccab7377
-From: Stefan Eissing <stefan@eissing.org>
-Date: Thu, 28 Mar 2024 11:08:15 +0100
-Subject: [PATCH] content_encoding: brotli and others, pass through 0-length
- writes
-
-- curl's transfer handling may write 0-length chunks at the end of the
- download with an EOS flag. (HTTP/2 does this commonly)
-
-- content encoders need to pass-through such a write and not count this
- as error in case they are finished decoding
-
-Fixes #13209
-Fixes #13212
-Closes #13219
---- a/lib/content_encoding.c
-+++ b/lib/content_encoding.c
-@@ -300,7 +300,7 @@ static CURLcode deflate_do_write(struct Curl_easy *data,
- struct zlib_writer *zp = (struct zlib_writer *) writer;
- z_stream *z = &zp->z; /* zlib state structure */
-
-- if(!(type & CLIENTWRITE_BODY))
-+ if(!(type & CLIENTWRITE_BODY) || !nbytes)
- return Curl_cwriter_write(data, writer->next, type, buf, nbytes);
-
- /* Set the compressed input when this function is called */
-@@ -457,7 +457,7 @@ static CURLcode gzip_do_write(struct Curl_easy *data,
- struct zlib_writer *zp = (struct zlib_writer *) writer;
- z_stream *z = &zp->z; /* zlib state structure */
-
-- if(!(type & CLIENTWRITE_BODY))
-+ if(!(type & CLIENTWRITE_BODY) || !nbytes)
- return Curl_cwriter_write(data, writer->next, type, buf, nbytes);
-
- if(zp->zlib_init == ZLIB_INIT_GZIP) {
-@@ -669,7 +669,7 @@ static CURLcode brotli_do_write(struct Curl_easy *data,
- CURLcode result = CURLE_OK;
- BrotliDecoderResult r = BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT;
-
-- if(!(type & CLIENTWRITE_BODY))
-+ if(!(type & CLIENTWRITE_BODY) || !nbytes)
- return Curl_cwriter_write(data, writer->next, type, buf, nbytes);
-
- if(!bp->br)
-@@ -762,7 +762,7 @@ static CURLcode zstd_do_write(struct Curl_easy *data,
- ZSTD_outBuffer out;
- size_t errorCode;
-
-- if(!(type & CLIENTWRITE_BODY))
-+ if(!(type & CLIENTWRITE_BODY) || !nbytes)
- return Curl_cwriter_write(data, writer->next, type, buf, nbytes);
-
- if(!zp->decomp) {
-@@ -916,7 +916,7 @@ static CURLcode error_do_write(struct Curl_easy *data,
- (void) buf;
- (void) nbytes;
-
-- if(!(type & CLIENTWRITE_BODY))
-+ if(!(type & CLIENTWRITE_BODY) || !nbytes)
- return Curl_cwriter_write(data, writer->next, type, buf, nbytes);
-
- failf(data, "Unrecognized content encoding type. "
---- a/tests/http/test_02_download.py
-+++ b/tests/http/test_02_download.py
-@@ -394,6 +394,19 @@ def test_02_27_paused_no_cl(self, env: Env, httpd, nghttpx, repeat):
- r = client.run(args=[url])
- r.check_exit_code(0)
-
-+ @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
-+ def test_02_28_get_compressed(self, env: Env, httpd, nghttpx, repeat, proto):
-+ if proto == 'h3' and not env.have_h3():
-+ pytest.skip("h3 not supported")
-+ count = 1
-+ urln = f'https://{env.authority_for(env.domain1brotli, proto)}/data-100k?[0-{count-1}]'
-+ curl = CurlClient(env=env)
-+ r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
-+ '--compressed'
-+ ])
-+ r.check_exit_code(code=0)
-+ r.check_response(count=count, http_status=200)
-+
- def check_downloads(self, client, srcfile: str, count: int,
- complete: bool = True):
- for i in range(count):
---- a/tests/http/testenv/env.py
-+++ b/tests/http/testenv/env.py
-@@ -129,10 +129,11 @@ def __init__(self):
- self.htdocs_dir = os.path.join(self.gen_dir, 'htdocs')
- self.tld = 'http.curl.se'
- self.domain1 = f"one.{self.tld}"
-+ self.domain1brotli = f"brotli.one.{self.tld}"
- self.domain2 = f"two.{self.tld}"
- self.proxy_domain = f"proxy.{self.tld}"
- self.cert_specs = [
-- CertificateSpec(domains=[self.domain1, 'localhost'], key_type='rsa2048'),
-+ CertificateSpec(domains=[self.domain1, self.domain1brotli, 'localhost'], key_type='rsa2048'),
- CertificateSpec(domains=[self.domain2], key_type='rsa2048'),
- CertificateSpec(domains=[self.proxy_domain, '127.0.0.1'], key_type='rsa2048'),
- CertificateSpec(name="clientsX", sub_specs=[
-@@ -376,6 +377,10 @@ def htdocs_dir(self) -> str:
- def domain1(self) -> str:
- return self.CONFIG.domain1
-
-+ @property
-+ def domain1brotli(self) -> str:
-+ return self.CONFIG.domain1brotli
-+
- @property
- def domain2(self) -> str:
- return self.CONFIG.domain2
---- a/tests/http/testenv/httpd.py
-+++ b/tests/http/testenv/httpd.py
-@@ -50,6 +50,7 @@ class Httpd:
- 'alias', 'env', 'filter', 'headers', 'mime', 'setenvif',
- 'socache_shmcb',
- 'rewrite', 'http2', 'ssl', 'proxy', 'proxy_http', 'proxy_connect',
-+ 'brotli',
- 'mpm_event',
- ]
- COMMON_MODULES_DIRS = [
-@@ -203,6 +204,7 @@ def _mkpath(self, path):
-
- def _write_config(self):
- domain1 = self.env.domain1
-+ domain1brotli = self.env.domain1brotli
- creds1 = self.env.get_credentials(domain1)
- domain2 = self.env.domain2
- creds2 = self.env.get_credentials(domain2)
-@@ -285,6 +287,24 @@ def _write_config(self):
- f'</VirtualHost>',
- f'',
- ])
-+ # Alternate to domain1 with BROTLI compression
-+ conf.extend([ # https host for domain1, h1 + h2
-+ f'<VirtualHost *:{self.env.https_port}>',
-+ f' ServerName {domain1brotli}',
-+ f' Protocols h2 http/1.1',
-+ f' SSLEngine on',
-+ f' SSLCertificateFile {creds1.cert_file}',
-+ f' SSLCertificateKeyFile {creds1.pkey_file}',
-+ f' DocumentRoot "{self._docs_dir}"',
-+ f' SetOutputFilter BROTLI_COMPRESS',
-+ ])
-+ conf.extend(self._curltest_conf(domain1))
-+ if domain1 in self._extra_configs:
-+ conf.extend(self._extra_configs[domain1])
-+ conf.extend([
-+ f'</VirtualHost>',
-+ f'',
-+ ])
- conf.extend([ # https host for domain2, no h2
- f'<VirtualHost *:{self.env.https_port}>',
- f' ServerName {domain2}',
diff --git a/net-misc/curl/files/curl-8.7.1-http2-git-clone.patch b/net-misc/curl/files/curl-8.7.1-http2-git-clone.patch
deleted file mode 100644
index b07a3b0a8817..000000000000
--- a/net-misc/curl/files/curl-8.7.1-http2-git-clone.patch
+++ /dev/null
@@ -1,342 +0,0 @@
-https://bugs.gentoo.org/930633
-https://github.com/curl/curl/issues/13474
---- a/lib/http2.c
-+++ b/lib/http2.c
-@@ -187,6 +187,7 @@ struct h2_stream_ctx {
-
- int status_code; /* HTTP response status code */
- uint32_t error; /* stream error code */
-+ CURLcode xfer_result; /* Result of writing out response */
- uint32_t local_window_size; /* the local recv window size */
- int32_t id; /* HTTP/2 protocol identifier for stream */
- BIT(resp_hds_complete); /* we have a complete, final response */
-@@ -945,12 +946,39 @@ fail:
- return rv;
- }
-
--static CURLcode recvbuf_write_hds(struct Curl_cfilter *cf,
-+static void h2_xfer_write_resp_hd(struct Curl_cfilter *cf,
- struct Curl_easy *data,
-- const char *buf, size_t blen)
-+ struct h2_stream_ctx *stream,
-+ const char *buf, size_t blen, bool eos)
- {
-- (void)cf;
-- return Curl_xfer_write_resp(data, (char *)buf, blen, FALSE);
-+
-+ /* If we already encountered an error, skip further writes */
-+ if(!stream->xfer_result) {
-+ stream->xfer_result = Curl_xfer_write_resp(data, (char *)buf, blen, eos);
-+ if(stream->xfer_result)
-+ CURL_TRC_CF(data, cf, "[%d] error %d writing %zu bytes of headers",
-+ stream->id, stream->xfer_result, blen);
-+ }
-+}
-+
-+static void h2_xfer_write_resp(struct Curl_cfilter *cf,
-+ struct Curl_easy *data,
-+ struct h2_stream_ctx *stream,
-+ const char *buf, size_t blen, bool eos)
-+{
-+
-+ /* If we already encountered an error, skip further writes */
-+ if(!stream->xfer_result)
-+ stream->xfer_result = Curl_xfer_write_resp(data, (char *)buf, blen, eos);
-+ /* If the transfer write is errored, we do not want any more data */
-+ if(stream->xfer_result) {
-+ struct cf_h2_ctx *ctx = cf->ctx;
-+ CURL_TRC_CF(data, cf, "[%d] error %d writing %zu bytes of data, "
-+ "RST-ing stream",
-+ stream->id, stream->xfer_result, blen);
-+ nghttp2_submit_rst_stream(ctx->h2, 0, stream->id,
-+ NGHTTP2_ERR_CALLBACK_FAILURE);
-+ }
- }
-
- static CURLcode on_stream_frame(struct Curl_cfilter *cf,
-@@ -960,7 +988,6 @@ static CURLcode on_stream_frame(struct Curl_cfilter *cf,
- struct cf_h2_ctx *ctx = cf->ctx;
- struct h2_stream_ctx *stream = H2_STREAM_CTX(data);
- int32_t stream_id = frame->hd.stream_id;
-- CURLcode result;
- int rv;
-
- if(!stream) {
-@@ -1008,9 +1035,7 @@ static CURLcode on_stream_frame(struct Curl_cfilter *cf,
- stream->status_code = -1;
- }
-
-- result = recvbuf_write_hds(cf, data, STRCONST("\r\n"));
-- if(result)
-- return result;
-+ h2_xfer_write_resp_hd(cf, data, stream, STRCONST("\r\n"), stream->closed);
-
- if(stream->status_code / 100 != 1) {
- stream->resp_hds_complete = TRUE;
-@@ -1229,7 +1254,6 @@ static int on_data_chunk_recv(nghttp2_session *session, uint8_t flags,
- struct cf_h2_ctx *ctx = cf->ctx;
- struct h2_stream_ctx *stream;
- struct Curl_easy *data_s;
-- CURLcode result;
- (void)flags;
-
- DEBUGASSERT(stream_id); /* should never be a zero stream ID here */
-@@ -1252,9 +1276,7 @@ static int on_data_chunk_recv(nghttp2_session *session, uint8_t flags,
- if(!stream)
- return NGHTTP2_ERR_CALLBACK_FAILURE;
-
-- result = Curl_xfer_write_resp(data_s, (char *)mem, len, FALSE);
-- if(result && result != CURLE_AGAIN)
-- return NGHTTP2_ERR_CALLBACK_FAILURE;
-+ h2_xfer_write_resp(cf, data_s, stream, (char *)mem, len, FALSE);
-
- nghttp2_session_consume(ctx->h2, stream_id, len);
- stream->nrcvd_data += (curl_off_t)len;
-@@ -1465,16 +1487,12 @@ static int on_header(nghttp2_session *session, const nghttp2_frame *frame,
- result = Curl_headers_push(data_s, buffer, CURLH_PSEUDO);
- if(result)
- return NGHTTP2_ERR_CALLBACK_FAILURE;
-- result = recvbuf_write_hds(cf, data_s, STRCONST("HTTP/2 "));
-- if(result)
-- return NGHTTP2_ERR_CALLBACK_FAILURE;
-- result = recvbuf_write_hds(cf, data_s, (const char *)value, valuelen);
-- if(result)
-- return NGHTTP2_ERR_CALLBACK_FAILURE;
-+ h2_xfer_write_resp_hd(cf, data_s, stream, STRCONST("HTTP/2 "), FALSE);
-+ h2_xfer_write_resp_hd(cf, data_s, stream,
-+ (const char *)value, valuelen, FALSE);
- /* the space character after the status code is mandatory */
-- result = recvbuf_write_hds(cf, data_s, STRCONST(" \r\n"));
-- if(result)
-- return NGHTTP2_ERR_CALLBACK_FAILURE;
-+ h2_xfer_write_resp_hd(cf, data_s, stream, STRCONST(" \r\n"), FALSE);
-+
- /* if we receive data for another handle, wake that up */
- if(CF_DATA_CURRENT(cf) != data_s)
- Curl_expire(data_s, 0, EXPIRE_RUN_NOW);
-@@ -1487,18 +1505,13 @@ static int on_header(nghttp2_session *session, const nghttp2_frame *frame,
- /* nghttp2 guarantees that namelen > 0, and :status was already
- received, and this is not pseudo-header field . */
- /* convert to an HTTP1-style header */
-- result = recvbuf_write_hds(cf, data_s, (const char *)name, namelen);
-- if(result)
-- return NGHTTP2_ERR_CALLBACK_FAILURE;
-- result = recvbuf_write_hds(cf, data_s, STRCONST(": "));
-- if(result)
-- return NGHTTP2_ERR_CALLBACK_FAILURE;
-- result = recvbuf_write_hds(cf, data_s, (const char *)value, valuelen);
-- if(result)
-- return NGHTTP2_ERR_CALLBACK_FAILURE;
-- result = recvbuf_write_hds(cf, data_s, STRCONST("\r\n"));
-- if(result)
-- return NGHTTP2_ERR_CALLBACK_FAILURE;
-+ h2_xfer_write_resp_hd(cf, data_s, stream,
-+ (const char *)name, namelen, FALSE);
-+ h2_xfer_write_resp_hd(cf, data_s, stream, STRCONST(": "), FALSE);
-+ h2_xfer_write_resp_hd(cf, data_s, stream,
-+ (const char *)value, valuelen, FALSE);
-+ h2_xfer_write_resp_hd(cf, data_s, stream, STRCONST("\r\n"), FALSE);
-+
- /* if we receive data for another handle, wake that up */
- if(CF_DATA_CURRENT(cf) != data_s)
- Curl_expire(data_s, 0, EXPIRE_RUN_NOW);
-@@ -1799,7 +1812,12 @@ static ssize_t stream_recv(struct Curl_cfilter *cf, struct Curl_easy *data,
-
- (void)buf;
- *err = CURLE_AGAIN;
-- if(stream->closed) {
-+ if(stream->xfer_result) {
-+ CURL_TRC_CF(data, cf, "[%d] xfer write failed", stream->id);
-+ *err = stream->xfer_result;
-+ nread = -1;
-+ }
-+ else if(stream->closed) {
- CURL_TRC_CF(data, cf, "[%d] returning CLOSE", stream->id);
- nread = http2_handle_stream_close(cf, data, stream, err);
- }
---- a/lib/vquic/curl_ngtcp2.c
-+++ b/lib/vquic/curl_ngtcp2.c
-@@ -152,6 +152,7 @@ struct h3_stream_ctx {
- uint64_t error3; /* HTTP/3 stream error code */
- curl_off_t upload_left; /* number of request bytes left to upload */
- int status_code; /* HTTP status code */
-+ CURLcode xfer_result; /* result from xfer_resp_write(_hd) */
- bool resp_hds_complete; /* we have a complete, final response */
- bool closed; /* TRUE on stream close */
- bool reset; /* TRUE on stream reset */
-@@ -759,10 +760,39 @@ static int cb_h3_stream_close(nghttp3_conn *conn, int64_t stream_id,
- return 0;
- }
-
--static CURLcode write_resp_hds(struct Curl_easy *data,
-- const char *buf, size_t blen)
-+static void h3_xfer_write_resp_hd(struct Curl_cfilter *cf,
-+ struct Curl_easy *data,
-+ struct h3_stream_ctx *stream,
-+ const char *buf, size_t blen, bool eos)
- {
-- return Curl_xfer_write_resp(data, (char *)buf, blen, FALSE);
-+
-+ /* If we already encountered an error, skip further writes */
-+ if(!stream->xfer_result) {
-+ stream->xfer_result = Curl_xfer_write_resp(data, (char *)buf, blen, eos);
-+ if(stream->xfer_result)
-+ CURL_TRC_CF(data, cf, "[%"PRId64"] error %d writing %zu "
-+ "bytes of headers", stream->id, stream->xfer_result, blen);
-+ }
-+}
-+
-+static void h3_xfer_write_resp(struct Curl_cfilter *cf,
-+ struct Curl_easy *data,
-+ struct h3_stream_ctx *stream,
-+ const char *buf, size_t blen, bool eos)
-+{
-+
-+ /* If we already encountered an error, skip further writes */
-+ if(!stream->xfer_result)
-+ stream->xfer_result = Curl_xfer_write_resp(data, (char *)buf, blen, eos);
-+ /* If the transfer write is errored, we do not want any more data */
-+ if(stream->xfer_result) {
-+ struct cf_ngtcp2_ctx *ctx = cf->ctx;
-+ CURL_TRC_CF(data, cf, "[%"PRId64"] error %d writing %zu bytes "
-+ "of data, cancelling stream",
-+ stream->id, stream->xfer_result, blen);
-+ nghttp3_conn_close_stream(ctx->h3conn, stream->id,
-+ NGHTTP3_H3_REQUEST_CANCELLED);
-+ }
- }
-
- static int cb_h3_recv_data(nghttp3_conn *conn, int64_t stream3_id,
-@@ -773,7 +803,6 @@ static int cb_h3_recv_data(nghttp3_conn *conn, int64_t stream3_id,
- struct cf_ngtcp2_ctx *ctx = cf->ctx;
- struct Curl_easy *data = stream_user_data;
- struct h3_stream_ctx *stream = H3_STREAM_CTX(data);
-- CURLcode result;
-
- (void)conn;
- (void)stream3_id;
-@@ -781,12 +810,7 @@ static int cb_h3_recv_data(nghttp3_conn *conn, int64_t stream3_id,
- if(!stream)
- return NGHTTP3_ERR_CALLBACK_FAILURE;
-
-- result = Curl_xfer_write_resp(data, (char *)buf, blen, FALSE);
-- if(result) {
-- CURL_TRC_CF(data, cf, "[%" PRId64 "] DATA len=%zu, ERROR receiving %d",
-- stream->id, blen, result);
-- return NGHTTP3_ERR_CALLBACK_FAILURE;
-- }
-+ h3_xfer_write_resp(cf, data, stream, (char *)buf, blen, FALSE);
- if(blen) {
- CURL_TRC_CF(data, cf, "[%" PRId64 "] ACK %zu bytes of DATA",
- stream->id, blen);
-@@ -819,7 +843,6 @@ static int cb_h3_end_headers(nghttp3_conn *conn, int64_t stream_id,
- struct Curl_cfilter *cf = user_data;
- struct Curl_easy *data = stream_user_data;
- struct h3_stream_ctx *stream = H3_STREAM_CTX(data);
-- CURLcode result = CURLE_OK;
- (void)conn;
- (void)stream_id;
- (void)fin;
-@@ -828,10 +851,7 @@ static int cb_h3_end_headers(nghttp3_conn *conn, int64_t stream_id,
- if(!stream)
- return 0;
- /* add a CRLF only if we've received some headers */
-- result = write_resp_hds(data, "\r\n", 2);
-- if(result) {
-- return -1;
-- }
-+ h3_xfer_write_resp_hd(cf, data, stream, STRCONST("\r\n"), stream->closed);
-
- CURL_TRC_CF(data, cf, "[%" PRId64 "] end_headers, status=%d",
- stream_id, stream->status_code);
-@@ -874,7 +894,7 @@ static int cb_h3_recv_header(nghttp3_conn *conn, int64_t stream_id,
- ncopy = msnprintf(line, sizeof(line), "HTTP/3 %03d \r\n",
- stream->status_code);
- CURL_TRC_CF(data, cf, "[%" PRId64 "] status: %s", stream_id, line);
-- result = write_resp_hds(data, line, ncopy);
-+ h3_xfer_write_resp_hd(cf, data, stream, line, ncopy, FALSE);
- if(result) {
- return -1;
- }
-@@ -884,22 +904,12 @@ static int cb_h3_recv_header(nghttp3_conn *conn, int64_t stream_id,
- CURL_TRC_CF(data, cf, "[%" PRId64 "] header: %.*s: %.*s",
- stream_id, (int)h3name.len, h3name.base,
- (int)h3val.len, h3val.base);
-- result = write_resp_hds(data, (const char *)h3name.base, h3name.len);
-- if(result) {
-- return -1;
-- }
-- result = write_resp_hds(data, ": ", 2);
-- if(result) {
-- return -1;
-- }
-- result = write_resp_hds(data, (const char *)h3val.base, h3val.len);
-- if(result) {
-- return -1;
-- }
-- result = write_resp_hds(data, "\r\n", 2);
-- if(result) {
-- return -1;
-- }
-+ h3_xfer_write_resp_hd(cf, data, stream,
-+ (const char *)h3name.base, h3name.len, FALSE);
-+ h3_xfer_write_resp_hd(cf, data, stream, ": ", 2, FALSE);
-+ h3_xfer_write_resp_hd(cf, data, stream, (
-+ const char *)h3val.base, h3val.len, FALSE);
-+ h3_xfer_write_resp_hd(cf, data, stream, "\r\n", 2, FALSE);
- }
- return 0;
- }
-@@ -1083,7 +1093,13 @@ static ssize_t cf_ngtcp2_recv(struct Curl_cfilter *cf, struct Curl_easy *data,
- goto out;
- }
-
-- if(stream->closed) {
-+ if(stream->xfer_result) {
-+ CURL_TRC_CF(data, cf, "[%" PRId64 "] xfer write failed", stream->id);
-+ *err = stream->xfer_result;
-+ nread = -1;
-+ goto out;
-+ }
-+ else if(stream->closed) {
- nread = recv_closed_stream(cf, data, stream, err);
- goto out;
- }
---- a/tests/http/test_02_download.py
-+++ b/tests/http/test_02_download.py
-@@ -257,6 +257,34 @@ class TestDownload:
- ])
- r.check_response(count=count, http_status=200)
-
-+ @pytest.mark.parametrize("proto", ['h2', 'h3'])
-+ def test_02_14_not_found(self, env: Env, httpd, nghttpx, repeat, proto):
-+ if proto == 'h3' and not env.have_h3():
-+ pytest.skip("h3 not supported")
-+ if proto == 'h3' and env.curl_uses_lib('msh3'):
-+ pytest.skip("msh3 stalls here")
-+ count = 10
-+ urln = f'https://{env.authority_for(env.domain1, proto)}/not-found?[0-{count-1}]'
-+ curl = CurlClient(env=env)
-+ r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
-+ '--parallel'
-+ ])
-+ r.check_stats(count=count, http_status=404, exitcode=0)
-+
-+ @pytest.mark.parametrize("proto", ['h2', 'h3'])
-+ def test_02_15_fail_not_found(self, env: Env, httpd, nghttpx, repeat, proto):
-+ if proto == 'h3' and not env.have_h3():
-+ pytest.skip("h3 not supported")
-+ if proto == 'h3' and env.curl_uses_lib('msh3'):
-+ pytest.skip("msh3 stalls here")
-+ count = 10
-+ urln = f'https://{env.authority_for(env.domain1, proto)}/not-found?[0-{count-1}]'
-+ curl = CurlClient(env=env)
-+ r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
-+ '--fail'
-+ ])
-+ r.check_stats(count=count, http_status=404, exitcode=22)
-+
- @pytest.mark.skipif(condition=Env().slow_network, reason="not suitable for slow network tests")
- @pytest.mark.skipif(condition=Env().ci_run, reason="not suitable for CI runs")
- def test_02_20_h2_small_frames(self, env: Env, httpd, repeat):
-
diff --git a/net-misc/curl/files/curl-8.7.1-rustls-fixes.patch b/net-misc/curl/files/curl-8.7.1-rustls-fixes.patch
deleted file mode 100644
index 81bcb0734339..000000000000
--- a/net-misc/curl/files/curl-8.7.1-rustls-fixes.patch
+++ /dev/null
@@ -1,49 +0,0 @@
-From a866b062b17ab94b16b817ab9969c561364a4d72 Mon Sep 17 00:00:00 2001
-From: Matt Jolly <Matt.Jolly@footclan.ninja>
-Date: Mon, 1 Apr 2024 08:36:51 +1000
-Subject: [PATCH] m4: fix rustls builds
-
-This patch consolidates the following commits to do with rustls
-detection using pkg-config:
-
-- https://github.com/curl/curl/commit/9c4209837094781d5eef69ae6bcad0e86b64bf99
-- https://github.com/curl/curl/commit/5a50cb5a18a141a463148562dab83fa3be1a3b90
----
- m4/curl-rustls.m4 | 15 +++++++++++++++
- 1 file changed, 15 insertions(+)
-
-diff --git a/m4/curl-rustls.m4 b/m4/curl-rustls.m4
-index 7c55230..8082cf9 100644
---- a/m4/curl-rustls.m4
-+++ b/m4/curl-rustls.m4
-@@ -142,6 +142,11 @@ if test "x$OPT_RUSTLS" != xno; then
- LIBS="$SSL_LIBS $LIBS"
- USE_RUSTLS="yes"
- ssl_msg="rustls"
-+ AC_DEFINE(USE_RUSTLS, 1, [if rustls is enabled])
-+ AC_SUBST(USE_RUSTLS, [1])
-+ USE_RUSTLS="yes"
-+ RUSTLS_ENABLED=1
-+ test rustls != "$DEFAULT_SSL_BACKEND" || VALID_DEFAULT_SSL_BACKEND=yes
- else
- AC_MSG_ERROR([pkg-config: Could not find rustls])
- fi
-@@ -174,5 +179,15 @@ if test "x$OPT_RUSTLS" != xno; then
- fi
-
- test -z "$ssl_msg" || ssl_backends="${ssl_backends:+$ssl_backends, }$ssl_msg"
-+
-+ if test X"$OPT_RUSTLS" != Xno &&
-+ test "$RUSTLS_ENABLED" != "1"; then
-+ AC_MSG_NOTICE([OPT_RUSTLS: $OPT_RUSTLS])
-+ AC_MSG_NOTICE([RUSTLS_ENABLED: $RUSTLS_ENABLED])
-+ AC_MSG_ERROR([--with-rustls was given but Rustls could not be detected])
-+ fi
- fi
- ])
-+
-+
-+RUSTLS_ENABLED
---
-2.44.0
-
diff --git a/net-misc/curl/files/curl-prefix.patch b/net-misc/curl/files/curl-prefix.patch
deleted file mode 100644
index fd495c49b132..000000000000
--- a/net-misc/curl/files/curl-prefix.patch
+++ /dev/null
@@ -1,21 +0,0 @@
-diff -Naur curl-7.30.0.orig/curl-config.in curl-7.30.0/curl-config.in
---- curl-7.30.0.orig/curl-config.in 2013-02-06 09:44:37.000000000 -0500
-+++ curl-7.30.0/curl-config.in 2013-04-17 18:43:56.000000000 -0400
-@@ -134,7 +134,7 @@
- else
- CPPFLAG_CURL_STATICLIB=""
- fi
-- if test "X@includedir@" = "X/usr/include"; then
-+ if test "X@includedir@" = "X@GENTOO_PORTAGE_EPREFIX@/usr/include"; then
- echo "$CPPFLAG_CURL_STATICLIB"
- else
- echo "${CPPFLAG_CURL_STATICLIB}-I@includedir@"
-@@ -142,7 +142,7 @@
- ;;
-
- --libs)
-- if test "X@libdir@" != "X/usr/lib" -a "X@libdir@" != "X/usr/lib64"; then
-+ if test "X@libdir@" != "X@GENTOO_PORTAGE_EPREFIX@/usr/lib" -a "X@libdir@" != "X@GENTOO_PORTAGE_EPREFIX@/usr/lib64"; then
- CURLLIBDIR="-L@libdir@ "
- else
- CURLLIBDIR=""