[arch-commits] Commit in curl/trunk (3 files)
Dave Reisner
dreisner at nymeria.archlinux.org
Wed Jan 29 14:43:13 UTC 2014
Date: Wednesday, January 29, 2014 @ 15:43:13
Author: dreisner
Revision: 204854
upgpkg: curl 7.35.0-1
Modified:
curl/trunk/PKGBUILD
Deleted:
curl/trunk/0001-FILE-don-t-wait-due-to-CURLOPT_MAX_RECV_SPEED_LARGE.patch
curl/trunk/0001-connect-Try-all-addresses-in-first-connection-attemp.patch
-----------------------------------------------------------------+
0001-FILE-don-t-wait-due-to-CURLOPT_MAX_RECV_SPEED_LARGE.patch | 47 ----------
0001-connect-Try-all-addresses-in-first-connection-attemp.patch | 47 ----------
PKGBUILD | 17 ---
3 files changed, 3 insertions(+), 108 deletions(-)
Deleted: 0001-FILE-don-t-wait-due-to-CURLOPT_MAX_RECV_SPEED_LARGE.patch
===================================================================
--- 0001-FILE-don-t-wait-due-to-CURLOPT_MAX_RECV_SPEED_LARGE.patch 2014-01-29 13:51:54 UTC (rev 204853)
+++ 0001-FILE-don-t-wait-due-to-CURLOPT_MAX_RECV_SPEED_LARGE.patch 2014-01-29 14:43:13 UTC (rev 204854)
@@ -1,47 +0,0 @@
-From 2715d7f948c8eb7cd3cba38f3dff6d4148e7cfaf Mon Sep 17 00:00:00 2001
-From: Daniel Stenberg <daniel at haxx.se>
-Date: Sun, 22 Dec 2013 23:36:11 +0100
-Subject: [PATCH] FILE: don't wait due to CURLOPT_MAX_RECV_SPEED_LARGE
-
-The FILE:// code doesn't support this option - and it doesn't make sense
-to support it as long as it works as it does since then it'd only block
-even longer.
-
-But: setting CURLOPT_MAX_RECV_SPEED_LARGE would make the transfer first
-get done and then libcurl would wait until the average speed would get
-low enough. This happened because the transfer happens completely in the
-DO state for FILE:// but then it would still unconditionally continue in
-to the PERFORM state where the speed check is made.
-
-Starting now, the code will skip from DO_DONE to DONE immediately if no
-socket is set to be recv()ed or send()ed to.
-
-Bug: http://curl.haxx.se/bug/view.cgi?id=1312
-Reported-by: Mohammad AlSaleh
----
- lib/multi.c | 9 ++++++++-
- 1 file changed, 8 insertions(+), 1 deletion(-)
-
-diff --git a/lib/multi.c b/lib/multi.c
-index ec45ecb..191c9b8 100644
---- a/lib/multi.c
-+++ b/lib/multi.c
-@@ -1381,7 +1381,14 @@ static CURLMcode multi_runsingle(struct Curl_multi *multi,
- Curl_move_handle_from_send_to_recv_pipe(data, data->easy_conn);
- /* Check if we can move pending requests to send pipe */
- Curl_multi_process_pending_handles(multi);
-- multistate(data, CURLM_STATE_WAITPERFORM);
-+
-+ /* Only perform the transfer if there's a good socket to work with.
-+ Having both BAD is a signal to skip immediately to DONE */
-+ if((data->easy_conn->sockfd != CURL_SOCKET_BAD) ||
-+ (data->easy_conn->writesockfd != CURL_SOCKET_BAD))
-+ multistate(data, CURLM_STATE_WAITPERFORM);
-+ else
-+ multistate(data, CURLM_STATE_DONE);
- result = CURLM_CALL_MULTI_PERFORM;
- break;
-
---
-1.8.5.2
-
Deleted: 0001-connect-Try-all-addresses-in-first-connection-attemp.patch
===================================================================
--- 0001-connect-Try-all-addresses-in-first-connection-attemp.patch 2014-01-29 13:51:54 UTC (rev 204853)
+++ 0001-connect-Try-all-addresses-in-first-connection-attemp.patch 2014-01-29 14:43:13 UTC (rev 204854)
@@ -1,47 +0,0 @@
-From 4e1ece2e44f432c2614f2090155c0aaf2226ea80 Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Bj=C3=B6rn=20Stenberg?= <bjorn at haxx.se>
-Date: Sat, 28 Dec 2013 13:42:57 +0100
-Subject: [PATCH] connect: Try all addresses in first connection attempt
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-Fixes a bug when all addresses in the first family fail immediately, due
-to "Network unreachable" for example, curl would hang and never try the
-next address family.
-
-Iterate through all address families when to trying establish the first
-connection attempt.
-
-Bug: http://curl.haxx.se/bug/view.cgi?id=1315
-Reported-by: Michal Górny and Anthony G. Basile
----
- lib/connect.c | 12 ++++++------
- 1 file changed, 6 insertions(+), 6 deletions(-)
-
-diff --git a/lib/connect.c b/lib/connect.c
-index 4b6ee00..588ac28 100644
---- a/lib/connect.c
-+++ b/lib/connect.c
-@@ -1104,12 +1104,12 @@ CURLcode Curl_connecthost(struct connectdata *conn, /* context */
- conn->tempaddr[0]->ai_next == NULL ? timeout_ms : timeout_ms / 2;
-
- /* start connecting to first IP */
-- res = singleipconnect(conn, conn->tempaddr[0], &(conn->tempsock[0]));
-- while(res != CURLE_OK &&
-- conn->tempaddr[0] &&
-- conn->tempaddr[0]->ai_next &&
-- conn->tempsock[0] == CURL_SOCKET_BAD)
-- res = trynextip(conn, FIRSTSOCKET, 0);
-+ while(conn->tempaddr[0]) {
-+ res = singleipconnect(conn, conn->tempaddr[0], &(conn->tempsock[0]));
-+ if(res == CURLE_OK)
-+ break;
-+ conn->tempaddr[0] = conn->tempaddr[0]->ai_next;
-+ }
-
- if(conn->tempsock[0] == CURL_SOCKET_BAD)
- return res;
---
-1.8.5.2
-
Modified: PKGBUILD
===================================================================
--- PKGBUILD 2014-01-29 13:51:54 UTC (rev 204853)
+++ PKGBUILD 2014-01-29 14:43:13 UTC (rev 204854)
@@ -6,8 +6,8 @@
# Contributor: Daniel J Griffiths <ghost1227 at archlinux.us>
pkgname=curl
-pkgver=7.34.0
-pkgrel=3
+pkgver=7.35.0
+pkgrel=1
pkgdesc="An URL retrieval utility and library"
arch=('i686' 'x86_64')
url="http://curl.haxx.se"
@@ -16,22 +16,11 @@
provides=('libcurl.so')
options=('strip' 'debug')
source=("http://curl.haxx.se/download/$pkgname-$pkgver.tar.gz"{,.asc}
- 0001-FILE-don-t-wait-due-to-CURLOPT_MAX_RECV_SPEED_LARGE.patch
- 0001-connect-Try-all-addresses-in-first-connection-attemp.patch
curlbuild.h)
-md5sums=('d5b7edccbd1793e3549842e01331da20'
+md5sums=('f5ae45ed6e86debb721b68392b5ce13c'
'SKIP'
- 'ba766acdb7568aac0b23d479ebecd591'
- 'ee755b0e886207fc1e8154e426c46ca7'
'751bd433ede935c8fae727377625a8ae')
-prepare() {
- cd "$pkgname-$pkgver"
-
- patch -Np1 <"$srcdir"/0001-connect-Try-all-addresses-in-first-connection-attemp.patch
- patch -Np1 <"$srcdir"/0001-FILE-don-t-wait-due-to-CURLOPT_MAX_RECV_SPEED_LARGE.patch
-}
-
build() {
cd "$pkgname-$pkgver"
More information about the arch-commits
mailing list