You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
python-urlgrabber/curl-timeout-head.patch

49 lines
1.9 KiB

commit 81d5f0fe31eb840f5e9b49afbb319d80ab03d107
Author: Seth Vidal <skvidal@fedoraproject.org>
Date: Tue Aug 4 11:57:49 2009 -0400
handle timeouts more correctly (with the exception)
and set timeouts to be connect timeouts since libcurl seems to actually honor timeouts - as opposed
to urllib.
closes rh bug # 515497
diff --git a/urlgrabber/grabber.py b/urlgrabber/grabber.py
index cf51dff..5e3c0d7 100644
--- a/urlgrabber/grabber.py
+++ b/urlgrabber/grabber.py
@@ -1542,13 +1542,12 @@ class PyCurlFileObject():
# maybe to be options later
self.curl_obj.setopt(pycurl.FOLLOWLOCATION, 1)
self.curl_obj.setopt(pycurl.MAXREDIRS, 5)
- self.curl_obj.setopt(pycurl.CONNECTTIMEOUT, 30)
# timeouts
timeout = 300
if opts.timeout:
- timeout = int(opts.timeout)
- self.curl_obj.setopt(pycurl.TIMEOUT, timeout)
+ self.curl_obj.setopt(pycurl.CONNECTTIMEOUT, timeout)
+
# ssl options
if self.scheme == 'https':
if opts.ssl_ca_cert: # this may do ZERO with nss according to curl docs
@@ -1607,12 +1606,17 @@ class PyCurlFileObject():
# to other URLGrabErrors from
# http://curl.haxx.se/libcurl/c/libcurl-errors.html
# this covers e.args[0] == 22 pretty well - which will be common
+ if e.args[0] == 28:
+ err = URLGrabError(12, _('Timeout on %s: %s') % (self.url, e))
+ err.url = self.url
+ raise err
+ code = self.http_code
if str(e.args[1]) == '': # fake it until you make it
msg = 'HTTP Error %s : %s ' % (self.http_code, self.url)
else:
msg = str(e.args[1])
err = URLGrabError(14, msg)
- err.code = self.http_code
+ err.code = code
err.exception = e
raise err