summaryrefslogtreecommitdiff
path: root/meta/recipes-devtools/python/python-urlgrabber
diff options
context:
space:
mode:
authorRichard Purdie <rpurdie@linux.intel.com>2010-09-30 21:35:20 +0100
committerRichard Purdie <rpurdie@linux.intel.com>2010-09-30 22:16:10 +0100
commitc09cae578e5568c0ac975124db31f9cac05d50e9 (patch)
tree1183a51498c1d2c7874ea0d3741c4f70dbfc66ef /meta/recipes-devtools/python/python-urlgrabber
parenta51df11c1596746c85b015562ed67f37382b88b5 (diff)
downloadopenembedded-core-c09cae578e5568c0ac975124db31f9cac05d50e9.tar.gz
openembedded-core-c09cae578e5568c0ac975124db31f9cac05d50e9.tar.bz2
openembedded-core-c09cae578e5568c0ac975124db31f9cac05d50e9.tar.xz
openembedded-core-c09cae578e5568c0ac975124db31f9cac05d50e9.zip
Move prism-firmware, spectrum-fw, python-urlgrabber, python-iniparse and yum-metadata to meta-extras
Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
Diffstat (limited to 'meta/recipes-devtools/python/python-urlgrabber')
-rw-r--r--meta/recipes-devtools/python/python-urlgrabber/urlgrabber-3.0.0-cleanup.patch28
-rw-r--r--meta/recipes-devtools/python/python-urlgrabber/urlgrabber-HEAD.patch142
-rw-r--r--meta/recipes-devtools/python/python-urlgrabber/urlgrabber-reset.patch15
3 files changed, 0 insertions, 185 deletions
diff --git a/meta/recipes-devtools/python/python-urlgrabber/urlgrabber-3.0.0-cleanup.patch b/meta/recipes-devtools/python/python-urlgrabber/urlgrabber-3.0.0-cleanup.patch
deleted file mode 100644
index 7a1ee059d..000000000
--- a/meta/recipes-devtools/python/python-urlgrabber/urlgrabber-3.0.0-cleanup.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-diff -up urlgrabber-3.0.0/urlgrabber/grabber.py.cleanup urlgrabber-3.0.0/urlgrabber/grabber.py
---- urlgrabber-3.0.0/urlgrabber/grabber.py.cleanup 2007-11-29 10:25:13.000000000 +0000
-+++ urlgrabber-3.0.0/urlgrabber/grabber.py 2007-11-29 10:26:15.000000000 +0000
-@@ -1204,16 +1204,18 @@ class URLGrabberFileObject:
- bs = 1024*8
- size = 0
-
-- if amount is not None: bs = min(bs, amount - size)
-- block = self.read(bs)
-- size = size + len(block)
-- while block:
-- new_fo.write(block)
-+ try:
- if amount is not None: bs = min(bs, amount - size)
- block = self.read(bs)
- size = size + len(block)
-+ while block:
-+ new_fo.write(block)
-+ if amount is not None: bs = min(bs, amount - size)
-+ block = self.read(bs)
-+ size = size + len(block)
-+ finally:
-+ new_fo.close()
-
-- new_fo.close()
- try:
- modified_tuple = self.hdr.getdate_tz('last-modified')
- modified_stamp = rfc822.mktime_tz(modified_tuple)
diff --git a/meta/recipes-devtools/python/python-urlgrabber/urlgrabber-HEAD.patch b/meta/recipes-devtools/python/python-urlgrabber/urlgrabber-HEAD.patch
deleted file mode 100644
index 90180d29a..000000000
--- a/meta/recipes-devtools/python/python-urlgrabber/urlgrabber-HEAD.patch
+++ /dev/null
@@ -1,142 +0,0 @@
-diff --git a/urlgrabber/grabber.py b/urlgrabber/grabber.py
-index e090e90..a26880c 100644
---- a/urlgrabber/grabber.py
-+++ b/urlgrabber/grabber.py
-@@ -439,6 +439,12 @@ try:
- except:
- __version__ = '???'
-
-+try:
-+ # this part isn't going to do much - need to talk to gettext
-+ from i18n import _
-+except ImportError, msg:
-+ def _(st): return st
-+
- ########################################################################
- # functions for debugging output. These functions are here because they
- # are also part of the module initialization.
-@@ -1052,7 +1058,8 @@ class PyCurlFileObject():
- self._reget_length = 0
- self._prog_running = False
- self._error = (None, None)
-- self.size = None
-+ self.size = 0
-+ self._hdr_ended = False
- self._do_open()
-
-
-@@ -1085,9 +1092,14 @@ class PyCurlFileObject():
- return -1
-
- def _hdr_retrieve(self, buf):
-+ if self._hdr_ended:
-+ self._hdr_dump = ''
-+ self.size = 0
-+ self._hdr_ended = False
-+
- if self._over_max_size(cur=len(self._hdr_dump),
- max_size=self.opts.max_header_size):
-- return -1
-+ return -1
- try:
- self._hdr_dump += buf
- # we have to get the size before we do the progress obj start
-@@ -1104,7 +1116,17 @@ class PyCurlFileObject():
- s = parse150(buf)
- if s:
- self.size = int(s)
--
-+
-+ if buf.lower().find('location') != -1:
-+ location = ':'.join(buf.split(':')[1:])
-+ location = location.strip()
-+ self.scheme = urlparse.urlsplit(location)[0]
-+ self.url = location
-+
-+ if len(self._hdr_dump) != 0 and buf == '\r\n':
-+ self._hdr_ended = True
-+ if DEBUG: DEBUG.info('header ended:')
-+
- return len(buf)
- except KeyboardInterrupt:
- return pycurl.READFUNC_ABORT
-@@ -1136,6 +1158,7 @@ class PyCurlFileObject():
- self.curl_obj.setopt(pycurl.PROGRESSFUNCTION, self._progress_update)
- self.curl_obj.setopt(pycurl.FAILONERROR, True)
- self.curl_obj.setopt(pycurl.OPT_FILETIME, True)
-+ self.curl_obj.setopt(pycurl.FOLLOWLOCATION, True)
-
- if DEBUG:
- self.curl_obj.setopt(pycurl.VERBOSE, True)
-@@ -1291,7 +1314,12 @@ class PyCurlFileObject():
- raise err
-
- elif str(e.args[1]) == '' and self.http_code != 0: # fake it until you make it
-- msg = 'HTTP Error %s : %s ' % (self.http_code, self.url)
-+ if self.scheme in ['http', 'https']:
-+ msg = 'HTTP Error %s : %s ' % (self.http_code, self.url)
-+ elif self.scheme in ['ftp']:
-+ msg = 'FTP Error %s : %s ' % (self.http_code, self.url)
-+ else:
-+ msg = "Unknown Error: URL=%s , scheme=%s" % (self.url, self.scheme)
- else:
- msg = 'PYCURL ERROR %s - "%s"' % (errcode, str(e.args[1]))
- code = errcode
-@@ -1299,6 +1327,12 @@ class PyCurlFileObject():
- err.code = code
- err.exception = e
- raise err
-+ else:
-+ if self._error[1]:
-+ msg = self._error[1]
-+ err = URLGRabError(14, msg)
-+ err.url = self.url
-+ raise err
-
- def _do_open(self):
- self.curl_obj = _curl_cache
-@@ -1532,11 +1566,14 @@ class PyCurlFileObject():
- def _over_max_size(self, cur, max_size=None):
-
- if not max_size:
-- max_size = self.size
-- if self.opts.size: # if we set an opts size use that, no matter what
-- max_size = self.opts.size
-+ if not self.opts.size:
-+ max_size = self.size
-+ else:
-+ max_size = self.opts.size
-+
- if not max_size: return False # if we have None for all of the Max then this is dumb
-- if cur > max_size + max_size*.10:
-+
-+ if cur > int(float(max_size) * 1.10):
-
- msg = _("Downloaded more than max size for %s: %s > %s") \
- % (self.url, cur, max_size)
-@@ -1582,7 +1619,11 @@ class PyCurlFileObject():
- self.opts.progress_obj.end(self._amount_read)
- self.fo.close()
-
--
-+ def geturl(self):
-+ """ Provide the geturl() method, used to be got from
-+ urllib.addinfourl, via. urllib.URLopener.* """
-+ return self.url
-+
- _curl_cache = pycurl.Curl() # make one and reuse it over and over and over
-
-
-diff --git a/urlgrabber/progress.py b/urlgrabber/progress.py
-index dd07c6a..45eb248 100644
---- a/urlgrabber/progress.py
-+++ b/urlgrabber/progress.py
-@@ -658,6 +658,8 @@ def format_time(seconds, use_hours=0):
- if seconds is None or seconds < 0:
- if use_hours: return '--:--:--'
- else: return '--:--'
-+ elif seconds == float('inf'):
-+ return 'Infinite'
- else:
- seconds = int(seconds)
- minutes = seconds / 60
diff --git a/meta/recipes-devtools/python/python-urlgrabber/urlgrabber-reset.patch b/meta/recipes-devtools/python/python-urlgrabber/urlgrabber-reset.patch
deleted file mode 100644
index b63e7c33a..000000000
--- a/meta/recipes-devtools/python/python-urlgrabber/urlgrabber-reset.patch
+++ /dev/null
@@ -1,15 +0,0 @@
---- a/urlgrabber/grabber.py 2010-02-19 14:50:45.000000000 -0500
-+++ b/urlgrabber/grabber.py 2010-02-19 14:51:28.000000000 -0500
-@@ -1626,6 +1626,12 @@
-
- _curl_cache = pycurl.Curl() # make one and reuse it over and over and over
-
-+def reset_curl_obj():
-+ """To make sure curl has reread the network/dns info we force a reload"""
-+ global _curl_cache
-+ _curl_cache.close()
-+ _curl_cache = pycurl.Curl()
-+
-
- #####################################################################
- # DEPRECATED FUNCTIONS