summaryrefslogtreecommitdiff
path: root/bitbake/lib
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2011-02-07 12:08:32 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2011-02-07 16:52:42 +0000
commit37624b97450f2ba3d6fad3e1e51818486451447e (patch)
tree6d5286aeb128da853f829ee28923005fb29405fe /bitbake/lib
parent1d3fdc85c64b07a228bcf2a370024c250f6e6623 (diff)
downloadopenembedded-core-37624b97450f2ba3d6fad3e1e51818486451447e.tar.gz
openembedded-core-37624b97450f2ba3d6fad3e1e51818486451447e.tar.bz2
openembedded-core-37624b97450f2ba3d6fad3e1e51818486451447e.tar.xz
openembedded-core-37624b97450f2ba3d6fad3e1e51818486451447e.zip
bitbake/fetch2: Update forcefetch and mirror handling to clean up, simplfy and bug fix the code
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py119
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py4
-rw-r--r--bitbake/lib/bb/fetch2/git.py11
-rw-r--r--bitbake/lib/bb/fetch2/hg.py8
-rw-r--r--bitbake/lib/bb/fetch2/svk.py8
5 files changed, 67 insertions, 83 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 282713f40..9b378a85e 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -282,15 +282,6 @@ def subprocess_setup():
# SIGPIPE errors are known issues with gzip/bash
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
-def download_update(result, target):
- if os.path.exists(target):
- return
- if not result or not os.path.exists(result):
- return
- if target != result:
- os.symlink(result, target)
- return
-
def get_autorev(d):
# only not cache src rev in autorev case
if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
@@ -401,7 +392,7 @@ def check_network_access(d, info = ""):
else:
logger.debug(1, "Fetcher accessed the network with the command %s" % info)
-def try_mirrors(d, uri, mirrors, check = False, force = False):
+def try_mirrors(d, uri, mirrors, check = False):
"""
Try to use a mirrored version of the sources.
This method will be automatically called before the fetchers go.
@@ -410,41 +401,31 @@ def try_mirrors(d, uri, mirrors, check = False, force = False):
uri is the original uri we're trying to download
mirrors is the list of mirrors we're going to try
"""
- fpath = os.path.join(data.getVar("DL_DIR", d, True), os.path.basename(uri))
- if not check and os.access(fpath, os.R_OK) and not force:
- logger.debug(1, "%s already exists, skipping checkout.", fpath)
- return fpath
-
ld = d.createCopy()
for (find, replace) in mirrors:
newuri = uri_replace(uri, find, replace, ld)
- if newuri != uri:
- try:
- ud = FetchData(newuri, ld)
- except bb.fetch2.NoMethodError:
- logger.debug(1, "No method for %s", uri)
- continue
-
+ if newuri == uri:
+ continue
+ try:
+ ud = FetchData(newuri, ld)
ud.setup_localpath(ld)
- try:
- if check:
- found = ud.method.checkstatus(newuri, ud, ld)
- if found:
- return found
- else:
- ud.method.download(newuri, ud, ld)
- if hasattr(ud.method,"build_mirror_data"):
- ud.method.build_mirror_data(newuri, ud, ld)
+ if check:
+ found = ud.method.checkstatus(newuri, ud, ld)
+ if found:
+ return found
+ else:
+ if not ud.method.need_update(newuri, ud, ld):
return ud.localpath
- except (bb.fetch2.MissingParameterError,
- bb.fetch2.FetchError,
- bb.fetch2.MD5SumError):
- import sys
- (type, value, traceback) = sys.exc_info()
- logger.debug(2, "Mirror fetch failure: %s", value)
- bb.utils.remove(ud.localpath)
- continue
+ ud.method.download(newuri, ud, ld)
+ if hasattr(ud.method,"build_mirror_data"):
+ ud.method.build_mirror_data(newuri, ud, ld)
+ return ud.localpath
+
+ except bb.fetch2.BBFetchException:
+ logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, uri))
+ bb.utils.remove(ud.localpath)
+ continue
return None
def srcrev_internal_helper(ud, d, name):
@@ -481,6 +462,7 @@ class FetchData(object):
A class which represents the fetcher state for a given URI.
"""
def __init__(self, url, d):
+ # localpath is the location of a downloaded result. If not set, the file is local.
self.localfile = ""
self.localpath = None
self.lockfile = None
@@ -594,11 +576,13 @@ class FetchMethod(object):
urls = property(getUrls, setUrls, None, "Urls property")
- def forcefetch(self, url, urldata, d):
+ def need_update(self, url, ud, d):
"""
Force a fetch, even if localpath exists?
"""
- return False
+ if os.path.exists(ud.localpath):
+ return False
+ return True
def supports_srcrev(self):
"""
@@ -694,12 +678,7 @@ class FetchMethod(object):
"""
Should premirrors be used?
"""
- if urldata.method.forcefetch(url, urldata, d):
- return True
- elif os.path.exists(urldata.donestamp) and os.path.exists(urldata.localfile):
- return False
- else:
- return True
+ return True
def checkstatus(self, url, urldata, d):
"""
@@ -842,36 +821,32 @@ class Fetch(object):
lf = bb.utils.lockfile(ud.lockfile)
- if m.try_premirror(u, ud, self.d):
- # First try fetching uri, u, from PREMIRRORS
+ if not m.need_update(u, ud, self.d):
+ localpath = ud.localpath
+ elif m.try_premirror(u, ud, self.d):
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
- localpath = try_mirrors(self.d, u, mirrors, False, m.forcefetch(u, ud, self.d))
- elif os.path.exists(ud.localfile):
- localpath = ud.localfile
-
- download_update(localpath, ud.localpath)
-
- # Need to re-test forcefetch() which will return true if our copy is too old
- if m.forcefetch(u, ud, self.d) or not localpath:
- # Next try fetching from the original uri, u
- try:
- m.download(u, ud, self.d)
- if hasattr(m, "build_mirror_data"):
- m.build_mirror_data(u, ud, self.d)
- localpath = ud.localpath
- download_update(localpath, ud.localpath)
-
- except FetchError:
- # Remove any incomplete file
- bb.utils.remove(ud.localpath)
- # Finally, try fetching uri, u, from MIRRORS
- mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
- localpath = try_mirrors (self.d, u, mirrors)
+ localpath = try_mirrors(self.d, u, mirrors, False)
+
+ if bb.data.getVar("BB_FETCH_PREMIRRORONLY", self.d, True) is None:
+ if not localpath and m.need_update(u, ud, self.d):
+ try:
+ m.download(u, ud, self.d)
+ if hasattr(m, "build_mirror_data"):
+ m.build_mirror_data(u, ud, self.d)
+ localpath = ud.localpath
+
+ except BBFetchException:
+ # Remove any incomplete file
+ bb.utils.remove(ud.localpath)
+ mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
+ localpath = try_mirrors (self.d, u, mirrors)
if not localpath or not os.path.exists(localpath):
raise FetchError("Unable to fetch URL %s from any source." % u, u)
- download_update(localpath, ud.localpath)
+ # The local fetcher can return an alternate path so we symlink
+ if os.path.exists(localpath) and not os.path.exists(ud.localpath):
+ os.symlink(localpath, ud.localpath)
if os.path.exists(ud.donestamp):
# Touch the done stamp file to show active use of the download
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
index b77e742c3..69c31e756 100644
--- a/bitbake/lib/bb/fetch2/cvs.py
+++ b/bitbake/lib/bb/fetch2/cvs.py
@@ -65,9 +65,11 @@ class Cvs(FetchMethod):
ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
- def forcefetch(self, url, ud, d):
+ def need_update(self, url, ud, d):
if (ud.date == "now"):
return True
+ if not os.path.exists(ud.localpath):
+ return True
return False
def download(self, loc, ud, d):
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index 55c66cf49..6bcc4a483 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -80,7 +80,7 @@ class Git(FetchMethod):
def localpath(self, url, ud, d):
return ud.clonedir
- def forcefetch(self, url, ud, d):
+ def need_update(self, u, ud, d):
if not os.path.exists(ud.clonedir):
return True
os.chdir(ud.clonedir)
@@ -90,13 +90,12 @@ class Git(FetchMethod):
return False
def try_premirror(self, u, ud, d):
- if 'noclone' in ud.parm:
- return False
+ # If we don't do this, updating an existing checkout with only premirrors
+ # is not possible
+ if bb.data.getVar("BB_FETCH_PREMIRRORONLY", d, True) is not None:
+ return True
if os.path.exists(ud.clonedir):
return False
- if os.path.exists(ud.localpath):
- return False
-
return True
def download(self, loc, ud, d):
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
index d186b009d..ac5825baa 100644
--- a/bitbake/lib/bb/fetch2/hg.py
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -64,9 +64,13 @@ class Hg(FetchMethod):
ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
- def forcefetch(self, url, ud, d):
+ def need_update(self, url, ud, d):
revTag = ud.parm.get('rev', 'tip')
- return revTag == "tip"
+ if revTag == "tip":
+ return True
+ if not os.path.exists(ud.localpath):
+ return True
+ return False
def _buildhgcommand(self, ud, d, command):
"""
diff --git a/bitbake/lib/bb/fetch2/svk.py b/bitbake/lib/bb/fetch2/svk.py
index 70f72c80a..117a22f43 100644
--- a/bitbake/lib/bb/fetch2/svk.py
+++ b/bitbake/lib/bb/fetch2/svk.py
@@ -53,8 +53,12 @@ class Svk(FetchMethod):
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
- def forcefetch(self, url, ud, d):
- return ud.date == "now"
+ def need_update(self, url, ud, d):
+ if ud.date == "now":
+ return True
+ if not os.path.exists(ud.localpath):
+ return True
+ return False
def download(self, loc, ud, d):
"""Fetch urls"""