summaryrefslogtreecommitdiff
path: root/bitbake/lib/bb/fetch
diff options
context:
space:
mode:
authorRichard Purdie <richard@openedhand.com>2006-11-16 15:02:15 +0000
committerRichard Purdie <richard@openedhand.com>2006-11-16 15:02:15 +0000
commit306b7c7a9757ead077363074e7bbac2e5c03e7c5 (patch)
tree6935017a9af749c46816881c86258f514384ba1c /bitbake/lib/bb/fetch
parent65930a38e415ae4a0182e1cea1be838e0ada50ee (diff)
downloadopenembedded-core-306b7c7a9757ead077363074e7bbac2e5c03e7c5.tar.gz
openembedded-core-306b7c7a9757ead077363074e7bbac2e5c03e7c5.tar.bz2
openembedded-core-306b7c7a9757ead077363074e7bbac2e5c03e7c5.tar.xz
openembedded-core-306b7c7a9757ead077363074e7bbac2e5c03e7c5.zip
bitbake: Upgrade from 1.4 -> 1.7.4ish
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@863 311d38ba-8fff-0310-9ca6-ca027cbcb966
Diffstat (limited to 'bitbake/lib/bb/fetch')
-rw-r--r--bitbake/lib/bb/fetch/__init__.py167
-rw-r--r--bitbake/lib/bb/fetch/cvs.py255
-rw-r--r--bitbake/lib/bb/fetch/git.py144
-rw-r--r--bitbake/lib/bb/fetch/local.py18
-rw-r--r--bitbake/lib/bb/fetch/perforce.py213
-rw-r--r--bitbake/lib/bb/fetch/ssh.py94
-rw-r--r--bitbake/lib/bb/fetch/svk.py160
-rw-r--r--bitbake/lib/bb/fetch/svn.py203
-rw-r--r--bitbake/lib/bb/fetch/wget.py152
9 files changed, 728 insertions, 678 deletions
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
index 7ab059076..24aebc41c 100644
--- a/bitbake/lib/bb/fetch/__init__.py
+++ b/bitbake/lib/bb/fetch/__init__.py
@@ -38,13 +38,16 @@ class NoMethodError(Exception):
class MissingParameterError(Exception):
"""Exception raised when a fetch method is missing a critical parameter in the url"""
+class ParameterError(Exception):
+ """Exception raised when a url cannot be proccessed due to invalid parameters."""
+
class MD5SumError(Exception):
"""Exception raised when a MD5SUM of a file does not match the expected one"""
def uri_replace(uri, uri_find, uri_replace, d):
-# bb.note("uri_replace: operating on %s" % uri)
+# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri)
if not uri or not uri_find or not uri_replace:
- bb.debug(1, "uri_replace: passed an undefined value, not replacing")
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing")
uri_decoded = list(bb.decodeurl(uri))
uri_find_decoded = list(bb.decodeurl(uri_find))
uri_replace_decoded = list(bb.decodeurl(uri_replace))
@@ -62,9 +65,9 @@ def uri_replace(uri, uri_find, uri_replace, d):
localfn = bb.fetch.localpath(uri, d)
if localfn:
result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d))
-# bb.note("uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
+# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
else:
-# bb.note("uri_replace: no match")
+# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match")
return uri
# else:
# for j in i.keys():
@@ -72,62 +75,94 @@ def uri_replace(uri, uri_find, uri_replace, d):
return bb.encodeurl(result_decoded)
methods = []
+urldata = {}
def init(urls = [], d = None):
if d == None:
- bb.debug(2,"BUG init called with None as data object!!!")
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "BUG init called with None as data object!!!")
return
for m in methods:
m.urls = []
for u in urls:
+ ud = initdata(u, d)
+ if ud.method:
+ ud.method.urls.append(u)
+
+def initdata(url, d):
+ if url not in urldata:
+ ud = FetchData()
+ (ud.type, ud.host, ud.path, ud.user, ud.pswd, ud.parm) = bb.decodeurl(data.expand(url, d))
+ ud.date = Fetch.getSRCDate(d)
for m in methods:
- m.data = d
- if m.supports(u, d):
- m.urls.append(u)
+ if m.supports(url, ud, d):
+ ud.localpath = m.localpath(url, ud, d)
+ ud.md5 = ud.localpath + '.md5'
+ # if user sets localpath for file, use it instead.
+ if "localpath" in ud.parm:
+ ud.localpath = ud.parm["localpath"]
+ ud.method = m
+ break
+ urldata[url] = ud
+ return urldata[url]
def go(d):
"""Fetch all urls"""
for m in methods:
- if m.urls:
- m.go(d)
+ for u in m.urls:
+ ud = urldata[u]
+ if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(urldata[u].md5):
+ # File already present along with md5 stamp file
+ # Touch md5 file to show activity
+ os.utime(ud.md5, None)
+ continue
+ # RP - is olddir needed?
+ # olddir = os.path.abspath(os.getcwd())
+ m.go(u, ud , d)
+ # os.chdir(olddir)
+ if ud.localfile and not m.forcefetch(u, ud, d):
+ Fetch.write_md5sum(u, ud, d)
def localpaths(d):
"""Return a list of the local filenames, assuming successful fetch"""
local = []
for m in methods:
for u in m.urls:
- local.append(m.localpath(u, d))
+ local.append(urldata[u].localpath)
return local
def localpath(url, d):
- for m in methods:
- if m.supports(url, d):
- return m.localpath(url, d)
+ ud = initdata(url, d)
+ if ud.method:
+ return ud.localpath
return url
+class FetchData(object):
+ """Class for fetcher variable store"""
+ def __init__(self):
+ self.localfile = ""
+
+
class Fetch(object):
"""Base class for 'fetch'ing data"""
def __init__(self, urls = []):
self.urls = []
- for url in urls:
- if self.supports(bb.decodeurl(url), d) is 1:
- self.urls.append(url)
- def supports(url, d):
- """Check to see if this fetch class supports a given url.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ def supports(self, url, urldata, d):
+ """
+ Check to see if this fetch class supports a given url.
"""
return 0
- supports = staticmethod(supports)
- def localpath(url, d):
- """Return the local filename of a given url assuming a successful fetch.
+ def localpath(self, url, urldata, d):
+ """
+ Return the local filename of a given url assuming a successful fetch.
+ Can also setup variables in urldata for use in go (saving code duplication
+ and duplicate code execution)
"""
return url
- localpath = staticmethod(localpath)
def setUrls(self, urls):
self.__urls = urls
@@ -137,16 +172,17 @@ class Fetch(object):
urls = property(getUrls, setUrls, None, "Urls property")
- def setData(self, data):
- self.__data = data
-
- def getData(self):
- return self.__data
-
- data = property(getData, setData, None, "Data property")
+ def forcefetch(self, url, urldata, d):
+ """
+ Force a fetch, even if localpath exists?
+ """
+ return False
- def go(self, urls = []):
- """Fetch urls"""
+ def go(self, url, urldata, d):
+ """
+ Fetch urls
+ Assumes localpath was called first
+ """
raise NoMethodError("Missing implementation for url")
def getSRCDate(d):
@@ -155,7 +191,12 @@ class Fetch(object):
d the bb.data module
"""
- return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1 )
+ pn = data.getVar("PN", d, 1)
+
+ if pn:
+ return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("DATE", d, 1)
+
+ return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
getSRCDate = staticmethod(getSRCDate)
def try_mirror(d, tarfn):
@@ -168,6 +209,11 @@ class Fetch(object):
d Is a bb.data instance
tarfn is the name of the tarball
"""
+ tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn)
+ if os.access(tarpath, os.R_OK):
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % tarfn)
+ return True
+
pn = data.getVar('PN', d, True)
src_tarball_stash = None
if pn:
@@ -176,36 +222,45 @@ class Fetch(object):
for stash in src_tarball_stash:
fetchcmd = data.getVar("FETCHCOMMAND_mirror", d, True) or data.getVar("FETCHCOMMAND_wget", d, True)
uri = stash + tarfn
- bb.note("fetch " + uri)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
fetchcmd = fetchcmd.replace("${URI}", uri)
ret = os.system(fetchcmd)
if ret == 0:
- bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Fetched %s from tarball stash, skipping checkout" % tarfn)
return True
return False
try_mirror = staticmethod(try_mirror)
- def check_for_tarball(d, tarfn, dldir, date):
+ def verify_md5sum(ud, got_sum):
"""
- Check for a local copy then check the tarball stash.
- Both checks are skipped if date == 'now'.
-
- d Is a bb.data instance
- tarfn is the name of the tarball
- date is the SRCDATE
+ Verify the md5sum we wanted with the one we got
"""
- if "now" != date:
- dl = os.path.join(dldir, tarfn)
- if os.access(dl, os.R_OK):
- bb.debug(1, "%s already exists, skipping checkout." % tarfn)
- return True
-
- # try to use the tarball stash
- if Fetch.try_mirror(d, tarfn):
- return True
- return False
- check_for_tarball = staticmethod(check_for_tarball)
-
+ wanted_sum = None
+ if 'md5sum' in ud.parm:
+ wanted_sum = ud.parm['md5sum']
+ if not wanted_sum:
+ return True
+
+ return wanted_sum == got_sum
+ verify_md5sum = staticmethod(verify_md5sum)
+
+ def write_md5sum(url, ud, d):
+ if bb.which(data.getVar('PATH', d), 'md5sum'):
+ try:
+ md5pipe = os.popen('md5sum ' + ud.localpath)
+ md5data = (md5pipe.readline().split() or [ "" ])[0]
+ md5pipe.close()
+ except OSError:
+ md5data = ""
+
+ # verify the md5sum
+ if not Fetch.verify_md5sum(ud, md5data):
+ raise MD5SumError(url)
+
+ md5out = file(ud.md5, 'w')
+ md5out.write(md5data)
+ md5out.close()
+ write_md5sum = staticmethod(write_md5sum)
import cvs
import git
@@ -214,6 +269,7 @@ import svn
import wget
import svk
import ssh
+import perforce
methods.append(cvs.Cvs())
methods.append(git.Git())
@@ -222,3 +278,4 @@ methods.append(svn.Svn())
methods.append(wget.Wget())
methods.append(svk.Svk())
methods.append(ssh.SSH())
+methods.append(perforce.Perforce())
diff --git a/bitbake/lib/bb/fetch/cvs.py b/bitbake/lib/bb/fetch/cvs.py
index 0b2477560..3bdac177e 100644
--- a/bitbake/lib/bb/fetch/cvs.py
+++ b/bitbake/lib/bb/fetch/cvs.py
@@ -33,164 +33,119 @@ from bb.fetch import FetchError
from bb.fetch import MissingParameterError
class Cvs(Fetch):
- """Class to fetch a module or modules from cvs repositories"""
- def supports(url, d):
- """Check to see if a given url can be fetched with cvs.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ """
+ Class to fetch a module or modules from cvs repositories
+ """
+ def supports(self, url, ud, d):
"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- return type in ['cvs', 'pserver']
- supports = staticmethod(supports)
-
- def localpath(url, d):
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- if "localpath" in parm:
-# if user overrides local path, use it.
- return parm["localpath"]
+ Check to see if a given url can be fetched with cvs.
+ """
+ return ud.type in ['cvs', 'pserver']
- if not "module" in parm:
+ def localpath(self, url, ud, d):
+ if not "module" in ud.parm:
raise MissingParameterError("cvs method needs a 'module' parameter")
- else:
- module = parm["module"]
- if 'tag' in parm:
- tag = parm['tag']
- else:
- tag = ""
- if 'date' in parm:
- date = parm['date']
- else:
- if not tag:
- date = Fetch.getSRCDate(d)
- else:
- date = ""
+ ud.module = ud.parm["module"]
+
+ ud.tag = ""
+ if 'tag' in ud.parm:
+ ud.tag = ud.parm['tag']
+
+ # Override the default date in certain cases
+ if 'date' in ud.parm:
+ ud.date = ud.parm['date']
+ elif ud.tag:
+ ud.date = ""
+
+ ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date), d)
+
+ return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
- return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, tag, date), d))
- localpath = staticmethod(localpath)
+ def forcefetch(self, url, ud, d):
+ if (ud.date == "now"):
+ return True
+ return False
- def go(self, d, urls = []):
- """Fetch urls"""
- if not urls:
- urls = self.urls
+ def go(self, loc, ud, d):
+
+ # try to use the tarball stash
+ if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping cvs checkout." % ud.localpath)
+ return
+
+ method = "pserver"
+ if "method" in ud.parm:
+ method = ud.parm["method"]
+
+ localdir = ud.module
+ if "localdir" in ud.parm:
+ localdir = ud.parm["localdir"]
+
+ cvs_rsh = None
+ if method == "ext":
+ if "rsh" in ud.parm:
+ cvs_rsh = ud.parm["rsh"]
+
+ if method == "dir":
+ cvsroot = ud.path
+ else:
+ cvsroot = ":" + method + ":" + ud.user
+ if ud.pswd:
+ cvsroot += ":" + ud.pswd
+ cvsroot += "@" + ud.host + ":" + ud.path
+
+ options = []
+ if ud.date:
+ options.append("-D %s" % ud.date)
+ if ud.tag:
+ options.append("-r %s" % ud.tag)
localdata = data.createCopy(d)
data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
data.update_data(localdata)
- for loc in urls:
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata))
- if not "module" in parm:
- raise MissingParameterError("cvs method needs a 'module' parameter")
- else:
- module = parm["module"]
-
- dlfile = self.localpath(loc, localdata)
- dldir = data.getVar('DL_DIR', localdata, 1)
-# if local path contains the cvs
-# module, consider the dir above it to be the
-# download directory
-# pos = dlfile.find(module)
-# if pos:
-# dldir = dlfile[:pos]
-# else:
-# dldir = os.path.dirname(dlfile)
-
-# setup cvs options
- options = []
- if 'tag' in parm:
- tag = parm['tag']
- else:
- tag = ""
-
- if 'date' in parm:
- date = parm['date']
- else:
- if not tag:
- date = Fetch.getSRCDate(d)
- else:
- date = ""
-
- if "method" in parm:
- method = parm["method"]
- else:
- method = "pserver"
-
- if "localdir" in parm:
- localdir = parm["localdir"]
- else:
- localdir = module
-
- cvs_rsh = None
- if method == "ext":
- if "rsh" in parm:
- cvs_rsh = parm["rsh"]
-
- tarfn = data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, tag, date), localdata)
- data.setVar('TARFILES', dlfile, localdata)
- data.setVar('TARFN', tarfn, localdata)
-
- if Fetch.check_for_tarball(d, tarfn, dldir, date):
- continue
-
- if date:
- options.append("-D %s" % date)
- if tag:
- options.append("-r %s" % tag)
-
- olddir = os.path.abspath(os.getcwd())
- os.chdir(data.expand(dldir, localdata))
-
-# setup cvsroot
- if method == "dir":
- cvsroot = path
- else:
- cvsroot = ":" + method + ":" + user
- if pswd:
- cvsroot += ":" + pswd
- cvsroot += "@" + host + ":" + path
-
- data.setVar('CVSROOT', cvsroot, localdata)
- data.setVar('CVSCOOPTS', " ".join(options), localdata)
- data.setVar('CVSMODULE', module, localdata)
- cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
- cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
-
- if cvs_rsh:
- cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
- cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
-
-# create module directory
- bb.debug(2, "Fetch: checking for module directory")
- pkg=data.expand('${PN}', d)
- pkgdir=os.path.join(data.expand('${CVSDIR}', localdata), pkg)
- moddir=os.path.join(pkgdir,localdir)
- if os.access(os.path.join(moddir,'CVS'), os.R_OK):
- bb.note("Update " + loc)
-# update sources there
- os.chdir(moddir)
- myret = os.system(cvsupdatecmd)
- else:
- bb.note("Fetch " + loc)
-# check out sources there
- bb.mkdirhier(pkgdir)
- os.chdir(pkgdir)
- bb.debug(1, "Running %s" % cvscmd)
- myret = os.system(cvscmd)
-
- if myret != 0 or not os.access(moddir, os.R_OK):
- try:
- os.rmdir(moddir)
- except OSError:
- pass
- raise FetchError(module)
-
+ data.setVar('CVSROOT', cvsroot, localdata)
+ data.setVar('CVSCOOPTS', " ".join(options), localdata)
+ data.setVar('CVSMODULE', ud.module, localdata)
+ cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
+ cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
+
+ if cvs_rsh:
+ cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
+ cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
+
+ # create module directory
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory")
+ pkg = data.expand('${PN}', d)
+ pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
+ moddir = os.path.join(pkgdir,localdir)
+ if os.access(os.path.join(moddir,'CVS'), os.R_OK):
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
+ # update sources there
os.chdir(moddir)
- os.chdir('..')
-# tar them up to a defined filename
- myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(moddir)))
- if myret != 0:
- try:
- os.unlink(tarfn)
- except OSError:
- pass
- os.chdir(olddir)
- del localdata
+ myret = os.system(cvsupdatecmd)
+ else:
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
+ # check out sources there
+ bb.mkdirhier(pkgdir)
+ os.chdir(pkgdir)
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd)
+ myret = os.system(cvscmd)
+
+ if myret != 0 or not os.access(moddir, os.R_OK):
+ try:
+ os.rmdir(moddir)
+ except OSError:
+ pass
+ raise FetchError(ud.module)
+
+ os.chdir(moddir)
+ os.chdir('..')
+ # tar them up to a defined filename
+ myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(moddir)))
+ if myret != 0:
+ try:
+ os.unlink(ud.localpath)
+ except OSError:
+ pass
+ raise FetchError(ud.module)
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py
index 49235c141..75a762922 100644
--- a/bitbake/lib/bb/fetch/git.py
+++ b/bitbake/lib/bb/fetch/git.py
@@ -37,7 +37,7 @@ def prunedir(topdir):
def rungitcmd(cmd,d):
- bb.debug(1, "Running %s" % cmd)
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
# Need to export PATH as git is likely to be in metadata paths
# rather than host provided
@@ -48,108 +48,80 @@ def rungitcmd(cmd,d):
if myret != 0:
raise FetchError("Git: %s failed" % pathcmd)
-def gettag(parm):
- if 'tag' in parm:
- tag = parm['tag']
- else:
- tag = ""
- if not tag:
- tag = "master"
-
- return tag
-
-def getprotocol(parm):
- if 'protocol' in parm:
- proto = parm['protocol']
- else:
- proto = ""
- if not proto:
- proto = "rsync"
-
- return proto
-
-def localfile(url, d):
- """Return the filename to cache the checkout in"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
-
- #if user sets localpath for file, use it instead.
- if "localpath" in parm:
- return parm["localpath"]
-
- tag = gettag(parm)
-
- return data.expand('git_%s%s_%s.tar.gz' % (host, path.replace('/', '.'), tag), d)
-
class Git(Fetch):
"""Class to fetch a module or modules from git repositories"""
- def supports(url, d):
- """Check to see if a given url can be fetched with cvs.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ def supports(self, url, ud, d):
+ """
+ Check to see if a given url can be fetched with cvs.
"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- return type in ['git']
- supports = staticmethod(supports)
+ return ud.type in ['git']
- def localpath(url, d):
+ def localpath(self, url, ud, d):
- return os.path.join(data.getVar("DL_DIR", d, 1), localfile(url, d))
+ ud.proto = "rsync"
+ if 'protocol' in ud.parm:
+ ud.proto = ud.parm['protocol']
- localpath = staticmethod(localpath)
+ ud.tag = "master"
+ if 'tag' in ud.parm:
+ ud.tag = ud.parm['tag']
- def go(self, d, urls = []):
- """Fetch urls"""
- if not urls:
- urls = self.urls
+ ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d)
- for loc in urls:
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, d))
+ return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
- tag = gettag(parm)
- proto = getprotocol(parm)
+ def forcefetch(self, url, ud, d):
+ # tag=="master" must always update
+ if (ud.tag == "master"):
+ return True
+ return False
- gitsrcname = '%s%s' % (host, path.replace('/', '.'))
+ def go(self, loc, ud, d):
+ """Fetch url"""
- repofilename = 'git_%s.tar.gz' % (gitsrcname)
- repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename)
- repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
+ if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath)
+ return
- coname = '%s' % (tag)
- codir = os.path.join(repodir, coname)
+ gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
- cofile = self.localpath(loc, d)
+ repofilename = 'git_%s.tar.gz' % (gitsrcname)
+ repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename)
+ repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
- # tag=="master" must always update
- if (tag != "master") and Fetch.try_mirror(d, localfile(loc, d)):
- bb.debug(1, "%s already exists (or was stashed). Skipping git checkout." % cofile)
- continue
+ coname = '%s' % (ud.tag)
+ codir = os.path.join(repodir, coname)
- if not os.path.exists(repodir):
- if Fetch.try_mirror(d, repofilename):
- bb.mkdirhier(repodir)
- os.chdir(repodir)
- rungitcmd("tar -xzf %s" % (repofile),d)
- else:
- rungitcmd("git clone -n %s://%s%s %s" % (proto, host, path, repodir),d)
+ if not os.path.exists(repodir):
+ if Fetch.try_mirror(d, repofilename):
+ bb.mkdirhier(repodir)
+ os.chdir(repodir)
+ rungitcmd("tar -xzf %s" % (repofile),d)
+ else:
+ rungitcmd("git clone -n %s://%s%s %s" % (ud.proto, ud.host, ud.path, repodir),d)
- os.chdir(repodir)
- rungitcmd("git pull %s://%s%s" % (proto, host, path),d)
- rungitcmd("git pull --tags %s://%s%s" % (proto, host, path),d)
- rungitcmd("git prune-packed", d)
- # old method of downloading tags
- #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (host, path, os.path.join(repodir, ".git", "")),d)
+ os.chdir(repodir)
+ rungitcmd("git pull %s://%s%s" % (ud.proto, ud.host, ud.path),d)
+ rungitcmd("git pull --tags %s://%s%s" % (ud.proto, ud.host, ud.path),d)
+ rungitcmd("git prune-packed", d)
+ rungitcmd("git pack-redundant --all | xargs -r rm", d)
+ # Remove all but the .git directory
+ rungitcmd("rm * -Rf", d)
+ # old method of downloading tags
+ #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (ud.host, ud.path, os.path.join(repodir, ".git", "")),d)
- os.chdir(repodir)
- bb.note("Creating tarball of git repository")
- rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d)
+ os.chdir(repodir)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository")
+ rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d)
- if os.path.exists(codir):
- prunedir(codir)
+ if os.path.exists(codir):
+ prunedir(codir)
- bb.mkdirhier(codir)
- os.chdir(repodir)
- rungitcmd("git read-tree %s" % (tag),d)
- rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d)
+ bb.mkdirhier(codir)
+ os.chdir(repodir)
+ rungitcmd("git read-tree %s" % (ud.tag),d)
+ rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d)
- os.chdir(codir)
- bb.note("Creating tarball of git checkout")
- rungitcmd("tar -czf %s %s" % (cofile, os.path.join(".", "*") ),d)
+ os.chdir(codir)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout")
+ rungitcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ),d)
diff --git a/bitbake/lib/bb/fetch/local.py b/bitbake/lib/bb/fetch/local.py
index 51938f823..522497670 100644
--- a/bitbake/lib/bb/fetch/local.py
+++ b/bitbake/lib/bb/fetch/local.py
@@ -31,15 +31,13 @@ from bb import data
from bb.fetch import Fetch
class Local(Fetch):
- def supports(url, d):
- """Check to see if a given url can be fetched in the local filesystem.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ def supports(self, url, urldata, d):
"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- return type in ['file','patch']
- supports = staticmethod(supports)
+ Check to see if a given url can be fetched with cvs.
+ """
+ return urldata.type in ['file','patch']
- def localpath(url, d):
+ def localpath(self, url, urldata, d):
"""Return the local filename of a given url assuming a successful fetch.
"""
path = url.split("://")[1]
@@ -52,10 +50,10 @@ class Local(Fetch):
filesdir = data.getVar('FILESDIR', d, 1)
if filesdir:
newpath = os.path.join(filesdir, path)
+ # We don't set localfile as for this fetcher the file is already local!
return newpath
- localpath = staticmethod(localpath)
- def go(self, urls = []):
+ def go(self, url, urldata, d):
"""Fetch urls (no-op for Local method)"""
-# no need to fetch local files, we'll deal with them in place.
+ # no need to fetch local files, we'll deal with them in place.
return 1
diff --git a/bitbake/lib/bb/fetch/perforce.py b/bitbake/lib/bb/fetch/perforce.py
new file mode 100644
index 000000000..88acf6995
--- /dev/null
+++ b/bitbake/lib/bb/fetch/perforce.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+Copyright (C) 2003, 2004 Chris Larson
+
+This program is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation; either version 2 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program; if not, write to the Free Software Foundation, Inc., 59 Temple
+Place, Suite 330, Boston, MA 02111-1307 USA.
+
+Based on functions from the base bb module, Copyright 2003 Holger Schurig
+"""
+
+import os, re
+import bb
+from bb import data
+from bb.fetch import Fetch
+from bb.fetch import FetchError
+from bb.fetch import MissingParameterError
+
+class Perforce(Fetch):
+ def supports(self, url, ud, d):
+ return ud.type in ['p4']
+
+ def doparse(url,d):
+ parm=[]
+ path = url.split("://")[1]
+ delim = path.find("@");
+ if delim != -1:
+ (user,pswd,host,port) = path.split('@')[0].split(":")
+ path = path.split('@')[1]
+ else:
+ (host,port) = data.getVar('P4PORT', d).split(':')
+ user = ""
+ pswd = ""
+
+ if path.find(";") != -1:
+ keys=[]
+ values=[]
+ plist = path.split(';')
+ for item in plist:
+ if item.count('='):
+ (key,value) = item.split('=')
+ keys.append(key)
+ values.append(value)
+
+ parm = dict(zip(keys,values))
+ path = "//" + path.split(';')[0]
+ host += ":%s" % (port)
+ parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
+
+ return host,path,user,pswd,parm
+ doparse = staticmethod(doparse)
+
+ def getcset(d, depot,host,user,pswd,parm):
+ if "cset" in parm:
+ return parm["cset"];
+ if user:
+ data.setVar('P4USER', user, d)
+ if pswd:
+ data.setVar('P4PASSWD', pswd, d)
+ if host:
+ data.setVar('P4PORT', host, d)
+
+ p4date = data.getVar("P4DATE", d, 1)
+ if "revision" in parm:
+ depot += "#%s" % (parm["revision"])
+ elif "label" in parm:
+ depot += "@%s" % (parm["label"])
+ elif p4date:
+ depot += "@%s" % (p4date)
+
+ p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s changes -m 1 %s" % (p4cmd, depot))
+ p4file = os.popen("%s changes -m 1 %s" % (p4cmd,depot))
+ cset = p4file.readline().strip()
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset))
+ if not cset:
+ return -1
+
+ return cset.split(' ')[1]
+ getcset = staticmethod(getcset)
+
+ def localpath(self, url, ud, d):
+
+ (host,path,user,pswd,parm) = Perforce.doparse(url,d)
+
+ # If a label is specified, we use that as our filename
+
+ if "label" in parm:
+ ud.localfile = "%s.tar.gz" % (parm["label"])
+ return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
+
+ base = path
+ which = path.find('/...')
+ if which != -1:
+ base = path[:which]
+
+ if base[0] == "/":
+ base = base[1:]
+
+ cset = Perforce.getcset(d, path, host, user, pswd, parm)
+
+ ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host,base.replace('/', '.'), cset), d)
+
+ return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
+
+ def go(self, loc, ud, d):
+ """
+ Fetch urls
+ """
+
+ # try to use the tarball stash
+ if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath)
+ return
+
+ (host,depot,user,pswd,parm) = Perforce.doparse(loc, d)
+
+ if depot.find('/...') != -1:
+ path = depot[:depot.find('/...')]
+ else:
+ path = depot
+
+ if "module" in parm:
+ module = parm["module"]
+ else:
+ module = os.path.basename(path)
+
+ localdata = data.createCopy(d)
+ data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
+ data.update_data(localdata)
+
+ # Get the p4 command
+ if user:
+ data.setVar('P4USER', user, localdata)
+
+ if pswd:
+ data.setVar('P4PASSWD', pswd, localdata)
+
+ if host:
+ data.setVar('P4PORT', host, localdata)
+
+ p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
+
+ # create temp directory
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
+ bb.mkdirhier(data.expand('${WORKDIR}', localdata))
+ data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
+ tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
+ tmpfile = tmppipe.readline().strip()
+ if not tmpfile:
+ bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
+ raise FetchError(module)
+
+ if "label" in parm:
+ depot = "%s@%s" % (depot,parm["label"])
+ else:
+ cset = Perforce.getcset(d, depot, host, user, pswd, parm)
+ depot = "%s@%s" % (depot,cset)
+
+ os.chdir(tmpfile)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "%s files %s" % (p4cmd, depot))
+ p4file = os.popen("%s files %s" % (p4cmd, depot))
+
+ if not p4file:
+ bb.error("Fetch: unable to get the P4 files from %s" % (depot))
+ raise FetchError(module)
+
+ count = 0
+
+ for file in p4file:
+ list = file.split()
+
+ if list[2] == "delete":
+ continue
+
+ dest = list[0][len(path)+1:]
+ where = dest.find("#")
+
+ os.system("%s print -o %s/%s %s" % (p4cmd, module,dest[:where],list[0]))
+ count = count + 1
+
+ if count == 0:
+ bb.error("Fetch: No files gathered from the P4 fetch")
+ raise FetchError(module)
+
+ myret = os.system("tar -czf %s %s" % (ud.localpath, module))
+ if myret != 0:
+ try:
+ os.unlink(ud.localpath)
+ except OSError:
+ pass
+ raise FetchError(module)
+ # cleanup
+ os.system('rm -rf %s' % tmpfile)
+
+
diff --git a/bitbake/lib/bb/fetch/ssh.py b/bitbake/lib/bb/fetch/ssh.py
index 57874d5ba..e5f69e33e 100644
--- a/bitbake/lib/bb/fetch/ssh.py
+++ b/bitbake/lib/bb/fetch/ssh.py
@@ -64,59 +64,55 @@ __pattern__ = re.compile(r'''
class SSH(Fetch):
'''Class to fetch a module or modules via Secure Shell'''
- def supports(self, url, d):
+ def supports(self, url, urldata, d):
return __pattern__.match(url) != None
- def localpath(self, url, d):
+ def localpath(self, url, urldata, d):
m = __pattern__.match(url)
path = m.group('path')
host = m.group('host')
- lpath = os.path.join(data.getVar('DL_DIR', d, 1), host, os.path.basename(path))
+ lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
return lpath
- def go(self, d, urls = []):
- if not urls:
- urls = self.urls
-
- for url in urls:
- dldir = data.getVar('DL_DIR', d, 1)
-
- m = __pattern__.match(url)
- path = m.group('path')
- host = m.group('host')
- port = m.group('port')
- user = m.group('user')
- password = m.group('pass')
-
- ldir = os.path.join(dldir, host)
- lpath = os.path.join(ldir, os.path.basename(path))
-
- if not os.path.exists(ldir):
- os.makedirs(ldir)
-
- if port:
- port = '-P %s' % port
- else:
- port = ''
-
- if user:
- fr = user
- if password:
- fr += ':%s' % password
- fr += '@%s' % host
- else:
- fr = host
- fr += ':%s' % path
-
-
- import commands
- cmd = 'scp -B -r %s %s %s/' % (
- port,
- commands.mkarg(fr),
- commands.mkarg(ldir)
- )
-
- (exitstatus, output) = commands.getstatusoutput(cmd)
- if exitstatus != 0:
- print output
- raise FetchError('Unable to fetch %s' % url)
+ def go(self, url, urldata, d):
+ dldir = data.getVar('DL_DIR', d, 1)
+
+ m = __pattern__.match(url)
+ path = m.group('path')
+ host = m.group('host')
+ port = m.group('port')
+ user = m.group('user')
+ password = m.group('pass')
+
+ ldir = os.path.join(dldir, host)
+ lpath = os.path.join(ldir, os.path.basename(path))
+
+ if not os.path.exists(ldir):
+ os.makedirs(ldir)
+
+ if port:
+ port = '-P %s' % port
+ else:
+ port = ''
+
+ if user:
+ fr = user
+ if password:
+ fr += ':%s' % password
+ fr += '@%s' % host
+ else:
+ fr = host
+ fr += ':%s' % path
+
+
+ import commands
+ cmd = 'scp -B -r %s %s %s/' % (
+ port,
+ commands.mkarg(fr),
+ commands.mkarg(ldir)
+ )
+
+ (exitstatus, output) = commands.getstatusoutput(cmd)
+ if exitstatus != 0:
+ print output
+ raise FetchError('Unable to fetch %s' % url)
diff --git a/bitbake/lib/bb/fetch/svk.py b/bitbake/lib/bb/fetch/svk.py
index 19103213c..29270ab3d 100644
--- a/bitbake/lib/bb/fetch/svk.py
+++ b/bitbake/lib/bb/fetch/svk.py
@@ -42,112 +42,76 @@ from bb.fetch import MissingParameterError
class Svk(Fetch):
"""Class to fetch a module or modules from svk repositories"""
- def supports(url, d):
- """Check to see if a given url can be fetched with svk.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ def supports(self, url, ud, d):
"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- return type in ['svk']
- supports = staticmethod(supports)
-
- def localpath(url, d):
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- if "localpath" in parm:
-# if user overrides local path, use it.
- return parm["localpath"]
+ Check to see if a given url can be fetched with cvs.
+ """
+ return ud.type in ['svk']
- if not "module" in parm:
+ def localpath(self, url, ud, d):
+ if not "module" in ud.parm:
raise MissingParameterError("svk method needs a 'module' parameter")
else:
- module = parm["module"]
- if 'rev' in parm:
- revision = parm['rev']
- else:
- revision = ""
+ ud.module = ud.parm["module"]
+
+ ud.revision = ""
+ if 'rev' in ud.parm:
+ ud.revision = ud.parm['rev']
+
+ ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
- date = Fetch.getSRCDate(d)
+ return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
- return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, path.replace('/', '.'), revision, date), d))
- localpath = staticmethod(localpath)
+ def forcefetch(self, url, ud, d):
+ if (ud.date == "now"):
+ return True
+ return False
- def go(self, d, urls = []):
+ def go(self, loc, ud, d):
"""Fetch urls"""
- if not urls:
- urls = self.urls
+ if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
+ return
+
+ svkroot = ud.host + ud.path
+
+ svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module)
+
+ if ud.revision:
+ svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module)
+
+ # create temp directory
localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "svk:%s" % data.getVar('OVERRIDES', localdata), localdata)
data.update_data(localdata)
-
- for loc in urls:
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata))
- if not "module" in parm:
- raise MissingParameterError("svk method needs a 'module' parameter")
- else:
- module = parm["module"]
-
- dlfile = self.localpath(loc, localdata)
- dldir = data.getVar('DL_DIR', localdata, 1)
-
-# setup svk options
- options = []
- if 'rev' in parm:
- revision = parm['rev']
- else:
- revision = ""
-
- date = Fetch.getSRCDate(d)
- tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata)
- data.setVar('TARFILES', dlfile, localdata)
- data.setVar('TARFN', tarfn, localdata)
-
- if Fetch.check_for_tarball(d, tarfn, dldir, date):
- continue
-
- olddir = os.path.abspath(os.getcwd())
- os.chdir(data.expand(dldir, localdata))
-
- svkroot = host + path
-
- data.setVar('SVKROOT', svkroot, localdata)
- data.setVar('SVKCOOPTS', " ".join(options), localdata)
- data.setVar('SVKMODULE', module, localdata)
- svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, module)
-
- if revision:
- svkcmd = "svk co -r %s/%s" % (revision, svkroot, module)
-
-# create temp directory
- bb.debug(2, "Fetch: creating temporary directory")
- bb.mkdirhier(data.expand('${WORKDIR}', localdata))
- data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
- tmpfile = tmppipe.readline().strip()
- if not tmpfile:
- bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
- raise FetchError(module)
-
-# check out sources there
- os.chdir(tmpfile)
- bb.note("Fetch " + loc)
- bb.debug(1, "Running %s" % svkcmd)
- myret = os.system(svkcmd)
- if myret != 0:
- try:
- os.rmdir(tmpfile)
- except OSError:
- pass
- raise FetchError(module)
-
- os.chdir(os.path.join(tmpfile, os.path.dirname(module)))
-# tar them up to a defined filename
- myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module)))
- if myret != 0:
- try:
- os.unlink(tarfn)
- except OSError:
- pass
-# cleanup
- os.system('rm -rf %s' % tmpfile)
- os.chdir(olddir)
- del localdata
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
+ bb.mkdirhier(data.expand('${WORKDIR}', localdata))
+ data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
+ tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
+ tmpfile = tmppipe.readline().strip()
+ if not tmpfile:
+ bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
+ raise FetchError(ud.module)
+
+ # check out sources there
+ os.chdir(tmpfile)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd)
+ myret = os.system(svkcmd)
+ if myret != 0:
+ try:
+ os.rmdir(tmpfile)
+ except OSError:
+ pass
+ raise FetchError(ud.module)
+
+ os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
+ # tar them up to a defined filename
+ myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
+ if myret != 0:
+ try:
+ os.unlink(ud.localpath)
+ except OSError:
+ pass
+ raise FetchError(ud.module)
+ # cleanup
+ os.system('rm -rf %s' % tmpfile)
diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py
index d1a959371..b95de2a79 100644
--- a/bitbake/lib/bb/fetch/svn.py
+++ b/bitbake/lib/bb/fetch/svn.py
@@ -26,6 +26,7 @@ Based on functions from the base bb module, Copyright 2003 Holger Schurig
"""
import os, re
+import sys
import bb
from bb import data
from bb.fetch import Fetch
@@ -34,136 +35,98 @@ from bb.fetch import MissingParameterError
class Svn(Fetch):
"""Class to fetch a module or modules from svn repositories"""
- def supports(url, d):
- """Check to see if a given url can be fetched with svn.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ def supports(self, url, ud, d):
"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- return type in ['svn']
- supports = staticmethod(supports)
-
- def localpath(url, d):
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- if "localpath" in parm:
-# if user overrides local path, use it.
- return parm["localpath"]
+ Check to see if a given url can be fetched with svn.
+ """
+ return ud.type in ['svn']
- if not "module" in parm:
+ def localpath(self, url, ud, d):
+ if not "module" in ud.parm:
raise MissingParameterError("svn method needs a 'module' parameter")
else:
- module = parm["module"]
- if 'rev' in parm:
- revision = parm['rev']
- else:
- revision = ""
+ ud.module = ud.parm["module"]
+
+ ud.revision = ""
+ if 'rev' in ud.parm:
+ ud.revision = ud.parm['rev']
+
+ ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
+
+ return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
- date = Fetch.getSRCDate(d)
+ def forcefetch(self, url, ud, d):
+ if (ud.date == "now"):
+ return True
+ return False
- return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, path.replace('/', '.'), revision, date), d))
- localpath = staticmethod(localpath)
+ def go(self, loc, ud, d):
+ """Fetch url"""
- def go(self, d, urls = []):
- """Fetch urls"""
- if not urls:
- urls = self.urls
+ # try to use the tarball stash
+ if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath)
+ return
+
+ proto = "svn"
+ if "proto" in ud.parm:
+ proto = ud.parm["proto"]
+
+ svn_rsh = None
+ if proto == "svn+ssh" and "rsh" in ud.parm:
+ svn_rsh = ud.parm["rsh"]
+
+ svnroot = ud.host + ud.path
+
+ # either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now"
+ options = []
+ if ud.revision:
+ options.append("-r %s" % ud.revision)
+ elif ud.date != "now":
+ options.append("-r {%s}" % ud.date)
localdata = data.createCopy(d)
data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata)
data.update_data(localdata)
- for loc in urls:
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata))
- if not "module" in parm:
- raise MissingParameterError("svn method needs a 'module' parameter")
- else:
- module = parm["module"]
-
- dlfile = self.localpath(loc, localdata)
- dldir = data.getVar('DL_DIR', localdata, 1)
-# if local path contains the svn
-# module, consider the dir above it to be the
-# download directory
-# pos = dlfile.find(module)
-# if pos:
-# dldir = dlfile[:pos]
-# else:
-# dldir = os.path.dirname(dlfile)
-
-# setup svn options
- options = []
- if 'rev' in parm:
- revision = parm['rev']
- else:
- revision = ""
-
- date = Fetch.getSRCDate(d)
-
- if "proto" in parm:
- proto = parm["proto"]
- else:
- proto = "svn"
-
- svn_rsh = None
- if proto == "svn+ssh" and "rsh" in parm:
- svn_rsh = parm["rsh"]
-
- tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata)
- data.setVar('TARFILES', dlfile, localdata)
- data.setVar('TARFN', tarfn, localdata)
-
- if Fetch.check_for_tarball(d, tarfn, dldir, date):
- continue
-
- olddir = os.path.abspath(os.getcwd())
- os.chdir(data.expand(dldir, localdata))
-
- svnroot = host + path
-
- data.setVar('SVNROOT', svnroot, localdata)
- data.setVar('SVNCOOPTS', " ".join(options), localdata)
- data.setVar('SVNMODULE', module, localdata)
- svncmd = data.getVar('FETCHCOMMAND', localdata, 1)
- svncmd = "svn co -r {%s} %s://%s/%s" % (date, proto, svnroot, module)
-
- if revision:
- svncmd = "svn co -r %s %s://%s/%s" % (revision, proto, svnroot, module)
- elif date == "now":
- svncmd = "svn co %s://%s/%s" % (proto, svnroot, module)
-
- if svn_rsh:
- svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
-
-# create temp directory
- bb.debug(2, "Fetch: creating temporary directory")
- bb.mkdirhier(data.expand('${WORKDIR}', localdata))
- data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvn.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
- tmpfile = tmppipe.readline().strip()
- if not tmpfile:
- bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
- raise FetchError(module)
-
-# check out sources there
- os.chdir(tmpfile)
- bb.note("Fetch " + loc)
- bb.debug(1, "Running %s" % svncmd)
+ data.setVar('SVNROOT', "%s://%s/%s" % (proto, svnroot, ud.module), localdata)
+ data.setVar('SVNCOOPTS', " ".join(options), localdata)
+ data.setVar('SVNMODULE', ud.module, localdata)
+ svncmd = data.getVar('FETCHCOMMAND', localdata, 1)
+ svnupcmd = data.getVar('UPDATECOMMAND', localdata, 1)
+
+ if svn_rsh:
+ svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
+ svnupcmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svnupcmd)
+
+ pkg = data.expand('${PN}', d)
+ pkgdir = os.path.join(data.expand('${SVNDIR}', localdata), pkg)
+ moddir = os.path.join(pkgdir, ud.module)
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + moddir + "'")
+
+ if os.access(os.path.join(moddir, '.svn'), os.R_OK):
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
+ # update sources there
+ os.chdir(moddir)
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupcmd)
+ myret = os.system(svnupcmd)
+ else:
+ bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
+ # check out sources there
+ bb.mkdirhier(pkgdir)
+ os.chdir(pkgdir)
+ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svncmd)
myret = os.system(svncmd)
- if myret != 0:
- try:
- os.rmdir(tmpfile)
- except OSError:
- pass
- raise FetchError(module)
-
- os.chdir(os.path.join(tmpfile, os.path.dirname(module)))
-# tar them up to a defined filename
- myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module)))
- if myret != 0:
- try:
- os.unlink(tarfn)
- except OSError:
- pass
-# cleanup
- os.system('rm -rf %s' % tmpfile)
- os.chdir(olddir)
- del localdata
+
+ if myret != 0:
+ raise FetchError(ud.module)
+
+ os.chdir(pkgdir)
+ # tar them up to a defined filename
+ myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
+ if myret != 0:
+ try:
+ os.unlink(ud.localpath)
+ except OSError:
+ pass
+ raise FetchError(ud.module)
diff --git a/bitbake/lib/bb/fetch/wget.py b/bitbake/lib/bb/fetch/wget.py
index e47a8859b..9c9c1675a 100644
--- a/bitbake/lib/bb/fetch/wget.py
+++ b/bitbake/lib/bb/fetch/wget.py
@@ -30,138 +30,70 @@ import bb
from bb import data
from bb.fetch import Fetch
from bb.fetch import FetchError
-from bb.fetch import MD5SumError
from bb.fetch import uri_replace
class Wget(Fetch):
"""Class to fetch urls via 'wget'"""
- def supports(url, d):
- """Check to see if a given url can be fetched using wget.
- Expects supplied url in list form, as outputted by bb.decodeurl().
+ def supports(self, url, ud, d):
"""
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- return type in ['http','https','ftp']
- supports = staticmethod(supports)
-
- def localpath(url, d):
-# strip off parameters
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
- if "localpath" in parm:
-# if user overrides local path, use it.
- return parm["localpath"]
- url = bb.encodeurl([type, host, path, user, pswd, {}])
-
- return os.path.join(data.getVar("DL_DIR", d), os.path.basename(url))
- localpath = staticmethod(localpath)
-
- def go(self, d, urls = []):
+ Check to see if a given url can be fetched with cvs.
+ """
+ return ud.type in ['http','https','ftp']
+
+ def localpath(self, url, ud, d):
+
+ url = bb.encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}])
+ ud.basename = os.path.basename(ud.path)
+ ud.localfile = data.expand(os.path.basename(url), d)
+
+ return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
+
+ def go(self, uri, ud, d):
"""Fetch urls"""
- def md5_sum(parm, d):
- """
- Return the MD5SUM associated with the to be downloaded
- file.
- It can return None if no md5sum is associated
- """
- try:
- return parm['md5sum']
- except:
- return None
-
- def verify_md5sum(wanted_sum, got_sum):
- """
- Verify the md5sum we wanted with the one we got
- """
- if not wanted_sum:
- return True
-
- return wanted_sum == got_sum
-
- def fetch_uri(uri, basename, dl, md5, parm, d):
- # the MD5 sum we want to verify
- wanted_md5sum = md5_sum(parm, d)
- if os.path.exists(dl):
-# file exists, but we didnt complete it.. trying again..
+ def fetch_uri(uri, ud, d):
+ if os.path.exists(ud.localpath):
+ # file exists, but we didnt complete it.. trying again..
fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
else:
fetchcmd = data.getVar("FETCHCOMMAND", d, 1)
- bb.note("fetch " + uri)
+ bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
fetchcmd = fetchcmd.replace("${URI}", uri)
- fetchcmd = fetchcmd.replace("${FILE}", basename)
- bb.debug(2, "executing " + fetchcmd)
+ fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
ret = os.system(fetchcmd)
if ret != 0:
return False
# check if sourceforge did send us to the mirror page
- dl_dir = data.getVar("DL_DIR", d, True)
- if not os.path.exists(dl):
- os.system("rm %s*" % dl) # FIXME shell quote it
- bb.debug(2,"sourceforge.net send us to the mirror on %s" % basename)
+ if not os.path.exists(ud.localpath):
+ os.system("rm %s*" % ud.localpath) # FIXME shell quote it
+ bb.msg.debug(2, bb.msg.domain.Fetcher, "sourceforge.net send us to the mirror on %s" % ud.basename)
return False
-# supposedly complete.. write out md5sum
- if bb.which(data.getVar('PATH', d), 'md5sum'):
- try:
- md5pipe = os.popen('md5sum ' + dl)
- md5data = (md5pipe.readline().split() or [ "" ])[0]
- md5pipe.close()
- except OSError:
- md5data = ""
-
- # verify the md5sum
- if not verify_md5sum(wanted_md5sum, md5data):
- raise MD5SumError(uri)
-
- md5out = file(md5, 'w')
- md5out.write(md5data)
- md5out.close()
return True
- if not urls:
- urls = self.urls
-
localdata = data.createCopy(d)
data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
data.update_data(localdata)
- for uri in urls:
- completed = 0
- (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(uri, localdata))
- basename = os.path.basename(path)
- dl = self.localpath(uri, d)
- dl = data.expand(dl, localdata)
- md5 = dl + '.md5'
-
- if os.path.exists(md5):
-# complete, nothing to see here..
- continue
-
- premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ]
- for (find, replace) in premirrors:
- newuri = uri_replace(uri, find, replace, d)
- if newuri != uri:
- if fetch_uri(newuri, basename, dl, md5, parm, localdata):
- completed = 1
- break
-
- if completed:
- continue
-
- if fetch_uri(uri, basename, dl, md5, parm, localdata):
- continue
-
-# try mirrors
- mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ]
- for (find, replace) in mirrors:
- newuri = uri_replace(uri, find, replace, d)
- if newuri != uri:
- if fetch_uri(newuri, basename, dl, md5, parm, localdata):
- completed = 1
- break
-
- if not completed:
- raise FetchError(uri)
-
- del localdata
+ premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ]
+ for (find, replace) in premirrors:
+ newuri = uri_replace(uri, find, replace, d)
+ if newuri != uri:
+ if fetch_uri(newuri, ud, localdata):
+ return
+
+ if fetch_uri(uri, ud, localdata):
+ return
+
+ # try mirrors
+ mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ]
+ for (find, replace) in mirrors:
+ newuri = uri_replace(uri, find, replace, d)
+ if newuri != uri:
+ if fetch_uri(newuri, ud, localdata):
+ return
+
+ raise FetchError(uri)