diff options
author | Chris Larson <chris_larson@mentor.com> | 2010-03-24 16:56:12 -0700 |
---|---|---|
committer | Richard Purdie <rpurdie@linux.intel.com> | 2010-07-02 15:41:32 +0100 |
commit | 7acc132cac873e60005516272473a55a8160b9c4 (patch) | |
tree | 2e4122862ffd856803160b6089fcb979d3efd630 /bitbake/lib/bb/fetch | |
parent | bbf83fd988ca3cf9dae7d2b542a11a7c942b1702 (diff) | |
download | openembedded-core-7acc132cac873e60005516272473a55a8160b9c4.tar.gz openembedded-core-7acc132cac873e60005516272473a55a8160b9c4.tar.bz2 openembedded-core-7acc132cac873e60005516272473a55a8160b9c4.tar.xz openembedded-core-7acc132cac873e60005516272473a55a8160b9c4.zip |
Formatting cleanups
(Bitbake rev: 2caf134b43a44dad30af4fbe33033b3c58deee57)
Signed-off-by: Chris Larson <chris_larson@mentor.com>
Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
Diffstat (limited to 'bitbake/lib/bb/fetch')
-rw-r--r-- | bitbake/lib/bb/fetch/__init__.py | 32 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch/bzr.py | 7 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch/cvs.py | 2 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch/git.py | 11 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch/hg.py | 7 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch/local.py | 6 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch/osc.py | 10 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch/perforce.py | 8 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch/svn.py | 2 |
9 files changed, 40 insertions, 45 deletions
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py index b1b5eda35..09c83b026 100644 --- a/bitbake/lib/bb/fetch/__init__.py +++ b/bitbake/lib/bb/fetch/__init__.py @@ -85,7 +85,7 @@ def decodeurl(url): p = {} if parm: for s in parm.split(';'): - s1,s2 = s.split('=') + s1, s2 = s.split('=') p[s1] = s2 return (type, host, path, user, pswd, p) @@ -121,7 +121,7 @@ def uri_replace(uri, uri_find, uri_replace, d): uri_decoded = list(decodeurl(uri)) uri_find_decoded = list(decodeurl(uri_find)) uri_replace_decoded = list(decodeurl(uri_replace)) - result_decoded = ['','','','','',{}] + result_decoded = ['', '', '', '', '', {}] for i in uri_find_decoded: loc = uri_find_decoded.index(i) result_decoded[loc] = uri_decoded[loc] @@ -214,7 +214,7 @@ def init(urls, d, setup = True): if setup: for url in urldata: if not urldata[url].setup: - urldata[url].setup_localpath(d) + urldata[url].setup_localpath(d) urldata_cache[fn] = urldata return urldata @@ -243,7 +243,7 @@ def go(d, urls = None): continue lf = bb.utils.lockfile(ud.lockfile) if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5): - # If someone else fetched this before we got the lock, + # If someone else fetched this before we got the lock, # notice and don't try again try: os.utime(ud.md5, None) @@ -309,7 +309,7 @@ def localpaths(d): urldata = init([], d, True) for u in urldata: - ud = urldata[u] + ud = urldata[u] local.append(ud.localpath) return local @@ -321,15 +321,15 @@ def get_srcrev(d): Return the version string for the current package (usually to be used as PV) Most packages usually only have one SCM so we just pass on the call. - In the multi SCM case, we build a value based on SRCREV_FORMAT which must + In the multi SCM case, we build a value based on SRCREV_FORMAT which must have been set. """ # - # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which + # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which # could translate into a call to here. If it does, we need to catch this # and provide some way so it knows get_srcrev is active instead of being - # some number etc. hence the srcrev_internal_call tracking and the magic + # some number etc. hence the srcrev_internal_call tracking and the magic # "SRCREVINACTION" return value. # # Neater solutions welcome! @@ -339,7 +339,7 @@ def get_srcrev(d): scms = [] - # Only call setup_localpath on URIs which suppports_srcrev() + # Only call setup_localpath on URIs which suppports_srcrev() urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False) for u in urldata: ud = urldata[u] @@ -352,7 +352,7 @@ def get_srcrev(d): bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI") raise ParameterError - bb.data.setVar('__BB_DONT_CACHE','1', d) + bb.data.setVar('__BB_DONT_CACHE', '1', d) if len(scms) == 1: return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) @@ -375,7 +375,7 @@ def get_srcrev(d): def localpath(url, d, cache = True): """ - Called from the parser with cache=False since the cache isn't ready + Called from the parser with cache=False since the cache isn't ready at this point. Also called from classed in OE e.g. patch.bbclass """ ud = init([url], d) @@ -538,7 +538,7 @@ class Fetch(object): def localpath(self, url, urldata, d): """ Return the local filename of a given url assuming a successful fetch. - Can also setup variables in urldata for use in go (saving code duplication + Can also setup variables in urldata for use in go (saving code duplication and duplicate code execution) """ return url @@ -599,8 +599,8 @@ class Fetch(object): """ Return: a) a source revision if specified - b) True if auto srcrev is in action - c) False otherwise + b) True if auto srcrev is in action + c) False otherwise """ if 'rev' in ud.parm: @@ -632,7 +632,7 @@ class Fetch(object): b) None otherwise """ - localcount= None + localcount = None if 'name' in ud.parm: pn = data.getVar("PN", d, 1) localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1) @@ -685,7 +685,7 @@ class Fetch(object): def sortable_revision(self, url, ud, d): """ - + """ if hasattr(self, "_sortable_revision"): return self._sortable_revision(url, ud, d) diff --git a/bitbake/lib/bb/fetch/bzr.py b/bitbake/lib/bb/fetch/bzr.py index c6e33c334..813d7d8c8 100644 --- a/bitbake/lib/bb/fetch/bzr.py +++ b/bitbake/lib/bb/fetch/bzr.py @@ -46,15 +46,15 @@ class Bzr(Fetch): revision = Fetch.srcrev_internal_helper(ud, d) if revision is True: - ud.revision = self.latest_revision(url, ud, d) + ud.revision = self.latest_revision(url, ud, d) elif revision: ud.revision = revision if not ud.revision: - ud.revision = self.latest_revision(url, ud, d) + ud.revision = self.latest_revision(url, ud, d) ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d) - + return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) def _buildbzrcommand(self, ud, d, command): @@ -145,4 +145,3 @@ class Bzr(Fetch): def _build_revision(self, url, ud, d): return ud.revision - diff --git a/bitbake/lib/bb/fetch/cvs.py b/bitbake/lib/bb/fetch/cvs.py index 443f52131..c0d43618f 100644 --- a/bitbake/lib/bb/fetch/cvs.py +++ b/bitbake/lib/bb/fetch/cvs.py @@ -157,7 +157,7 @@ class Cvs(Fetch): try: os.rmdir(moddir) except OSError: - pass + pass raise FetchError(ud.module) # tar them up to a defined filename diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py index 41ebc5b99..533268625 100644 --- a/bitbake/lib/bb/fetch/git.py +++ b/bitbake/lib/bb/fetch/git.py @@ -57,12 +57,12 @@ class Git(Fetch): tag = Fetch.srcrev_internal_helper(ud, d) if tag is True: - ud.tag = self.latest_revision(url, ud, d) + ud.tag = self.latest_revision(url, ud, d) elif tag: ud.tag = tag if not ud.tag or ud.tag == "master": - ud.tag = self.latest_revision(url, ud, d) + ud.tag = self.latest_revision(url, ud, d) subdir = ud.parm.get("subpath", "") if subdir != "": @@ -114,7 +114,7 @@ class Git(Fetch): os.chdir(ud.clonedir) mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) - if mirror_tarballs != "0" or 'fullclone' in ud.parm: + if mirror_tarballs != "0" or 'fullclone' in ud.parm: bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) @@ -188,7 +188,7 @@ class Git(Fetch): def _sortable_buildindex_disabled(self, url, ud, d, rev): """ - Return a suitable buildindex for the revision specified. This is done by counting revisions + Return a suitable buildindex for the revision specified. This is done by counting revisions using "git rev-list" which may or may not work in different circumstances. """ @@ -213,5 +213,4 @@ class Git(Fetch): buildindex = "%s" % output.split()[0] bb.msg.debug(1, bb.msg.domain.Fetcher, "GIT repository for %s in %s is returning %s revisions in rev-list before %s" % (url, ud.clonedir, buildindex, rev)) - return buildindex - + return buildindex diff --git a/bitbake/lib/bb/fetch/hg.py b/bitbake/lib/bb/fetch/hg.py index d0756382f..efb3b5c76 100644 --- a/bitbake/lib/bb/fetch/hg.py +++ b/bitbake/lib/bb/fetch/hg.py @@ -134,9 +134,9 @@ class Hg(Fetch): os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % fetchcmd) runfetchcmd(fetchcmd, d) - - # Even when we clone (fetch), we still need to update as hg's clone - # won't checkout the specified revision if its on a branch + + # Even when we clone (fetch), we still need to update as hg's clone + # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) runfetchcmd(updatecmd, d) @@ -170,4 +170,3 @@ class Hg(Fetch): Return a unique key for the url """ return "hg:" + ud.moddir - diff --git a/bitbake/lib/bb/fetch/local.py b/bitbake/lib/bb/fetch/local.py index f9bdf589c..a2abc8639 100644 --- a/bitbake/lib/bb/fetch/local.py +++ b/bitbake/lib/bb/fetch/local.py @@ -65,8 +65,8 @@ class Local(Fetch): Check the status of the url """ if urldata.localpath.find("*") != -1: - bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s looks like a glob and was therefore not checked." % url) - return True + bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s looks like a glob and was therefore not checked." % url) + return True if os.path.exists(urldata.localpath): - return True + return True return False diff --git a/bitbake/lib/bb/fetch/osc.py b/bitbake/lib/bb/fetch/osc.py index 548dd9d07..ed773939b 100644 --- a/bitbake/lib/bb/fetch/osc.py +++ b/bitbake/lib/bb/fetch/osc.py @@ -16,7 +16,7 @@ from bb.fetch import MissingParameterError from bb.fetch import runfetchcmd class Osc(Fetch): - """Class to fetch a module or modules from Opensuse build server + """Class to fetch a module or modules from Opensuse build server repositories.""" def supports(self, url, ud, d): @@ -64,7 +64,7 @@ class Osc(Fetch): proto = "ocs" if "proto" in ud.parm: proto = ud.parm["proto"] - + options = [] config = "-c %s" % self.generate_config(ud, d) @@ -108,7 +108,7 @@ class Osc(Fetch): os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscfetchcmd) runfetchcmd(oscfetchcmd, d) - + os.chdir(os.path.join(ud.pkgdir + ud.path)) # tar them up to a defined filename try: @@ -131,7 +131,7 @@ class Osc(Fetch): config_path = "%s/oscrc" % data.expand('${OSCDIR}', d) if (os.path.exists(config_path)): - os.remove(config_path) + os.remove(config_path) f = open(config_path, 'w') f.write("[general]\n") @@ -146,5 +146,5 @@ class Osc(Fetch): f.write("user = %s\n" % ud.parm["user"]) f.write("pass = %s\n" % ud.parm["pswd"]) f.close() - + return config_path diff --git a/bitbake/lib/bb/fetch/perforce.py b/bitbake/lib/bb/fetch/perforce.py index 8bc3205c2..67de6f59f 100644 --- a/bitbake/lib/bb/fetch/perforce.py +++ b/bitbake/lib/bb/fetch/perforce.py @@ -95,7 +95,7 @@ class Perforce(Fetch): return cset.split(' ')[1] getcset = staticmethod(getcset) - def localpath(self, url, ud, d): + def localpath(self, url, ud, d): (host,path,user,pswd,parm) = Perforce.doparse(url,d) @@ -180,7 +180,7 @@ class Perforce(Fetch): count = 0 - for file in p4file: + for file in p4file: list = file.split() if list[2] == "delete": @@ -191,7 +191,7 @@ class Perforce(Fetch): os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module,dest[:where],list[0])) count = count + 1 - + if count == 0: bb.msg.error(bb.msg.domain.Fetcher, "Fetch: No files gathered from the P4 fetch") raise FetchError(module) @@ -205,5 +205,3 @@ class Perforce(Fetch): raise FetchError(module) # cleanup os.system('rm -rf %s' % tmpfile) - - diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py index ba9f6ab10..375e8df05 100644 --- a/bitbake/lib/bb/fetch/svn.py +++ b/bitbake/lib/bb/fetch/svn.py @@ -78,7 +78,7 @@ class Svn(Fetch): ud.revision = rev ud.date = "" else: - ud.revision = "" + ud.revision = "" ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) |