diff options
-rw-r--r-- | bitbake/lib/bb/cache.py | 369 | ||||
-rw-r--r-- | bitbake/lib/bb/cooker.py | 3 |
2 files changed, 197 insertions, 175 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 707d81c97..3e2f698cf 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py @@ -30,7 +30,7 @@ import os import logging -from collections import defaultdict +from collections import defaultdict, namedtuple import bb.data import bb.utils @@ -43,7 +43,104 @@ except ImportError: logger.info("Importing cPickle failed. " "Falling back to a very slow implementation.") -__cache_version__ = "132" +__cache_version__ = "133" + +recipe_fields = ( + 'pn', + 'pv', + 'pr', + 'pe', + 'defaultpref', + 'depends', + 'provides', + 'task_deps', + 'stamp', + 'broken', + 'not_world', + 'skipped', + 'timestamp', + 'packages', + 'packages_dynamic', + 'rdepends', + 'rdepends_pkg', + 'rprovides', + 'rprovides_pkg', + 'rrecommends', + 'rrecommends_pkg', + 'nocache', + 'variants', + 'file_depends', + 'tasks', + 'basetaskhashes', + 'hashfilename', +) + + +class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)): + __slots__ = () + + @classmethod + def listvar(cls, var, metadata): + return cls.getvar(var, metadata).split() + + @classmethod + def intvar(cls, var, metadata): + return int(cls.getvar(var, metadata) or 0) + + @classmethod + def depvar(cls, var, metadata): + return bb.utils.explode_deps(cls.getvar(var, metadata)) + + @classmethod + def pkgvar(cls, var, packages, metadata): + return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata)) + for pkg in packages) + + @classmethod + def taskvar(cls, var, tasks, metadata): + return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata)) + for task in tasks) + @classmethod + def getvar(cls, var, metadata): + return metadata.getVar(var, True) or '' + + @classmethod + def from_metadata(cls, filename, metadata): + tasks = metadata.getVar('__BBTASKS', False) + + packages = cls.listvar('PACKAGES', metadata) + return RecipeInfo( + tasks = tasks, + basetaskhashes = cls.taskvar('BB_BASEHASH', tasks, metadata), + hashfilename = cls.getvar('BB_HASHFILENAME', metadata), + + file_depends = metadata.getVar('__depends', False), + task_deps = metadata.getVar('_task_deps', False) or + {'tasks': [], 'parents': {}}, + variants = cls.listvar('__VARIANTS', metadata) + [''], + + skipped = cls.getvar('__SKIPPED', metadata), + timestamp = bb.parse.cached_mtime(filename), + packages = packages, + pn = cls.getvar('PN', metadata), + pe = cls.getvar('PE', metadata), + pv = cls.getvar('PV', metadata), + pr = cls.getvar('PR', metadata), + nocache = cls.getvar('__BB_DONT_CACHE', metadata), + defaultpref = cls.intvar('DEFAULT_PREFERENCE', metadata), + broken = cls.getvar('BROKEN', metadata), + not_world = cls.getvar('EXCLUDE_FROM_WORLD', metadata), + stamp = cls.getvar('STAMP', metadata), + packages_dynamic = cls.listvar('PACKAGES_DYNAMIC', metadata), + depends = cls.depvar('DEPENDS', metadata), + provides = cls.depvar('PROVIDES', metadata), + rdepends = cls.depvar('RDEPENDS', metadata), + rprovides = cls.depvar('RPROVIDES', metadata), + rrecommends = cls.depvar('RRECOMMENDS', metadata), + rprovides_pkg = cls.pkgvar('RPROVIDES', packages, metadata), + rdepends_pkg = cls.pkgvar('RDEPENDS', packages, metadata), + rrecommends_pkg = cls.pkgvar('RRECOMMENDS', packages, metadata), + ) class Cache(object): @@ -99,56 +196,6 @@ class Cache(object): if os.path.isfile(self.cachefile): logger.info("Out of date cache found, rebuilding...") - def getVar(self, var, fn, exp=0): - """ - Gets the value of a variable - (similar to getVar in the data class) - - There are two scenarios: - 1. We have cached data - serve from depends_cache[fn] - 2. We're learning what data to cache - serve from data - backend but add a copy of the data to the cache. - """ - if fn in self.clean: - return self.depends_cache[fn].get(var) - - self.depends_cache.setdefault(fn, {}) - - if fn != self.data_fn: - # We're trying to access data in the cache which doesn't exist - # yet setData hasn't been called to setup the right access - logger.error("data_fn %s and fn %s don't match", self.data_fn, fn) - - self.cacheclean = False - result = bb.data.getVar(var, self.data, exp) - if result is not None: - self.depends_cache[fn][var] = result - return result - - def setData(self, virtualfn, fn, data): - """ - Called to prime bb_cache ready to learn which variables to cache. - Will be followed by calls to self.getVar which aren't cached - but can be fulfilled from self.data. - """ - self.data_fn = virtualfn - self.data = data - - # Make sure __depends makes the depends_cache - # If we're a virtual class we need to make sure all our depends are - # appended to the depends of fn. - depends = self.getVar("__depends", virtualfn) or set() - self.depends_cache.setdefault(fn, {}) - if "__depends" not in self.depends_cache[fn] or not self.depends_cache[fn]["__depends"]: - self.depends_cache[fn]["__depends"] = depends - else: - self.depends_cache[fn]["__depends"].update(depends) - - # Make sure the variants always make it into the cache too - self.getVar('__VARIANTS', virtualfn, True) - - self.depends_cache[virtualfn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn) - @staticmethod def virtualfn2realfn(virtualfn): """ @@ -193,38 +240,39 @@ class Cache(object): to record the variables accessed. Return the cache status and whether the file was skipped when parsed """ - skipped = 0 - virtuals = 0 + skipped, virtuals = 0, 0 if fn not in self.checked: self.cacheValidUpdate(fn) - if self.cacheValid(fn): - multi = self.getVar('__VARIANTS', fn, True) - for cls in (multi or "").split() + [""]: - virtualfn = self.realfn2virtual(fn, cls) - if self.depends_cache[virtualfn].get("__SKIPPED"): - skipped += 1 - logger.debug(1, "Skipping %s", virtualfn) - continue - self.handle_data(virtualfn, cacheData) - virtuals += 1 - return True, skipped, virtuals - - logger.debug(1, "Parsing %s", fn) - - bb_data = self.load_bbfile(fn, appends, cfgData) - - for data in bb_data: - virtualfn = self.realfn2virtual(fn, data) - self.setData(virtualfn, fn, bb_data[data]) - if self.getVar("__SKIPPED", virtualfn): - skipped += 1 + cached = self.cacheValid(fn) + if not cached: + self.cacheclean = False + logger.debug(1, "Parsing %s", fn) + datastores = self.load_bbfile(fn, appends, cfgData) + depends = set() + for variant, data in sorted(datastores.iteritems(), + key=lambda i: i[0], + reverse=True): + virtualfn = self.realfn2virtual(fn, variant) + depends |= (data.getVar("__depends", False) or set()) + if depends and not variant: + data.setVar("__depends", depends) + info = RecipeInfo.from_metadata(fn, data) + self.depends_cache[virtualfn] = info + + info = self.depends_cache[fn] + for variant in info.variants: + virtualfn = self.realfn2virtual(fn, variant) + vinfo = self.depends_cache[virtualfn] + if vinfo.skipped: logger.debug(1, "Skipping %s", virtualfn) + skipped += 1 else: - self.handle_data(virtualfn, cacheData) + cacheData.add_from_recipeinfo(virtualfn, vinfo) virtuals += 1 - return False, skipped, virtuals + + return cached, skipped, virtuals def cacheValid(self, fn): """ @@ -266,14 +314,15 @@ class Cache(object): self.remove(fn) return False + info = self.depends_cache[fn] # Check the file's timestamp - if mtime != self.getVar("CACHETIMESTAMP", fn, True): + if mtime != info.timestamp: logger.debug(2, "Cache: %s changed", fn) self.remove(fn) return False # Check dependencies are still valid - depends = self.getVar("__depends", fn, True) + depends = info.file_depends if depends: for f, old_mtime in depends: fmtime = bb.parse.cached_mtime_noerror(f) @@ -290,20 +339,17 @@ class Cache(object): self.remove(fn) return False - self.clean.add(fn) invalid = False - # Mark extended class data as clean too - multi = self.getVar('__VARIANTS', fn, True) - for cls in (multi or "").split(): + for cls in info.variants: virtualfn = self.realfn2virtual(fn, cls) self.clean.add(virtualfn) - if not virtualfn in self.depends_cache: + if virtualfn not in self.depends_cache: logger.debug(2, "Cache: %s is not cached", virtualfn) invalid = True # If any one of the variants is not present, mark as invalid for all if invalid: - for cls in (multi or "").split(): + for cls in info.variants: virtualfn = self.realfn2virtual(fn, cls) if virtualfn in self.clean: logger.debug(2, "Cache: Removing %s from cache", virtualfn) @@ -332,7 +378,6 @@ class Cache(object): Save the cache Called from the parser when complete (or exiting) """ - import copy if not self.has_cache: return @@ -345,13 +390,12 @@ class Cache(object): version_data['CACHE_VER'] = __cache_version__ version_data['BITBAKE_VER'] = bb.__version__ - cache_data = copy.copy(self.depends_cache) - for fn in self.depends_cache: - if '__BB_DONT_CACHE' in self.depends_cache[fn] and self.depends_cache[fn]['__BB_DONT_CACHE']: + cache_data = dict(self.depends_cache) + for fn, info in self.depends_cache.iteritems(): + if info.nocache: logger.debug(2, "Not caching %s, marked as not cacheable", fn) del cache_data[fn] - elif ('PV' in self.depends_cache[fn] and - 'SRCREVINACTION' in self.depends_cache[fn]['PV']): + elif info.pv and 'SRCREVINACTION' in info.pv: logger.error("Not caching %s as it had SRCREVINACTION in PV. " "Please report this bug", fn) del cache_data[fn] @@ -364,90 +408,14 @@ class Cache(object): def mtime(cachefile): return bb.parse.cached_mtime_noerror(cachefile) - def handle_data(self, file_name, cacheData): + def add(self, file_name, data, cacheData): """ Save data we need into the cache """ - - pn = self.getVar('PN', file_name, True) - pe = self.getVar('PE', file_name, True) or "0" - pv = self.getVar('PV', file_name, True) - if 'SRCREVINACTION' in pv: - logger.info("Found SRCREVINACTION in PV (%s) or %s. Please report this bug.", pv, file_name) - pr = self.getVar('PR', file_name, True) - dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0") - depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "") - packages = (self.getVar('PACKAGES', file_name, True) or "").split() - packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split() - rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split() - - cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name) - - # build PackageName to FileName lookup table - cacheData.pkg_pn[pn].append(file_name) - - cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True) - - cacheData.tasks[file_name] = self.getVar('__BBTASKS', file_name, True) - for t in cacheData.tasks[file_name]: - cacheData.basetaskhash[file_name + "." + t] = self.getVar("BB_BASEHASH_task-%s" % t, file_name, True) - - # build FileName to PackageName lookup table - cacheData.pkg_fn[file_name] = pn - cacheData.pkg_pepvpr[file_name] = (pe, pv, pr) - cacheData.pkg_dp[file_name] = dp - - provides = [pn] - for provide in (self.getVar("PROVIDES", file_name, True) or "").split(): - if provide not in provides: - provides.append(provide) - - # Build forward and reverse provider hashes - # Forward: virtual -> [filenames] - # Reverse: PN -> [virtuals] - cacheData.fn_provides[file_name] = provides - for provide in provides: - cacheData.providers[provide].append(file_name) - if not provide in cacheData.pn_provides[pn]: - cacheData.pn_provides[pn].append(provide) - - for dep in depends: - if not dep in cacheData.deps[file_name]: - cacheData.deps[file_name].append(dep) - if not dep in cacheData.all_depends: - cacheData.all_depends.append(dep) - - # Build reverse hash for PACKAGES, so runtime dependencies - # can be be resolved (RDEPENDS, RRECOMMENDS etc.) - for package in packages: - cacheData.packages[package].append(file_name) - rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split() - - for package in packages_dynamic: - cacheData.packages_dynamic[package].append(file_name) - - for rprovide in rprovides: - cacheData.rproviders[rprovide].append(file_name) - - # Build hash of runtime depends and rececommends - rdepends = bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or "") - rrecommends = bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or "") - for package in packages + [pn]: - rdeps_pkg = bb.utils.explode_deps(self.getVar('RDEPENDS_%s' % package, file_name, True) or "") - cacheData.rundeps[file_name][package] = rdepends + rdeps_pkg - rrecs_pkg = bb.utils.explode_deps(self.getVar('RDEPENDS_%s' % package, file_name, True) or "") - cacheData.runrecs[file_name][package] = rrecommends + rrecs_pkg - - # Collect files we may need for possible world-dep - # calculations - if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True): - cacheData.possible_world.append(file_name) - - cacheData.hashfn[file_name] = self.getVar('BB_HASHFILENAME', file_name, True) - - # Touch this to make sure its in the cache - self.getVar('__BB_DONT_CACHE', file_name, True) - self.getVar('__VARIANTS', file_name, True) + realfn = self.virtualfn2realfn(file_name)[0] + info = RecipeInfo.from_metadata(realfn, data) + self.depends_cache[file_name] = info + cacheData.add_from_recipeinfo(file_name, info) @staticmethod def load_bbfile(bbfile, appends, config): @@ -514,7 +482,6 @@ class CacheData(object): def __init__(self): """ Direct cache variables - (from Cache.handle_data) """ self.providers = defaultdict(list) self.rproviders = defaultdict(list) @@ -547,3 +514,59 @@ class CacheData(object): self.world_target = set() self.bbfile_priority = {} self.bbfile_config_priorities = [] + + def add_from_recipeinfo(self, fn, info): + self.task_deps[fn] = info.task_deps + self.pkg_fn[fn] = info.pn + self.pkg_pn[info.pn].append(fn) + self.pkg_pepvpr[fn] = (info.pe, info.pv, info.pr) + self.pkg_dp[fn] = info.defaultpref + self.stamp[fn] = info.stamp + + provides = [info.pn] + for provide in info.provides: + if provide not in provides: + provides.append(provide) + self.fn_provides[fn] = provides + + for provide in provides: + self.providers[provide].append(fn) + if provide not in self.pn_provides[info.pn]: + self.pn_provides[info.pn].append(provide) + + for dep in info.depends: + if dep not in self.deps[fn]: + self.deps[fn].append(dep) + if dep not in self.all_depends: + self.all_depends.append(dep) + + rprovides = info.rprovides + for package in info.packages: + self.packages[package].append(fn) + rprovides += info.rprovides_pkg[package] + + for package in info.packages_dynamic: + self.packages_dynamic[package].append(fn) + + for rprovide in rprovides: + self.rproviders[rprovide].append(fn) + + # Build hash of runtime depends and rececommends + for package in info.packages + [info.pn]: + rundeps, runrecs = list(info.rdepends), list(info.rrecommends) + if package in info.packages: + rundeps += info.rdepends_pkg[package] + runrecs += info.rrecommends_pkg[package] + self.rundeps[fn][package] = rundeps + self.runrecs[fn][package] = runrecs + + # Collect files we may need for possible world-dep + # calculations + if not info.broken and not info.not_world: + self.possible_world.append(fn) + + self.hashfn[fn] = info.hashfilename + + for task, taskhash in info.basetaskhashes.iteritems(): + identifier = '%s.%s' % (fn, task) + self.basetaskhash[identifier] = taskhash diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index 2e139558b..6194919e4 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py @@ -648,8 +648,7 @@ class BBCooker: # Load data into the cache for fn and parse the loaded cache data the_data = self.bb_cache.loadDataFull(fn, self.get_file_appends(fn), self.configuration.data) - self.bb_cache.setData(fn, buildfile, the_data) - self.bb_cache.handle_data(fn, self.status) + self.bb_cache.add(fn, the_data, self.status) # Tweak some variables item = self.bb_cache.getVar('PN', fn, True) |