diff options
-rw-r--r-- | bitbake/lib/bb/fetch/__init__.py | 9 | ||||
-rw-r--r-- | bitbake/lib/bb/persist_data.py | 11 |
2 files changed, 7 insertions, 13 deletions
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py index 668b78869..d8f5f167f 100644 --- a/bitbake/lib/bb/fetch/__init__.py +++ b/bitbake/lib/bb/fetch/__init__.py @@ -144,14 +144,13 @@ def uri_replace(uri, uri_find, uri_replace, d): methods = [] urldata_cache = {} saved_headrevs = {} -persistent_database_connection = {} def fetcher_init(d): """ Called to initialize the fetchers once the configuration data is known. Calls before this must not hit the cache. """ - pd = persist_data.PersistData(d, persistent_database_connection) + pd = persist_data.PersistData(d) # When to drop SCM head revisions controlled by user policy srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" if srcrev_policy == "cache": @@ -180,7 +179,7 @@ def fetcher_compare_revisions(d): return true/false on whether they've changed. """ - pd = persist_data.PersistData(d, persistent_database_connection) + pd = persist_data.PersistData(d) data = pd.getKeyValues("BB_URI_HEADREVS") data2 = bb.fetch.saved_headrevs @@ -758,7 +757,7 @@ class Fetch(object): if not hasattr(self, "_latest_revision"): raise ParameterError - pd = persist_data.PersistData(d, persistent_database_connection) + pd = persist_data.PersistData(d) key = self.generate_revision_key(url, ud, d) rev = pd.getValue("BB_URI_HEADREVS", key) if rev != None: @@ -775,7 +774,7 @@ class Fetch(object): if hasattr(self, "_sortable_revision"): return self._sortable_revision(url, ud, d) - pd = persist_data.PersistData(d, persistent_database_connection) + pd = persist_data.PersistData(d) key = self.generate_revision_key(url, ud, d) latest_rev = self._build_revision(url, ud, d) diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py index 76bff1665..9558e7128 100644 --- a/bitbake/lib/bb/persist_data.py +++ b/bitbake/lib/bb/persist_data.py @@ -47,10 +47,7 @@ class PersistData: Why sqlite? It handles all the locking issues for us. """ - def __init__(self, d, persistent_database_connection): - if "connection" in persistent_database_connection: - self.cursor = persistent_database_connection["connection"].cursor() - return + def __init__(self, d): self.cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True) if self.cachedir in [None, '']: bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'PERSISTENT_DIR' or 'CACHE' variable.") @@ -62,9 +59,7 @@ class PersistData: self.cachefile = os.path.join(self.cachedir, "bb_persist_data.sqlite3") logger.debug(1, "Using '%s' as the persistent data cache", self.cachefile) - connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None) - persistent_database_connection["connection"] = connection - self.cursor = persistent_database_connection["connection"].cursor() + self.connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None) def addDomain(self, domain): """ @@ -127,7 +122,7 @@ class PersistData: count = 0 while True: try: - return self.cursor.execute(*query) + return self.connection.execute(*query) except sqlite3.OperationalError as e: if 'database is locked' in str(e) and count < 500: count = count + 1 |