summaryrefslogtreecommitdiff
path: root/meta/lib
diff options
context:
space:
mode:
authorLianhao Lu <lianhao.lu@intel.com>2011-12-22 15:29:11 +0800
committerRichard Purdie <richard.purdie@linuxfoundation.org>2012-01-11 10:33:24 +0000
commit9979107d8eaf503efd921564385859b1e83dbb3c (patch)
treea79dd735499d19f0b5ebefc96ccb79c001d50d6b /meta/lib
parent0df0399677a6677fc810e32e9275ee9e79021e9a (diff)
downloadopenembedded-core-9979107d8eaf503efd921564385859b1e83dbb3c.tar.gz
openembedded-core-9979107d8eaf503efd921564385859b1e83dbb3c.tar.bz2
openembedded-core-9979107d8eaf503efd921564385859b1e83dbb3c.tar.xz
openembedded-core-9979107d8eaf503efd921564385859b1e83dbb3c.zip
meta/PRService: Added export/import fuctions.
[YOCTO #1556] - Modified meta/class/package.bbclass and prserv.bbclass according to the change in PR service by adding PACKAGE_ARCH into the query tuple. - Added prexport.bbclass, primport.bbclass to export/import AUTOPR values from/to PRService. - Move PR service related common code to lib/oe/prservice.py. - Supported reading the AUTOPR values from the exported .inc file instead of reading it from remote PR service. - Created a new script bitbake-prserv-tool to export/import the AUTOPR values from/to the PR service. Typical usage scenario of the export/import is: 1. bitbake-prserv-tool export <file> to export the AUTOPR values from the current PR service into an exported .inc file. 2. Others may use that exported .inc file(to be included in the local.conf) to lockdown and reproduce the same AUTOPR when generating package feeds. 3. Others may "bitbake-prserv-tool import <file>" to import the AUTOPR values into their own PR service and the AUTOPR values will be incremented from there. Signed-off-by: Lianhao Lu <lianhao.lu@intel.com>
Diffstat (limited to 'meta/lib')
-rw-r--r--meta/lib/oe/prservice.py113
1 files changed, 113 insertions, 0 deletions
diff --git a/meta/lib/oe/prservice.py b/meta/lib/oe/prservice.py
new file mode 100644
index 000000000..fa6718e91
--- /dev/null
+++ b/meta/lib/oe/prservice.py
@@ -0,0 +1,113 @@
+import bb
+
+def prserv_make_conn(d):
+ import prserv.serv
+ host = d.getVar("PRSERV_HOST",True)
+ port = d.getVar("PRSERV_PORT",True)
+ try:
+ conn = None
+ conn = prserv.serv.PRServerConnection(host,int(port))
+ d.setVar("__PRSERV_CONN",conn)
+ except Exception, exc:
+ bb.fatal("Connecting to PR service %s:%s failed: %s" % (host, port, str(exc)))
+
+ return conn
+
+def prserv_dump_db(d):
+ if d.getVar('USE_PR_SERV', True) != "1":
+ bb.error("Not using network based PR service")
+ return None
+
+ conn = d.getVar("__PRSERV_CONN", True)
+ if conn is None:
+ conn = prserv_make_conn(d)
+ if conn is None:
+ bb.error("Making connection failed to remote PR service")
+ return None
+
+ #dump db
+ opt_version = d.getVar('PRSERV_DUMPOPT_VERSION', True)
+ opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH', True)
+ opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM', True)
+ opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL', True))
+ return conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col)
+
+def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None):
+ if d.getVar('USE_PR_SERV', True) != "1":
+ bb.error("Not using network based PR service")
+ return None
+
+ conn = d.getVar("__PRSERV_CONN", True)
+ if conn is None:
+ conn = prserv_make_conn(d)
+ if conn is None:
+ bb.error("Making connection failed to remote PR service")
+ return None
+ #get the entry values
+ imported = []
+ prefix = "PRAUTO$"
+ for v in d.keys():
+ if v.startswith(prefix):
+ (remain, sep, checksum) = v.rpartition('$')
+ (remain, sep, pkgarch) = remain.rpartition('$')
+ (remain, sep, version) = remain.rpartition('$')
+ if (remain + '$' != prefix) or \
+ (filter_version and filter_version != version) or \
+ (filter_pkgarch and filter_pkgarch != pkgarch) or \
+ (filter_checksum and filter_checksum != checksum):
+ continue
+ try:
+ value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum, True))
+ except BaseException as exc:
+ bb.debug("Not valid value of %s:%s" % (v,str(exc)))
+ continue
+ ret = conn.importone(version,pkgarch,checksum,value)
+ if ret != value:
+ bb.error("importing(%s,%s,%s,%d) failed. DB may have larger value %d" % (version,pkgarch,checksum,value,ret))
+ else:
+ imported.append((version,pkgarch,checksum,value))
+ return imported
+
+def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
+ import bb.utils
+ #initilize the output file
+ bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR', True))
+ df = d.getVar('PRSERV_DUMPFILE', True)
+ #write data
+ lf = bb.utils.lockfile("%s.lock" % df)
+ f = open(df, "a")
+ if metainfo:
+ #dump column info
+ f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']);
+ f.write("#Table: %s\n" % metainfo['tbl_name'])
+ f.write("#Columns:\n")
+ f.write("#name \t type \t notn \t dflt \t pk\n")
+ f.write("#----------\t --------\t --------\t --------\t ----\n")
+ for i in range(len(metainfo['col_info'])):
+ f.write("#%10s\t %8s\t %8s\t %8s\t %4s\n" %
+ (metainfo['col_info'][i]['name'],
+ metainfo['col_info'][i]['type'],
+ metainfo['col_info'][i]['notnull'],
+ metainfo['col_info'][i]['dflt_value'],
+ metainfo['col_info'][i]['pk']))
+ f.write("\n")
+
+ if lockdown:
+ f.write("PRSERV_LOCKDOWN = \"1\"\n\n")
+
+ if datainfo:
+ idx = {}
+ for i in range(len(datainfo)):
+ pkgarch = datainfo[i]['pkgarch']
+ value = datainfo[i]['value']
+ if not idx.has_key(pkgarch):
+ idx[pkgarch] = i
+ elif value > datainfo[idx[pkgarch]]['value']:
+ idx[pkgarch] = i
+ f.write("PRAUTO$%s$%s$%s = \"%s\"\n" %
+ (str(datainfo[i]['version']), pkgarch, str(datainfo[i]['checksum']), str(value)))
+ if not nomax:
+ for i in idx:
+ f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value'])))
+ f.close()
+ bb.utils.unlockfile(lf)