From 951cbf3f65f347c7a7bbcae193218f9187a15fbf Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 23 Feb 2011 10:48:51 -0800 Subject: bitbake: Remove in-tree version Bitbake should be used by checking it out from its own repo Signed-off-by: Khem Raj --- bitbake/lib/bb/COW.py | 323 ----- bitbake/lib/bb/__init__.py | 139 --- bitbake/lib/bb/build.py | 472 -------- bitbake/lib/bb/cache.py | 632 ---------- bitbake/lib/bb/codeparser.py | 336 ------ bitbake/lib/bb/command.py | 271 ----- bitbake/lib/bb/cooker.py | 1078 ----------------- bitbake/lib/bb/daemonize.py | 190 --- bitbake/lib/bb/data.py | 338 ------ bitbake/lib/bb/data_smart.py | 428 ------- bitbake/lib/bb/event.py | 386 ------ bitbake/lib/bb/fetch/__init__.py | 836 ------------- bitbake/lib/bb/fetch/bzr.py | 148 --- bitbake/lib/bb/fetch/cvs.py | 172 --- bitbake/lib/bb/fetch/git.py | 339 ------ bitbake/lib/bb/fetch/hg.py | 180 --- bitbake/lib/bb/fetch/local.py | 73 -- bitbake/lib/bb/fetch/osc.py | 143 --- bitbake/lib/bb/fetch/perforce.py | 206 ---- bitbake/lib/bb/fetch/repo.py | 98 -- bitbake/lib/bb/fetch/ssh.py | 118 -- bitbake/lib/bb/fetch/svk.py | 104 -- bitbake/lib/bb/fetch/svn.py | 204 ---- bitbake/lib/bb/fetch/wget.py | 93 -- bitbake/lib/bb/fetch2/__init__.py | 1074 ----------------- bitbake/lib/bb/fetch2/bzr.py | 141 --- bitbake/lib/bb/fetch2/cvs.py | 181 --- bitbake/lib/bb/fetch2/git.py | 242 ---- bitbake/lib/bb/fetch2/hg.py | 174 --- bitbake/lib/bb/fetch2/local.py | 80 -- bitbake/lib/bb/fetch2/osc.py | 135 --- bitbake/lib/bb/fetch2/perforce.py | 196 --- bitbake/lib/bb/fetch2/repo.py | 98 -- bitbake/lib/bb/fetch2/ssh.py | 120 -- bitbake/lib/bb/fetch2/svk.py | 97 -- bitbake/lib/bb/fetch2/svn.py | 180 --- bitbake/lib/bb/fetch2/wget.py | 91 -- bitbake/lib/bb/methodpool.py | 84 -- bitbake/lib/bb/msg.py | 200 ---- bitbake/lib/bb/parse/__init__.py | 123 -- bitbake/lib/bb/parse/ast.py | 446 ------- bitbake/lib/bb/parse/parse_py/BBHandler.py | 254 ---- bitbake/lib/bb/parse/parse_py/ConfHandler.py | 139 --- bitbake/lib/bb/parse/parse_py/__init__.py | 33 - bitbake/lib/bb/persist_data.py | 194 --- bitbake/lib/bb/process.py | 109 -- bitbake/lib/bb/providers.py | 330 ----- bitbake/lib/bb/pysh/__init__.py | 0 bitbake/lib/bb/pysh/builtin.py | 710 ----------- bitbake/lib/bb/pysh/interp.py | 1367 --------------------- bitbake/lib/bb/pysh/lsprof.py | 116 -- bitbake/lib/bb/pysh/pysh.py | 167 --- bitbake/lib/bb/pysh/pyshlex.py | 888 -------------- bitbake/lib/bb/pysh/pyshyacc.py | 779 ------------ bitbake/lib/bb/pysh/sherrors.py | 41 - bitbake/lib/bb/pysh/subprocess_fix.py | 77 -- bitbake/lib/bb/runqueue.py | 1663 -------------------------- bitbake/lib/bb/server/__init__.py | 0 bitbake/lib/bb/server/none.py | 195 --- bitbake/lib/bb/server/xmlrpc.py | 260 ---- bitbake/lib/bb/shell.py | 820 ------------- bitbake/lib/bb/siggen.py | 298 ----- bitbake/lib/bb/taskdata.py | 586 --------- bitbake/lib/bb/ui/__init__.py | 17 - bitbake/lib/bb/ui/crumbs/__init__.py | 17 - bitbake/lib/bb/ui/crumbs/buildmanager.py | 455 ------- bitbake/lib/bb/ui/crumbs/progress.py | 17 - bitbake/lib/bb/ui/crumbs/puccho.glade | 606 ---------- bitbake/lib/bb/ui/crumbs/runningbuild.py | 311 ----- bitbake/lib/bb/ui/depexp.py | 307 ----- bitbake/lib/bb/ui/goggle.py | 110 -- bitbake/lib/bb/ui/knotty.py | 248 ---- bitbake/lib/bb/ui/ncurses.py | 352 ------ bitbake/lib/bb/ui/puccho.py | 425 ------- bitbake/lib/bb/ui/uievent.py | 127 -- bitbake/lib/bb/ui/uihelper.py | 42 - bitbake/lib/bb/utils.py | 845 ------------- 77 files changed, 23874 deletions(-) delete mode 100644 bitbake/lib/bb/COW.py delete mode 100644 bitbake/lib/bb/__init__.py delete mode 100644 bitbake/lib/bb/build.py delete mode 100644 bitbake/lib/bb/cache.py delete mode 100644 bitbake/lib/bb/codeparser.py delete mode 100644 bitbake/lib/bb/command.py delete mode 100644 bitbake/lib/bb/cooker.py delete mode 100644 bitbake/lib/bb/daemonize.py delete mode 100644 bitbake/lib/bb/data.py delete mode 100644 bitbake/lib/bb/data_smart.py delete mode 100644 bitbake/lib/bb/event.py delete mode 100644 bitbake/lib/bb/fetch/__init__.py delete mode 100644 bitbake/lib/bb/fetch/bzr.py delete mode 100644 bitbake/lib/bb/fetch/cvs.py delete mode 100644 bitbake/lib/bb/fetch/git.py delete mode 100644 bitbake/lib/bb/fetch/hg.py delete mode 100644 bitbake/lib/bb/fetch/local.py delete mode 100644 bitbake/lib/bb/fetch/osc.py delete mode 100644 bitbake/lib/bb/fetch/perforce.py delete mode 100644 bitbake/lib/bb/fetch/repo.py delete mode 100644 bitbake/lib/bb/fetch/ssh.py delete mode 100644 bitbake/lib/bb/fetch/svk.py delete mode 100644 bitbake/lib/bb/fetch/svn.py delete mode 100644 bitbake/lib/bb/fetch/wget.py delete mode 100644 bitbake/lib/bb/fetch2/__init__.py delete mode 100644 bitbake/lib/bb/fetch2/bzr.py delete mode 100644 bitbake/lib/bb/fetch2/cvs.py delete mode 100644 bitbake/lib/bb/fetch2/git.py delete mode 100644 bitbake/lib/bb/fetch2/hg.py delete mode 100644 bitbake/lib/bb/fetch2/local.py delete mode 100644 bitbake/lib/bb/fetch2/osc.py delete mode 100644 bitbake/lib/bb/fetch2/perforce.py delete mode 100644 bitbake/lib/bb/fetch2/repo.py delete mode 100644 bitbake/lib/bb/fetch2/ssh.py delete mode 100644 bitbake/lib/bb/fetch2/svk.py delete mode 100644 bitbake/lib/bb/fetch2/svn.py delete mode 100644 bitbake/lib/bb/fetch2/wget.py delete mode 100644 bitbake/lib/bb/methodpool.py delete mode 100644 bitbake/lib/bb/msg.py delete mode 100644 bitbake/lib/bb/parse/__init__.py delete mode 100644 bitbake/lib/bb/parse/ast.py delete mode 100644 bitbake/lib/bb/parse/parse_py/BBHandler.py delete mode 100644 bitbake/lib/bb/parse/parse_py/ConfHandler.py delete mode 100644 bitbake/lib/bb/parse/parse_py/__init__.py delete mode 100644 bitbake/lib/bb/persist_data.py delete mode 100644 bitbake/lib/bb/process.py delete mode 100644 bitbake/lib/bb/providers.py delete mode 100644 bitbake/lib/bb/pysh/__init__.py delete mode 100644 bitbake/lib/bb/pysh/builtin.py delete mode 100644 bitbake/lib/bb/pysh/interp.py delete mode 100644 bitbake/lib/bb/pysh/lsprof.py delete mode 100644 bitbake/lib/bb/pysh/pysh.py delete mode 100644 bitbake/lib/bb/pysh/pyshlex.py delete mode 100644 bitbake/lib/bb/pysh/pyshyacc.py delete mode 100644 bitbake/lib/bb/pysh/sherrors.py delete mode 100644 bitbake/lib/bb/pysh/subprocess_fix.py delete mode 100644 bitbake/lib/bb/runqueue.py delete mode 100644 bitbake/lib/bb/server/__init__.py delete mode 100644 bitbake/lib/bb/server/none.py delete mode 100644 bitbake/lib/bb/server/xmlrpc.py delete mode 100644 bitbake/lib/bb/shell.py delete mode 100644 bitbake/lib/bb/siggen.py delete mode 100644 bitbake/lib/bb/taskdata.py delete mode 100644 bitbake/lib/bb/ui/__init__.py delete mode 100644 bitbake/lib/bb/ui/crumbs/__init__.py delete mode 100644 bitbake/lib/bb/ui/crumbs/buildmanager.py delete mode 100644 bitbake/lib/bb/ui/crumbs/progress.py delete mode 100644 bitbake/lib/bb/ui/crumbs/puccho.glade delete mode 100644 bitbake/lib/bb/ui/crumbs/runningbuild.py delete mode 100644 bitbake/lib/bb/ui/depexp.py delete mode 100644 bitbake/lib/bb/ui/goggle.py delete mode 100644 bitbake/lib/bb/ui/knotty.py delete mode 100644 bitbake/lib/bb/ui/ncurses.py delete mode 100644 bitbake/lib/bb/ui/puccho.py delete mode 100644 bitbake/lib/bb/ui/uievent.py delete mode 100644 bitbake/lib/bb/ui/uihelper.py delete mode 100644 bitbake/lib/bb/utils.py (limited to 'bitbake/lib/bb') diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py deleted file mode 100644 index 6917ec378..000000000 --- a/bitbake/lib/bb/COW.py +++ /dev/null @@ -1,323 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# This is a copy on write dictionary and set which abuses classes to try and be nice and fast. -# -# Copyright (C) 2006 Tim Amsell -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -#Please Note: -# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW. -# Assign a file to __warn__ to get warnings about slow operations. -# - -from __future__ import print_function -import copy -import types -ImmutableTypes = ( - types.NoneType, - bool, - complex, - float, - int, - long, - tuple, - frozenset, - basestring -) - -MUTABLE = "__mutable__" - -class COWMeta(type): - pass - -class COWDictMeta(COWMeta): - __warn__ = False - __hasmutable__ = False - __marker__ = tuple() - - def __str__(cls): - # FIXME: I have magic numbers! - return "" % (cls.__count__, len(cls.__dict__) - 3) - __repr__ = __str__ - - def cow(cls): - class C(cls): - __count__ = cls.__count__ + 1 - return C - copy = cow - __call__ = cow - - def __setitem__(cls, key, value): - if not isinstance(value, ImmutableTypes): - if not isinstance(value, COWMeta): - cls.__hasmutable__ = True - key += MUTABLE - setattr(cls, key, value) - - def __getmutable__(cls, key, readonly=False): - nkey = key + MUTABLE - try: - return cls.__dict__[nkey] - except KeyError: - pass - - value = getattr(cls, nkey) - if readonly: - return value - - if not cls.__warn__ is False and not isinstance(value, COWMeta): - print("Warning: Doing a copy because %s is a mutable type." % key, file=cls.__warn__) - try: - value = value.copy() - except AttributeError as e: - value = copy.copy(value) - setattr(cls, nkey, value) - return value - - __getmarker__ = [] - def __getreadonly__(cls, key, default=__getmarker__): - """\ - Get a value (even if mutable) which you promise not to change. - """ - return cls.__getitem__(key, default, True) - - def __getitem__(cls, key, default=__getmarker__, readonly=False): - try: - try: - value = getattr(cls, key) - except AttributeError: - value = cls.__getmutable__(key, readonly) - - # This is for values which have been deleted - if value is cls.__marker__: - raise AttributeError("key %s does not exist." % key) - - return value - except AttributeError as e: - if not default is cls.__getmarker__: - return default - - raise KeyError(str(e)) - - def __delitem__(cls, key): - cls.__setitem__(key, cls.__marker__) - - def __revertitem__(cls, key): - if not cls.__dict__.has_key(key): - key += MUTABLE - delattr(cls, key) - - def __contains__(cls, key): - return cls.has_key(key) - - def has_key(cls, key): - value = cls.__getreadonly__(key, cls.__marker__) - if value is cls.__marker__: - return False - return True - - def iter(cls, type, readonly=False): - for key in dir(cls): - if key.startswith("__"): - continue - - if key.endswith(MUTABLE): - key = key[:-len(MUTABLE)] - - if type == "keys": - yield key - - try: - if readonly: - value = cls.__getreadonly__(key) - else: - value = cls[key] - except KeyError: - continue - - if type == "values": - yield value - if type == "items": - yield (key, value) - raise StopIteration() - - def iterkeys(cls): - return cls.iter("keys") - def itervalues(cls, readonly=False): - if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False: - print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__) - return cls.iter("values", readonly) - def iteritems(cls, readonly=False): - if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False: - print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__) - return cls.iter("items", readonly) - -class COWSetMeta(COWDictMeta): - def __str__(cls): - # FIXME: I have magic numbers! - return "" % (cls.__count__, len(cls.__dict__) -3) - __repr__ = __str__ - - def cow(cls): - class C(cls): - __count__ = cls.__count__ + 1 - return C - - def add(cls, value): - COWDictMeta.__setitem__(cls, repr(hash(value)), value) - - def remove(cls, value): - COWDictMeta.__delitem__(cls, repr(hash(value))) - - def __in__(cls, value): - return COWDictMeta.has_key(repr(hash(value))) - - def iterkeys(cls): - raise TypeError("sets don't have keys") - - def iteritems(cls): - raise TypeError("sets don't have 'items'") - -# These are the actual classes you use! -class COWDictBase(object): - __metaclass__ = COWDictMeta - __count__ = 0 - -class COWSetBase(object): - __metaclass__ = COWSetMeta - __count__ = 0 - -if __name__ == "__main__": - import sys - COWDictBase.__warn__ = sys.stderr - a = COWDictBase() - print("a", a) - - a['a'] = 'a' - a['b'] = 'b' - a['dict'] = {} - - b = a.copy() - print("b", b) - b['c'] = 'b' - - print() - - print("a", a) - for x in a.iteritems(): - print(x) - print("--") - print("b", b) - for x in b.iteritems(): - print(x) - print() - - b['dict']['a'] = 'b' - b['a'] = 'c' - - print("a", a) - for x in a.iteritems(): - print(x) - print("--") - print("b", b) - for x in b.iteritems(): - print(x) - print() - - try: - b['dict2'] - except KeyError as e: - print("Okay!") - - a['set'] = COWSetBase() - a['set'].add("o1") - a['set'].add("o1") - a['set'].add("o2") - - print("a", a) - for x in a['set'].itervalues(): - print(x) - print("--") - print("b", b) - for x in b['set'].itervalues(): - print(x) - print() - - b['set'].add('o3') - - print("a", a) - for x in a['set'].itervalues(): - print(x) - print("--") - print("b", b) - for x in b['set'].itervalues(): - print(x) - print() - - a['set2'] = set() - a['set2'].add("o1") - a['set2'].add("o1") - a['set2'].add("o2") - - print("a", a) - for x in a.iteritems(): - print(x) - print("--") - print("b", b) - for x in b.iteritems(readonly=True): - print(x) - print() - - del b['b'] - try: - print(b['b']) - except KeyError: - print("Yay! deleted key raises error") - - if b.has_key('b'): - print("Boo!") - else: - print("Yay - has_key with delete works!") - - print("a", a) - for x in a.iteritems(): - print(x) - print("--") - print("b", b) - for x in b.iteritems(readonly=True): - print(x) - print() - - b.__revertitem__('b') - - print("a", a) - for x in a.iteritems(): - print(x) - print("--") - print("b", b) - for x in b.iteritems(readonly=True): - print(x) - print() - - b.__revertitem__('dict') - print("a", a) - for x in a.iteritems(): - print(x) - print("--") - print("b", b) - for x in b.iteritems(readonly=True): - print(x) - print() diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py deleted file mode 100644 index 4c7afc9c2..000000000 --- a/bitbake/lib/bb/__init__.py +++ /dev/null @@ -1,139 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# BitBake Build System Python Library -# -# Copyright (C) 2003 Holger Schurig -# Copyright (C) 2003, 2004 Chris Larson -# -# Based on Gentoo's portage.py. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -__version__ = "1.11.0" - -import sys -if sys.version_info < (2, 6, 0): - raise RuntimeError("Sorry, python 2.6.0 or later is required for this version of bitbake") - -import os -import logging -import traceback - -class NullHandler(logging.Handler): - def emit(self, record): - pass - -Logger = logging.getLoggerClass() -class BBLogger(Logger): - def __init__(self, name): - if name.split(".")[0] == "BitBake": - self.debug = self.bbdebug - Logger.__init__(self, name) - - def bbdebug(self, level, msg, *args, **kwargs): - return self.log(logging.DEBUG - level + 1, msg, *args, **kwargs) - - def plain(self, msg, *args, **kwargs): - return self.log(logging.INFO + 1, msg, *args, **kwargs) - - def verbose(self, msg, *args, **kwargs): - return self.log(logging.INFO - 1, msg, *args, **kwargs) - - def exception(self, msg, *args, **kwargs): - return self.critical("%s\n%s" % (msg, traceback.format_exc()), *args, **kwargs) - -logging.raiseExceptions = False -logging.setLoggerClass(BBLogger) - -logger = logging.getLogger("BitBake") -logger.addHandler(NullHandler()) -logger.setLevel(logging.INFO) - -# This has to be imported after the setLoggerClass, as the import of bb.msg -# can result in construction of the various loggers. -import bb.msg - -if "BBDEBUG" in os.environ: - level = int(os.environ["BBDEBUG"]) - if level: - bb.msg.set_debug_level(level) - -if True or os.environ.get("BBFETCH2"): - from bb import fetch2 as fetch - sys.modules['bb.fetch'] = sys.modules['bb.fetch2'] - -# Messaging convenience functions -def plain(*args): - logger.plain(''.join(args)) - -def debug(lvl, *args): - logger.debug(lvl, ''.join(args)) - -def note(*args): - logger.info(''.join(args)) - -def warn(*args): - logger.warn(''.join(args)) - -def error(*args): - logger.error(''.join(args)) - -def fatal(*args): - logger.critical(''.join(args)) - sys.exit(1) - - -def deprecated(func, name = None, advice = ""): - """This is a decorator which can be used to mark functions - as deprecated. It will result in a warning being emmitted - when the function is used.""" - import warnings - - if advice: - advice = ": %s" % advice - if name is None: - name = func.__name__ - - def newFunc(*args, **kwargs): - warnings.warn("Call to deprecated function %s%s." % (name, - advice), - category = PendingDeprecationWarning, - stacklevel = 2) - return func(*args, **kwargs) - newFunc.__name__ = func.__name__ - newFunc.__doc__ = func.__doc__ - newFunc.__dict__.update(func.__dict__) - return newFunc - -# For compatibility -def deprecate_import(current, modulename, fromlist, renames = None): - """Import objects from one module into another, wrapping them with a DeprecationWarning""" - import sys - - module = __import__(modulename, fromlist = fromlist) - for position, objname in enumerate(fromlist): - obj = getattr(module, objname) - newobj = deprecated(obj, "{0}.{1}".format(current, objname), - "Please use {0}.{1} instead".format(modulename, objname)) - if renames: - newname = renames[position] - else: - newname = objname - - setattr(sys.modules[current], newname, newobj) - -deprecate_import(__name__, "bb.fetch", ("MalformedUrl", "encodeurl", "decodeurl")) -deprecate_import(__name__, "bb.utils", ("mkdirhier", "movefile", "copyfile", "which")) -deprecate_import(__name__, "bb.utils", ["vercmp_string"], ["vercmp"]) diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py deleted file mode 100644 index a7664bd36..000000000 --- a/bitbake/lib/bb/build.py +++ /dev/null @@ -1,472 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# BitBake 'Build' implementation -# -# Core code for function execution and task handling in the -# BitBake build tools. -# -# Copyright (C) 2003, 2004 Chris Larson -# -# Based on Gentoo's portage.py. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -#Based on functions from the base bb module, Copyright 2003 Holger Schurig - -import os -import sys -import logging -import bb -import bb.msg -import bb.process -from contextlib import nested -from bb import data, event, mkdirhier, utils - -bblogger = logging.getLogger('BitBake') -logger = logging.getLogger('BitBake.Build') - -NULL = open(os.devnull, 'r+') - - -# When we execute a python function we'd like certain things -# in all namespaces, hence we add them to __builtins__ -# If we do not do this and use the exec globals, they will -# not be available to subfunctions. -__builtins__['bb'] = bb -__builtins__['os'] = os - -class FuncFailed(Exception): - def __init__(self, name = None, logfile = None): - self.logfile = logfile - self.name = name - if name: - self.msg = "Function '%s' failed" % name - else: - self.msg = "Function failed" - - def __str__(self): - if self.logfile and os.path.exists(self.logfile): - msg = ("%s (see %s for further information)" % - (self.msg, self.logfile)) - else: - msg = self.msg - return msg - -class TaskBase(event.Event): - """Base class for task events""" - - def __init__(self, t, d ): - self._task = t - self._package = bb.data.getVar("PF", d, 1) - event.Event.__init__(self) - self._message = "package %s: task %s: %s" % (bb.data.getVar("PF", d, 1), t, bb.event.getName(self)[4:]) - - def getTask(self): - return self._task - - def setTask(self, task): - self._task = task - - task = property(getTask, setTask, None, "task property") - -class TaskStarted(TaskBase): - """Task execution started""" - -class TaskSucceeded(TaskBase): - """Task execution completed""" - -class TaskFailed(TaskBase): - """Task execution failed""" - - def __init__(self, task, logfile, metadata): - self.logfile = logfile - super(TaskFailed, self).__init__(task, metadata) - -class TaskInvalid(TaskBase): - - def __init__(self, task, metadata): - super(TaskInvalid, self).__init__(task, metadata) - self._message = "No such task '%s'" % task - - -class LogTee(object): - def __init__(self, logger, outfile): - self.outfile = outfile - self.logger = logger - self.name = self.outfile.name - - def write(self, string): - self.logger.plain(string) - self.outfile.write(string) - - def __enter__(self): - self.outfile.__enter__() - return self - - def __exit__(self, *excinfo): - self.outfile.__exit__(*excinfo) - - def __repr__(self): - return ''.format(self.name) - - -def exec_func(func, d, dirs = None): - """Execute an BB 'function'""" - - body = data.getVar(func, d) - if not body: - if body is None: - logger.warn("Function %s doesn't exist", func) - return - - flags = data.getVarFlags(func, d) - cleandirs = flags.get('cleandirs') - if cleandirs: - for cdir in data.expand(cleandirs, d).split(): - bb.utils.remove(cdir, True) - - if dirs is None: - dirs = flags.get('dirs') - if dirs: - dirs = data.expand(dirs, d).split() - - if dirs: - for adir in dirs: - bb.utils.mkdirhier(adir) - adir = dirs[-1] - else: - adir = data.getVar('B', d, 1) - if not os.path.exists(adir): - adir = None - - ispython = flags.get('python') - if flags.get('fakeroot') and not flags.get('task'): - bb.fatal("Function %s specifies fakeroot but isn't a task?!" % func) - - lockflag = flags.get('lockfiles') - if lockflag: - lockfiles = [data.expand(f, d) for f in lockflag.split()] - else: - lockfiles = None - - tempdir = data.getVar('T', d, 1) - runfile = os.path.join(tempdir, 'run.{0}.{1}'.format(func, os.getpid())) - - with bb.utils.fileslocked(lockfiles): - if ispython: - exec_func_python(func, d, runfile, cwd=adir) - else: - exec_func_shell(func, d, runfile, cwd=adir) - -_functionfmt = """ -def {function}(d): -{body} - -{function}(d) -""" -logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") -def exec_func_python(func, d, runfile, cwd=None): - """Execute a python BB 'function'""" - - bbfile = d.getVar('FILE', True) - try: - olddir = os.getcwd() - except OSError: - olddir = None - code = _functionfmt.format(function=func, body=d.getVar(func, True)) - bb.utils.mkdirhier(os.path.dirname(runfile)) - with open(runfile, 'w') as script: - script.write(code) - - if cwd: - os.chdir(cwd) - - try: - comp = utils.better_compile(code, func, bbfile) - utils.better_exec(comp, {"d": d}, code, bbfile) - except: - if sys.exc_info()[0] in (bb.parse.SkipPackage, bb.build.FuncFailed): - raise - - raise FuncFailed(func, None) - finally: - if olddir: - os.chdir(olddir) - -def exec_func_shell(function, d, runfile, cwd=None): - """Execute a shell function from the metadata - - Note on directory behavior. The 'dirs' varflag should contain a list - of the directories you need created prior to execution. The last - item in the list is where we will chdir/cd to. - """ - - # Don't let the emitted shell script override PWD - d.delVarFlag('PWD', 'export') - - with open(runfile, 'w') as script: - script.write('#!/bin/sh -e\n') - if logger.isEnabledFor(logging.DEBUG): - script.write("set -x\n") - data.emit_func(function, script, d) - - script.write("%s\n" % function) - os.fchmod(script.fileno(), 0775) - - env = { - 'PATH': d.getVar('PATH', True), - 'LC_ALL': 'C', - } - - cmd = runfile - - if logger.isEnabledFor(logging.DEBUG): - logfile = LogTee(logger, sys.stdout) - else: - logfile = sys.stdout - - try: - bb.process.run(cmd, env=env, cwd=cwd, shell=False, stdin=NULL, - log=logfile) - except bb.process.CmdError: - logfn = d.getVar('BB_LOGFILE', True) - raise FuncFailed(function, logfn) - -def _task_data(fn, task, d): - localdata = data.createCopy(d) - localdata.setVar('BB_FILENAME', fn) - localdata.setVar('BB_CURRENTTASK', task[3:]) - localdata.setVar('OVERRIDES', 'task-%s:%s' % - (task[3:], d.getVar('OVERRIDES', False))) - localdata.finalize() - data.expandKeys(localdata) - return localdata - -def _exec_task(fn, task, d, quieterr): - """Execute a BB 'task' - - Execution of a task involves a bit more setup than executing a function, - running it with its own local metadata, and with some useful variables set. - """ - if not data.getVarFlag(task, 'task', d): - event.fire(TaskInvalid(task, d), d) - logger.error("No such task: %s" % task) - return 1 - - logger.debug(1, "Executing task %s", task) - - localdata = _task_data(fn, task, d) - tempdir = localdata.getVar('T', True) - if not tempdir: - bb.fatal("T variable not set, unable to build") - - bb.utils.mkdirhier(tempdir) - loglink = os.path.join(tempdir, 'log.{0}'.format(task)) - logfn = os.path.join(tempdir, 'log.{0}.{1}'.format(task, os.getpid())) - if loglink: - bb.utils.remove(loglink) - - try: - os.symlink(logfn, loglink) - except OSError: - pass - - prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True) - postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True) - - # Handle logfiles - si = file('/dev/null', 'r') - try: - logfile = file(logfn, 'w') - except OSError: - logger.exception("Opening log file '%s'", logfn) - pass - - # Dup the existing fds so we dont lose them - osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()] - oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()] - ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()] - - # Replace those fds with our own - os.dup2(si.fileno(), osi[1]) - os.dup2(logfile.fileno(), oso[1]) - os.dup2(logfile.fileno(), ose[1]) - - # Ensure python logging goes to the logfile - handler = logging.StreamHandler(logfile) - handler.setFormatter(logformatter) - bblogger.addHandler(handler) - - localdata.setVar('BB_LOGFILE', logfn) - - event.fire(TaskStarted(task, localdata), localdata) - try: - for func in (prefuncs or '').split(): - exec_func(func, localdata) - exec_func(task, localdata) - for func in (postfuncs or '').split(): - exec_func(func, localdata) - except FuncFailed as exc: - if not quieterr: - logger.error(str(exc)) - event.fire(TaskFailed(task, logfn, localdata), localdata) - return 1 - finally: - sys.stdout.flush() - sys.stderr.flush() - - bblogger.removeHandler(handler) - - # Restore the backup fds - os.dup2(osi[0], osi[1]) - os.dup2(oso[0], oso[1]) - os.dup2(ose[0], ose[1]) - - # Close the backup fds - os.close(osi[0]) - os.close(oso[0]) - os.close(ose[0]) - si.close() - - logfile.close() - if os.path.exists(logfn) and os.path.getsize(logfn) == 0: - logger.debug(2, "Zero size logfn %s, removing", logfn) - bb.utils.remove(logfn) - bb.utils.remove(loglink) - event.fire(TaskSucceeded(task, localdata), localdata) - - if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'): - make_stamp(task, localdata) - - return 0 - -def exec_task(fn, task, d): - try: - quieterr = False - if d.getVarFlag(task, "quieterrors") is not None: - quieterr = True - - return _exec_task(fn, task, d, quieterr) - except Exception: - from traceback import format_exc - if not quieterr: - logger.error("Build of %s failed" % (task)) - logger.error(format_exc()) - failedevent = TaskFailed(task, None, d) - event.fire(failedevent, d) - return 1 - -def stamp_internal(taskname, d, file_name): - """ - Internal stamp helper function - Makes sure the stamp directory exists - Returns the stamp path+filename - - In the bitbake core, d can be a CacheData and file_name will be set. - When called in task context, d will be a data store, file_name will not be set - """ - taskflagname = taskname - if taskname.endswith("_setscene") and taskname != "do_setscene": - taskflagname = taskname.replace("_setscene", "") - - if file_name: - stamp = d.stamp[file_name] - extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" - else: - stamp = d.getVar('STAMP', True) - file_name = d.getVar('BB_FILENAME', True) - extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or "" - - if not stamp: - return - - stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo) - - bb.utils.mkdirhier(os.path.dirname(stamp)) - - return stamp - -def make_stamp(task, d, file_name = None): - """ - Creates/updates a stamp for a given task - (d can be a data dict or dataCache) - """ - stamp = stamp_internal(task, d, file_name) - # Remove the file and recreate to force timestamp - # change on broken NFS filesystems - if stamp: - bb.utils.remove(stamp) - f = open(stamp, "w") - f.close() - -def del_stamp(task, d, file_name = None): - """ - Removes a stamp for a given task - (d can be a data dict or dataCache) - """ - stamp = stamp_internal(task, d, file_name) - bb.utils.remove(stamp) - -def stampfile(taskname, d, file_name = None): - """ - Return the stamp for a given task - (d can be a data dict or dataCache) - """ - return stamp_internal(taskname, d, file_name) - -def add_tasks(tasklist, d): - task_deps = data.getVar('_task_deps', d) - if not task_deps: - task_deps = {} - if not 'tasks' in task_deps: - task_deps['tasks'] = [] - if not 'parents' in task_deps: - task_deps['parents'] = {} - - for task in tasklist: - task = data.expand(task, d) - data.setVarFlag(task, 'task', 1, d) - - if not task in task_deps['tasks']: - task_deps['tasks'].append(task) - - flags = data.getVarFlags(task, d) - def getTask(name): - if not name in task_deps: - task_deps[name] = {} - if name in flags: - deptask = data.expand(flags[name], d) - task_deps[name][task] = deptask - getTask('depends') - getTask('deptask') - getTask('rdeptask') - getTask('recrdeptask') - getTask('nostamp') - getTask('fakeroot') - getTask('noexec') - task_deps['parents'][task] = [] - for dep in flags['deps']: - dep = data.expand(dep, d) - task_deps['parents'][task].append(dep) - - # don't assume holding a reference - data.setVar('_task_deps', task_deps, d) - -def remove_task(task, kill, d): - """Remove an BB 'task'. - - If kill is 1, also remove tasks that depend on this task.""" - - data.delVarFlag(task, 'task', d) diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py deleted file mode 100644 index 7ea04ac1a..000000000 --- a/bitbake/lib/bb/cache.py +++ /dev/null @@ -1,632 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# BitBake 'Event' implementation -# -# Caching of bitbake variables before task execution - -# Copyright (C) 2006 Richard Purdie - -# but small sections based on code from bin/bitbake: -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2003, 2004 Phil Blundell -# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer -# Copyright (C) 2005 Holger Hans Peter Freyther -# Copyright (C) 2005 ROAD GmbH -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - - -import os -import logging -from collections import defaultdict, namedtuple -import bb.data -import bb.utils - -logger = logging.getLogger("BitBake.Cache") - -try: - import cPickle as pickle -except ImportError: - import pickle - logger.info("Importing cPickle failed. " - "Falling back to a very slow implementation.") - -__cache_version__ = "136" - -recipe_fields = ( - 'pn', - 'pv', - 'pr', - 'pe', - 'defaultpref', - 'depends', - 'provides', - 'task_deps', - 'stamp', - 'stamp_extrainfo', - 'broken', - 'not_world', - 'skipped', - 'timestamp', - 'packages', - 'packages_dynamic', - 'rdepends', - 'rdepends_pkg', - 'rprovides', - 'rprovides_pkg', - 'rrecommends', - 'rrecommends_pkg', - 'nocache', - 'variants', - 'file_depends', - 'tasks', - 'basetaskhashes', - 'hashfilename', -) - - -class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)): - __slots__ = () - - @classmethod - def listvar(cls, var, metadata): - return cls.getvar(var, metadata).split() - - @classmethod - def intvar(cls, var, metadata): - return int(cls.getvar(var, metadata) or 0) - - @classmethod - def depvar(cls, var, metadata): - return bb.utils.explode_deps(cls.getvar(var, metadata)) - - @classmethod - def pkgvar(cls, var, packages, metadata): - return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata)) - for pkg in packages) - - @classmethod - def taskvar(cls, var, tasks, metadata): - return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata)) - for task in tasks) - - @classmethod - def flaglist(cls, flag, varlist, metadata): - return dict((var, metadata.getVarFlag(var, flag, True)) - for var in varlist) - - @classmethod - def getvar(cls, var, metadata): - return metadata.getVar(var, True) or '' - - @classmethod - def make_optional(cls, default=None, **kwargs): - """Construct the namedtuple from the specified keyword arguments, - with every value considered optional, using the default value if - it was not specified.""" - for field in cls._fields: - kwargs[field] = kwargs.get(field, default) - return cls(**kwargs) - - @classmethod - def from_metadata(cls, filename, metadata): - if cls.getvar('__SKIPPED', metadata): - return cls.make_optional(skipped=True) - - tasks = metadata.getVar('__BBTASKS', False) - - pn = cls.getvar('PN', metadata) - packages = cls.listvar('PACKAGES', metadata) - if not pn in packages: - packages.append(pn) - - return RecipeInfo( - tasks = tasks, - basetaskhashes = cls.taskvar('BB_BASEHASH', tasks, metadata), - hashfilename = cls.getvar('BB_HASHFILENAME', metadata), - - file_depends = metadata.getVar('__depends', False), - task_deps = metadata.getVar('_task_deps', False) or - {'tasks': [], 'parents': {}}, - variants = cls.listvar('__VARIANTS', metadata) + [''], - - skipped = False, - timestamp = bb.parse.cached_mtime(filename), - packages = cls.listvar('PACKAGES', metadata), - pn = pn, - pe = cls.getvar('PE', metadata), - pv = cls.getvar('PV', metadata), - pr = cls.getvar('PR', metadata), - nocache = cls.getvar('__BB_DONT_CACHE', metadata), - defaultpref = cls.intvar('DEFAULT_PREFERENCE', metadata), - broken = cls.getvar('BROKEN', metadata), - not_world = cls.getvar('EXCLUDE_FROM_WORLD', metadata), - stamp = cls.getvar('STAMP', metadata), - stamp_extrainfo = cls.flaglist('stamp-extra-info', tasks, metadata), - packages_dynamic = cls.listvar('PACKAGES_DYNAMIC', metadata), - depends = cls.depvar('DEPENDS', metadata), - provides = cls.depvar('PROVIDES', metadata), - rdepends = cls.depvar('RDEPENDS', metadata), - rprovides = cls.depvar('RPROVIDES', metadata), - rrecommends = cls.depvar('RRECOMMENDS', metadata), - rprovides_pkg = cls.pkgvar('RPROVIDES', packages, metadata), - rdepends_pkg = cls.pkgvar('RDEPENDS', packages, metadata), - rrecommends_pkg = cls.pkgvar('RRECOMMENDS', packages, metadata), - ) - - -class Cache(object): - """ - BitBake Cache implementation - """ - - def __init__(self, data): - self.cachedir = bb.data.getVar("CACHE", data, True) - self.clean = set() - self.checked = set() - self.depends_cache = {} - self.data = None - self.data_fn = None - self.cacheclean = True - - if self.cachedir in [None, '']: - self.has_cache = False - logger.info("Not using a cache. " - "Set CACHE = to enable.") - return - - self.has_cache = True - self.cachefile = os.path.join(self.cachedir, "bb_cache.dat") - - logger.debug(1, "Using cache in '%s'", self.cachedir) - bb.utils.mkdirhier(self.cachedir) - - # If any of configuration.data's dependencies are newer than the - # cache there isn't even any point in loading it... - newest_mtime = 0 - deps = bb.data.getVar("__base_depends", data) - - old_mtimes = [old_mtime for _, old_mtime in deps] - old_mtimes.append(newest_mtime) - newest_mtime = max(old_mtimes) - - if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime: - self.load_cachefile() - elif os.path.isfile(self.cachefile): - logger.info("Out of date cache found, rebuilding...") - - def load_cachefile(self): - with open(self.cachefile, "rb") as cachefile: - pickled = pickle.Unpickler(cachefile) - try: - cache_ver = pickled.load() - bitbake_ver = pickled.load() - except Exception: - logger.info('Invalid cache, rebuilding...') - return - - if cache_ver != __cache_version__: - logger.info('Cache version mismatch, rebuilding...') - return - elif bitbake_ver != bb.__version__: - logger.info('Bitbake version mismatch, rebuilding...') - return - - cachesize = os.fstat(cachefile.fileno()).st_size - bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data) - - previous_percent = 0 - while cachefile: - try: - key = pickled.load() - value = pickled.load() - except Exception: - break - - self.depends_cache[key] = value - - # only fire events on even percentage boundaries - current_progress = cachefile.tell() - current_percent = 100 * current_progress / cachesize - if current_percent > previous_percent: - previous_percent = current_percent - bb.event.fire(bb.event.CacheLoadProgress(current_progress), - self.data) - - bb.event.fire(bb.event.CacheLoadCompleted(cachesize, - len(self.depends_cache)), - self.data) - - @staticmethod - def virtualfn2realfn(virtualfn): - """ - Convert a virtual file name to a real one + the associated subclass keyword - """ - - fn = virtualfn - cls = "" - if virtualfn.startswith('virtual:'): - cls = virtualfn.split(':', 2)[1] - fn = virtualfn.replace('virtual:' + cls + ':', '') - return (fn, cls) - - @staticmethod - def realfn2virtual(realfn, cls): - """ - Convert a real filename + the associated subclass keyword to a virtual filename - """ - if cls == "": - return realfn - return "virtual:" + cls + ":" + realfn - - @classmethod - def loadDataFull(cls, virtualfn, appends, cfgData): - """ - Return a complete set of data for fn. - To do this, we need to parse the file. - """ - - (fn, virtual) = cls.virtualfn2realfn(virtualfn) - - logger.debug(1, "Parsing %s (full)", fn) - - bb_data = cls.load_bbfile(fn, appends, cfgData) - return bb_data[virtual] - - @classmethod - def parse(cls, filename, appends, configdata): - """Parse the specified filename, returning the recipe information""" - infos = [] - datastores = cls.load_bbfile(filename, appends, configdata) - depends = set() - for variant, data in sorted(datastores.iteritems(), - key=lambda i: i[0], - reverse=True): - virtualfn = cls.realfn2virtual(filename, variant) - depends |= (data.getVar("__depends", False) or set()) - if depends and not variant: - data.setVar("__depends", depends) - info = RecipeInfo.from_metadata(filename, data) - infos.append((virtualfn, info)) - return infos - - def load(self, filename, appends, configdata): - """Obtain the recipe information for the specified filename, - using cached values if available, otherwise parsing. - - Note that if it does parse to obtain the info, it will not - automatically add the information to the cache or to your - CacheData. Use the add or add_info method to do so after - running this, or use loadData instead.""" - cached = self.cacheValid(filename) - if cached: - infos = [] - info = self.depends_cache[filename] - for variant in info.variants: - virtualfn = self.realfn2virtual(filename, variant) - infos.append((virtualfn, self.depends_cache[virtualfn])) - else: - logger.debug(1, "Parsing %s", filename) - return self.parse(filename, appends, configdata) - - return cached, infos - - def loadData(self, fn, appends, cfgData, cacheData): - """Load the recipe info for the specified filename, - parsing and adding to the cache if necessary, and adding - the recipe information to the supplied CacheData instance.""" - skipped, virtuals = 0, 0 - - cached, infos = self.load(fn, appends, cfgData) - for virtualfn, info in infos: - if info.skipped: - logger.debug(1, "Skipping %s", virtualfn) - skipped += 1 - else: - self.add_info(virtualfn, info, cacheData, not cached) - virtuals += 1 - - return cached, skipped, virtuals - - def cacheValid(self, fn): - """ - Is the cache valid for fn? - Fast version, no timestamps checked. - """ - if fn not in self.checked: - self.cacheValidUpdate(fn) - - # Is cache enabled? - if not self.has_cache: - return False - if fn in self.clean: - return True - return False - - def cacheValidUpdate(self, fn): - """ - Is the cache valid for fn? - Make thorough (slower) checks including timestamps. - """ - # Is cache enabled? - if not self.has_cache: - return False - - self.checked.add(fn) - - # File isn't in depends_cache - if not fn in self.depends_cache: - logger.debug(2, "Cache: %s is not cached", fn) - return False - - mtime = bb.parse.cached_mtime_noerror(fn) - - # Check file still exists - if mtime == 0: - logger.debug(2, "Cache: %s no longer exists", fn) - self.remove(fn) - return False - - info = self.depends_cache[fn] - # Check the file's timestamp - if mtime != info.timestamp: - logger.debug(2, "Cache: %s changed", fn) - self.remove(fn) - return False - - # Check dependencies are still valid - depends = info.file_depends - if depends: - for f, old_mtime in depends: - fmtime = bb.parse.cached_mtime_noerror(f) - # Check if file still exists - if old_mtime != 0 and fmtime == 0: - logger.debug(2, "Cache: %s's dependency %s was removed", - fn, f) - self.remove(fn) - return False - - if (fmtime != old_mtime): - logger.debug(2, "Cache: %s's dependency %s changed", - fn, f) - self.remove(fn) - return False - - invalid = False - for cls in info.variants: - virtualfn = self.realfn2virtual(fn, cls) - self.clean.add(virtualfn) - if virtualfn not in self.depends_cache: - logger.debug(2, "Cache: %s is not cached", virtualfn) - invalid = True - - # If any one of the variants is not present, mark as invalid for all - if invalid: - for cls in info.variants: - virtualfn = self.realfn2virtual(fn, cls) - if virtualfn in self.clean: - logger.debug(2, "Cache: Removing %s from cache", virtualfn) - self.clean.remove(virtualfn) - if fn in self.clean: - logger.debug(2, "Cache: Marking %s as not clean", fn) - self.clean.remove(fn) - return False - - self.clean.add(fn) - return True - - def remove(self, fn): - """ - Remove a fn from the cache - Called from the parser in error cases - """ - if fn in self.depends_cache: - logger.debug(1, "Removing %s from cache", fn) - del self.depends_cache[fn] - if fn in self.clean: - logger.debug(1, "Marking %s as unclean", fn) - self.clean.remove(fn) - - def sync(self): - """ - Save the cache - Called from the parser when complete (or exiting) - """ - - if not self.has_cache: - return - - if self.cacheclean: - logger.debug(2, "Cache is clean, not saving.") - return - - with open(self.cachefile, "wb") as cachefile: - pickler = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL) - pickler.dump(__cache_version__) - pickler.dump(bb.__version__) - for key, value in self.depends_cache.iteritems(): - pickler.dump(key) - pickler.dump(value) - - del self.depends_cache - - @staticmethod - def mtime(cachefile): - return bb.parse.cached_mtime_noerror(cachefile) - - def add_info(self, filename, info, cacheData, parsed=None): - cacheData.add_from_recipeinfo(filename, info) - if not self.has_cache: - return - - if 'SRCREVINACTION' not in info.pv and not info.nocache: - if parsed: - self.cacheclean = False - self.depends_cache[filename] = info - - def add(self, file_name, data, cacheData, parsed=None): - """ - Save data we need into the cache - """ - - realfn = self.virtualfn2realfn(file_name)[0] - info = RecipeInfo.from_metadata(realfn, data) - self.add_info(file_name, info, cacheData, parsed) - - @staticmethod - def load_bbfile(bbfile, appends, config): - """ - Load and parse one .bb build file - Return the data and whether parsing resulted in the file being skipped - """ - chdir_back = False - - from bb import data, parse - - # expand tmpdir to include this topdir - data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config) - bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) - oldpath = os.path.abspath(os.getcwd()) - parse.cached_mtime_noerror(bbfile_loc) - bb_data = data.init_db(config) - # The ConfHandler first looks if there is a TOPDIR and if not - # then it would call getcwd(). - # Previously, we chdir()ed to bbfile_loc, called the handler - # and finally chdir()ed back, a couple of thousand times. We now - # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet. - if not data.getVar('TOPDIR', bb_data): - chdir_back = True - data.setVar('TOPDIR', bbfile_loc, bb_data) - try: - if appends: - data.setVar('__BBAPPEND', " ".join(appends), bb_data) - bb_data = parse.handle(bbfile, bb_data) - if chdir_back: - os.chdir(oldpath) - return bb_data - except: - if chdir_back: - os.chdir(oldpath) - raise - - -def init(cooker): - """ - The Objective: Cache the minimum amount of data possible yet get to the - stage of building packages (i.e. tryBuild) without reparsing any .bb files. - - To do this, we intercept getVar calls and only cache the variables we see - being accessed. We rely on the cache getVar calls being made for all - variables bitbake might need to use to reach this stage. For each cached - file we need to track: - - * Its mtime - * The mtimes of all its dependencies - * Whether it caused a parse.SkipPackage exception - - Files causing parsing errors are evicted from the cache. - - """ - return Cache(cooker.configuration.data) - - -class CacheData(object): - """ - The data structures we compile from the cached data - """ - - def __init__(self): - # Direct cache variables - self.providers = defaultdict(list) - self.rproviders = defaultdict(list) - self.packages = defaultdict(list) - self.packages_dynamic = defaultdict(list) - self.possible_world = [] - self.pkg_pn = defaultdict(list) - self.pkg_fn = {} - self.pkg_pepvpr = {} - self.pkg_dp = {} - self.pn_provides = defaultdict(list) - self.fn_provides = {} - self.all_depends = [] - self.deps = defaultdict(list) - self.rundeps = defaultdict(lambda: defaultdict(list)) - self.runrecs = defaultdict(lambda: defaultdict(list)) - self.task_queues = {} - self.task_deps = {} - self.stamp = {} - self.stamp_extrainfo = {} - self.preferred = {} - self.tasks = {} - self.basetaskhash = {} - self.hashfn = {} - - # Indirect Cache variables (set elsewhere) - self.ignored_dependencies = [] - self.world_target = set() - self.bbfile_priority = {} - self.bbfile_config_priorities = [] - - def add_from_recipeinfo(self, fn, info): - self.task_deps[fn] = info.task_deps - self.pkg_fn[fn] = info.pn - self.pkg_pn[info.pn].append(fn) - self.pkg_pepvpr[fn] = (info.pe, info.pv, info.pr) - self.pkg_dp[fn] = info.defaultpref - self.stamp[fn] = info.stamp - self.stamp_extrainfo[fn] = info.stamp_extrainfo - - provides = [info.pn] - for provide in info.provides: - if provide not in provides: - provides.append(provide) - self.fn_provides[fn] = provides - - for provide in provides: - self.providers[provide].append(fn) - if provide not in self.pn_provides[info.pn]: - self.pn_provides[info.pn].append(provide) - - for dep in info.depends: - if dep not in self.deps[fn]: - self.deps[fn].append(dep) - if dep not in self.all_depends: - self.all_depends.append(dep) - - rprovides = info.rprovides - for package in info.packages: - self.packages[package].append(fn) - rprovides += info.rprovides_pkg[package] - - for rprovide in rprovides: - self.rproviders[rprovide].append(fn) - - for package in info.packages_dynamic: - self.packages_dynamic[package].append(fn) - - # Build hash of runtime depends and rececommends - for package in info.packages + [info.pn]: - self.rundeps[fn][package] = list(info.rdepends) + info.rdepends_pkg[package] - self.runrecs[fn][package] = list(info.rrecommends) + info.rrecommends_pkg[package] - - # Collect files we may need for possible world-dep - # calculations - if not info.broken and not info.not_world: - self.possible_world.append(fn) - - self.hashfn[fn] = info.hashfilename - for task, taskhash in info.basetaskhashes.iteritems(): - identifier = '%s.%s' % (fn, task) - self.basetaskhash[identifier] = taskhash diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py deleted file mode 100644 index bfffcacc3..000000000 --- a/bitbake/lib/bb/codeparser.py +++ /dev/null @@ -1,336 +0,0 @@ -import ast -import codegen -import logging -import os.path -import bb.utils, bb.data -from itertools import chain -from pysh import pyshyacc, pyshlex - - -logger = logging.getLogger('BitBake.CodeParser') -PARSERCACHE_VERSION = 2 - -try: - import cPickle as pickle -except ImportError: - import pickle - logger.info('Importing cPickle failed. Falling back to a very slow implementation.') - - -def check_indent(codestr): - """If the code is indented, add a top level piece of code to 'remove' the indentation""" - - i = 0 - while codestr[i] in ["\n", " ", " "]: - i = i + 1 - - if i == 0: - return codestr - - if codestr[i-1] is " " or codestr[i-1] is " ": - return "if 1:\n" + codestr - - return codestr - -pythonparsecache = {} -shellparsecache = {} - -def parser_cachefile(d): - cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or - bb.data.getVar("CACHE", d, True)) - if cachedir in [None, '']: - return None - bb.utils.mkdirhier(cachedir) - cachefile = os.path.join(cachedir, "bb_codeparser.dat") - logger.debug(1, "Using cache in '%s' for codeparser cache", cachefile) - return cachefile - -def parser_cache_init(d): - global pythonparsecache - global shellparsecache - - cachefile = parser_cachefile(d) - if not cachefile: - return - - try: - p = pickle.Unpickler(file(cachefile, "rb")) - data, version = p.load() - except: - return - - if version != PARSERCACHE_VERSION: - return - - pythonparsecache = data[0] - shellparsecache = data[1] - -def parser_cache_save(d): - cachefile = parser_cachefile(d) - if not cachefile: - return - - p = pickle.Pickler(file(cachefile, "wb"), -1) - p.dump([[pythonparsecache, shellparsecache], PARSERCACHE_VERSION]) - -class PythonParser(): - class ValueVisitor(): - """Visitor to traverse a python abstract syntax tree and obtain - the variables referenced via bitbake metadata APIs, and the external - functions called. - """ - - getvars = ("d.getVar", "bb.data.getVar", "data.getVar") - expands = ("d.expand", "bb.data.expand", "data.expand") - execs = ("bb.build.exec_func", "bb.build.exec_task") - - @classmethod - def _compare_name(cls, strparts, node): - """Given a sequence of strings representing a python name, - where the last component is the actual Name and the prior - elements are Attribute nodes, determine if the supplied node - matches. - """ - - if not strparts: - return True - - current, rest = strparts[0], strparts[1:] - if isinstance(node, ast.Attribute): - if current == node.attr: - return cls._compare_name(rest, node.value) - elif isinstance(node, ast.Name): - if current == node.id: - return True - return False - - @classmethod - def compare_name(cls, value, node): - """Convenience function for the _compare_node method, which - can accept a string (which is split by '.' for you), or an - iterable of strings, in which case it checks to see if any of - them match, similar to isinstance. - """ - - if isinstance(value, basestring): - return cls._compare_name(tuple(reversed(value.split("."))), - node) - else: - return any(cls.compare_name(item, node) for item in value) - - def __init__(self, value): - self.var_references = set() - self.var_execs = set() - self.direct_func_calls = set() - self.var_expands = set() - self.value = value - - @classmethod - def warn(cls, func, arg): - """Warn about calls of bitbake APIs which pass a non-literal - argument for the variable name, as we're not able to track such - a reference. - """ - - try: - funcstr = codegen.to_source(func) - argstr = codegen.to_source(arg) - except TypeError: - logger.debug(2, 'Failed to convert function and argument to source form') - else: - logger.debug(1, "Warning: in call to '%s', argument '%s' is " - "not a literal", funcstr, argstr) - - def visit_Call(self, node): - if self.compare_name(self.getvars, node.func): - if isinstance(node.args[0], ast.Str): - self.var_references.add(node.args[0].s) - else: - self.warn(node.func, node.args[0]) - elif self.compare_name(self.expands, node.func): - if isinstance(node.args[0], ast.Str): - self.warn(node.func, node.args[0]) - self.var_expands.update(node.args[0].s) - elif isinstance(node.args[0], ast.Call) and \ - self.compare_name(self.getvars, node.args[0].func): - pass - else: - self.warn(node.func, node.args[0]) - elif self.compare_name(self.execs, node.func): - if isinstance(node.args[0], ast.Str): - self.var_execs.add(node.args[0].s) - else: - self.warn(node.func, node.args[0]) - elif isinstance(node.func, ast.Name): - self.direct_func_calls.add(node.func.id) - elif isinstance(node.func, ast.Attribute): - # We must have a qualified name. Therefore we need - # to walk the chain of 'Attribute' nodes to determine - # the qualification. - attr_node = node.func.value - identifier = node.func.attr - while isinstance(attr_node, ast.Attribute): - identifier = attr_node.attr + "." + identifier - attr_node = attr_node.value - if isinstance(attr_node, ast.Name): - identifier = attr_node.id + "." + identifier - self.direct_func_calls.add(identifier) - - def __init__(self): - #self.funcdefs = set() - self.execs = set() - #self.external_cmds = set() - self.references = set() - - def parse_python(self, node): - - h = hash(str(node)) - - if h in pythonparsecache: - self.references = pythonparsecache[h]["refs"] - self.execs = pythonparsecache[h]["execs"] - return - - code = compile(check_indent(str(node)), "", "exec", - ast.PyCF_ONLY_AST) - - visitor = self.ValueVisitor(code) - for n in ast.walk(code): - if n.__class__.__name__ == "Call": - visitor.visit_Call(n) - - self.references.update(visitor.var_references) - self.references.update(visitor.var_execs) - self.execs = visitor.direct_func_calls - - pythonparsecache[h] = {} - pythonparsecache[h]["refs"] = self.references - pythonparsecache[h]["execs"] = self.execs - -class ShellParser(): - def __init__(self): - self.funcdefs = set() - self.allexecs = set() - self.execs = set() - - def parse_shell(self, value): - """Parse the supplied shell code in a string, returning the external - commands it executes. - """ - - h = hash(str(value)) - - if h in shellparsecache: - self.execs = shellparsecache[h]["execs"] - return self.execs - - try: - tokens, _ = pyshyacc.parse(value, eof=True, debug=False) - except pyshlex.NeedMore: - raise ShellSyntaxError("Unexpected EOF") - - for token in tokens: - self.process_tokens(token) - self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs) - - shellparsecache[h] = {} - shellparsecache[h]["execs"] = self.execs - - return self.execs - - def process_tokens(self, tokens): - """Process a supplied portion of the syntax tree as returned by - pyshyacc.parse. - """ - - def function_definition(value): - self.funcdefs.add(value.name) - return [value.body], None - - def case_clause(value): - # Element 0 of each item in the case is the list of patterns, and - # Element 1 of each item in the case is the list of commands to be - # executed when that pattern matches. - words = chain(*[item[0] for item in value.items]) - cmds = chain(*[item[1] for item in value.items]) - return cmds, words - - def if_clause(value): - main = chain(value.cond, value.if_cmds) - rest = value.else_cmds - if isinstance(rest, tuple) and rest[0] == "elif": - return chain(main, if_clause(rest[1])) - else: - return chain(main, rest) - - def simple_command(value): - return None, chain(value.words, (assign[1] for assign in value.assigns)) - - token_handlers = { - "and_or": lambda x: ((x.left, x.right), None), - "async": lambda x: ([x], None), - "brace_group": lambda x: (x.cmds, None), - "for_clause": lambda x: (x.cmds, x.items), - "function_definition": function_definition, - "if_clause": lambda x: (if_clause(x), None), - "pipeline": lambda x: (x.commands, None), - "redirect_list": lambda x: ([x.cmd], None), - "subshell": lambda x: (x.cmds, None), - "while_clause": lambda x: (chain(x.condition, x.cmds), None), - "until_clause": lambda x: (chain(x.condition, x.cmds), None), - "simple_command": simple_command, - "case_clause": case_clause, - } - - for token in tokens: - name, value = token - try: - more_tokens, words = token_handlers[name](value) - except KeyError: - raise NotImplementedError("Unsupported token type " + name) - - if more_tokens: - self.process_tokens(more_tokens) - - if words: - self.process_words(words) - - def process_words(self, words): - """Process a set of 'words' in pyshyacc parlance, which includes - extraction of executed commands from $() blocks, as well as grabbing - the command name argument. - """ - - words = list(words) - for word in list(words): - wtree = pyshlex.make_wordtree(word[1]) - for part in wtree: - if not isinstance(part, list): - continue - - if part[0] in ('`', '$('): - command = pyshlex.wordtree_as_string(part[1:-1]) - self.parse_shell(command) - - if word[0] in ("cmd_name", "cmd_word"): - if word in words: - words.remove(word) - - usetoken = False - for word in words: - if word[0] in ("cmd_name", "cmd_word") or \ - (usetoken and word[0] == "TOKEN"): - if "=" in word[1]: - usetoken = True - continue - - cmd = word[1] - if cmd.startswith("$"): - logger.debug(1, "Warning: execution of non-literal " - "command '%s'", cmd) - elif cmd == "eval": - command = " ".join(word for _, word in words[1:]) - self.parse_shell(command) - else: - self.allexecs.add(cmd) - break diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py deleted file mode 100644 index b88089298..000000000 --- a/bitbake/lib/bb/command.py +++ /dev/null @@ -1,271 +0,0 @@ -""" -BitBake 'Command' module - -Provide an interface to interact with the bitbake server through 'commands' -""" - -# Copyright (C) 2006-2007 Richard Purdie -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -""" -The bitbake server takes 'commands' from its UI/commandline. -Commands are either synchronous or asynchronous. -Async commands return data to the client in the form of events. -Sync commands must only return data through the function return value -and must not trigger events, directly or indirectly. -Commands are queued in a CommandQueue -""" - -import bb.event -import bb.cooker -import bb.data - -async_cmds = {} -sync_cmds = {} - - -class CommandCompleted(bb.event.Event): - pass - -class CommandExit(bb.event.Event): - def __init__(self, exitcode): - bb.event.Event.__init__(self) - self.exitcode = int(exitcode) - -class CommandFailed(CommandExit): - def __init__(self, message): - self.error = message - CommandExit.__init__(self, 1) - -class Command: - """ - A queue of asynchronous commands for bitbake - """ - def __init__(self, cooker): - self.cooker = cooker - self.cmds_sync = CommandsSync() - self.cmds_async = CommandsAsync() - - # FIXME Add lock for this - self.currentAsyncCommand = None - - for attr in CommandsSync.__dict__: - command = attr[:].lower() - method = getattr(CommandsSync, attr) - sync_cmds[command] = (method) - - for attr in CommandsAsync.__dict__: - command = attr[:].lower() - method = getattr(CommandsAsync, attr) - async_cmds[command] = (method) - - def runCommand(self, commandline): - try: - command = commandline.pop(0) - if command in CommandsSync.__dict__: - # Can run synchronous commands straight away - return getattr(CommandsSync, command)(self.cmds_sync, self, commandline) - if self.currentAsyncCommand is not None: - return "Busy (%s in progress)" % self.currentAsyncCommand[0] - if command not in CommandsAsync.__dict__: - return "No such command" - self.currentAsyncCommand = (command, commandline) - self.cooker.server.register_idle_function(self.cooker.runCommands, self.cooker) - return True - except: - import traceback - return traceback.format_exc() - - def runAsyncCommand(self): - try: - if self.currentAsyncCommand is not None: - (command, options) = self.currentAsyncCommand - commandmethod = getattr(CommandsAsync, command) - needcache = getattr( commandmethod, "needcache" ) - if (needcache and self.cooker.state in - (bb.cooker.state.initial, bb.cooker.state.parsing)): - self.cooker.updateCache() - return True - else: - commandmethod(self.cmds_async, self, options) - return False - else: - return False - except KeyboardInterrupt as exc: - self.finishAsyncCommand("Interrupted") - return False - except SystemExit as exc: - arg = exc.args[0] - if isinstance(arg, basestring): - self.finishAsyncCommand(arg) - else: - self.finishAsyncCommand("Exited with %s" % arg) - return False - except Exception: - import traceback - self.finishAsyncCommand(traceback.format_exc()) - return False - - def finishAsyncCommand(self, msg=None, code=None): - if msg: - bb.event.fire(CommandFailed(msg), self.cooker.configuration.event_data) - elif code: - bb.event.fire(CommandExit(code), self.cooker.configuration.event_data) - else: - bb.event.fire(CommandCompleted(), self.cooker.configuration.event_data) - self.currentAsyncCommand = None - - -class CommandsSync: - """ - A class of synchronous commands - These should run quickly so as not to hurt interactive performance. - These must not influence any running synchronous command. - """ - - def stateShutdown(self, command, params): - """ - Trigger cooker 'shutdown' mode - """ - command.cooker.shutdown() - - def stateStop(self, command, params): - """ - Stop the cooker - """ - command.cooker.stop() - - def getCmdLineAction(self, command, params): - """ - Get any command parsed from the commandline - """ - return command.cooker.commandlineAction - - def getVariable(self, command, params): - """ - Read the value of a variable from configuration.data - """ - varname = params[0] - expand = True - if len(params) > 1: - expand = params[1] - - return bb.data.getVar(varname, command.cooker.configuration.data, expand) - - def setVariable(self, command, params): - """ - Set the value of variable in configuration.data - """ - varname = params[0] - value = params[1] - bb.data.setVar(varname, value, command.cooker.configuration.data) - - -class CommandsAsync: - """ - A class of asynchronous commands - These functions communicate via generated events. - Any function that requires metadata parsing should be here. - """ - - def buildFile(self, command, params): - """ - Build a single specified .bb file - """ - bfile = params[0] - task = params[1] - - command.cooker.buildFile(bfile, task) - buildFile.needcache = False - - def buildTargets(self, command, params): - """ - Build a set of targets - """ - pkgs_to_build = params[0] - task = params[1] - - command.cooker.buildTargets(pkgs_to_build, task) - buildTargets.needcache = True - - def generateDepTreeEvent(self, command, params): - """ - Generate an event containing the dependency information - """ - pkgs_to_build = params[0] - task = params[1] - - command.cooker.generateDepTreeEvent(pkgs_to_build, task) - command.finishAsyncCommand() - generateDepTreeEvent.needcache = True - - def generateDotGraph(self, command, params): - """ - Dump dependency information to disk as .dot files - """ - pkgs_to_build = params[0] - task = params[1] - - command.cooker.generateDotGraphFiles(pkgs_to_build, task) - command.finishAsyncCommand() - generateDotGraph.needcache = True - - def showVersions(self, command, params): - """ - Show the currently selected versions - """ - command.cooker.showVersions() - command.finishAsyncCommand() - showVersions.needcache = True - - def showEnvironmentTarget(self, command, params): - """ - Print the environment of a target recipe - (needs the cache to work out which recipe to use) - """ - pkg = params[0] - - command.cooker.showEnvironment(None, pkg) - command.finishAsyncCommand() - showEnvironmentTarget.needcache = True - - def showEnvironment(self, command, params): - """ - Print the standard environment - or if specified the environment for a specified recipe - """ - bfile = params[0] - - command.cooker.showEnvironment(bfile) - command.finishAsyncCommand() - showEnvironment.needcache = False - - def parseFiles(self, command, params): - """ - Parse the .bb files - """ - command.cooker.updateCache() - command.finishAsyncCommand() - parseFiles.needcache = True - - def compareRevisions(self, command, params): - """ - Parse the .bb files - """ - if bb.fetch.fetcher_compare_revisions(command.cooker.configuration.data): - command.finishAsyncCommand(code=1) - else: - command.finishAsyncCommand() - compareRevisions.needcache = True diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py deleted file mode 100644 index ff16daf83..000000000 --- a/bitbake/lib/bb/cooker.py +++ /dev/null @@ -1,1078 +0,0 @@ -#!/usr/bin/env python -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2003, 2004 Phil Blundell -# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer -# Copyright (C) 2005 Holger Hans Peter Freyther -# Copyright (C) 2005 ROAD GmbH -# Copyright (C) 2006 - 2007 Richard Purdie -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -from __future__ import print_function -import sys, os, glob, os.path, re, time -import atexit -import itertools -import logging -import multiprocessing -import signal -import sre_constants -import threading -from cStringIO import StringIO -from contextlib import closing -import bb -from bb import utils, data, parse, event, cache, providers, taskdata, command, runqueue - -logger = logging.getLogger("BitBake") -collectlog = logging.getLogger("BitBake.Collection") -buildlog = logging.getLogger("BitBake.Build") -parselog = logging.getLogger("BitBake.Parsing") -providerlog = logging.getLogger("BitBake.Provider") - -class MultipleMatches(Exception): - """ - Exception raised when multiple file matches are found - """ - -class NothingToBuild(Exception): - """ - Exception raised when there is nothing to build - """ - -class state: - initial, parsing, running, shutdown, stop = range(5) - -#============================================================================# -# BBCooker -#============================================================================# -class BBCooker: - """ - Manages one bitbake build run - """ - - def __init__(self, configuration, server): - self.status = None - self.appendlist = {} - - if server: - self.server = server.BitBakeServer(self) - - self.configuration = configuration - - self.configuration.data = bb.data.init() - - if not server: - bb.data.setVar("BB_WORKERCONTEXT", "1", self.configuration.data) - - bb.data.inheritFromOS(self.configuration.data) - - self.parseConfigurationFiles(self.configuration.file) - - if not self.configuration.cmd: - self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build" - - bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True) - if bbpkgs and len(self.configuration.pkgs_to_build) == 0: - self.configuration.pkgs_to_build.extend(bbpkgs.split()) - - # - # Special updated configuration we use for firing events - # - self.configuration.event_data = bb.data.createCopy(self.configuration.data) - bb.data.update_data(self.configuration.event_data) - - # TOSTOP must not be set or our children will hang when they output - fd = sys.stdout.fileno() - if os.isatty(fd): - import termios - tcattr = termios.tcgetattr(fd) - if tcattr[3] & termios.TOSTOP: - buildlog.info("The terminal had the TOSTOP bit set, clearing...") - tcattr[3] = tcattr[3] & ~termios.TOSTOP - termios.tcsetattr(fd, termios.TCSANOW, tcattr) - - self.command = bb.command.Command(self) - self.state = state.initial - - def parseConfiguration(self): - - - # Change nice level if we're asked to - nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True) - if nice: - curnice = os.nice(0) - nice = int(nice) - curnice - buildlog.verbose("Renice to %s " % os.nice(nice)) - - def parseCommandLine(self): - # Parse any commandline into actions - if self.configuration.show_environment: - self.commandlineAction = None - - if 'world' in self.configuration.pkgs_to_build: - buildlog.error("'world' is not a valid target for --environment.") - elif len(self.configuration.pkgs_to_build) > 1: - buildlog.error("Only one target can be used with the --environment option.") - elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0: - buildlog.error("No target should be used with the --environment and --buildfile options.") - elif len(self.configuration.pkgs_to_build) > 0: - self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build] - else: - self.commandlineAction = ["showEnvironment", self.configuration.buildfile] - elif self.configuration.buildfile is not None: - self.commandlineAction = ["buildFile", self.configuration.buildfile, self.configuration.cmd] - elif self.configuration.revisions_changed: - self.commandlineAction = ["compareRevisions"] - elif self.configuration.show_versions: - self.commandlineAction = ["showVersions"] - elif self.configuration.parse_only: - self.commandlineAction = ["parseFiles"] - elif self.configuration.dot_graph: - if self.configuration.pkgs_to_build: - self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd] - else: - self.commandlineAction = None - buildlog.error("Please specify a package name for dependency graph generation.") - else: - if self.configuration.pkgs_to_build: - self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd] - else: - self.commandlineAction = None - buildlog.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.") - - def runCommands(self, server, data, abort): - """ - Run any queued asynchronous command - This is done by the idle handler so it runs in true context rather than - tied to any UI. - """ - - return self.command.runAsyncCommand() - - def showVersions(self): - - # Need files parsed - self.updateCache() - - pkg_pn = self.status.pkg_pn - preferred_versions = {} - latest_versions = {} - - # Sort by priority - for pn in pkg_pn: - (last_ver, last_file, pref_ver, pref_file) = bb.providers.findBestProvider(pn, self.configuration.data, self.status) - preferred_versions[pn] = (pref_ver, pref_file) - latest_versions[pn] = (last_ver, last_file) - - logger.plain("%-35s %25s %25s", "Package Name", "Latest Version", "Preferred Version") - logger.plain("%-35s %25s %25s\n", "============", "==============", "=================") - - for p in sorted(pkg_pn): - pref = preferred_versions[p] - latest = latest_versions[p] - - prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2] - lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] - - if pref == latest: - prefstr = "" - - logger.plain("%-35s %25s %25s", p, lateststr, prefstr) - - def showEnvironment(self, buildfile = None, pkgs_to_build = []): - """ - Show the outer or per-package environment - """ - fn = None - envdata = None - - if buildfile: - fn = self.matchFile(buildfile) - elif len(pkgs_to_build) == 1: - self.updateCache() - - localdata = data.createCopy(self.configuration.data) - bb.data.update_data(localdata) - bb.data.expandKeys(localdata) - - taskdata = bb.taskdata.TaskData(self.configuration.abort) - taskdata.add_provider(localdata, self.status, pkgs_to_build[0]) - taskdata.add_unresolved(localdata, self.status) - - targetid = taskdata.getbuild_id(pkgs_to_build[0]) - fnid = taskdata.build_targets[targetid][0] - fn = taskdata.fn_index[fnid] - else: - envdata = self.configuration.data - - if fn: - try: - envdata = bb.cache.Cache.loadDataFull(fn, self.get_file_appends(fn), self.configuration.data) - except Exception, e: - parselog.exception("Unable to read %s", fn) - raise - - # emit variables and shell functions - data.update_data(envdata) - with closing(StringIO()) as env: - data.emit_env(env, envdata, True) - logger.plain(env.getvalue()) - - # emit the metadata which isnt valid shell - data.expandKeys(envdata) - for e in envdata.keys(): - if data.getVarFlag( e, 'python', envdata ): - logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1)) - - def generateDepTreeData(self, pkgs_to_build, task): - """ - Create a dependency tree of pkgs_to_build, returning the data. - """ - - # Need files parsed - self.updateCache() - - # If we are told to do the None task then query the default task - if (task == None): - task = self.configuration.cmd - - pkgs_to_build = self.checkPackages(pkgs_to_build) - - localdata = data.createCopy(self.configuration.data) - bb.data.update_data(localdata) - bb.data.expandKeys(localdata) - taskdata = bb.taskdata.TaskData(self.configuration.abort) - - runlist = [] - for k in pkgs_to_build: - taskdata.add_provider(localdata, self.status, k) - runlist.append([k, "do_%s" % task]) - taskdata.add_unresolved(localdata, self.status) - - rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist) - rq.rqdata.prepare() - - seen_fnids = [] - depend_tree = {} - depend_tree["depends"] = {} - depend_tree["tdepends"] = {} - depend_tree["pn"] = {} - depend_tree["rdepends-pn"] = {} - depend_tree["packages"] = {} - depend_tree["rdepends-pkg"] = {} - depend_tree["rrecs-pkg"] = {} - - for task in xrange(len(rq.rqdata.runq_fnid)): - taskname = rq.rqdata.runq_task[task] - fnid = rq.rqdata.runq_fnid[task] - fn = taskdata.fn_index[fnid] - pn = self.status.pkg_fn[fn] - version = "%s:%s-%s" % self.status.pkg_pepvpr[fn] - if pn not in depend_tree["pn"]: - depend_tree["pn"][pn] = {} - depend_tree["pn"][pn]["filename"] = fn - depend_tree["pn"][pn]["version"] = version - for dep in rq.rqdata.runq_depends[task]: - depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]] - deppn = self.status.pkg_fn[depfn] - dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task]) - if not dotname in depend_tree["tdepends"]: - depend_tree["tdepends"][dotname] = [] - depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep])) - if fnid not in seen_fnids: - seen_fnids.append(fnid) - packages = [] - - depend_tree["depends"][pn] = [] - for dep in taskdata.depids[fnid]: - depend_tree["depends"][pn].append(taskdata.build_names_index[dep]) - - depend_tree["rdepends-pn"][pn] = [] - for rdep in taskdata.rdepids[fnid]: - depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep]) - - rdepends = self.status.rundeps[fn] - for package in rdepends: - depend_tree["rdepends-pkg"][package] = [] - for rdepend in rdepends[package]: - depend_tree["rdepends-pkg"][package].append(rdepend) - packages.append(package) - - rrecs = self.status.runrecs[fn] - for package in rrecs: - depend_tree["rrecs-pkg"][package] = [] - for rdepend in rrecs[package]: - depend_tree["rrecs-pkg"][package].append(rdepend) - if not package in packages: - packages.append(package) - - for package in packages: - if package not in depend_tree["packages"]: - depend_tree["packages"][package] = {} - depend_tree["packages"][package]["pn"] = pn - depend_tree["packages"][package]["filename"] = fn - depend_tree["packages"][package]["version"] = version - - return depend_tree - - - def generateDepTreeEvent(self, pkgs_to_build, task): - """ - Create a task dependency graph of pkgs_to_build. - Generate an event with the result - """ - depgraph = self.generateDepTreeData(pkgs_to_build, task) - bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.configuration.data) - - def generateDotGraphFiles(self, pkgs_to_build, task): - """ - Create a task dependency graph of pkgs_to_build. - Save the result to a set of .dot files. - """ - - depgraph = self.generateDepTreeData(pkgs_to_build, task) - - # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn - depends_file = file('pn-depends.dot', 'w' ) - print("digraph depends {", file=depends_file) - for pn in depgraph["pn"]: - fn = depgraph["pn"][pn]["filename"] - version = depgraph["pn"][pn]["version"] - print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file) - for pn in depgraph["depends"]: - for depend in depgraph["depends"][pn]: - print('"%s" -> "%s"' % (pn, depend), file=depends_file) - for pn in depgraph["rdepends-pn"]: - for rdepend in depgraph["rdepends-pn"][pn]: - print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file) - print("}", file=depends_file) - logger.info("PN dependencies saved to 'pn-depends.dot'") - - depends_file = file('package-depends.dot', 'w' ) - print("digraph depends {", file=depends_file) - for package in depgraph["packages"]: - pn = depgraph["packages"][package]["pn"] - fn = depgraph["packages"][package]["filename"] - version = depgraph["packages"][package]["version"] - if package == pn: - print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file) - else: - print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file) - for depend in depgraph["depends"][pn]: - print('"%s" -> "%s"' % (package, depend), file=depends_file) - for package in depgraph["rdepends-pkg"]: - for rdepend in depgraph["rdepends-pkg"][package]: - print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file) - for package in depgraph["rrecs-pkg"]: - for rdepend in depgraph["rrecs-pkg"][package]: - print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file) - print("}", file=depends_file) - logger.info("Package dependencies saved to 'package-depends.dot'") - - tdepends_file = file('task-depends.dot', 'w' ) - print("digraph depends {", file=tdepends_file) - for task in depgraph["tdepends"]: - (pn, taskname) = task.rsplit(".", 1) - fn = depgraph["pn"][pn]["filename"] - version = depgraph["pn"][pn]["version"] - print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file) - for dep in depgraph["tdepends"][task]: - print('"%s" -> "%s"' % (task, dep), file=tdepends_file) - print("}", file=tdepends_file) - logger.info("Task dependencies saved to 'task-depends.dot'") - - def buildDepgraph( self ): - all_depends = self.status.all_depends - pn_provides = self.status.pn_provides - - localdata = data.createCopy(self.configuration.data) - bb.data.update_data(localdata) - bb.data.expandKeys(localdata) - - matched = set() - def calc_bbfile_priority(filename): - for _, _, regex, pri in self.status.bbfile_config_priorities: - if regex.match(filename): - if not regex in matched: - matched.add(regex) - return pri - return 0 - - # Handle PREFERRED_PROVIDERS - for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, 1) or "").split(): - try: - (providee, provider) = p.split(':') - except: - providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p) - continue - if providee in self.status.preferred and self.status.preferred[providee] != provider: - providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.status.preferred[providee]) - self.status.preferred[providee] = provider - - # Calculate priorities for each file - for p in self.status.pkg_fn: - self.status.bbfile_priority[p] = calc_bbfile_priority(p) - - for collection, pattern, regex, _ in self.status.bbfile_config_priorities: - if not regex in matched: - collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern)) - - def buildWorldTargetList(self): - """ - Build package list for "bitbake world" - """ - all_depends = self.status.all_depends - pn_provides = self.status.pn_provides - parselog.debug(1, "collating packages for \"world\"") - for f in self.status.possible_world: - terminal = True - pn = self.status.pkg_fn[f] - - for p in pn_provides[pn]: - if p.startswith('virtual/'): - parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p) - terminal = False - break - for pf in self.status.providers[p]: - if self.status.pkg_fn[pf] != pn: - parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p) - terminal = False - break - if terminal: - self.status.world_target.add(pn) - - # drop reference count now - self.status.possible_world = None - self.status.all_depends = None - - def interactiveMode( self ): - """Drop off into a shell""" - try: - from bb import shell - except ImportError: - parselog.exception("Interactive mode not available") - sys.exit(1) - else: - shell.start( self ) - - def _findLayerConf(self): - path = os.getcwd() - while path != "/": - bblayers = os.path.join(path, "conf", "bblayers.conf") - if os.path.exists(bblayers): - return bblayers - - path, _ = os.path.split(path) - - def parseConfigurationFiles(self, files): - def _parse(f, data, include=False): - try: - return bb.parse.handle(f, data, include) - except (IOError, bb.parse.ParseError) as exc: - parselog.critical("Unable to parse %s: %s" % (f, exc)) - sys.exit(1) - - data = self.configuration.data - bb.parse.init_parser(data) - for f in files: - data = _parse(f, data) - - layerconf = self._findLayerConf() - if layerconf: - parselog.debug(2, "Found bblayers.conf (%s)", layerconf) - data = _parse(layerconf, data) - - layers = (bb.data.getVar('BBLAYERS', data, True) or "").split() - - data = bb.data.createCopy(data) - for layer in layers: - parselog.debug(2, "Adding layer %s", layer) - bb.data.setVar('LAYERDIR', layer, data) - data = _parse(os.path.join(layer, "conf", "layer.conf"), data) - data.expandVarref('LAYERDIR') - - bb.data.delVar('LAYERDIR', data) - - if not data.getVar("BBPATH", True): - raise SystemExit("The BBPATH variable is not set") - - data = _parse(os.path.join("conf", "bitbake.conf"), data) - - self.configuration.data = data - - # Handle any INHERITs and inherit the base class - inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split() - for inherit in inherits: - self.configuration.data = _parse(os.path.join('classes', '%s.bbclass' % inherit), self.configuration.data, True ) - - # Nomally we only register event handlers at the end of parsing .bb files - # We register any handlers we've found so far here... - for var in bb.data.getVar('__BBHANDLERS', self.configuration.data) or []: - bb.event.register(var, bb.data.getVar(var, self.configuration.data)) - - if bb.data.getVar("BB_WORKERCONTEXT", self.configuration.data) is None: - bb.fetch.fetcher_init(self.configuration.data) - bb.codeparser.parser_cache_init(self.configuration.data) - bb.parse.init_parser(data) - bb.event.fire(bb.event.ConfigParsed(), self.configuration.data) - - def handleCollections( self, collections ): - """Handle collections""" - if collections: - collection_list = collections.split() - for c in collection_list: - regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1) - if regex == None: - parselog.error("BBFILE_PATTERN_%s not defined" % c) - continue - priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1) - if priority == None: - parselog.error("BBFILE_PRIORITY_%s not defined" % c) - continue - try: - cre = re.compile(regex) - except re.error: - parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex) - continue - try: - pri = int(priority) - self.status.bbfile_config_priorities.append((c, regex, cre, pri)) - except ValueError: - parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority) - - def buildSetVars(self): - """ - Setup any variables needed before starting a build - """ - if not bb.data.getVar("BUILDNAME", self.configuration.data): - bb.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'), self.configuration.data) - bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()), self.configuration.data) - - def matchFiles(self, buildfile): - """ - Find the .bb files which match the expression in 'buildfile'. - """ - - bf = os.path.abspath(buildfile) - filelist, masked = self.collect_bbfiles() - try: - os.stat(bf) - return [bf] - except OSError: - regexp = re.compile(buildfile) - matches = [] - for f in filelist: - if regexp.search(f) and os.path.isfile(f): - bf = f - matches.append(f) - return matches - - def matchFile(self, buildfile): - """ - Find the .bb file which matches the expression in 'buildfile'. - Raise an error if multiple files - """ - matches = self.matchFiles(buildfile) - if len(matches) != 1: - parselog.error("Unable to match %s (%s matches found):" % (buildfile, len(matches))) - for f in matches: - parselog.error(" %s" % f) - raise MultipleMatches - return matches[0] - - def buildFile(self, buildfile, task): - """ - Build the file matching regexp buildfile - """ - - # Parse the configuration here. We need to do it explicitly here since - # buildFile() doesn't use the cache - self.parseConfiguration() - - # If we are told to do the None task then query the default task - if (task == None): - task = self.configuration.cmd - - (fn, cls) = bb.cache.Cache.virtualfn2realfn(buildfile) - buildfile = self.matchFile(fn) - fn = bb.cache.Cache.realfn2virtual(buildfile, cls) - - self.buildSetVars() - - self.status = bb.cache.CacheData() - infos = bb.cache.Cache.parse(fn, self.get_file_appends(fn), \ - self.configuration.data) - maininfo = None - for vfn, info in infos: - self.status.add_from_recipeinfo(vfn, info) - if vfn == fn: - maininfo = info - - # Tweak some variables - item = maininfo.pn - self.status.ignored_dependencies = set() - self.status.bbfile_priority[fn] = 1 - - # Remove external dependencies - self.status.task_deps[fn]['depends'] = {} - self.status.deps[fn] = [] - self.status.rundeps[fn] = [] - self.status.runrecs[fn] = [] - - # Remove stamp for target if force mode active - if self.configuration.force: - logger.verbose("Remove stamp %s, %s", task, fn) - bb.build.del_stamp('do_%s' % task, self.status, fn) - - # Setup taskdata structure - taskdata = bb.taskdata.TaskData(self.configuration.abort) - taskdata.add_provider(self.configuration.data, self.status, item) - - buildname = bb.data.getVar("BUILDNAME", self.configuration.data) - bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.configuration.event_data) - - # Clear locks - bb.fetch.persistent_database_connection = {} - - # Execute the runqueue - runlist = [[item, "do_%s" % task]] - - rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist) - - def buildFileIdle(server, rq, abort): - - if abort or self.state == state.stop: - rq.finish_runqueue(True) - elif self.state == state.shutdown: - rq.finish_runqueue(False) - failures = 0 - try: - retval = rq.execute_runqueue() - except runqueue.TaskFailure as exc: - for fnid in exc.args: - buildlog.error("'%s' failed" % taskdata.fn_index[fnid]) - failures += len(exc.args) - retval = False - if not retval: - bb.event.fire(bb.event.BuildCompleted(buildname, item, failures), self.configuration.event_data) - self.command.finishAsyncCommand() - return False - if retval is True: - return True - return retval - - self.server.register_idle_function(buildFileIdle, rq) - - def buildTargets(self, targets, task): - """ - Attempt to build the targets specified - """ - - # Need files parsed - self.updateCache() - - # If we are told to do the NULL task then query the default task - if (task == None): - task = self.configuration.cmd - - targets = self.checkPackages(targets) - - def buildTargetsIdle(server, rq, abort): - if abort or self.state == state.stop: - rq.finish_runqueue(True) - elif self.state == state.shutdown: - rq.finish_runqueue(False) - failures = 0 - try: - retval = rq.execute_runqueue() - except runqueue.TaskFailure as exc: - for fnid in exc.args: - buildlog.error("'%s' failed" % taskdata.fn_index[fnid]) - failures += len(exc.args) - retval = False - if not retval: - bb.event.fire(bb.event.BuildCompleted(buildname, targets, failures), self.configuration.event_data) - self.command.finishAsyncCommand() - return False - if retval is True: - return True - return retval - - self.buildSetVars() - - buildname = bb.data.getVar("BUILDNAME", self.configuration.data) - bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.event_data) - - localdata = data.createCopy(self.configuration.data) - bb.data.update_data(localdata) - bb.data.expandKeys(localdata) - - taskdata = bb.taskdata.TaskData(self.configuration.abort) - - runlist = [] - for k in targets: - taskdata.add_provider(localdata, self.status, k) - runlist.append([k, "do_%s" % task]) - taskdata.add_unresolved(localdata, self.status) - - # Clear locks - bb.fetch.persistent_database_connection = {} - - rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist) - - self.server.register_idle_function(buildTargetsIdle, rq) - - def updateCache(self): - if self.state == state.running: - return - - if self.state != state.parsing: - self.parseConfiguration () - - # Import Psyco if available and not disabled - import platform - if platform.machine() in ['i386', 'i486', 'i586', 'i686']: - if not self.configuration.disable_psyco: - try: - import psyco - except ImportError: - collectlog.info("Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.") - else: - psyco.bind( CookerParser.parse_next ) - else: - collectlog.info("You have disabled Psyco. This decreases performance.") - - self.status = bb.cache.CacheData() - - ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or "" - self.status.ignored_dependencies = set(ignore.split()) - - for dep in self.configuration.extra_assume_provided: - self.status.ignored_dependencies.add(dep) - - self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) ) - - (filelist, masked) = self.collect_bbfiles() - bb.data.renameVar("__depends", "__base_depends", self.configuration.data) - - self.parser = CookerParser(self, filelist, masked) - self.state = state.parsing - - if not self.parser.parse_next(): - collectlog.debug(1, "parsing complete") - self.buildDepgraph() - self.state = state.running - return None - - return True - - def checkPackages(self, pkgs_to_build): - - if len(pkgs_to_build) == 0: - raise NothingToBuild - - if 'world' in pkgs_to_build: - self.buildWorldTargetList() - pkgs_to_build.remove('world') - for t in self.status.world_target: - pkgs_to_build.append(t) - - return pkgs_to_build - - def get_bbfiles( self, path = os.getcwd() ): - """Get list of default .bb files by reading out the current directory""" - contents = os.listdir(path) - bbfiles = [] - for f in contents: - (root, ext) = os.path.splitext(f) - if ext == ".bb": - bbfiles.append(os.path.abspath(os.path.join(os.getcwd(), f))) - return bbfiles - - def find_bbfiles( self, path ): - """Find all the .bb and .bbappend files in a directory""" - from os.path import join - - found = [] - for dir, dirs, files in os.walk(path): - for ignored in ('SCCS', 'CVS', '.svn'): - if ignored in dirs: - dirs.remove(ignored) - found += [join(dir, f) for f in files if (f.endswith('.bb') or f.endswith('.bbappend'))] - - return found - - def collect_bbfiles( self ): - """Collect all available .bb build files""" - parsed, cached, skipped, masked = 0, 0, 0, 0 - - collectlog.debug(1, "collecting .bb files") - - files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split() - data.setVar("BBFILES", " ".join(files), self.configuration.data) - - if not len(files): - files = self.get_bbfiles() - - if not len(files): - collectlog.error("no recipe files to build, check your BBPATH and BBFILES?") - bb.event.fire(CookerExit(), self.configuration.event_data) - - newfiles = set() - for f in files: - if os.path.isdir(f): - dirfiles = self.find_bbfiles(f) - newfiles.update(dirfiles) - else: - globbed = glob.glob(f) - if not globbed and os.path.exists(f): - globbed = [f] - newfiles.update(globbed) - - bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1) - - if bbmask: - try: - bbmask_compiled = re.compile(bbmask) - except sre_constants.error: - collectlog.critical("BBMASK is not a valid regular expression, ignoring.") - return list(newfiles), 0 - - bbfiles = [] - bbappend = [] - for f in newfiles: - if bbmask and bbmask_compiled.search(f): - collectlog.debug(1, "skipping masked file %s", f) - masked += 1 - continue - if f.endswith('.bb'): - bbfiles.append(f) - elif f.endswith('.bbappend'): - bbappend.append(f) - else: - collectlog.debug(1, "skipping %s: unknown file extension", f) - - # Build a list of .bbappend files for each .bb file - for f in bbappend: - base = os.path.basename(f).replace('.bbappend', '.bb') - if not base in self.appendlist: - self.appendlist[base] = [] - self.appendlist[base].append(f) - - return (bbfiles, masked) - - def get_file_appends(self, fn): - """ - Returns a list of .bbappend files to apply to fn - NB: collect_bbfiles() must have been called prior to this - """ - f = os.path.basename(fn) - if f in self.appendlist: - return self.appendlist[f] - return [] - - def pre_serve(self): - # Empty the environment. The environment will be populated as - # necessary from the data store. - #bb.utils.empty_environment() - return - - def post_serve(self): - bb.event.fire(CookerExit(), self.configuration.event_data) - - def shutdown(self): - self.state = state.shutdown - - def stop(self): - self.state = state.stop - -def server_main(cooker, func, *args): - cooker.pre_serve() - - if cooker.configuration.profile: - try: - import cProfile as profile - except: - import profile - prof = profile.Profile() - - ret = profile.Profile.runcall(prof, func, *args) - - prof.dump_stats("profile.log") - - # Redirect stdout to capture profile information - pout = open('profile.log.processed', 'w') - so = sys.stdout.fileno() - orig_so = os.dup(sys.stdout.fileno()) - os.dup2(pout.fileno(), so) - - import pstats - p = pstats.Stats('profile.log') - p.sort_stats('time') - p.print_stats() - p.print_callers() - p.sort_stats('cumulative') - p.print_stats() - - os.dup2(orig_so, so) - pout.flush() - pout.close() - - print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed") - - else: - ret = func(*args) - - cooker.post_serve() - - return ret - -class CookerExit(bb.event.Event): - """ - Notify clients of the Cooker shutdown - """ - - def __init__(self): - bb.event.Event.__init__(self) - -class ParsingFailure(Exception): - def __init__(self, realexception, recipe): - self.realexception = realexception - self.recipe = recipe - Exception.__init__(self, "Failure when parsing %s" % recipe) - self.args = (realexception, recipe) - -def parse_file(task): - filename, appends = task - try: - return True, bb.cache.Cache.parse(filename, appends, parse_file.cfg) - except Exception, exc: - exc.recipe = filename - raise exc - # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown - # and for example a worker thread doesn't just exit on its own in response to - # a SystemExit event for example. - except BaseException, exc: - raise ParsingFailure(exc, filename) - -class CookerParser(object): - def __init__(self, cooker, filelist, masked): - self.filelist = filelist - self.cooker = cooker - self.cfgdata = cooker.configuration.data - - # Accounting statistics - self.parsed = 0 - self.cached = 0 - self.error = 0 - self.masked = masked - - self.skipped = 0 - self.virtuals = 0 - self.total = len(filelist) - - self.current = 0 - self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or - multiprocessing.cpu_count()) - - self.bb_cache = bb.cache.Cache(self.cfgdata) - self.fromcache = [] - self.willparse = [] - for filename in self.filelist: - appends = self.cooker.get_file_appends(filename) - if not self.bb_cache.cacheValid(filename): - self.willparse.append((filename, appends)) - else: - self.fromcache.append((filename, appends)) - self.toparse = self.total - len(self.fromcache) - self.progress_chunk = max(self.toparse / 100, 1) - - self.start() - - def start(self): - def init(cfg): - signal.signal(signal.SIGINT, signal.SIG_IGN) - parse_file.cfg = cfg - - bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) - - self.pool = multiprocessing.Pool(self.num_processes, init, [self.cfgdata]) - parsed = self.pool.imap(parse_file, self.willparse) - self.pool.close() - - self.results = itertools.chain(self.load_cached(), parsed) - - def shutdown(self, clean=True): - if clean: - event = bb.event.ParseCompleted(self.cached, self.parsed, - self.skipped, self.masked, - self.virtuals, self.error, - self.total) - bb.event.fire(event, self.cfgdata) - else: - self.pool.terminate() - self.pool.join() - - sync = threading.Thread(target=self.bb_cache.sync) - sync.start() - atexit.register(lambda: sync.join()) - - codesync = threading.Thread(target=bb.codeparser.parser_cache_save(self.cooker.configuration.data)) - codesync.start() - atexit.register(lambda: codesync.join()) - - def load_cached(self): - for filename, appends in self.fromcache: - cached, infos = self.bb_cache.load(filename, appends, self.cfgdata) - yield not cached, infos - - def parse_next(self): - try: - parsed, result = self.results.next() - except StopIteration: - self.shutdown() - return False - except KeyboardInterrupt: - self.shutdown(clean=False) - raise - except Exception as exc: - self.shutdown(clean=False) - bb.fatal('Error parsing %s: %s' % (exc.recipe, exc)) - - self.current += 1 - self.virtuals += len(result) - if parsed: - self.parsed += 1 - if self.parsed % self.progress_chunk == 0: - bb.event.fire(bb.event.ParseProgress(self.parsed), - self.cfgdata) - else: - self.cached += 1 - - for virtualfn, info in result: - if info.skipped: - self.skipped += 1 - else: - self.bb_cache.add_info(virtualfn, info, self.cooker.status, - parsed=parsed) - return True - - def reparse(self, filename): - infos = self.bb_cache.parse(filename, - self.cooker.get_file_appends(filename), - self.cfgdata) - for vfn, info in infos: - self.cooker.status.add_from_recipeinfo(vfn, info) diff --git a/bitbake/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py deleted file mode 100644 index f0714b3af..000000000 --- a/bitbake/lib/bb/daemonize.py +++ /dev/null @@ -1,190 +0,0 @@ -""" -Python Deamonizing helper - -Configurable daemon behaviors: - - 1.) The current working directory set to the "/" directory. - 2.) The current file creation mode mask set to 0. - 3.) Close all open files (1024). - 4.) Redirect standard I/O streams to "/dev/null". - -A failed call to fork() now raises an exception. - -References: - 1) Advanced Programming in the Unix Environment: W. Richard Stevens - 2) Unix Programming Frequently Asked Questions: - http://www.erlenstar.demon.co.uk/unix/faq_toc.html - -Modified to allow a function to be daemonized and return for -bitbake use by Richard Purdie -""" - -__author__ = "Chad J. Schroeder" -__copyright__ = "Copyright (C) 2005 Chad J. Schroeder" -__version__ = "0.2" - -# Standard Python modules. -import os # Miscellaneous OS interfaces. -import sys # System-specific parameters and functions. - -# Default daemon parameters. -# File mode creation mask of the daemon. -# For BitBake's children, we do want to inherit the parent umask. -UMASK = None - -# Default maximum for the number of available file descriptors. -MAXFD = 1024 - -# The standard I/O file descriptors are redirected to /dev/null by default. -if (hasattr(os, "devnull")): - REDIRECT_TO = os.devnull -else: - REDIRECT_TO = "/dev/null" - -def createDaemon(function, logfile): - """ - Detach a process from the controlling terminal and run it in the - background as a daemon, returning control to the caller. - """ - - try: - # Fork a child process so the parent can exit. This returns control to - # the command-line or shell. It also guarantees that the child will not - # be a process group leader, since the child receives a new process ID - # and inherits the parent's process group ID. This step is required - # to insure that the next call to os.setsid is successful. - pid = os.fork() - except OSError as e: - raise Exception("%s [%d]" % (e.strerror, e.errno)) - - if (pid == 0): # The first child. - # To become the session leader of this new session and the process group - # leader of the new process group, we call os.setsid(). The process is - # also guaranteed not to have a controlling terminal. - os.setsid() - - # Is ignoring SIGHUP necessary? - # - # It's often suggested that the SIGHUP signal should be ignored before - # the second fork to avoid premature termination of the process. The - # reason is that when the first child terminates, all processes, e.g. - # the second child, in the orphaned group will be sent a SIGHUP. - # - # "However, as part of the session management system, there are exactly - # two cases where SIGHUP is sent on the death of a process: - # - # 1) When the process that dies is the session leader of a session that - # is attached to a terminal device, SIGHUP is sent to all processes - # in the foreground process group of that terminal device. - # 2) When the death of a process causes a process group to become - # orphaned, and one or more processes in the orphaned group are - # stopped, then SIGHUP and SIGCONT are sent to all members of the - # orphaned group." [2] - # - # The first case can be ignored since the child is guaranteed not to have - # a controlling terminal. The second case isn't so easy to dismiss. - # The process group is orphaned when the first child terminates and - # POSIX.1 requires that every STOPPED process in an orphaned process - # group be sent a SIGHUP signal followed by a SIGCONT signal. Since the - # second child is not STOPPED though, we can safely forego ignoring the - # SIGHUP signal. In any case, there are no ill-effects if it is ignored. - # - # import signal # Set handlers for asynchronous events. - # signal.signal(signal.SIGHUP, signal.SIG_IGN) - - try: - # Fork a second child and exit immediately to prevent zombies. This - # causes the second child process to be orphaned, making the init - # process responsible for its cleanup. And, since the first child is - # a session leader without a controlling terminal, it's possible for - # it to acquire one by opening a terminal in the future (System V- - # based systems). This second fork guarantees that the child is no - # longer a session leader, preventing the daemon from ever acquiring - # a controlling terminal. - pid = os.fork() # Fork a second child. - except OSError as e: - raise Exception("%s [%d]" % (e.strerror, e.errno)) - - if (pid == 0): # The second child. - # We probably don't want the file mode creation mask inherited from - # the parent, so we give the child complete control over permissions. - if UMASK is not None: - os.umask(UMASK) - else: - # Parent (the first child) of the second child. - os._exit(0) - else: - # exit() or _exit()? - # _exit is like exit(), but it doesn't call any functions registered - # with atexit (and on_exit) or any registered signal handlers. It also - # closes any open file descriptors. Using exit() may cause all stdio - # streams to be flushed twice and any temporary files may be unexpectedly - # removed. It's therefore recommended that child branches of a fork() - # and the parent branch(es) of a daemon use _exit(). - return - - # Close all open file descriptors. This prevents the child from keeping - # open any file descriptors inherited from the parent. There is a variety - # of methods to accomplish this task. Three are listed below. - # - # Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum - # number of open file descriptors to close. If it doesn't exists, use - # the default value (configurable). - # - # try: - # maxfd = os.sysconf("SC_OPEN_MAX") - # except (AttributeError, ValueError): - # maxfd = MAXFD - # - # OR - # - # if (os.sysconf_names.has_key("SC_OPEN_MAX")): - # maxfd = os.sysconf("SC_OPEN_MAX") - # else: - # maxfd = MAXFD - # - # OR - # - # Use the getrlimit method to retrieve the maximum file descriptor number - # that can be opened by this process. If there is not limit on the - # resource, use the default value. - # - import resource # Resource usage information. - maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1] - if (maxfd == resource.RLIM_INFINITY): - maxfd = MAXFD - - # Iterate through and close all file descriptors. -# for fd in range(0, maxfd): -# try: -# os.close(fd) -# except OSError: # ERROR, fd wasn't open to begin with (ignored) -# pass - - # Redirect the standard I/O file descriptors to the specified file. Since - # the daemon has no controlling terminal, most daemons redirect stdin, - # stdout, and stderr to /dev/null. This is done to prevent side-effects - # from reads and writes to the standard I/O file descriptors. - - # This call to open is guaranteed to return the lowest file descriptor, - # which will be 0 (stdin), since it was closed above. -# os.open(REDIRECT_TO, os.O_RDWR) # standard input (0) - - # Duplicate standard input to standard output and standard error. -# os.dup2(0, 1) # standard output (1) -# os.dup2(0, 2) # standard error (2) - - - si = file('/dev/null', 'r') - so = file(logfile, 'w') - se = so - - - # Replace those fds with our own - os.dup2(si.fileno(), sys.stdin.fileno()) - os.dup2(so.fileno(), sys.stdout.fileno()) - os.dup2(se.fileno(), sys.stderr.fileno()) - - function() - - os._exit(0) diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py deleted file mode 100644 index 50f2218a7..000000000 --- a/bitbake/lib/bb/data.py +++ /dev/null @@ -1,338 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Data' implementations - -Functions for interacting with the data structure used by the -BitBake build tools. - -The expandData and update_data are the most expensive -operations. At night the cookie monster came by and -suggested 'give me cookies on setting the variables and -things will work out'. Taking this suggestion into account -applying the skills from the not yet passed 'Entwurf und -Analyse von Algorithmen' lecture and the cookie -monster seems to be right. We will track setVar more carefully -to have faster update_data and expandKeys operations. - -This is a treade-off between speed and memory again but -the speed is more critical here. -""" - -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2005 Holger Hans Peter Freyther -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -#Based on functions from the base bb module, Copyright 2003 Holger Schurig - -import sys, os, re -if sys.argv[0][-5:] == "pydoc": - path = os.path.dirname(os.path.dirname(sys.argv[1])) -else: - path = os.path.dirname(os.path.dirname(sys.argv[0])) -sys.path.insert(0, path) -from itertools import groupby - -from bb import data_smart -from bb import codeparser -import bb - -_dict_type = data_smart.DataSmart - -def init(): - """Return a new object representing the Bitbake data""" - return _dict_type() - -def init_db(parent = None): - """Return a new object representing the Bitbake data, - optionally based on an existing object""" - if parent: - return parent.createCopy() - else: - return _dict_type() - -def createCopy(source): - """Link the source set to the destination - If one does not find the value in the destination set, - search will go on to the source set to get the value. - Value from source are copy-on-write. i.e. any try to - modify one of them will end up putting the modified value - in the destination set. - """ - return source.createCopy() - -def initVar(var, d): - """Non-destructive var init for data structure""" - d.initVar(var) - - -def setVar(var, value, d): - """Set a variable to a given value""" - d.setVar(var, value) - - -def getVar(var, d, exp = 0): - """Gets the value of a variable""" - return d.getVar(var, exp) - - -def renameVar(key, newkey, d): - """Renames a variable from key to newkey""" - d.renameVar(key, newkey) - -def delVar(var, d): - """Removes a variable from the data set""" - d.delVar(var) - -def setVarFlag(var, flag, flagvalue, d): - """Set a flag for a given variable to a given value""" - d.setVarFlag(var, flag, flagvalue) - -def getVarFlag(var, flag, d): - """Gets given flag from given var""" - return d.getVarFlag(var, flag) - -def delVarFlag(var, flag, d): - """Removes a given flag from the variable's flags""" - d.delVarFlag(var, flag) - -def setVarFlags(var, flags, d): - """Set the flags for a given variable - - Note: - setVarFlags will not clear previous - flags. Think of this method as - addVarFlags - """ - d.setVarFlags(var, flags) - -def getVarFlags(var, d): - """Gets a variable's flags""" - return d.getVarFlags(var) - -def delVarFlags(var, d): - """Removes a variable's flags""" - d.delVarFlags(var) - -def keys(d): - """Return a list of keys in d""" - return d.keys() - - -__expand_var_regexp__ = re.compile(r"\${[^{}]+}") -__expand_python_regexp__ = re.compile(r"\${@.+?}") - -def expand(s, d, varname = None): - """Variable expansion using the data store""" - return d.expand(s, varname) - -def expandKeys(alterdata, readdata = None): - if readdata == None: - readdata = alterdata - - todolist = {} - for key in keys(alterdata): - if not '${' in key: - continue - - ekey = expand(key, readdata) - if key == ekey: - continue - todolist[key] = ekey - - # These two for loops are split for performance to maximise the - # usefulness of the expand cache - - for key in todolist: - ekey = todolist[key] - renameVar(key, ekey, alterdata) - -def inheritFromOS(d): - """Inherit variables from the environment.""" - exportlist = bb.utils.preserved_envvars_exported() - for s in os.environ.keys(): - try: - setVar(s, os.environ[s], d) - if s in exportlist: - setVarFlag(s, "export", True, d) - except TypeError: - pass - -def emit_var(var, o=sys.__stdout__, d = init(), all=False): - """Emit a variable to be sourced by a shell.""" - if getVarFlag(var, "python", d): - return 0 - - export = getVarFlag(var, "export", d) - unexport = getVarFlag(var, "unexport", d) - func = getVarFlag(var, "func", d) - if not all and not export and not unexport and not func: - return 0 - - try: - if all: - oval = getVar(var, d, 0) - val = getVar(var, d, 1) - except (KeyboardInterrupt, bb.build.FuncFailed): - raise - except Exception, exc: - o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc))) - return 0 - - if all: - commentVal = re.sub('\n', '\n#', str(oval)) - o.write('# %s=%s\n' % (var, commentVal)) - - if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all: - return 0 - - varExpanded = expand(var, d) - - if unexport: - o.write('unset %s\n' % varExpanded) - return 0 - - if not val: - return 0 - - val = str(val) - - if func: - # NOTE: should probably check for unbalanced {} within the var - o.write("%s() {\n%s\n}\n" % (varExpanded, val)) - return 1 - - if export: - o.write('export ') - - # if we're going to output this within doublequotes, - # to a shell, we need to escape the quotes in the var - alter = re.sub('"', '\\"', val.strip()) - alter = re.sub('\n', ' \\\n', alter) - o.write('%s="%s"\n' % (varExpanded, alter)) - return 0 - -def emit_env(o=sys.__stdout__, d = init(), all=False): - """Emits all items in the data store in a format such that it can be sourced by a shell.""" - - isfunc = lambda key: bool(d.getVarFlag(key, "func")) - keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc) - grouped = groupby(keys, isfunc) - for isfunc, keys in grouped: - for key in keys: - emit_var(key, o, d, all and not isfunc) and o.write('\n') - -def export_vars(d): - keys = (key for key in d.keys() if d.getVarFlag(key, "export")) - ret = {} - for k in keys: - try: - v = d.getVar(k, True) - if v: - ret[k] = v - except (KeyboardInterrupt, bb.build.FuncFailed): - raise - except Exception, exc: - pass - return ret - -def export_envvars(v, d): - for s in os.environ.keys(): - if s not in v: - v[s] = os.environ[s] - return v - -def emit_func(func, o=sys.__stdout__, d = init()): - """Emits all items in the data store in a format such that it can be sourced by a shell.""" - - keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func")) - for key in keys: - emit_var(key, o, d, False) and o.write('\n') - - emit_var(func, o, d, False) and o.write('\n') - newdeps = bb.codeparser.ShellParser().parse_shell(d.getVar(func, True)) - seen = set() - while newdeps: - deps = newdeps - seen |= deps - newdeps = set() - for dep in deps: - if bb.data.getVarFlag(dep, "func", d): - emit_var(dep, o, d, False) and o.write('\n') - newdeps |= bb.codeparser.ShellParser().parse_shell(d.getVar(dep, True)) - newdeps -= seen - -def update_data(d): - """Performs final steps upon the datastore, including application of overrides""" - d.finalize() - -def build_dependencies(key, keys, shelldeps, d): - deps = set() - try: - if d.getVarFlag(key, "func"): - if d.getVarFlag(key, "python"): - parsedvar = d.expandWithRefs(d.getVar(key, False), key) - parser = bb.codeparser.PythonParser() - parser.parse_python(parsedvar.value) - deps = deps | parser.references - else: - parsedvar = d.expandWithRefs(d.getVar(key, False), key) - parser = bb.codeparser.ShellParser() - parser.parse_shell(parsedvar.value) - deps = deps | shelldeps - deps = deps | parsedvar.references - deps = deps | (keys & parser.execs) | (keys & parsedvar.execs) - else: - parser = d.expandWithRefs(d.getVar(key, False), key) - deps |= parser.references - deps = deps | (keys & parser.execs) - deps |= set((d.getVarFlag(key, "vardeps", True) or "").split()) - deps -= set((d.getVarFlag(key, "vardepsexclude", True) or "").split()) - except: - bb.note("Error expanding variable %s" % key) - raise - return deps - #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs))) - #d.setVarFlag(key, "vardeps", deps) - -def generate_dependencies(d): - - keys = set(key for key in d.keys() if not key.startswith("__")) - shelldeps = set(key for key in keys if d.getVarFlag(key, "export") and not d.getVarFlag(key, "unexport")) - - deps = {} - - tasklist = bb.data.getVar('__BBTASKS', d) or [] - for task in tasklist: - deps[task] = build_dependencies(task, keys, shelldeps, d) - newdeps = deps[task] - seen = set() - while newdeps: - nextdeps = newdeps - seen |= nextdeps - newdeps = set() - for dep in nextdeps: - if dep not in deps: - deps[dep] = build_dependencies(dep, keys, shelldeps, d) - newdeps |= deps[dep] - newdeps -= seen - #print "For %s: %s" % (task, str(taskdeps[task])) - return tasklist, deps - -def inherits_class(klass, d): - val = getVar('__inherit_cache', d) or [] - if os.path.join('classes', '%s.bbclass' % klass) in val: - return True - return False diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py deleted file mode 100644 index df9798ad5..000000000 --- a/bitbake/lib/bb/data_smart.py +++ /dev/null @@ -1,428 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake Smart Dictionary Implementation - -Functions for interacting with the data structure used by the -BitBake build tools. - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2004, 2005 Seb Frankengul -# Copyright (C) 2005, 2006 Holger Hans Peter Freyther -# Copyright (C) 2005 Uli Luckas -# Copyright (C) 2005 ROAD GmbH -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -import copy, re -from collections import MutableMapping -import logging -import bb, bb.codeparser -from bb import utils -from bb.COW import COWDictBase - -logger = logging.getLogger("BitBake.Data") - -__setvar_keyword__ = ["_append", "_prepend"] -__setvar_regexp__ = re.compile('(?P.*?)(?P_append|_prepend)(_(?P.*))?') -__expand_var_regexp__ = re.compile(r"\${[^{}]+}") -__expand_python_regexp__ = re.compile(r"\${@.+?}") - - -class VariableParse: - def __init__(self, varname, d, val = None): - self.varname = varname - self.d = d - self.value = val - - self.references = set() - self.execs = set() - - def var_sub(self, match): - key = match.group()[2:-1] - if self.varname and key: - if self.varname == key: - raise Exception("variable %s references itself!" % self.varname) - var = self.d.getVar(key, 1) - if var is not None: - self.references.add(key) - return var - else: - return match.group() - - def python_sub(self, match): - code = match.group()[3:-1] - codeobj = compile(code.strip(), self.varname or "", "eval") - - parser = bb.codeparser.PythonParser() - parser.parse_python(code) - self.references |= parser.references - self.execs |= parser.execs - - value = utils.better_eval(codeobj, DataContext(self.d)) - return str(value) - - -class DataContext(dict): - def __init__(self, metadata, **kwargs): - self.metadata = metadata - dict.__init__(self, **kwargs) - self['d'] = metadata - - def __missing__(self, key): - value = self.metadata.getVar(key, True) - if value is None or self.metadata.getVarFlag(key, 'func'): - raise KeyError(key) - else: - return value - -class ExpansionError(Exception): - def __init__(self, varname, expression, exception): - self.expression = expression - self.variablename = varname - self.exception = exception - self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception) - Exception.__init__(self, self.msg) - self.args = (varname, expression, exception) - def __str__(self): - return self.msg - -class DataSmart(MutableMapping): - def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ): - self.dict = {} - - # cookie monster tribute - self._special_values = special - self._seen_overrides = seen - - self.expand_cache = {} - - def expandWithRefs(self, s, varname): - - if not isinstance(s, basestring): # sanity check - return VariableParse(varname, self, s) - - if varname and varname in self.expand_cache: - return self.expand_cache[varname] - - varparse = VariableParse(varname, self) - - while s.find('${') != -1: - olds = s - try: - s = __expand_var_regexp__.sub(varparse.var_sub, s) - s = __expand_python_regexp__.sub(varparse.python_sub, s) - if s == olds: - break - except ExpansionError: - raise - except Exception as exc: - raise ExpansionError(varname, s, exc) - - varparse.value = s - - if varname: - self.expand_cache[varname] = varparse - - return varparse - - def expand(self, s, varname): - return self.expandWithRefs(s, varname).value - - - def finalize(self): - """Performs final steps upon the datastore, including application of overrides""" - - overrides = (self.getVar("OVERRIDES", True) or "").split(":") or [] - - # - # Well let us see what breaks here. We used to iterate - # over each variable and apply the override and then - # do the line expanding. - # If we have bad luck - which we will have - the keys - # where in some order that is so important for this - # method which we don't have anymore. - # Anyway we will fix that and write test cases this - # time. - - # - # First we apply all overrides - # Then we will handle _append and _prepend - # - - for o in overrides: - # calculate '_'+override - l = len(o) + 1 - - # see if one should even try - if o not in self._seen_overrides: - continue - - vars = self._seen_overrides[o] - for var in vars: - name = var[:-l] - try: - self.setVar(name, self.getVar(var, False)) - except Exception: - logger.info("Untracked delVar") - - # now on to the appends and prepends - for op in __setvar_keyword__: - if op in self._special_values: - appends = self._special_values[op] or [] - for append in appends: - keep = [] - for (a, o) in self.getVarFlag(append, op) or []: - if o and not o in overrides: - keep.append((a ,o)) - continue - - if op is "_append": - sval = self.getVar(append, False) or "" - sval += a - self.setVar(append, sval) - elif op is "_prepend": - sval = a + (self.getVar(append, False) or "") - self.setVar(append, sval) - - # We save overrides that may be applied at some later stage - if keep: - self.setVarFlag(append, op, keep) - else: - self.delVarFlag(append, op) - - def initVar(self, var): - self.expand_cache = {} - if not var in self.dict: - self.dict[var] = {} - - def _findVar(self, var): - dest = self.dict - while dest: - if var in dest: - return dest[var] - - if "_data" not in dest: - break - dest = dest["_data"] - - def _makeShadowCopy(self, var): - if var in self.dict: - return - - local_var = self._findVar(var) - - if local_var: - self.dict[var] = copy.copy(local_var) - else: - self.initVar(var) - - def setVar(self, var, value): - self.expand_cache = {} - match = __setvar_regexp__.match(var) - if match and match.group("keyword") in __setvar_keyword__: - base = match.group('base') - keyword = match.group("keyword") - override = match.group('add') - l = self.getVarFlag(base, keyword) or [] - l.append([value, override]) - self.setVarFlag(base, keyword, l) - - # todo make sure keyword is not __doc__ or __module__ - # pay the cookie monster - try: - self._special_values[keyword].add( base ) - except KeyError: - self._special_values[keyword] = set() - self._special_values[keyword].add( base ) - - return - - if not var in self.dict: - self._makeShadowCopy(var) - - # more cookies for the cookie monster - if '_' in var: - override = var[var.rfind('_')+1:] - if override not in self._seen_overrides: - self._seen_overrides[override] = set() - self._seen_overrides[override].add( var ) - - # setting var - self.dict[var]["content"] = value - - def getVar(self, var, exp): - value = self.getVarFlag(var, "content") - - if exp and value: - return self.expand(value, var) - return value - - def renameVar(self, key, newkey): - """ - Rename the variable key to newkey - """ - val = self.getVar(key, 0) - if val is not None: - self.setVar(newkey, val) - - for i in ('_append', '_prepend'): - src = self.getVarFlag(key, i) - if src is None: - continue - - dest = self.getVarFlag(newkey, i) or [] - dest.extend(src) - self.setVarFlag(newkey, i, dest) - - if i in self._special_values and key in self._special_values[i]: - self._special_values[i].remove(key) - self._special_values[i].add(newkey) - - self.delVar(key) - - def delVar(self, var): - self.expand_cache = {} - self.dict[var] = {} - - def setVarFlag(self, var, flag, flagvalue): - if not var in self.dict: - self._makeShadowCopy(var) - self.dict[var][flag] = flagvalue - - def getVarFlag(self, var, flag, expand=False): - local_var = self._findVar(var) - value = None - if local_var: - if flag in local_var: - value = copy.copy(local_var[flag]) - elif flag == "content" and "defaultval" in local_var: - value = copy.copy(local_var["defaultval"]) - if expand and value: - value = self.expand(value, None) - return value - - def delVarFlag(self, var, flag): - local_var = self._findVar(var) - if not local_var: - return - if not var in self.dict: - self._makeShadowCopy(var) - - if var in self.dict and flag in self.dict[var]: - del self.dict[var][flag] - - def setVarFlags(self, var, flags): - if not var in self.dict: - self._makeShadowCopy(var) - - for i in flags: - if i == "content": - continue - self.dict[var][i] = flags[i] - - def getVarFlags(self, var): - local_var = self._findVar(var) - flags = {} - - if local_var: - for i in local_var: - if i == "content": - continue - flags[i] = local_var[i] - - if len(flags) == 0: - return None - return flags - - - def delVarFlags(self, var): - if not var in self.dict: - self._makeShadowCopy(var) - - if var in self.dict: - content = None - - # try to save the content - if "content" in self.dict[var]: - content = self.dict[var]["content"] - self.dict[var] = {} - self.dict[var]["content"] = content - else: - del self.dict[var] - - - def createCopy(self): - """ - Create a copy of self by setting _data to self - """ - # we really want this to be a DataSmart... - data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy()) - data.dict["_data"] = self.dict - - return data - - def expandVarref(self, variable, parents=False): - """Find all references to variable in the data and expand it - in place, optionally descending to parent datastores.""" - - if parents: - keys = iter(self) - else: - keys = self.localkeys() - - ref = '${%s}' % variable - value = self.getVar(variable, False) - for key in keys: - referrervalue = self.getVar(key, False) - if referrervalue and ref in referrervalue: - self.setVar(key, referrervalue.replace(ref, value)) - - def localkeys(self): - for key in self.dict: - if key != '_data': - yield key - - def __iter__(self): - seen = set() - def _keys(d): - if "_data" in d: - for key in _keys(d["_data"]): - yield key - - for key in d: - if key != "_data": - if not key in seen: - seen.add(key) - yield key - return _keys(self.dict) - - def __len__(self): - return len(frozenset(self)) - - def __getitem__(self, item): - value = self.getVar(item, False) - if value is None: - raise KeyError(item) - else: - return value - - def __setitem__(self, var, value): - self.setVar(var, value) - - def __delitem__(self, var): - self.delVar(var) diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py deleted file mode 100644 index 3467ddd61..000000000 --- a/bitbake/lib/bb/event.py +++ /dev/null @@ -1,386 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Event' implementation - -Classes and functions for manipulating 'events' in the -BitBake build tools. -""" - -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import os, sys -import warnings -try: - import cPickle as pickle -except ImportError: - import pickle -import logging -import atexit -import bb.utils - -# This is the pid for which we should generate the event. This is set when -# the runqueue forks off. -worker_pid = 0 -worker_pipe = None - -class Event(object): - """Base class for events""" - - def __init__(self): - self.pid = worker_pid - -NotHandled = 0 -Handled = 1 - -Registered = 10 -AlreadyRegistered = 14 - -# Internal -_handlers = {} -_ui_handlers = {} -_ui_handler_seq = 0 - -# For compatibility -bb.utils._context["NotHandled"] = NotHandled -bb.utils._context["Handled"] = Handled - -def fire_class_handlers(event, d): - if isinstance(event, logging.LogRecord): - return - - for handler in _handlers: - h = _handlers[handler] - event.data = d - if type(h).__name__ == "code": - locals = {"e": event} - bb.utils.simple_exec(h, locals) - ret = bb.utils.better_eval("tmpHandler(e)", locals) - if ret is not None: - warnings.warn("Using Handled/NotHandled in event handlers is deprecated", - DeprecationWarning, stacklevel = 2) - else: - h(event) - del event.data - -ui_queue = [] -@atexit.register -def print_ui_queue(): - """If we're exiting before a UI has been spawned, display any queued - LogRecords to the console.""" - logger = logging.getLogger("BitBake") - if not _ui_handlers: - from bb.msg import BBLogFormatter - console = logging.StreamHandler(sys.stdout) - console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s")) - logger.handlers = [console] - while ui_queue: - event = ui_queue.pop() - if isinstance(event, logging.LogRecord): - logger.handle(event) - -def fire_ui_handlers(event, d): - if not _ui_handlers: - # No UI handlers registered yet, queue up the messages - ui_queue.append(event) - return - - errors = [] - for h in _ui_handlers: - #print "Sending event %s" % event - try: - # We use pickle here since it better handles object instances - # which xmlrpc's marshaller does not. Events *must* be serializable - # by pickle. - _ui_handlers[h].event.send((pickle.dumps(event))) - except: - errors.append(h) - for h in errors: - del _ui_handlers[h] - -def fire(event, d): - """Fire off an Event""" - - # We can fire class handlers in the worker process context and this is - # desired so they get the task based datastore. - # UI handlers need to be fired in the server context so we defer this. They - # don't have a datastore so the datastore context isn't a problem. - - fire_class_handlers(event, d) - if worker_pid != 0: - worker_fire(event, d) - else: - fire_ui_handlers(event, d) - -def worker_fire(event, d): - data = "" + pickle.dumps(event) + "" - worker_pipe.write(data) - -def fire_from_worker(event, d): - if not event.startswith("") or not event.endswith(""): - print("Error, not an event %s" % event) - return - event = pickle.loads(event[7:-8]) - fire_ui_handlers(event, d) - -def register(name, handler): - """Register an Event handler""" - - # already registered - if name in _handlers: - return AlreadyRegistered - - if handler is not None: - # handle string containing python code - if isinstance(handler, basestring): - tmp = "def tmpHandler(e):\n%s" % handler - comp = bb.utils.better_compile(tmp, "tmpHandler(e)", "bb.event._registerCode") - _handlers[name] = comp - else: - _handlers[name] = handler - - return Registered - -def remove(name, handler): - """Remove an Event handler""" - _handlers.pop(name) - -def register_UIHhandler(handler): - bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1 - _ui_handlers[_ui_handler_seq] = handler - return _ui_handler_seq - -def unregister_UIHhandler(handlerNum): - if handlerNum in _ui_handlers: - del _ui_handlers[handlerNum] - return - -def getName(e): - """Returns the name of a class or class instance""" - if getattr(e, "__name__", None) == None: - return e.__class__.__name__ - else: - return e.__name__ - -class ConfigParsed(Event): - """Configuration Parsing Complete""" - -class RecipeParsed(Event): - """ Recipe Parsing Complete """ - - def __init__(self, fn): - self.fn = fn - Event.__init__(self) - -class StampUpdate(Event): - """Trigger for any adjustment of the stamp files to happen""" - - def __init__(self, targets, stampfns): - self._targets = targets - self._stampfns = stampfns - Event.__init__(self) - - def getStampPrefix(self): - return self._stampfns - - def getTargets(self): - return self._targets - - stampPrefix = property(getStampPrefix) - targets = property(getTargets) - -class BuildBase(Event): - """Base class for bbmake run events""" - - def __init__(self, n, p, failures = 0): - self._name = n - self._pkgs = p - Event.__init__(self) - self._failures = failures - - def getPkgs(self): - return self._pkgs - - def setPkgs(self, pkgs): - self._pkgs = pkgs - - def getName(self): - return self._name - - def setName(self, name): - self._name = name - - def getCfg(self): - return self.data - - def setCfg(self, cfg): - self.data = cfg - - def getFailures(self): - """ - Return the number of failed packages - """ - return self._failures - - pkgs = property(getPkgs, setPkgs, None, "pkgs property") - name = property(getName, setName, None, "name property") - cfg = property(getCfg, setCfg, None, "cfg property") - - - - - -class BuildStarted(BuildBase): - """bbmake build run started""" - - -class BuildCompleted(BuildBase): - """bbmake build run completed""" - - - - -class NoProvider(Event): - """No Provider for an Event""" - - def __init__(self, item, runtime=False, dependees=None): - Event.__init__(self) - self._item = item - self._runtime = runtime - self._dependees = dependees - - def getItem(self): - return self._item - - def isRuntime(self): - return self._runtime - -class MultipleProviders(Event): - """Multiple Providers""" - - def __init__(self, item, candidates, runtime = False): - Event.__init__(self) - self._item = item - self._candidates = candidates - self._is_runtime = runtime - - def isRuntime(self): - """ - Is this a runtime issue? - """ - return self._is_runtime - - def getItem(self): - """ - The name for the to be build item - """ - return self._item - - def getCandidates(self): - """ - Get the possible Candidates for a PROVIDER. - """ - return self._candidates - -class ParseStarted(Event): - """Recipe parsing for the runqueue has begun""" - def __init__(self, total): - Event.__init__(self) - self.total = total - -class ParseCompleted(Event): - """Recipe parsing for the runqueue has completed""" - - def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total): - Event.__init__(self) - self.cached = cached - self.parsed = parsed - self.skipped = skipped - self.virtuals = virtuals - self.masked = masked - self.errors = errors - self.sofar = cached + parsed - self.total = total - -class ParseProgress(Event): - """Recipe parsing progress""" - - def __init__(self, current): - self.current = current - -class CacheLoadStarted(Event): - """Loading of the dependency cache has begun""" - def __init__(self, total): - Event.__init__(self) - self.total = total - -class CacheLoadProgress(Event): - """Cache loading progress""" - def __init__(self, current): - Event.__init__(self) - self.current = current - -class CacheLoadCompleted(Event): - """Cache loading is complete""" - def __init__(self, total, num_entries): - Event.__init__(self) - self.total = total - self.num_entries = num_entries - - -class DepTreeGenerated(Event): - """ - Event when a dependency tree has been generated - """ - - def __init__(self, depgraph): - Event.__init__(self) - self._depgraph = depgraph - -class MsgBase(Event): - """Base class for messages""" - - def __init__(self, msg): - self._message = msg - Event.__init__(self) - -class MsgDebug(MsgBase): - """Debug Message""" - -class MsgNote(MsgBase): - """Note Message""" - -class MsgWarn(MsgBase): - """Warning Message""" - -class MsgError(MsgBase): - """Error Message""" - -class MsgFatal(MsgBase): - """Fatal Message""" - -class MsgPlain(MsgBase): - """General output""" - -class LogHandler(logging.Handler): - """Dispatch logging messages as bitbake events""" - - def emit(self, record): - fire(record, None) - - def filter(self, record): - record.taskpid = worker_pid - return True diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py deleted file mode 100644 index 2f92d87d9..000000000 --- a/bitbake/lib/bb/fetch/__init__.py +++ /dev/null @@ -1,836 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementations - -Classes for obtaining upstream sources for the -BitBake build tools. -""" - -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -from __future__ import absolute_import -from __future__ import print_function -import os, re -import logging -import bb -from bb import data -from bb import persist_data -from bb import utils - -__version__ = "1" - -logger = logging.getLogger("BitBake.Fetch") - -class MalformedUrl(Exception): - """Exception raised when encountering an invalid url""" - -class FetchError(Exception): - """Exception raised when a download fails""" - -class NoMethodError(Exception): - """Exception raised when there is no method to obtain a supplied url or set of urls""" - -class MissingParameterError(Exception): - """Exception raised when a fetch method is missing a critical parameter in the url""" - -class ParameterError(Exception): - """Exception raised when a url cannot be proccessed due to invalid parameters.""" - -class MD5SumError(Exception): - """Exception raised when a MD5SUM of a file does not match the expected one""" - -class InvalidSRCREV(Exception): - """Exception raised when an invalid SRCREV is encountered""" - -def decodeurl(url): - """Decodes an URL into the tokens (scheme, network location, path, - user, password, parameters). - """ - - m = re.compile('(?P[^:]*)://((?P.+)@)?(?P[^;]+)(;(?P.*))?').match(url) - if not m: - raise MalformedUrl(url) - - type = m.group('type') - location = m.group('location') - if not location: - raise MalformedUrl(url) - user = m.group('user') - parm = m.group('parm') - - locidx = location.find('/') - if locidx != -1 and type.lower() != 'file': - host = location[:locidx] - path = location[locidx:] - else: - host = "" - path = location - if user: - m = re.compile('(?P[^:]+)(:?(?P.*))').match(user) - if m: - user = m.group('user') - pswd = m.group('pswd') - else: - user = '' - pswd = '' - - p = {} - if parm: - for s in parm.split(';'): - s1, s2 = s.split('=') - p[s1] = s2 - - return (type, host, path, user, pswd, p) - -def encodeurl(decoded): - """Encodes a URL from tokens (scheme, network location, path, - user, password, parameters). - """ - - (type, host, path, user, pswd, p) = decoded - - if not type or not path: - raise MissingParameterError("Type or path url components missing when encoding %s" % decoded) - url = '%s://' % type - if user: - url += "%s" % user - if pswd: - url += ":%s" % pswd - url += "@" - if host: - url += "%s" % host - url += "%s" % path - if p: - for parm in p: - url += ";%s=%s" % (parm, p[parm]) - - return url - -def uri_replace(uri, uri_find, uri_replace, d): - if not uri or not uri_find or not uri_replace: - logger.debug(1, "uri_replace: passed an undefined value, not replacing") - uri_decoded = list(decodeurl(uri)) - uri_find_decoded = list(decodeurl(uri_find)) - uri_replace_decoded = list(decodeurl(uri_replace)) - result_decoded = ['', '', '', '', '', {}] - for i in uri_find_decoded: - loc = uri_find_decoded.index(i) - result_decoded[loc] = uri_decoded[loc] - if isinstance(i, basestring): - if (re.match(i, uri_decoded[loc])): - result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) - if uri_find_decoded.index(i) == 2: - if d: - localfn = bb.fetch.localpath(uri, d) - if localfn: - result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(bb.fetch.localpath(uri, d))) - else: - return uri - return encodeurl(result_decoded) - -methods = [] -urldata_cache = {} -saved_headrevs = {} - -def fetcher_init(d): - """ - Called to initialize the fetchers once the configuration data is known. - Calls before this must not hit the cache. - """ - pd = persist_data.persist(d) - # When to drop SCM head revisions controlled by user policy - srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" - if srcrev_policy == "cache": - logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) - elif srcrev_policy == "clear": - logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) - try: - bb.fetch.saved_headrevs = pd['BB_URI_HEADREVS'].items() - except: - pass - del pd['BB_URI_HEADREVS'] - else: - raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) - - for m in methods: - if hasattr(m, "init"): - m.init(d) - -def fetcher_compare_revisions(d): - """ - Compare the revisions in the persistant cache with current values and - return true/false on whether they've changed. - """ - - pd = persist_data.persist(d) - data = pd['BB_URI_HEADREVS'].items() - data2 = bb.fetch.saved_headrevs - - changed = False - for key in data: - if key not in data2 or data2[key] != data[key]: - logger.debug(1, "%s changed", key) - changed = True - return True - else: - logger.debug(2, "%s did not change", key) - return False - -# Function call order is usually: -# 1. init -# 2. go -# 3. localpaths -# localpath can be called at any time - -def init(urls, d, setup = True): - urldata = {} - - fn = bb.data.getVar('FILE', d, 1) - if fn in urldata_cache: - urldata = urldata_cache[fn] - - for url in urls: - if url not in urldata: - urldata[url] = FetchData(url, d) - - if setup: - for url in urldata: - if not urldata[url].setup: - urldata[url].setup_localpath(d) - - urldata_cache[fn] = urldata - return urldata - -def mirror_from_string(data): - return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] - -def verify_checksum(u, ud, d): - """ - verify the MD5 and SHA256 checksum for downloaded src - - return value: - - True: checksum matched - - False: checksum unmatched - - if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value. - if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as - matched - """ - - if not ud.type in ["http", "https", "ftp", "ftps"]: - return - - md5data = bb.utils.md5_file(ud.localpath) - sha256data = bb.utils.sha256_file(ud.localpath) - - if (ud.md5_expected == None or ud.sha256_expected == None): - logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n' - 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', - ud.localpath, ud.md5_name, md5data, - ud.sha256_name, sha256data) - if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1": - raise FetchError("No checksum specified for %s." % u) - return - - if (ud.md5_expected != md5data or ud.sha256_expected != sha256data): - logger.error('The checksums for "%s" did not match.\n' - ' MD5: expected "%s", got "%s"\n' - ' SHA256: expected "%s", got "%s"\n', - ud.localpath, ud.md5_expected, md5data, - ud.sha256_expected, sha256data) - raise FetchError("%s checksum mismatch." % u) - -def go(d, urls = None): - """ - Fetch all urls - init must have previously been called - """ - if not urls: - urls = d.getVar("SRC_URI", 1).split() - urldata = init(urls, d, True) - - for u in urls: - ud = urldata[u] - m = ud.method - localpath = "" - - if not ud.localfile: - continue - - lf = bb.utils.lockfile(ud.lockfile) - - if m.try_premirror(u, ud, d): - # First try fetching uri, u, from PREMIRRORS - mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) - localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d)) - elif os.path.exists(ud.localfile): - localpath = ud.localfile - - # Need to re-test forcefetch() which will return true if our copy is too old - if m.forcefetch(u, ud, d) or not localpath: - # Next try fetching from the original uri, u - try: - m.go(u, ud, d) - localpath = ud.localpath - except FetchError: - # Remove any incomplete file - bb.utils.remove(ud.localpath) - # Finally, try fetching uri, u, from MIRRORS - mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) - localpath = try_mirrors (d, u, mirrors) - if not localpath or not os.path.exists(localpath): - raise FetchError("Unable to fetch URL %s from any source." % u) - - ud.localpath = localpath - - if os.path.exists(ud.md5): - # Touch the md5 file to show active use of the download - try: - os.utime(ud.md5, None) - except: - # Errors aren't fatal here - pass - else: - # Only check the checksums if we've not seen this item before - verify_checksum(u, ud, d) - Fetch.write_md5sum(u, ud, d) - - bb.utils.unlockfile(lf) - -def checkstatus(d, urls = None): - """ - Check all urls exist upstream - init must have previously been called - """ - urldata = init([], d, True) - - if not urls: - urls = urldata - - for u in urls: - ud = urldata[u] - m = ud.method - logger.debug(1, "Testing URL %s", u) - # First try checking uri, u, from PREMIRRORS - mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True)) - ret = try_mirrors(d, u, mirrors, True) - if not ret: - # Next try checking from the original uri, u - try: - ret = m.checkstatus(u, ud, d) - except: - # Finally, try checking uri, u, from MIRRORS - mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True)) - ret = try_mirrors (d, u, mirrors, True) - - if not ret: - raise FetchError("URL %s doesn't work" % u) - -def localpaths(d): - """ - Return a list of the local filenames, assuming successful fetch - """ - local = [] - urldata = init([], d, True) - - for u in urldata: - ud = urldata[u] - local.append(ud.localpath) - - return local - -srcrev_internal_call = False - -def get_autorev(d): - return get_srcrev(d) - -def get_srcrev(d): - """ - Return the version string for the current package - (usually to be used as PV) - Most packages usually only have one SCM so we just pass on the call. - In the multi SCM case, we build a value based on SRCREV_FORMAT which must - have been set. - """ - - # - # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which - # could translate into a call to here. If it does, we need to catch this - # and provide some way so it knows get_srcrev is active instead of being - # some number etc. hence the srcrev_internal_call tracking and the magic - # "SRCREVINACTION" return value. - # - # Neater solutions welcome! - # - if bb.fetch.srcrev_internal_call: - return "SRCREVINACTION" - - scms = [] - - # Only call setup_localpath on URIs which supports_srcrev() - urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False) - for u in urldata: - ud = urldata[u] - if ud.method.supports_srcrev(): - if not ud.setup: - ud.setup_localpath(d) - scms.append(u) - - if len(scms) == 0: - logger.error("SRCREV was used yet no valid SCM was found in SRC_URI") - raise ParameterError - - if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache": - bb.data.setVar('__BB_DONT_CACHE', '1', d) - - if len(scms) == 1: - return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) - - # - # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT - # - format = bb.data.getVar('SRCREV_FORMAT', d, 1) - if not format: - logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") - raise ParameterError - - for scm in scms: - if 'name' in urldata[scm].parm: - name = urldata[scm].parm["name"] - rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d) - format = format.replace(name, rev) - - return format - -def localpath(url, d, cache = True): - """ - Called from the parser with cache=False since the cache isn't ready - at this point. Also called from classed in OE e.g. patch.bbclass - """ - ud = init([url], d) - if ud[url].method: - return ud[url].localpath - return url - -def runfetchcmd(cmd, d, quiet = False): - """ - Run cmd returning the command output - Raise an error if interrupted or cmd fails - Optionally echo command output to stdout - """ - - # Need to export PATH as binary could be in metadata paths - # rather than host provided - # Also include some other variables. - # FIXME: Should really include all export varaiables? - exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST', - 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy', - 'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy', - 'KRB5CCNAME', 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME'] - - for var in exportvars: - val = data.getVar(var, d, True) - if val: - cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) - - logger.debug(1, "Running %s", cmd) - - # redirect stderr to stdout - stdout_handle = os.popen(cmd + " 2>&1", "r") - output = "" - - while True: - line = stdout_handle.readline() - if not line: - break - if not quiet: - print(line, end=' ') - output += line - - status = stdout_handle.close() or 0 - signal = status >> 8 - exitstatus = status & 0xff - - if signal: - raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output)) - elif status != 0: - raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output)) - - return output - -def try_mirrors(d, uri, mirrors, check = False, force = False): - """ - Try to use a mirrored version of the sources. - This method will be automatically called before the fetchers go. - - d Is a bb.data instance - uri is the original uri we're trying to download - mirrors is the list of mirrors we're going to try - """ - fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri)) - if not check and os.access(fpath, os.R_OK) and not force: - logger.debug(1, "%s already exists, skipping checkout.", fpath) - return fpath - - ld = d.createCopy() - for (find, replace) in mirrors: - newuri = uri_replace(uri, find, replace, ld) - if newuri != uri: - try: - ud = FetchData(newuri, ld) - except bb.fetch.NoMethodError: - logger.debug(1, "No method for %s", uri) - continue - - ud.setup_localpath(ld) - - try: - if check: - found = ud.method.checkstatus(newuri, ud, ld) - if found: - return found - else: - ud.method.go(newuri, ud, ld) - return ud.localpath - except (bb.fetch.MissingParameterError, - bb.fetch.FetchError, - bb.fetch.MD5SumError): - import sys - (type, value, traceback) = sys.exc_info() - logger.debug(2, "Mirror fetch failure: %s", value) - bb.utils.remove(ud.localpath) - continue - return None - - -class FetchData(object): - """ - A class which represents the fetcher state for a given URI. - """ - def __init__(self, url, d): - self.localfile = "" - (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d)) - self.date = Fetch.getSRCDate(self, d) - self.url = url - if not self.user and "user" in self.parm: - self.user = self.parm["user"] - if not self.pswd and "pswd" in self.parm: - self.pswd = self.parm["pswd"] - self.setup = False - - if "name" in self.parm: - self.md5_name = "%s.md5sum" % self.parm["name"] - self.sha256_name = "%s.sha256sum" % self.parm["name"] - else: - self.md5_name = "md5sum" - self.sha256_name = "sha256sum" - self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d) - self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d) - - for m in methods: - if m.supports(url, self, d): - self.method = m - return - raise NoMethodError("Missing implementation for url %s" % url) - - def setup_localpath(self, d): - self.setup = True - if "localpath" in self.parm: - # if user sets localpath for file, use it instead. - self.localpath = self.parm["localpath"] - self.basename = os.path.basename(self.localpath) - else: - premirrors = bb.data.getVar('PREMIRRORS', d, True) - local = "" - if premirrors and self.url: - aurl = self.url.split(";")[0] - mirrors = mirror_from_string(premirrors) - for (find, replace) in mirrors: - if replace.startswith("file://"): - path = aurl.split("://")[1] - path = path.split(";")[0] - local = replace.split("://")[1] + os.path.basename(path) - if local == aurl or not os.path.exists(local) or os.path.isdir(local): - local = "" - self.localpath = local - if not local: - try: - bb.fetch.srcrev_internal_call = True - self.localpath = self.method.localpath(self.url, self, d) - finally: - bb.fetch.srcrev_internal_call = False - # We have to clear data's internal caches since the cached value of SRCREV is now wrong. - # Horrible... - bb.data.delVar("ISHOULDNEVEREXIST", d) - - if self.localpath is not None: - # Note: These files should always be in DL_DIR whereas localpath may not be. - basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d) - self.md5 = basepath + '.md5' - self.lockfile = basepath + '.lock' - - -class Fetch(object): - """Base class for 'fetch'ing data""" - - def __init__(self, urls = []): - self.urls = [] - - def supports(self, url, urldata, d): - """ - Check to see if this fetch class supports a given url. - """ - return 0 - - def localpath(self, url, urldata, d): - """ - Return the local filename of a given url assuming a successful fetch. - Can also setup variables in urldata for use in go (saving code duplication - and duplicate code execution) - """ - return url - def _strip_leading_slashes(self, relpath): - """ - Remove leading slash as os.path.join can't cope - """ - while os.path.isabs(relpath): - relpath = relpath[1:] - return relpath - - def setUrls(self, urls): - self.__urls = urls - - def getUrls(self): - return self.__urls - - urls = property(getUrls, setUrls, None, "Urls property") - - def forcefetch(self, url, urldata, d): - """ - Force a fetch, even if localpath exists? - """ - return False - - def supports_srcrev(self): - """ - The fetcher supports auto source revisions (SRCREV) - """ - return False - - def go(self, url, urldata, d): - """ - Fetch urls - Assumes localpath was called first - """ - raise NoMethodError("Missing implementation for url") - - def try_premirror(self, url, urldata, d): - """ - Should premirrors be used? - """ - if urldata.method.forcefetch(url, urldata, d): - return True - elif os.path.exists(urldata.md5) and os.path.exists(urldata.localfile): - return False - else: - return True - - def checkstatus(self, url, urldata, d): - """ - Check the status of a URL - Assumes localpath was called first - """ - logger.info("URL %s could not be checked for status since no method exists.", url) - return True - - def getSRCDate(urldata, d): - """ - Return the SRC Date for the component - - d the bb.data module - """ - if "srcdate" in urldata.parm: - return urldata.parm['srcdate'] - - pn = data.getVar("PN", d, 1) - - if pn: - return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) - - return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) - getSRCDate = staticmethod(getSRCDate) - - def srcrev_internal_helper(ud, d): - """ - Return: - a) a source revision if specified - b) True if auto srcrev is in action - c) False otherwise - """ - - if 'rev' in ud.parm: - return ud.parm['rev'] - - if 'tag' in ud.parm: - return ud.parm['tag'] - - rev = None - if 'name' in ud.parm: - pn = data.getVar("PN", d, 1) - rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1) - if not rev: - rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1) - if not rev: - rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1) - if not rev: - rev = data.getVar("SRCREV", d, 1) - if rev == "INVALID": - raise InvalidSRCREV("Please set SRCREV to a valid value") - if not rev: - return False - if rev == "SRCREVINACTION": - return True - return rev - - srcrev_internal_helper = staticmethod(srcrev_internal_helper) - - def localcount_internal_helper(ud, d): - """ - Return: - a) a locked localcount if specified - b) None otherwise - """ - - localcount = None - if 'name' in ud.parm: - pn = data.getVar("PN", d, 1) - localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1) - if not localcount: - localcount = data.getVar("LOCALCOUNT", d, 1) - return localcount - - localcount_internal_helper = staticmethod(localcount_internal_helper) - - def verify_md5sum(ud, got_sum): - """ - Verify the md5sum we wanted with the one we got - """ - wanted_sum = ud.parm.get('md5sum') - if not wanted_sum: - return True - - return wanted_sum == got_sum - verify_md5sum = staticmethod(verify_md5sum) - - def write_md5sum(url, ud, d): - md5data = bb.utils.md5_file(ud.localpath) - # verify the md5sum - if not Fetch.verify_md5sum(ud, md5data): - raise MD5SumError(url) - - md5out = file(ud.md5, 'w') - md5out.write(md5data) - md5out.close() - write_md5sum = staticmethod(write_md5sum) - - def latest_revision(self, url, ud, d): - """ - Look in the cache for the latest revision, if not present ask the SCM. - """ - if not hasattr(self, "_latest_revision"): - raise ParameterError - - pd = persist_data.persist(d) - revs = pd['BB_URI_HEADREVS'] - key = self.generate_revision_key(url, ud, d) - rev = revs[key] - if rev != None: - return str(rev) - - revs[key] = rev = self._latest_revision(url, ud, d) - return rev - - def sortable_revision(self, url, ud, d): - """ - - """ - if hasattr(self, "_sortable_revision"): - return self._sortable_revision(url, ud, d) - - pd = persist_data.persist(d) - localcounts = pd['BB_URI_LOCALCOUNT'] - key = self.generate_revision_key(url, ud, d) - - latest_rev = self._build_revision(url, ud, d) - last_rev = localcounts[key + '_rev'] - uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False - count = None - if uselocalcount: - count = Fetch.localcount_internal_helper(ud, d) - if count is None: - count = localcounts[key + '_count'] - - if last_rev == latest_rev: - return str(count + "+" + latest_rev) - - buildindex_provided = hasattr(self, "_sortable_buildindex") - if buildindex_provided: - count = self._sortable_buildindex(url, ud, d, latest_rev) - - if count is None: - count = "0" - elif uselocalcount or buildindex_provided: - count = str(count) - else: - count = str(int(count) + 1) - - localcounts[key + '_rev'] = latest_rev - localcounts[key + '_count'] = count - - return str(count + "+" + latest_rev) - - def generate_revision_key(self, url, ud, d): - key = self._revision_key(url, ud, d) - return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") - -from . import cvs -from . import git -from . import local -from . import svn -from . import wget -from . import svk -from . import ssh -from . import perforce -from . import bzr -from . import hg -from . import osc -from . import repo - -methods.append(local.Local()) -methods.append(wget.Wget()) -methods.append(svn.Svn()) -methods.append(git.Git()) -methods.append(cvs.Cvs()) -methods.append(svk.Svk()) -methods.append(ssh.SSH()) -methods.append(perforce.Perforce()) -methods.append(bzr.Bzr()) -methods.append(hg.Hg()) -methods.append(osc.Osc()) -methods.append(repo.Repo()) diff --git a/bitbake/lib/bb/fetch/bzr.py b/bitbake/lib/bb/fetch/bzr.py deleted file mode 100644 index afaf79990..000000000 --- a/bitbake/lib/bb/fetch/bzr.py +++ /dev/null @@ -1,148 +0,0 @@ -""" -BitBake 'Fetch' implementation for bzr. - -""" - -# Copyright (C) 2007 Ross Burton -# Copyright (C) 2007 Richard Purdie -# -# Classes for obtaining upstream sources for the -# BitBake build tools. -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import os -import sys -import logging -import bb -from bb import data -from bb.fetch import Fetch, FetchError, runfetchcmd, logger - -class Bzr(Fetch): - def supports(self, url, ud, d): - return ud.type in ['bzr'] - - def localpath (self, url, ud, d): - - # Create paths to bzr checkouts - relpath = self._strip_leading_slashes(ud.path) - ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath) - - revision = Fetch.srcrev_internal_helper(ud, d) - if revision is True: - ud.revision = self.latest_revision(url, ud, d) - elif revision: - ud.revision = revision - - if not ud.revision: - ud.revision = self.latest_revision(url, ud, d) - - ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d) - - return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) - - def _buildbzrcommand(self, ud, d, command): - """ - Build up an bzr commandline based on ud - command is "fetch", "update", "revno" - """ - - basecmd = data.expand('${FETCHCMD_bzr}', d) - - proto = ud.parm.get('proto', 'http') - - bzrroot = ud.host + ud.path - - options = [] - - if command is "revno": - bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) - else: - if ud.revision: - options.append("-r %s" % ud.revision) - - if command is "fetch": - bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) - elif command is "update": - bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options)) - else: - raise FetchError("Invalid bzr command %s" % command) - - return bzrcmd - - def go(self, loc, ud, d): - """Fetch url""" - - if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): - bzrcmd = self._buildbzrcommand(ud, d, "update") - logger.debug(1, "BZR Update %s", loc) - os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) - runfetchcmd(bzrcmd, d) - else: - bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) - bzrcmd = self._buildbzrcommand(ud, d, "fetch") - logger.debug(1, "BZR Checkout %s", loc) - bb.mkdirhier(ud.pkgdir) - os.chdir(ud.pkgdir) - logger.debug(1, "Running %s", bzrcmd) - runfetchcmd(bzrcmd, d) - - os.chdir(ud.pkgdir) - - scmdata = ud.parm.get("scmdata", "") - if scmdata == "keep": - tar_flags = "" - else: - tar_flags = "--exclude '.bzr' --exclude '.bzrtags'" - - # tar them up to a defined filename - try: - runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d) - except: - t, v, tb = sys.exc_info() - try: - os.unlink(ud.localpath) - except OSError: - pass - raise t, v, tb - - def supports_srcrev(self): - return True - - def _revision_key(self, url, ud, d): - """ - Return a unique key for the url - """ - return "bzr:" + ud.pkgdir - - def _latest_revision(self, url, ud, d): - """ - Return the latest upstream revision number - """ - logger.debug(2, "BZR fetcher hitting network for %s", url) - - output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True) - - return output.strip() - - def _sortable_revision(self, url, ud, d): - """ - Return a sortable revision number which in our case is the revision number - """ - - return self._build_revision(url, ud, d) - - def _build_revision(self, url, ud, d): - return ud.revision diff --git a/bitbake/lib/bb/fetch/cvs.py b/bitbake/lib/bb/fetch/cvs.py deleted file mode 100644 index 0edb794b0..000000000 --- a/bitbake/lib/bb/fetch/cvs.py +++ /dev/null @@ -1,172 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementations - -Classes for obtaining upstream sources for the -BitBake build tools. - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -#Based on functions from the base bb module, Copyright 2003 Holger Schurig -# - -import os -import logging -import bb -from bb import data -from bb.fetch import Fetch, FetchError, MissingParameterError, logger - -class Cvs(Fetch): - """ - Class to fetch a module or modules from cvs repositories - """ - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with cvs. - """ - return ud.type in ['cvs'] - - def localpath(self, url, ud, d): - if not "module" in ud.parm: - raise MissingParameterError("cvs method needs a 'module' parameter") - ud.module = ud.parm["module"] - - ud.tag = ud.parm.get('tag', "") - - # Override the default date in certain cases - if 'date' in ud.parm: - ud.date = ud.parm['date'] - elif ud.tag: - ud.date = "" - - norecurse = '' - if 'norecurse' in ud.parm: - norecurse = '_norecurse' - - fullpath = '' - if 'fullpath' in ud.parm: - fullpath = '_fullpath' - - ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d) - - return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) - - def forcefetch(self, url, ud, d): - if (ud.date == "now"): - return True - return False - - def go(self, loc, ud, d): - - method = ud.parm.get('method', 'pserver') - localdir = ud.parm.get('localdir', ud.module) - cvs_port = ud.parm.get('port', '') - - cvs_rsh = None - if method == "ext": - if "rsh" in ud.parm: - cvs_rsh = ud.parm["rsh"] - - if method == "dir": - cvsroot = ud.path - else: - cvsroot = ":" + method - cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) - if cvsproxyhost: - cvsroot += ";proxy=" + cvsproxyhost - cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) - if cvsproxyport: - cvsroot += ";proxyport=" + cvsproxyport - cvsroot += ":" + ud.user - if ud.pswd: - cvsroot += ":" + ud.pswd - cvsroot += "@" + ud.host + ":" + cvs_port + ud.path - - options = [] - if 'norecurse' in ud.parm: - options.append("-l") - if ud.date: - # treat YYYYMMDDHHMM specially for CVS - if len(ud.date) == 12: - options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) - else: - options.append("-D \"%s UTC\"" % ud.date) - if ud.tag: - options.append("-r %s" % ud.tag) - - localdata = data.createCopy(d) - data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) - data.update_data(localdata) - - data.setVar('CVSROOT', cvsroot, localdata) - data.setVar('CVSCOOPTS', " ".join(options), localdata) - data.setVar('CVSMODULE', ud.module, localdata) - cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) - cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) - - if cvs_rsh: - cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) - cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) - - # create module directory - logger.debug(2, "Fetch: checking for module directory") - pkg = data.expand('${PN}', d) - pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) - moddir = os.path.join(pkgdir, localdir) - if os.access(os.path.join(moddir, 'CVS'), os.R_OK): - logger.info("Update " + loc) - # update sources there - os.chdir(moddir) - myret = os.system(cvsupdatecmd) - else: - logger.info("Fetch " + loc) - # check out sources there - bb.mkdirhier(pkgdir) - os.chdir(pkgdir) - logger.debug(1, "Running %s", cvscmd) - myret = os.system(cvscmd) - - if myret != 0 or not os.access(moddir, os.R_OK): - try: - os.rmdir(moddir) - except OSError: - pass - raise FetchError(ud.module) - - scmdata = ud.parm.get("scmdata", "") - if scmdata == "keep": - tar_flags = "" - else: - tar_flags = "--exclude 'CVS'" - - # tar them up to a defined filename - if 'fullpath' in ud.parm: - os.chdir(pkgdir) - myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)) - else: - os.chdir(moddir) - os.chdir('..') - myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))) - - if myret != 0: - try: - os.unlink(ud.localpath) - except OSError: - pass - raise FetchError(ud.module) diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py deleted file mode 100644 index b37a09743..000000000 --- a/bitbake/lib/bb/fetch/git.py +++ /dev/null @@ -1,339 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' git implementation - -""" - -#Copyright (C) 2005 Richard Purdie -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import os -import bb -import bb.persist_data -from bb import data -from bb.fetch import Fetch -from bb.fetch import runfetchcmd -from bb.fetch import logger - -class Git(Fetch): - """Class to fetch a module or modules from git repositories""" - def init(self, d): - # - # Only enable _sortable revision if the key is set - # - if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True): - self._sortable_buildindex = self._sortable_buildindex_disabled - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with git. - """ - return ud.type in ['git'] - - def localpath(self, url, ud, d): - - if 'protocol' in ud.parm: - ud.proto = ud.parm['protocol'] - elif not ud.host: - ud.proto = 'file' - else: - ud.proto = "rsync" - - ud.branch = ud.parm.get("branch", "master") - - gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) - ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname) - ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) - - tag = Fetch.srcrev_internal_helper(ud, d) - if tag is True: - ud.tag = self.latest_revision(url, ud, d) - elif tag: - ud.tag = tag - - if not ud.tag or ud.tag == "master": - ud.tag = self.latest_revision(url, ud, d) - - subdir = ud.parm.get("subpath", "") - if subdir != "": - if subdir.endswith("/"): - subdir = subdir[:-1] - subdirpath = os.path.join(ud.path, subdir); - else: - subdirpath = ud.path; - - if 'fullclone' in ud.parm: - ud.localfile = ud.mirrortarball - else: - ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d) - - ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git" - - if 'noclone' in ud.parm: - ud.localfile = None - return None - - return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) - - def forcefetch(self, url, ud, d): - if 'fullclone' in ud.parm: - return True - if 'noclone' in ud.parm: - return False - if os.path.exists(ud.localpath): - return False - if not self._contains_ref(ud.tag, d): - return True - return False - - def try_premirror(self, u, ud, d): - if 'noclone' in ud.parm: - return False - if os.path.exists(ud.clonedir): - return False - if os.path.exists(ud.localpath): - return False - - return True - - def go(self, loc, ud, d): - """Fetch url""" - - if ud.user: - username = ud.user + '@' - else: - username = "" - - repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) - - - coname = '%s' % (ud.tag) - codir = os.path.join(ud.clonedir, coname) - - # If we have no existing clone and no mirror tarball, try and obtain one - if not os.path.exists(ud.clonedir) and not os.path.exists(repofile): - try: - Fetch.try_mirrors(ud.mirrortarball) - except: - pass - - # If the checkout doesn't exist and the mirror tarball does, extract it - if not os.path.exists(ud.clonedir) and os.path.exists(repofile): - bb.mkdirhier(ud.clonedir) - os.chdir(ud.clonedir) - runfetchcmd("tar -xzf %s" % (repofile), d) - - # If the repo still doesn't exist, fallback to cloning it - if not os.path.exists(ud.clonedir): - runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d) - - os.chdir(ud.clonedir) - # Update the checkout if needed - if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm: - # Remove all but the .git directory - runfetchcmd("rm * -Rf", d) - if 'fullclone' in ud.parm: - runfetchcmd("%s fetch --all" % (ud.basecmd), d) - else: - runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d) - runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d) - runfetchcmd("%s prune-packed" % ud.basecmd, d) - runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) - - # Generate a mirror tarball if needed - os.chdir(ud.clonedir) - mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) - if mirror_tarballs != "0" or 'fullclone' in ud.parm: - logger.info("Creating tarball of git repository") - runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) - - if 'fullclone' in ud.parm: - return - - if os.path.exists(codir): - bb.utils.prunedir(codir) - - subdir = ud.parm.get("subpath", "") - if subdir != "": - if subdir.endswith("/"): - subdirbase = os.path.basename(subdir[:-1]) - else: - subdirbase = os.path.basename(subdir) - else: - subdirbase = "" - - if subdir != "": - readpathspec = ":%s" % (subdir) - codir = os.path.join(codir, "git") - coprefix = os.path.join(codir, subdirbase, "") - else: - readpathspec = "" - coprefix = os.path.join(codir, "git", "") - - scmdata = ud.parm.get("scmdata", "") - if scmdata == "keep": - runfetchcmd("%s clone -n %s %s" % (ud.basecmd, ud.clonedir, coprefix), d) - os.chdir(coprefix) - runfetchcmd("%s checkout -q -f %s%s" % (ud.basecmd, ud.tag, readpathspec), d) - else: - bb.mkdirhier(codir) - os.chdir(ud.clonedir) - runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d) - runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d) - - os.chdir(codir) - logger.info("Creating tarball of git checkout") - runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) - - os.chdir(ud.clonedir) - bb.utils.prunedir(codir) - - def supports_srcrev(self): - return True - - def _contains_ref(self, tag, d): - basecmd = data.getVar("FETCHCMD_git", d, True) or "git" - output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True) - return output.split()[0] != "0" - - def _revision_key(self, url, ud, d, branch=False): - """ - Return a unique key for the url - """ - key = 'git:' + ud.host + ud.path.replace('/', '.') - if branch: - return key + ud.branch - else: - return key - - def generate_revision_key(self, url, ud, d, branch=False): - key = self._revision_key(url, ud, d, branch) - return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") - - def _latest_revision(self, url, ud, d): - """ - Compute the HEAD revision for the url - """ - if ud.user: - username = ud.user + '@' - else: - username = "" - - basecmd = data.getVar("FETCHCMD_git", d, True) or "git" - cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branch) - output = runfetchcmd(cmd, d, True) - if not output: - raise bb.fetch.FetchError("Fetch command %s gave empty output\n" % (cmd)) - return output.split()[0] - - def latest_revision(self, url, ud, d): - """ - Look in the cache for the latest revision, if not present ask the SCM. - """ - persisted = bb.persist_data.persist(d) - revs = persisted['BB_URI_HEADREVS'] - - key = self.generate_revision_key(url, ud, d, branch=True) - rev = revs[key] - if rev is None: - # Compatibility with old key format, no branch included - oldkey = self.generate_revision_key(url, ud, d, branch=False) - rev = revs[oldkey] - if rev is not None: - del revs[oldkey] - else: - rev = self._latest_revision(url, ud, d) - revs[key] = rev - - return str(rev) - - def sortable_revision(self, url, ud, d): - """ - - """ - pd = bb.persist_data.persist(d) - localcounts = pd['BB_URI_LOCALCOUNT'] - key = self.generate_revision_key(url, ud, d, branch=True) - oldkey = self.generate_revision_key(url, ud, d, branch=False) - - latest_rev = self._build_revision(url, ud, d) - last_rev = localcounts[key + '_rev'] - if last_rev is None: - last_rev = localcounts[oldkey + '_rev'] - if last_rev is not None: - del localcounts[oldkey + '_rev'] - localcounts[key + '_rev'] = last_rev - - uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False - count = None - if uselocalcount: - count = Fetch.localcount_internal_helper(ud, d) - if count is None: - count = localcounts[key + '_count'] - if count is None: - count = localcounts[oldkey + '_count'] - if count is not None: - del localcounts[oldkey + '_count'] - localcounts[key + '_count'] = count - - if last_rev == latest_rev: - return str(count + "+" + latest_rev) - - buildindex_provided = hasattr(self, "_sortable_buildindex") - if buildindex_provided: - count = self._sortable_buildindex(url, ud, d, latest_rev) - if count is None: - count = "0" - elif uselocalcount or buildindex_provided: - count = str(count) - else: - count = str(int(count) + 1) - - localcounts[key + '_rev'] = latest_rev - localcounts[key + '_count'] = count - - return str(count + "+" + latest_rev) - - def _build_revision(self, url, ud, d): - return ud.tag - - def _sortable_buildindex_disabled(self, url, ud, d, rev): - """ - Return a suitable buildindex for the revision specified. This is done by counting revisions - using "git rev-list" which may or may not work in different circumstances. - """ - - cwd = os.getcwd() - - # Check if we have the rev already - - if not os.path.exists(ud.clonedir): - print("no repo") - self.go(None, ud, d) - if not os.path.exists(ud.clonedir): - logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir) - return None - - - os.chdir(ud.clonedir) - if not self._contains_ref(rev, d): - self.go(None, ud, d) - - output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True) - os.chdir(cwd) - - buildindex = "%s" % output.split()[0] - logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev) - return buildindex diff --git a/bitbake/lib/bb/fetch/hg.py b/bitbake/lib/bb/fetch/hg.py deleted file mode 100644 index 3c649a6ad..000000000 --- a/bitbake/lib/bb/fetch/hg.py +++ /dev/null @@ -1,180 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementation for mercurial DRCS (hg). - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2004 Marcin Juszkiewicz -# Copyright (C) 2007 Robert Schuster -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -import os -import sys -import logging -import bb -from bb import data -from bb.fetch import Fetch -from bb.fetch import FetchError -from bb.fetch import MissingParameterError -from bb.fetch import runfetchcmd -from bb.fetch import logger - -class Hg(Fetch): - """Class to fetch from mercurial repositories""" - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with mercurial. - """ - return ud.type in ['hg'] - - def forcefetch(self, url, ud, d): - revTag = ud.parm.get('rev', 'tip') - return revTag == "tip" - - def localpath(self, url, ud, d): - if not "module" in ud.parm: - raise MissingParameterError("hg method needs a 'module' parameter") - - ud.module = ud.parm["module"] - - # Create paths to mercurial checkouts - relpath = self._strip_leading_slashes(ud.path) - ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath) - ud.moddir = os.path.join(ud.pkgdir, ud.module) - - if 'rev' in ud.parm: - ud.revision = ud.parm['rev'] - else: - tag = Fetch.srcrev_internal_helper(ud, d) - if tag is True: - ud.revision = self.latest_revision(url, ud, d) - elif tag: - ud.revision = tag - else: - ud.revision = self.latest_revision(url, ud, d) - - ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) - - return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) - - def _buildhgcommand(self, ud, d, command): - """ - Build up an hg commandline based on ud - command is "fetch", "update", "info" - """ - - basecmd = data.expand('${FETCHCMD_hg}', d) - - proto = ud.parm.get('proto', 'http') - - host = ud.host - if proto == "file": - host = "/" - ud.host = "localhost" - - if not ud.user: - hgroot = host + ud.path - else: - hgroot = ud.user + "@" + host + ud.path - - if command is "info": - return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module) - - options = []; - if ud.revision: - options.append("-r %s" % ud.revision) - - if command is "fetch": - cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module) - elif command is "pull": - # do not pass options list; limiting pull to rev causes the local - # repo not to contain it and immediately following "update" command - # will crash - cmd = "%s pull" % (basecmd) - elif command is "update": - cmd = "%s update -C %s" % (basecmd, " ".join(options)) - else: - raise FetchError("Invalid hg command %s" % command) - - return cmd - - def go(self, loc, ud, d): - """Fetch url""" - - logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") - - if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): - updatecmd = self._buildhgcommand(ud, d, "pull") - logger.info("Update " + loc) - # update sources there - os.chdir(ud.moddir) - logger.debug(1, "Running %s", updatecmd) - runfetchcmd(updatecmd, d) - - else: - fetchcmd = self._buildhgcommand(ud, d, "fetch") - logger.info("Fetch " + loc) - # check out sources there - bb.mkdirhier(ud.pkgdir) - os.chdir(ud.pkgdir) - logger.debug(1, "Running %s", fetchcmd) - runfetchcmd(fetchcmd, d) - - # Even when we clone (fetch), we still need to update as hg's clone - # won't checkout the specified revision if its on a branch - updatecmd = self._buildhgcommand(ud, d, "update") - os.chdir(ud.moddir) - logger.debug(1, "Running %s", updatecmd) - runfetchcmd(updatecmd, d) - - scmdata = ud.parm.get("scmdata", "") - if scmdata == "keep": - tar_flags = "" - else: - tar_flags = "--exclude '.hg' --exclude '.hgrags'" - - os.chdir(ud.pkgdir) - try: - runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) - except: - t, v, tb = sys.exc_info() - try: - os.unlink(ud.localpath) - except OSError: - pass - raise t, v, tb - - def supports_srcrev(self): - return True - - def _latest_revision(self, url, ud, d): - """ - Compute tip revision for the url - """ - output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d) - return output.strip() - - def _build_revision(self, url, ud, d): - return ud.revision - - def _revision_key(self, url, ud, d): - """ - Return a unique key for the url - """ - return "hg:" + ud.moddir diff --git a/bitbake/lib/bb/fetch/local.py b/bitbake/lib/bb/fetch/local.py deleted file mode 100644 index 6aa9e4576..000000000 --- a/bitbake/lib/bb/fetch/local.py +++ /dev/null @@ -1,73 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementations - -Classes for obtaining upstream sources for the -BitBake build tools. - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -import os -import bb -import bb.utils -from bb import data -from bb.fetch import Fetch - -class Local(Fetch): - def supports(self, url, urldata, d): - """ - Check to see if a given url represents a local fetch. - """ - return urldata.type in ['file'] - - def localpath(self, url, urldata, d): - """ - Return the local filename of a given url assuming a successful fetch. - """ - path = url.split("://")[1] - path = path.split(";")[0] - newpath = path - if path[0] != "/": - filespath = data.getVar('FILESPATH', d, 1) - if filespath: - newpath = bb.utils.which(filespath, path) - if not newpath: - filesdir = data.getVar('FILESDIR', d, 1) - if filesdir: - newpath = os.path.join(filesdir, path) - # We don't set localfile as for this fetcher the file is already local! - return newpath - - def go(self, url, urldata, d): - """Fetch urls (no-op for Local method)""" - # no need to fetch local files, we'll deal with them in place. - return 1 - - def checkstatus(self, url, urldata, d): - """ - Check the status of the url - """ - if urldata.localpath.find("*") != -1: - logger.info("URL %s looks like a glob and was therefore not checked.", url) - return True - if os.path.exists(urldata.localpath): - return True - return False diff --git a/bitbake/lib/bb/fetch/osc.py b/bitbake/lib/bb/fetch/osc.py deleted file mode 100644 index 8e0423d76..000000000 --- a/bitbake/lib/bb/fetch/osc.py +++ /dev/null @@ -1,143 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -Bitbake "Fetch" implementation for osc (Opensuse build service client). -Based on the svn "Fetch" implementation. - -""" - -import os -import sys -import logging -import bb -from bb import data -from bb import utils -from bb.fetch import Fetch -from bb.fetch import FetchError -from bb.fetch import MissingParameterError -from bb.fetch import runfetchcmd - -class Osc(Fetch): - """Class to fetch a module or modules from Opensuse build server - repositories.""" - - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with osc. - """ - return ud.type in ['osc'] - - def localpath(self, url, ud, d): - if not "module" in ud.parm: - raise MissingParameterError("osc method needs a 'module' parameter.") - - ud.module = ud.parm["module"] - - # Create paths to osc checkouts - relpath = self._strip_leading_slashes(ud.path) - ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host) - ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) - - if 'rev' in ud.parm: - ud.revision = ud.parm['rev'] - else: - pv = data.getVar("PV", d, 0) - rev = Fetch.srcrev_internal_helper(ud, d) - if rev and rev != True: - ud.revision = rev - else: - ud.revision = "" - - ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d) - - return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) - - def _buildosccommand(self, ud, d, command): - """ - Build up an ocs commandline based on ud - command is "fetch", "update", "info" - """ - - basecmd = data.expand('${FETCHCMD_osc}', d) - - proto = ud.parm.get('proto', 'ocs') - - options = [] - - config = "-c %s" % self.generate_config(ud, d) - - if ud.revision: - options.append("-r %s" % ud.revision) - - coroot = self._strip_leading_slashes(ud.path) - - if command is "fetch": - osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) - elif command is "update": - osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) - else: - raise FetchError("Invalid osc command %s" % command) - - return osccmd - - def go(self, loc, ud, d): - """ - Fetch url - """ - - logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") - - if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): - oscupdatecmd = self._buildosccommand(ud, d, "update") - logger.info("Update "+ loc) - # update sources there - os.chdir(ud.moddir) - logger.debug(1, "Running %s", oscupdatecmd) - runfetchcmd(oscupdatecmd, d) - else: - oscfetchcmd = self._buildosccommand(ud, d, "fetch") - logger.info("Fetch " + loc) - # check out sources there - bb.mkdirhier(ud.pkgdir) - os.chdir(ud.pkgdir) - logger.debug(1, "Running %s", oscfetchcmd) - runfetchcmd(oscfetchcmd, d) - - os.chdir(os.path.join(ud.pkgdir + ud.path)) - # tar them up to a defined filename - try: - runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) - except: - t, v, tb = sys.exc_info() - try: - os.unlink(ud.localpath) - except OSError: - pass - raise t, v, tb - - def supports_srcrev(self): - return False - - def generate_config(self, ud, d): - """ - Generate a .oscrc to be used for this run. - """ - - config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc") - bb.utils.remove(config_path) - - f = open(config_path, 'w') - f.write("[general]\n") - f.write("apisrv = %s\n" % ud.host) - f.write("scheme = http\n") - f.write("su-wrapper = su -c\n") - f.write("build-root = %s\n" % data.expand('${WORKDIR}', d)) - f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n") - f.write("extra-pkgs = gzip\n") - f.write("\n") - f.write("[%s]\n" % ud.host) - f.write("user = %s\n" % ud.parm["user"]) - f.write("pass = %s\n" % ud.parm["pswd"]) - f.close() - - return config_path diff --git a/bitbake/lib/bb/fetch/perforce.py b/bitbake/lib/bb/fetch/perforce.py deleted file mode 100644 index 222ed7eaa..000000000 --- a/bitbake/lib/bb/fetch/perforce.py +++ /dev/null @@ -1,206 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementations - -Classes for obtaining upstream sources for the -BitBake build tools. - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -from future_builtins import zip -import os -import logging -import bb -from bb import data -from bb.fetch import Fetch -from bb.fetch import FetchError -from bb.fetch import logger - -class Perforce(Fetch): - def supports(self, url, ud, d): - return ud.type in ['p4'] - - def doparse(url, d): - parm = {} - path = url.split("://")[1] - delim = path.find("@"); - if delim != -1: - (user, pswd, host, port) = path.split('@')[0].split(":") - path = path.split('@')[1] - else: - (host, port) = data.getVar('P4PORT', d).split(':') - user = "" - pswd = "" - - if path.find(";") != -1: - keys=[] - values=[] - plist = path.split(';') - for item in plist: - if item.count('='): - (key, value) = item.split('=') - keys.append(key) - values.append(value) - - parm = dict(zip(keys, values)) - path = "//" + path.split(';')[0] - host += ":%s" % (port) - parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm) - - return host, path, user, pswd, parm - doparse = staticmethod(doparse) - - def getcset(d, depot, host, user, pswd, parm): - p4opt = "" - if "cset" in parm: - return parm["cset"]; - if user: - p4opt += " -u %s" % (user) - if pswd: - p4opt += " -P %s" % (pswd) - if host: - p4opt += " -p %s" % (host) - - p4date = data.getVar("P4DATE", d, 1) - if "revision" in parm: - depot += "#%s" % (parm["revision"]) - elif "label" in parm: - depot += "@%s" % (parm["label"]) - elif p4date: - depot += "@%s" % (p4date) - - p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1) - logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot) - p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot)) - cset = p4file.readline().strip() - logger.debug(1, "READ %s", cset) - if not cset: - return -1 - - return cset.split(' ')[1] - getcset = staticmethod(getcset) - - def localpath(self, url, ud, d): - - (host, path, user, pswd, parm) = Perforce.doparse(url, d) - - # If a label is specified, we use that as our filename - - if "label" in parm: - ud.localfile = "%s.tar.gz" % (parm["label"]) - return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) - - base = path - which = path.find('/...') - if which != -1: - base = path[:which] - - base = self._strip_leading_slashes(base) - - cset = Perforce.getcset(d, path, host, user, pswd, parm) - - ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d) - - return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) - - def go(self, loc, ud, d): - """ - Fetch urls - """ - - (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) - - if depot.find('/...') != -1: - path = depot[:depot.find('/...')] - else: - path = depot - - module = parm.get('module', os.path.basename(path)) - - localdata = data.createCopy(d) - data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) - data.update_data(localdata) - - # Get the p4 command - p4opt = "" - if user: - p4opt += " -u %s" % (user) - - if pswd: - p4opt += " -P %s" % (pswd) - - if host: - p4opt += " -p %s" % (host) - - p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) - - # create temp directory - logger.debug(2, "Fetch: creating temporary directory") - bb.mkdirhier(data.expand('${WORKDIR}', localdata)) - data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) - tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") - tmpfile = tmppipe.readline().strip() - if not tmpfile: - logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") - raise FetchError(module) - - if "label" in parm: - depot = "%s@%s" % (depot, parm["label"]) - else: - cset = Perforce.getcset(d, depot, host, user, pswd, parm) - depot = "%s@%s" % (depot, cset) - - os.chdir(tmpfile) - logger.info("Fetch " + loc) - logger.info("%s%s files %s", p4cmd, p4opt, depot) - p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) - - if not p4file: - logger.error("Fetch: unable to get the P4 files from %s", depot) - raise FetchError(module) - - count = 0 - - for file in p4file: - list = file.split() - - if list[2] == "delete": - continue - - dest = list[0][len(path)+1:] - where = dest.find("#") - - os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0])) - count = count + 1 - - if count == 0: - logger.error("Fetch: No files gathered from the P4 fetch") - raise FetchError(module) - - myret = os.system("tar -czf %s %s" % (ud.localpath, module)) - if myret != 0: - try: - os.unlink(ud.localpath) - except OSError: - pass - raise FetchError(module) - # cleanup - bb.utils.prunedir(tmpfile) diff --git a/bitbake/lib/bb/fetch/repo.py b/bitbake/lib/bb/fetch/repo.py deleted file mode 100644 index 03642e7a0..000000000 --- a/bitbake/lib/bb/fetch/repo.py +++ /dev/null @@ -1,98 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake "Fetch" repo (git) implementation - -""" - -# Copyright (C) 2009 Tom Rini -# -# Based on git.py which is: -#Copyright (C) 2005 Richard Purdie -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import os -import bb -from bb import data -from bb.fetch import Fetch -from bb.fetch import runfetchcmd - -class Repo(Fetch): - """Class to fetch a module or modules from repo (git) repositories""" - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with repo. - """ - return ud.type in ["repo"] - - def localpath(self, url, ud, d): - """ - We don"t care about the git rev of the manifests repository, but - we do care about the manifest to use. The default is "default". - We also care about the branch or tag to be used. The default is - "master". - """ - - ud.proto = ud.parm.get('protocol', 'git') - ud.branch = ud.parm.get('branch', 'master') - ud.manifest = ud.parm.get('manifest', 'default.xml') - if not ud.manifest.endswith('.xml'): - ud.manifest += '.xml' - - ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d) - - return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) - - def go(self, loc, ud, d): - """Fetch url""" - - if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): - logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) - return - - gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) - repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo") - codir = os.path.join(repodir, gitsrcname, ud.manifest) - - if ud.user: - username = ud.user + "@" - else: - username = "" - - bb.mkdirhier(os.path.join(codir, "repo")) - os.chdir(os.path.join(codir, "repo")) - if not os.path.exists(os.path.join(codir, "repo", ".repo")): - runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d) - - runfetchcmd("repo sync", d) - os.chdir(codir) - - scmdata = ud.parm.get("scmdata", "") - if scmdata == "keep": - tar_flags = "" - else: - tar_flags = "--exclude '.repo' --exclude '.git'" - - # Create a cache - runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d) - - def supports_srcrev(self): - return False - - def _build_revision(self, url, ud, d): - return ud.manifest - - def _want_sortable_revision(self, url, ud, d): - return False diff --git a/bitbake/lib/bb/fetch/ssh.py b/bitbake/lib/bb/fetch/ssh.py deleted file mode 100644 index 86c76f4e4..000000000 --- a/bitbake/lib/bb/fetch/ssh.py +++ /dev/null @@ -1,118 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -''' -BitBake 'Fetch' implementations - -This implementation is for Secure Shell (SSH), and attempts to comply with the -IETF secsh internet draft: - http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/ - - Currently does not support the sftp parameters, as this uses scp - Also does not support the 'fingerprint' connection parameter. - -''' - -# Copyright (C) 2006 OpenedHand Ltd. -# -# -# Based in part on svk.py: -# Copyright (C) 2006 Holger Hans Peter Freyther -# Based on svn.py: -# Copyright (C) 2003, 2004 Chris Larson -# Based on functions from the base bb module: -# Copyright 2003 Holger Schurig -# -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import re, os -from bb import data -from bb.fetch import Fetch -from bb.fetch import FetchError - - -__pattern__ = re.compile(r''' - \s* # Skip leading whitespace - ssh:// # scheme - ( # Optional username/password block - (?P\S+) # username - (:(?P\S+))? # colon followed by the password (optional) - )? - (?P(;[^;]+)*)? # connection parameters block (optional) - @ - (?P\S+?) # non-greedy match of the host - (:(?P[0-9]+))? # colon followed by the port (optional) - / - (?P[^;]+) # path on the remote system, may be absolute or relative, - # and may include the use of '~' to reference the remote home - # directory - (?P(;[^;]+)*)? # parameters block (optional) - $ -''', re.VERBOSE) - -class SSH(Fetch): - '''Class to fetch a module or modules via Secure Shell''' - - def supports(self, url, urldata, d): - return __pattern__.match(url) != None - - def localpath(self, url, urldata, d): - m = __pattern__.match(url) - path = m.group('path') - host = m.group('host') - lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path)) - return lpath - - def go(self, url, urldata, d): - dldir = data.getVar('DL_DIR', d, 1) - - m = __pattern__.match(url) - path = m.group('path') - host = m.group('host') - port = m.group('port') - user = m.group('user') - password = m.group('pass') - - ldir = os.path.join(dldir, host) - lpath = os.path.join(ldir, os.path.basename(path)) - - if not os.path.exists(ldir): - os.makedirs(ldir) - - if port: - port = '-P %s' % port - else: - port = '' - - if user: - fr = user - if password: - fr += ':%s' % password - fr += '@%s' % host - else: - fr = host - fr += ':%s' % path - - - import commands - cmd = 'scp -B -r %s %s %s/' % ( - port, - commands.mkarg(fr), - commands.mkarg(ldir) - ) - - (exitstatus, output) = commands.getstatusoutput(cmd) - if exitstatus != 0: - print(output) - raise FetchError('Unable to fetch %s' % url) diff --git a/bitbake/lib/bb/fetch/svk.py b/bitbake/lib/bb/fetch/svk.py deleted file mode 100644 index 595a9da25..000000000 --- a/bitbake/lib/bb/fetch/svk.py +++ /dev/null @@ -1,104 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementations - -This implementation is for svk. It is based on the svn implementation - -""" - -# Copyright (C) 2006 Holger Hans Peter Freyther -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -import os -import logging -import bb -from bb import data -from bb.fetch import Fetch -from bb.fetch import FetchError -from bb.fetch import MissingParameterError -from bb.fetch import logger - -class Svk(Fetch): - """Class to fetch a module or modules from svk repositories""" - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with svk. - """ - return ud.type in ['svk'] - - def localpath(self, url, ud, d): - if not "module" in ud.parm: - raise MissingParameterError("svk method needs a 'module' parameter") - else: - ud.module = ud.parm["module"] - - ud.revision = ud.parm.get('rev', "") - - ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) - - return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) - - def forcefetch(self, url, ud, d): - return ud.date == "now" - - def go(self, loc, ud, d): - """Fetch urls""" - - svkroot = ud.host + ud.path - - svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) - - if ud.revision: - svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) - - # create temp directory - localdata = data.createCopy(d) - data.update_data(localdata) - logger.debug(2, "Fetch: creating temporary directory") - bb.mkdirhier(data.expand('${WORKDIR}', localdata)) - data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) - tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") - tmpfile = tmppipe.readline().strip() - if not tmpfile: - logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") - raise FetchError(ud.module) - - # check out sources there - os.chdir(tmpfile) - logger.info("Fetch " + loc) - logger.debug(1, "Running %s", svkcmd) - myret = os.system(svkcmd) - if myret != 0: - try: - os.rmdir(tmpfile) - except OSError: - pass - raise FetchError(ud.module) - - os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) - # tar them up to a defined filename - myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) - if myret != 0: - try: - os.unlink(ud.localpath) - except OSError: - pass - raise FetchError(ud.module) - # cleanup - bb.utils.prunedir(tmpfile) diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py deleted file mode 100644 index 8f053abf7..000000000 --- a/bitbake/lib/bb/fetch/svn.py +++ /dev/null @@ -1,204 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementation for svn. - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2004 Marcin Juszkiewicz -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -import os -import sys -import logging -import bb -from bb import data -from bb.fetch import Fetch -from bb.fetch import FetchError -from bb.fetch import MissingParameterError -from bb.fetch import runfetchcmd -from bb.fetch import logger - -class Svn(Fetch): - """Class to fetch a module or modules from svn repositories""" - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with svn. - """ - return ud.type in ['svn'] - - def localpath(self, url, ud, d): - if not "module" in ud.parm: - raise MissingParameterError("svn method needs a 'module' parameter") - - ud.module = ud.parm["module"] - - # Create paths to svn checkouts - relpath = self._strip_leading_slashes(ud.path) - ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath) - ud.moddir = os.path.join(ud.pkgdir, ud.module) - - if 'rev' in ud.parm: - ud.date = "" - ud.revision = ud.parm['rev'] - elif 'date' in ud.date: - ud.date = ud.parm['date'] - ud.revision = "" - else: - # - # ***Nasty hack*** - # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE) - # Should warn people to switch to SRCREV here - # - pv = data.getVar("PV", d, 0) - if "DATE" in pv: - ud.revision = "" - else: - rev = Fetch.srcrev_internal_helper(ud, d) - if rev is True: - ud.revision = self.latest_revision(url, ud, d) - ud.date = "" - elif rev: - ud.revision = rev - ud.date = "" - else: - ud.revision = "" - - ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) - - return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) - - def _buildsvncommand(self, ud, d, command): - """ - Build up an svn commandline based on ud - command is "fetch", "update", "info" - """ - - basecmd = data.expand('${FETCHCMD_svn}', d) - - proto = ud.parm.get('proto', 'svn') - - svn_rsh = None - if proto == "svn+ssh" and "rsh" in ud.parm: - svn_rsh = ud.parm["rsh"] - - svnroot = ud.host + ud.path - - # either use the revision, or SRCDATE in braces, - options = [] - - if ud.user: - options.append("--username %s" % ud.user) - - if ud.pswd: - options.append("--password %s" % ud.pswd) - - if command is "info": - svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module) - else: - suffix = "" - if ud.revision: - options.append("-r %s" % ud.revision) - suffix = "@%s" % (ud.revision) - elif ud.date: - options.append("-r {%s}" % ud.date) - - if command is "fetch": - svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module) - elif command is "update": - svncmd = "%s update %s" % (basecmd, " ".join(options)) - else: - raise FetchError("Invalid svn command %s" % command) - - if svn_rsh: - svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) - - return svncmd - - def go(self, loc, ud, d): - """Fetch url""" - - logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") - - if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): - svnupdatecmd = self._buildsvncommand(ud, d, "update") - logger.info("Update " + loc) - # update sources there - os.chdir(ud.moddir) - logger.debug(1, "Running %s", svnupdatecmd) - runfetchcmd(svnupdatecmd, d) - else: - svnfetchcmd = self._buildsvncommand(ud, d, "fetch") - logger.info("Fetch " + loc) - # check out sources there - bb.mkdirhier(ud.pkgdir) - os.chdir(ud.pkgdir) - logger.debug(1, "Running %s", svnfetchcmd) - runfetchcmd(svnfetchcmd, d) - - scmdata = ud.parm.get("scmdata", "") - if scmdata == "keep": - tar_flags = "" - else: - tar_flags = "--exclude '.svn'" - - os.chdir(ud.pkgdir) - # tar them up to a defined filename - try: - runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) - except: - t, v, tb = sys.exc_info() - try: - os.unlink(ud.localpath) - except OSError: - pass - raise t, v, tb - - def supports_srcrev(self): - return True - - def _revision_key(self, url, ud, d): - """ - Return a unique key for the url - """ - return "svn:" + ud.moddir - - def _latest_revision(self, url, ud, d): - """ - Return the latest upstream revision number - """ - logger.debug(2, "SVN fetcher hitting network for %s", url) - - output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True) - - revision = None - for line in output.splitlines(): - if "Last Changed Rev" in line: - revision = line.split(":")[1].strip() - - return revision - - def _sortable_revision(self, url, ud, d): - """ - Return a sortable revision number which in our case is the revision number - """ - - return self._build_revision(url, ud, d) - - def _build_revision(self, url, ud, d): - return ud.revision diff --git a/bitbake/lib/bb/fetch/wget.py b/bitbake/lib/bb/fetch/wget.py deleted file mode 100644 index 4d4bdfd49..000000000 --- a/bitbake/lib/bb/fetch/wget.py +++ /dev/null @@ -1,93 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementations - -Classes for obtaining upstream sources for the -BitBake build tools. - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -import os -import logging -import bb -import urllib -from bb import data -from bb.fetch import Fetch, FetchError, encodeurl, decodeurl, logger, runfetchcmd - -class Wget(Fetch): - """Class to fetch urls via 'wget'""" - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with wget. - """ - return ud.type in ['http', 'https', 'ftp'] - - def localpath(self, url, ud, d): - - url = encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}]) - ud.basename = os.path.basename(ud.path) - ud.localfile = data.expand(urllib.unquote(ud.basename), d) - - return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) - - def go(self, uri, ud, d, checkonly = False): - """Fetch urls""" - - def fetch_uri(uri, ud, d): - if checkonly: - fetchcmd = data.getVar("CHECKCOMMAND", d, 1) - elif os.path.exists(ud.localpath): - # file exists, but we didnt complete it.. trying again.. - fetchcmd = data.getVar("RESUMECOMMAND", d, 1) - else: - fetchcmd = data.getVar("FETCHCOMMAND", d, 1) - - uri = uri.split(";")[0] - uri_decoded = list(decodeurl(uri)) - uri_type = uri_decoded[0] - uri_host = uri_decoded[1] - - fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) - fetchcmd = fetchcmd.replace("${FILE}", ud.basename) - logger.info("fetch " + uri) - logger.debug(2, "executing " + fetchcmd) - runfetchcmd(fetchcmd, d) - - # Sanity check since wget can pretend it succeed when it didn't - # Also, this used to happen if sourceforge sent us to the mirror page - if not os.path.exists(ud.localpath) and not checkonly: - logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath) - return False - - return True - - localdata = data.createCopy(d) - data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) - data.update_data(localdata) - - if fetch_uri(uri, ud, localdata): - return True - - raise FetchError(uri) - - - def checkstatus(self, uri, ud, d): - return self.go(uri, ud, d, True) diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py deleted file mode 100644 index 4e03fc988..000000000 --- a/bitbake/lib/bb/fetch2/__init__.py +++ /dev/null @@ -1,1074 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementations - -Classes for obtaining upstream sources for the -BitBake build tools. -""" - -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -from __future__ import absolute_import -from __future__ import print_function -import os, re -import logging -import bb -from bb import data -from bb import persist_data -from bb import utils - -__version__ = "2" - -logger = logging.getLogger("BitBake.Fetcher") - -class BBFetchException(Exception): - """Class all fetch exceptions inherit from""" - def __init__(self, message): - self.msg = message - Exception.__init__(self, message) - - def __str__(self): - return self.msg - -class MalformedUrl(BBFetchException): - """Exception raised when encountering an invalid url""" - def __init__(self, url): - msg = "The URL: '%s' is invalid and cannot be interpreted" % url - self.url = url - BBFetchException.__init__(self, msg) - self.args = url - -class FetchError(BBFetchException): - """General fetcher exception when something happens incorrectly""" - def __init__(self, message, url = None): - msg = "Fetcher failure for URL: '%s'. %s" % (url, message) - self.url = url - BBFetchException.__init__(self, msg) - self.args = (message, url) - -class UnpackError(BBFetchException): - """General fetcher exception when something happens incorrectly when unpacking""" - def __init__(self, message, url): - msg = "Unpack failure for URL: '%s'. %s" % (url, message) - self.url = url - BBFetchException.__init__(self, msg) - self.args = (message, url) - -class NoMethodError(BBFetchException): - """Exception raised when there is no method to obtain a supplied url or set of urls""" - def __init__(self, url): - msg = "Could not find a fetcher which supports the URL: '%s'" % url - self.url = url - BBFetchException.__init__(self, msg) - self.args = url - -class MissingParameterError(BBFetchException): - """Exception raised when a fetch method is missing a critical parameter in the url""" - def __init__(self, missing, url): - msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing) - self.url = url - self.missing = missing - BBFetchException.__init__(self, msg) - self.args = (missing, url) - -class ParameterError(BBFetchException): - """Exception raised when a url cannot be proccessed due to invalid parameters.""" - def __init__(self, message, url): - msg = "URL: '%s' has invalid parameters. %s" % (url, message) - self.url = url - BBFetchException.__init__(self, msg) - self.args = (message, url) - -class MD5SumError(BBFetchException): - """Exception raised when a MD5 checksum of a file does not match for a downloaded file""" - def __init__(self, path, wanted, got, url): - msg = "File: '%s' has md5 checksum %s when %s was expected (from URL: '%s')" % (path, got, wanted, url) - self.url = url - self.path = path - self.wanted = wanted - self.got = got - BBFetchException.__init__(self, msg) - self.args = (path, wanted, got, url) - -class SHA256SumError(MD5SumError): - """Exception raised when a SHA256 checksum of a file does not match for a downloaded file""" - def __init__(self, path, wanted, got, url): - msg = "File: '%s' has sha256 checksum %s when %s was expected (from URL: '%s')" % (path, got, wanted, url) - self.url = url - self.path = path - self.wanted = wanted - self.got = got - BBFetchException.__init__(self, msg) - self.args = (path, wanted, got, url) - -class NetworkAccess(BBFetchException): - """Exception raised when network access is disabled but it is required.""" - def __init__(self, url, cmd): - msg = "Network access disabled through BB_NO_NETWORK but access rquested with command %s (for url %s)" % (cmd, url) - self.url = url - self.cmd = cmd - BBFetchException.__init__(self, msg) - self.args = (url, cmd) - - -def decodeurl(url): - """Decodes an URL into the tokens (scheme, network location, path, - user, password, parameters). - """ - - m = re.compile('(?P[^:]*)://((?P.+)@)?(?P[^;]+)(;(?P.*))?').match(url) - if not m: - raise MalformedUrl(url) - - type = m.group('type') - location = m.group('location') - if not location: - raise MalformedUrl(url) - user = m.group('user') - parm = m.group('parm') - - locidx = location.find('/') - if locidx != -1 and type.lower() != 'file': - host = location[:locidx] - path = location[locidx:] - else: - host = "" - path = location - if user: - m = re.compile('(?P[^:]+)(:?(?P.*))').match(user) - if m: - user = m.group('user') - pswd = m.group('pswd') - else: - user = '' - pswd = '' - - p = {} - if parm: - for s in parm.split(';'): - s1, s2 = s.split('=') - p[s1] = s2 - - return (type, host, path, user, pswd, p) - -def encodeurl(decoded): - """Encodes a URL from tokens (scheme, network location, path, - user, password, parameters). - """ - - (type, host, path, user, pswd, p) = decoded - - if not path: - raise MissingParameterError('path', "encoded from the data %s" % str(decoded)) - if not type: - raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) - url = '%s://' % type - if user and type != "file": - url += "%s" % user - if pswd: - url += ":%s" % pswd - url += "@" - if host and type != "file": - url += "%s" % host - url += "%s" % path - if p: - for parm in p: - url += ";%s=%s" % (parm, p[parm]) - - return url - -def uri_replace(ud, uri_find, uri_replace, d): - if not ud.url or not uri_find or not uri_replace: - logger.debug(1, "uri_replace: passed an undefined value, not replacing") - uri_decoded = list(decodeurl(ud.url)) - uri_find_decoded = list(decodeurl(uri_find)) - uri_replace_decoded = list(decodeurl(uri_replace)) - result_decoded = ['', '', '', '', '', {}] - for i in uri_find_decoded: - loc = uri_find_decoded.index(i) - result_decoded[loc] = uri_decoded[loc] - if isinstance(i, basestring): - if (re.match(i, uri_decoded[loc])): - result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) - if uri_find_decoded.index(i) == 2: - if ud.mirrortarball: - result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(ud.mirrortarball)) - elif ud.localpath: - result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(ud.localpath)) - else: - return ud.url - return encodeurl(result_decoded) - -methods = [] -urldata_cache = {} -saved_headrevs = {} - -def fetcher_init(d): - """ - Called to initialize the fetchers once the configuration data is known. - Calls before this must not hit the cache. - """ - pd = persist_data.persist(d) - # When to drop SCM head revisions controlled by user policy - srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear" - if srcrev_policy == "cache": - logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) - elif srcrev_policy == "clear": - logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) - try: - bb.fetch2.saved_headrevs = pd['BB_URI_HEADREVS'].items() - except: - pass - del pd['BB_URI_HEADREVS'] - else: - raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) - - for m in methods: - if hasattr(m, "init"): - m.init(d) - -def fetcher_compare_revisions(d): - """ - Compare the revisions in the persistant cache with current values and - return true/false on whether they've changed. - """ - - pd = persist_data.persist(d) - data = pd['BB_URI_HEADREVS'].items() - data2 = bb.fetch2.saved_headrevs - - changed = False - for key in data: - if key not in data2 or data2[key] != data[key]: - logger.debug(1, "%s changed", key) - changed = True - return True - else: - logger.debug(2, "%s did not change", key) - return False - -def mirror_from_string(data): - return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] - -def verify_checksum(u, ud, d): - """ - verify the MD5 and SHA256 checksum for downloaded src - - return value: - - True: checksum matched - - False: checksum unmatched - - if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value. - if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as - matched - """ - - if not ud.type in ["http", "https", "ftp", "ftps"]: - return - - md5data = bb.utils.md5_file(ud.localpath) - sha256data = bb.utils.sha256_file(ud.localpath) - - if (ud.md5_expected == None or ud.sha256_expected == None): - logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n' - 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"', - ud.localpath, ud.md5_name, md5data, - ud.sha256_name, sha256data) - if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1": - raise FetchError("No checksum specified for %s." % u, u) - return - - if ud.md5_expected != md5data: - raise MD5SumError(ud.localpath, ud.md5_expected, md5data, u) - - if ud.sha256_expected != sha256data: - raise SHA256SumError(ud.localpath, ud.sha256_expected, sha256data, u) - -def subprocess_setup(): - import signal - # Python installs a SIGPIPE handler by default. This is usually not what - # non-Python subprocesses expect. - # SIGPIPE errors are known issues with gzip/bash - signal.signal(signal.SIGPIPE, signal.SIG_DFL) - -def get_autorev(d): - # only not cache src rev in autorev case - if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache": - bb.data.setVar('__BB_DONT_CACHE', '1', d) - return "AUTOINC" - -def get_srcrev(d): - """ - Return the version string for the current package - (usually to be used as PV) - Most packages usually only have one SCM so we just pass on the call. - In the multi SCM case, we build a value based on SRCREV_FORMAT which must - have been set. - """ - - scms = [] - fetcher = Fetch(bb.data.getVar('SRC_URI', d, True).split(), d) - urldata = fetcher.ud - for u in urldata: - if urldata[u].method.supports_srcrev(): - scms.append(u) - - if len(scms) == 0: - raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") - - if len(scms) == 1 and len(urldata[scms[0]].names) == 1: - return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d, urldata[scms[0]].names[0]) - - # - # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT - # - format = bb.data.getVar('SRCREV_FORMAT', d, True) - if not format: - raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") - - for scm in scms: - ud = urldata[scm] - for name in ud.names: - rev = ud.method.sortable_revision(scm, ud, d, name) - format = format.replace(name, rev) - - return format - -def localpath(url, d): - fetcher = bb.fetch2.Fetch([url], d) - return fetcher.localpath(url) - -def runfetchcmd(cmd, d, quiet = False, cleanup = []): - """ - Run cmd returning the command output - Raise an error if interrupted or cmd fails - Optionally echo command output to stdout - Optionally remove the files/directories listed in cleanup upon failure - """ - - # Need to export PATH as binary could be in metadata paths - # rather than host provided - # Also include some other variables. - # FIXME: Should really include all export varaiables? - exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST', - 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy', - 'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy', - 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME'] - - for var in exportvars: - val = data.getVar(var, d, True) - if val: - cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) - - logger.debug(1, "Running %s", cmd) - - # redirect stderr to stdout - stdout_handle = os.popen(cmd + " 2>&1", "r") - output = "" - - while True: - line = stdout_handle.readline() - if not line: - break - if not quiet: - print(line, end=' ') - output += line - - status = stdout_handle.close() or 0 - signal = status >> 8 - exitstatus = status & 0xff - - if (signal or status != 0): - for f in cleanup: - try: - bb.utils.remove(f, True) - except OSError: - pass - - if signal: - raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output)) - elif status != 0: - raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output)) - - return output - -def check_network_access(d, info = "", url = None): - """ - log remote network access, and error if BB_NO_NETWORK is set - """ - if bb.data.getVar("BB_NO_NETWORK", d, True) == "1": - raise NetworkAccess(url, info) - else: - logger.debug(1, "Fetcher accessed the network with the command %s" % info) - -def try_mirrors(d, origud, mirrors, check = False): - """ - Try to use a mirrored version of the sources. - This method will be automatically called before the fetchers go. - - d Is a bb.data instance - uri is the original uri we're trying to download - mirrors is the list of mirrors we're going to try - """ - ld = d.createCopy() - for line in mirrors: - try: - (find, replace) = line - except ValueError: - continue - newuri = uri_replace(origud, find, replace, ld) - if newuri == origud.url: - continue - try: - ud = FetchData(newuri, ld) - ud.setup_localpath(ld) - - if check: - found = ud.method.checkstatus(newuri, ud, ld) - if found: - return found - continue - - if ud.method.need_update(newuri, ud, ld): - ud.method.download(newuri, ud, ld) - if hasattr(ud.method,"build_mirror_data"): - ud.method.build_mirror_data(newuri, ud, ld) - - if not ud.localpath or not os.path.exists(ud.localpath): - continue - - if ud.localpath == origud.localpath: - return ud.localpath - - # We may be obtaining a mirror tarball which needs further processing by the real fetcher - # If that tarball is a local file:// we need to provide a symlink to it - dldir = ld.getVar("DL_DIR", True) - if os.path.basename(ud.localpath) != os.path.basename(origud.localpath): - dest = os.path.join(dldir, os.path.basename(ud.localpath)) - if not os.path.exists(dest): - os.symlink(ud.localpath, dest) - return None - # Otherwise the result is a local file:// and we symlink to it - if not os.path.exists(origud.localpath): - os.symlink(ud.localpath, origud.localpath) - return ud.localpath - - except bb.fetch2.NetworkAccess: - raise - - except bb.fetch2.BBFetchException as e: - logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) - logger.debug(1, str(e)) - try: - if os.path.isfile(ud.localpath): - bb.utils.remove(ud.localpath) - except UnboundLocalError: - pass - continue - return None - -def srcrev_internal_helper(ud, d, name): - """ - Return: - a) a source revision if specified - b) latest revision if SRCREV="AUTOINC" - c) None if not specified - """ - - if 'rev' in ud.parm: - return ud.parm['rev'] - - if 'tag' in ud.parm: - return ud.parm['tag'] - - rev = None - pn = data.getVar("PN", d, True) - if name != '': - rev = data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, True) - if not rev: - rev = data.getVar("SRCREV_%s" % name, d, True) - if not rev: - rev = data.getVar("SRCREV_pn-%s" % pn, d, True) - if not rev: - rev = data.getVar("SRCREV", d, True) - if rev == "INVALID": - raise FetchError("Please set SRCREV to a valid value", ud.url) - if rev == "AUTOINC": - rev = ud.method.latest_revision(ud.url, ud, d, name) - - return rev - -class FetchData(object): - """ - A class which represents the fetcher state for a given URI. - """ - def __init__(self, url, d): - # localpath is the location of a downloaded result. If not set, the file is local. - self.donestamp = None - self.localfile = "" - self.localpath = None - self.lockfile = None - self.mirrortarball = None - (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d)) - self.date = self.getSRCDate(d) - self.url = url - if not self.user and "user" in self.parm: - self.user = self.parm["user"] - if not self.pswd and "pswd" in self.parm: - self.pswd = self.parm["pswd"] - self.setup = False - - if "name" in self.parm: - self.md5_name = "%s.md5sum" % self.parm["name"] - self.sha256_name = "%s.sha256sum" % self.parm["name"] - else: - self.md5_name = "md5sum" - self.sha256_name = "sha256sum" - self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d) - self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d) - - self.names = self.parm.get("name",'default').split(',') - - self.method = None - for m in methods: - if m.supports(url, self, d): - self.method = m - break - - if not self.method: - raise NoMethodError(url) - - if self.method.supports_srcrev(): - self.revisions = {} - for name in self.names: - self.revisions[name] = srcrev_internal_helper(self, d, name) - - # add compatibility code for non name specified case - if len(self.names) == 1: - self.revision = self.revisions[self.names[0]] - - if hasattr(self.method, "urldata_init"): - self.method.urldata_init(self, d) - - if "localpath" in self.parm: - # if user sets localpath for file, use it instead. - self.localpath = self.parm["localpath"] - self.basename = os.path.basename(self.localpath) - elif self.localfile: - self.localpath = self.method.localpath(self.url, self, d) - - if self.localfile and self.localpath: - # Note: These files should always be in DL_DIR whereas localpath may not be. - basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d) - self.donestamp = basepath + '.done' - self.lockfile = basepath + '.lock' - - def setup_localpath(self, d): - if not self.localpath: - self.localpath = self.method.localpath(self.url, self, d) - - def getSRCDate(self, d): - """ - Return the SRC Date for the component - - d the bb.data module - """ - if "srcdate" in self.parm: - return self.parm['srcdate'] - - pn = data.getVar("PN", d, True) - - if pn: - return data.getVar("SRCDATE_%s" % pn, d, True) or data.getVar("SRCDATE", d, True) or data.getVar("DATE", d, True) - - return data.getVar("SRCDATE", d, True) or data.getVar("DATE", d, True) - -class FetchMethod(object): - """Base class for 'fetch'ing data""" - - def __init__(self, urls = []): - self.urls = [] - - def supports(self, url, urldata, d): - """ - Check to see if this fetch class supports a given url. - """ - return 0 - - def localpath(self, url, urldata, d): - """ - Return the local filename of a given url assuming a successful fetch. - Can also setup variables in urldata for use in go (saving code duplication - and duplicate code execution) - """ - return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile) - - def _strip_leading_slashes(self, relpath): - """ - Remove leading slash as os.path.join can't cope - """ - while os.path.isabs(relpath): - relpath = relpath[1:] - return relpath - - def setUrls(self, urls): - self.__urls = urls - - def getUrls(self): - return self.__urls - - urls = property(getUrls, setUrls, None, "Urls property") - - def need_update(self, url, ud, d): - """ - Force a fetch, even if localpath exists? - """ - if os.path.exists(ud.localpath): - return False - return True - - def supports_srcrev(self): - """ - The fetcher supports auto source revisions (SRCREV) - """ - return False - - def download(self, url, urldata, d): - """ - Fetch urls - Assumes localpath was called first - """ - raise NoMethodError(url) - - def unpack(self, urldata, rootdir, data): - import subprocess - iterate = False - file = urldata.localpath - - try: - unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True) - except ValueError, exc: - bb.fatal("Invalid value for 'unpack' parameter for %s: %s" % - (file, urldata.parm.get('unpack'))) - - dots = file.split(".") - if dots[-1] in ['gz', 'bz2', 'Z']: - efile = os.path.join(bb.data.getVar('WORKDIR', data, True),os.path.basename('.'.join(dots[0:-1]))) - else: - efile = file - cmd = None - - if unpack: - if file.endswith('.tar'): - cmd = 'tar x --no-same-owner -f %s' % file - elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): - cmd = 'tar xz --no-same-owner -f %s' % file - elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): - cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file - elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): - cmd = 'gzip -dc %s > %s' % (file, efile) - elif file.endswith('.bz2'): - cmd = 'bzip2 -dc %s > %s' % (file, efile) - elif file.endswith('.tar.xz'): - cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file - elif file.endswith('.xz'): - cmd = 'xz -dc %s > %s' % (file, efile) - elif file.endswith('.zip') or file.endswith('.jar'): - try: - dos = bb.utils.to_boolean(urldata.parm.get('dos'), False) - except ValueError, exc: - bb.fatal("Invalid value for 'dos' parameter for %s: %s" % - (file, urldata.parm.get('dos'))) - cmd = 'unzip -q -o' - if dos: - cmd = '%s -a' % cmd - cmd = "%s '%s'" % (cmd, file) - elif file.endswith('.src.rpm') or file.endswith('.srpm'): - if 'extract' in urldata.parm: - unpack_file = urldata.parm.get('extract') - cmd = 'rpm2cpio.sh %s | cpio -i %s' % (file, unpack_file) - iterate = True - iterate_file = unpack_file - else: - cmd = 'rpm2cpio.sh %s | cpio -i' % (file) - - if not unpack or not cmd: - # If file == dest, then avoid any copies, as we already put the file into dest! - dest = os.path.join(rootdir, os.path.basename(file)) - if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)): - if os.path.isdir(file): - filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, True)) - destdir = "." - if file[0:len(filesdir)] == filesdir: - destdir = file[len(filesdir):file.rfind('/')] - destdir = destdir.strip('/') - if len(destdir) < 1: - destdir = "." - elif not os.access("%s/%s" % (rootdir, destdir), os.F_OK): - os.makedirs("%s/%s" % (rootdir, destdir)) - cmd = 'cp -pPR %s %s/%s/' % (file, rootdir, destdir) - #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir) - else: - # The "destdir" handling was specifically done for FILESPATH - # items. So, only do so for file:// entries. - if urldata.type == "file" and urldata.path.find("/") != -1: - destdir = urldata.path.rsplit("/", 1)[0] - else: - destdir = "." - bb.mkdirhier("%s/%s" % (rootdir, destdir)) - cmd = 'cp %s %s/%s/' % (file, rootdir, destdir) - - if not cmd: - return - - # Change to subdir before executing command - save_cwd = os.getcwd(); - os.chdir(rootdir) - if 'subdir' in urldata.parm: - newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir'))) - bb.mkdirhier(newdir) - os.chdir(newdir) - - cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, True), cmd) - bb.note("Unpacking %s to %s/" % (file, os.getcwd())) - ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) - - os.chdir(save_cwd) - - if ret != 0: - raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url) - - if iterate is True: - iterate_urldata = urldata - iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file) - self.unpack(urldata, rootdir, data) - - return - - def clean(self, urldata, d): - """ - Clean any existing full or partial download - """ - bb.utils.remove(urldata.localpath) - - def try_premirror(self, url, urldata, d): - """ - Should premirrors be used? - """ - return True - - def checkstatus(self, url, urldata, d): - """ - Check the status of a URL - Assumes localpath was called first - """ - logger.info("URL %s could not be checked for status since no method exists.", url) - return True - - def localcount_internal_helper(ud, d, name): - """ - Return: - a) a locked localcount if specified - b) None otherwise - """ - - localcount = None - if name != '': - pn = data.getVar("PN", d, True) - localcount = data.getVar("LOCALCOUNT_" + name, d, True) - if not localcount: - localcount = data.getVar("LOCALCOUNT", d, True) - return localcount - - localcount_internal_helper = staticmethod(localcount_internal_helper) - - def latest_revision(self, url, ud, d, name): - """ - Look in the cache for the latest revision, if not present ask the SCM. - """ - if not hasattr(self, "_latest_revision"): - raise ParameterError("The fetcher for this URL does not support _latest_revision", url) - - pd = persist_data.persist(d) - revs = pd['BB_URI_HEADREVS'] - key = self.generate_revision_key(url, ud, d, name) - rev = revs[key] - if rev != None: - return str(rev) - - revs[key] = rev = self._latest_revision(url, ud, d, name) - return rev - - def sortable_revision(self, url, ud, d, name): - """ - - """ - if hasattr(self, "_sortable_revision"): - return self._sortable_revision(url, ud, d) - - pd = persist_data.persist(d) - localcounts = pd['BB_URI_LOCALCOUNT'] - key = self.generate_revision_key(url, ud, d, name) - - latest_rev = self._build_revision(url, ud, d, name) - last_rev = localcounts[key + '_rev'] - uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False - count = None - if uselocalcount: - count = FetchMethod.localcount_internal_helper(ud, d, name) - if count is None: - count = localcounts[key + '_count'] or "0" - - if last_rev == latest_rev: - return str(count + "+" + latest_rev) - - buildindex_provided = hasattr(self, "_sortable_buildindex") - if buildindex_provided: - count = self._sortable_buildindex(url, ud, d, latest_rev) - - if count is None: - count = "0" - elif uselocalcount or buildindex_provided: - count = str(count) - else: - count = str(int(count) + 1) - - localcounts[key + '_rev'] = latest_rev - localcounts[key + '_count'] = count - - return str(count + "+" + latest_rev) - - def generate_revision_key(self, url, ud, d, name): - key = self._revision_key(url, ud, d, name) - return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") - -class Fetch(object): - def __init__(self, urls, d, cache = True): - if len(urls) == 0: - urls = d.getVar("SRC_URI", True).split() - self.urls = urls - self.d = d - self.ud = {} - - fn = bb.data.getVar('FILE', d, True) - if cache and fn in urldata_cache: - self.ud = urldata_cache[fn] - - for url in urls: - if url not in self.ud: - self.ud[url] = FetchData(url, d) - - if cache: - urldata_cache[fn] = self.ud - - def localpath(self, url): - if url not in self.urls: - self.ud[url] = FetchData(url, self.d) - - self.ud[url].setup_localpath(self.d) - return bb.data.expand(self.ud[url].localpath, self.d) - - def localpaths(self): - """ - Return a list of the local filenames, assuming successful fetch - """ - local = [] - - for u in self.urls: - ud = self.ud[u] - ud.setup_localpath(self.d) - local.append(ud.localpath) - - return local - - def download(self, urls = []): - """ - Fetch all urls - """ - if len(urls) == 0: - urls = self.urls - - network = bb.data.getVar("BB_NO_NETWORK", self.d, True) - premirroronly = (bb.data.getVar("BB_FETCH_PREMIRRORONLY", self.d, True) == "1") - - for u in urls: - ud = self.ud[u] - ud.setup_localpath(self.d) - m = ud.method - localpath = "" - - if not ud.localfile: - continue - - lf = bb.utils.lockfile(ud.lockfile) - - try: - bb.data.setVar("BB_NO_NETWORK", network, self.d) - - if not m.need_update(u, ud, self.d): - localpath = ud.localpath - elif m.try_premirror(u, ud, self.d): - logger.debug(1, "Trying PREMIRRORS") - mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True)) - localpath = try_mirrors(self.d, ud, mirrors, False) - - if premirroronly: - bb.data.setVar("BB_NO_NETWORK", "1", self.d) - - if not localpath and m.need_update(u, ud, self.d): - try: - logger.debug(1, "Trying Upstream") - m.download(u, ud, self.d) - if hasattr(m, "build_mirror_data"): - m.build_mirror_data(u, ud, self.d) - localpath = ud.localpath - - except bb.fetch2.NetworkAccess: - raise - - except BBFetchException as e: - logger.debug(1, str(e)) - # Remove any incomplete fetch - if os.path.isfile(ud.localpath): - bb.utils.remove(ud.localpath) - logger.debug(1, "Trying MIRRORS") - mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True)) - localpath = try_mirrors (self.d, ud, mirrors) - - if not localpath or not os.path.exists(localpath): - raise FetchError("Unable to fetch URL %s from any source." % u, u) - - if os.path.exists(ud.donestamp): - # Touch the done stamp file to show active use of the download - try: - os.utime(ud.donestamp, None) - except: - # Errors aren't fatal here - pass - else: - # Only check the checksums if we've not seen this item before, then create the stamp - verify_checksum(u, ud, self.d) - open(ud.donestamp, 'w').close() - - finally: - bb.utils.unlockfile(lf) - - def checkstatus(self, urls = []): - """ - Check all urls exist upstream - """ - - if len(urls) == 0: - urls = self.urls - - for u in urls: - ud = self.ud[u] - ud.setup_localpath(self.d) - m = ud.method - logger.debug(1, "Testing URL %s", u) - # First try checking uri, u, from PREMIRRORS - mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True)) - ret = try_mirrors(self.d, ud, mirrors, True) - if not ret: - # Next try checking from the original uri, u - try: - ret = m.checkstatus(u, ud, self.d) - except: - # Finally, try checking uri, u, from MIRRORS - mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True)) - ret = try_mirrors (self.d, ud, mirrors, True) - - if not ret: - raise FetchError("URL %s doesn't work" % u, u) - - def unpack(self, root, urls = []): - """ - Check all urls exist upstream - """ - - if len(urls) == 0: - urls = self.urls - - for u in urls: - ud = self.ud[u] - ud.setup_localpath(self.d) - - if bb.data.expand(self.localpath, self.d) is None: - continue - - if ud.lockfile: - lf = bb.utils.lockfile(ud.lockfile) - - ud.method.unpack(ud, root, self.d) - - if ud.lockfile: - bb.utils.unlockfile(lf) - - def clean(self, urls = []): - """ - Clean files that the fetcher gets or places - """ - - if len(urls) == 0: - urls = self.urls - - for url in urls: - if url not in self.ud: - self.ud[url] = FetchData(url, d) - ud = self.ud[url] - ud.setup_localpath(self.d) - - if not ud.localfile or self.localpath is None: - continue - - if ud.lockfile: - lf = bb.utils.lockfile(ud.lockfile) - - ud.method.clean(ud, self.d) - if ud.donestamp: - bb.utils.remove(ud.donestamp) - - if ud.lockfile: - bb.utils.unlockfile(lf) - -from . import cvs -from . import git -from . import local -from . import svn -from . import wget -from . import svk -from . import ssh -from . import perforce -from . import bzr -from . import hg -from . import osc -from . import repo - -methods.append(local.Local()) -methods.append(wget.Wget()) -methods.append(svn.Svn()) -methods.append(git.Git()) -methods.append(cvs.Cvs()) -methods.append(svk.Svk()) -methods.append(ssh.SSH()) -methods.append(perforce.Perforce()) -methods.append(bzr.Bzr()) -methods.append(hg.Hg()) -methods.append(osc.Osc()) -methods.append(repo.Repo()) diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py deleted file mode 100644 index 454961eff..000000000 --- a/bitbake/lib/bb/fetch2/bzr.py +++ /dev/null @@ -1,141 +0,0 @@ -""" -BitBake 'Fetch' implementation for bzr. - -""" - -# Copyright (C) 2007 Ross Burton -# Copyright (C) 2007 Richard Purdie -# -# Classes for obtaining upstream sources for the -# BitBake build tools. -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import os -import sys -import logging -import bb -from bb import data -from bb.fetch2 import FetchMethod -from bb.fetch2 import FetchError -from bb.fetch2 import runfetchcmd -from bb.fetch2 import logger - -class Bzr(FetchMethod): - def supports(self, url, ud, d): - return ud.type in ['bzr'] - - def urldata_init(self, ud, d): - """ - init bzr specific variable within url data - """ - # Create paths to bzr checkouts - relpath = self._strip_leading_slashes(ud.path) - ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath) - - if not ud.revision: - ud.revision = self.latest_revision(ud.url, ud, d) - - ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d) - - def _buildbzrcommand(self, ud, d, command): - """ - Build up an bzr commandline based on ud - command is "fetch", "update", "revno" - """ - - basecmd = data.expand('${FETCHCMD_bzr}', d) - - proto = ud.parm.get('proto', 'http') - - bzrroot = ud.host + ud.path - - options = [] - - if command is "revno": - bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) - else: - if ud.revision: - options.append("-r %s" % ud.revision) - - if command is "fetch": - bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) - elif command is "update": - bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options)) - else: - raise FetchError("Invalid bzr command %s" % command, ud.url) - - return bzrcmd - - def download(self, loc, ud, d): - """Fetch url""" - - if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): - bzrcmd = self._buildbzrcommand(ud, d, "update") - logger.debug(1, "BZR Update %s", loc) - bb.fetch2.check_network_access(d, bzrcmd, ud.url) - os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) - runfetchcmd(bzrcmd, d) - else: - bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) - bzrcmd = self._buildbzrcommand(ud, d, "fetch") - bb.fetch2.check_network_access(d, bzrcmd, ud.url) - logger.debug(1, "BZR Checkout %s", loc) - bb.mkdirhier(ud.pkgdir) - os.chdir(ud.pkgdir) - logger.debug(1, "Running %s", bzrcmd) - runfetchcmd(bzrcmd, d) - - os.chdir(ud.pkgdir) - - scmdata = ud.parm.get("scmdata", "") - if scmdata == "keep": - tar_flags = "" - else: - tar_flags = "--exclude '.bzr' --exclude '.bzrtags'" - - # tar them up to a defined filename - runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath]) - - def supports_srcrev(self): - return True - - def _revision_key(self, url, ud, d, name): - """ - Return a unique key for the url - """ - return "bzr:" + ud.pkgdir - - def _latest_revision(self, url, ud, d, name): - """ - Return the latest upstream revision number - """ - logger.debug(2, "BZR fetcher hitting network for %s", url) - - bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url) - - output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True) - - return output.strip() - - def _sortable_revision(self, url, ud, d): - """ - Return a sortable revision number which in our case is the revision number - """ - - return self._build_revision(url, ud, d) - - def _build_revision(self, url, ud, d): - return ud.revision diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py deleted file mode 100644 index 12d11e0d5..000000000 --- a/bitbake/lib/bb/fetch2/cvs.py +++ /dev/null @@ -1,181 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementations - -Classes for obtaining upstream sources for the -BitBake build tools. - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -#Based on functions from the base bb module, Copyright 2003 Holger Schurig -# - -import os -import logging -import bb -from bb import data -from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger -from bb.fetch2 import runfetchcmd - -class Cvs(FetchMethod): - """ - Class to fetch a module or modules from cvs repositories - """ - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with cvs. - """ - return ud.type in ['cvs'] - - def urldata_init(self, ud, d): - if not "module" in ud.parm: - raise MissingParameterError("module", ud.url) - ud.module = ud.parm["module"] - - ud.tag = ud.parm.get('tag', "") - - # Override the default date in certain cases - if 'date' in ud.parm: - ud.date = ud.parm['date'] - elif ud.tag: - ud.date = "" - - norecurse = '' - if 'norecurse' in ud.parm: - norecurse = '_norecurse' - - fullpath = '' - if 'fullpath' in ud.parm: - fullpath = '_fullpath' - - ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d) - - def need_update(self, url, ud, d): - if (ud.date == "now"): - return True - if not os.path.exists(ud.localpath): - return True - return False - - def download(self, loc, ud, d): - - method = ud.parm.get('method', 'pserver') - localdir = ud.parm.get('localdir', ud.module) - cvs_port = ud.parm.get('port', '') - - cvs_rsh = None - if method == "ext": - if "rsh" in ud.parm: - cvs_rsh = ud.parm["rsh"] - - if method == "dir": - cvsroot = ud.path - else: - cvsroot = ":" + method - cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) - if cvsproxyhost: - cvsroot += ";proxy=" + cvsproxyhost - cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) - if cvsproxyport: - cvsroot += ";proxyport=" + cvsproxyport - cvsroot += ":" + ud.user - if ud.pswd: - cvsroot += ":" + ud.pswd - cvsroot += "@" + ud.host + ":" + cvs_port + ud.path - - options = [] - if 'norecurse' in ud.parm: - options.append("-l") - if ud.date: - # treat YYYYMMDDHHMM specially for CVS - if len(ud.date) == 12: - options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) - else: - options.append("-D \"%s UTC\"" % ud.date) - if ud.tag: - options.append("-r %s" % ud.tag) - - localdata = data.createCopy(d) - data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) - data.update_data(localdata) - - data.setVar('CVSROOT', cvsroot, localdata) - data.setVar('CVSCOOPTS', " ".join(options), localdata) - data.setVar('CVSMODULE', ud.module, localdata) - cvscmd = data.getVar('FETCHCOMMAND', localdata, True) - cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, True) - - if cvs_rsh: - cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) - cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) - - # create module directory - logger.debug(2, "Fetch: checking for module directory") - pkg = data.expand('${PN}', d) - pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) - moddir = os.path.join(pkgdir, localdir) - if os.access(os.path.join(moddir, 'CVS'), os.R_OK): - logger.info("Update " + loc) - bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url) - # update sources there - os.chdir(moddir) - cmd = cvsupdatecmd - else: - logger.info("Fetch " + loc) - # check out sources there - bb.mkdirhier(pkgdir) - os.chdir(pkgdir) - logger.debug(1, "Running %s", cvscmd) - bb.fetch2.check_network_access(d, cvscmd, ud.url) - cmd = cvscmd - - runfetchcmd(cmd, d, cleanup = [moddir]) - - if not os.access(moddir, os.R_OK): - raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url) - - scmdata = ud.parm.get("scmdata", "") - if scmdata == "keep": - tar_flags = "" - else: - tar_flags = "--exclude 'CVS'" - - # tar them up to a defined filename - if 'fullpath' in ud.parm: - os.chdir(pkgdir) - cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir) - else: - os.chdir(moddir) - os.chdir('..') - cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir)) - - runfetchcmd(cmd, d, cleanup = [ud.localpath]) - - def clean(self, ud, d): - """ Clean CVS Files and tarballs """ - - pkg = data.expand('${PN}', d) - localdata = data.createCopy(d) - data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) - data.update_data(localdata) - pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) - - bb.utils.remove(pkgdir, True) - bb.utils.remove(ud.localpath) - diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py deleted file mode 100644 index f2c27e42a..000000000 --- a/bitbake/lib/bb/fetch2/git.py +++ /dev/null @@ -1,242 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' git implementation - -""" - -#Copyright (C) 2005 Richard Purdie -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import os -import bb -from bb import data -from bb.fetch2 import FetchMethod -from bb.fetch2 import runfetchcmd -from bb.fetch2 import logger - -class Git(FetchMethod): - """Class to fetch a module or modules from git repositories""" - def init(self, d): - # - # Only enable _sortable revision if the key is set - # - if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True): - self._sortable_buildindex = self._sortable_buildindex_disabled - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with git. - """ - return ud.type in ['git'] - - def urldata_init(self, ud, d): - """ - init git specific variable within url data - so that the git method like latest_revision() can work - """ - if 'protocol' in ud.parm: - ud.proto = ud.parm['protocol'] - elif not ud.host: - ud.proto = 'file' - else: - ud.proto = "rsync" - - ud.nocheckout = False - if 'nocheckout' in ud.parm: - ud.nocheckout = True - - branches = ud.parm.get("branch", "master").split(',') - if len(branches) != len(ud.names): - raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url) - ud.branches = {} - for name in ud.names: - branch = branches[ud.names.index(name)] - ud.branches[name] = branch - - gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) - ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname) - ud.fullmirror = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball) - ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) - - ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git" - - for name in ud.names: - # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one - if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]): - ud.revisions[name] = self.latest_revision(ud.url, ud, d, name) - - ud.write_tarballs = (data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0" - - ud.localfile = ud.clonedir - - def localpath(self, url, ud, d): - return ud.clonedir - - def need_update(self, u, ud, d): - if not os.path.exists(ud.clonedir): - return True - os.chdir(ud.clonedir) - for name in ud.names: - if not self._contains_ref(ud.revisions[name], d): - return True - if ud.write_tarballs and not os.path.exists(ud.fullmirror): - return True - return False - - def try_premirror(self, u, ud, d): - # If we don't do this, updating an existing checkout with only premirrors - # is not possible - if bb.data.getVar("BB_FETCH_PREMIRRORONLY", d, True) is not None: - return True - if os.path.exists(ud.clonedir): - return False - return True - - def download(self, loc, ud, d): - """Fetch url""" - - if ud.user: - username = ud.user + '@' - else: - username = "" - - ud.repochanged = not os.path.exists(ud.fullmirror) - - # If the checkout doesn't exist and the mirror tarball does, extract it - if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror): - bb.mkdirhier(ud.clonedir) - os.chdir(ud.clonedir) - runfetchcmd("tar -xzf %s" % (ud.fullmirror), d) - - # If the repo still doesn't exist, fallback to cloning it - if not os.path.exists(ud.clonedir): - bb.fetch2.check_network_access(d, "git clone --bare %s%s" % (ud.host, ud.path)) - runfetchcmd("%s clone --bare %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d) - - os.chdir(ud.clonedir) - # Update the checkout if needed - needupdate = False - for name in ud.names: - if not self._contains_ref(ud.revisions[name], d): - needupdate = True - if needupdate: - bb.fetch2.check_network_access(d, "git fetch %s%s" % (ud.host, ud.path), ud.url) - try: - runfetchcmd("%s remote prune origin" % ud.basecmd, d) - runfetchcmd("%s remote rm origin" % ud.basecmd, d) - except bb.fetch2.FetchError: - logger.debug(1, "No Origin") - - runfetchcmd("%s remote add origin %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d) - runfetchcmd("%s fetch --all -t" % ud.basecmd, d) - runfetchcmd("%s prune-packed" % ud.basecmd, d) - runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) - ud.repochanged = True - - def build_mirror_data(self, url, ud, d): - # Generate a mirror tarball if needed - if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)): - os.chdir(ud.clonedir) - logger.info("Creating tarball of git repository") - runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d) - - def unpack(self, ud, destdir, d): - """ unpack the downloaded src to destdir""" - - subdir = ud.parm.get("subpath", "") - if subdir != "": - readpathspec = ":%s" % (subdir) - else: - readpathspec = "" - - destdir = os.path.join(destdir, "git/") - if os.path.exists(destdir): - bb.utils.prunedir(destdir) - - runfetchcmd("git clone -s -n %s %s" % (ud.clonedir, destdir), d) - if not ud.nocheckout: - os.chdir(destdir) - runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d) - runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d) - return True - - def clean(self, ud, d): - """ clean the git directory """ - - bb.utils.remove(ud.localpath, True) - bb.utils.remove(ud.fullmirror) - - def supports_srcrev(self): - return True - - def _contains_ref(self, tag, d): - basecmd = data.getVar("FETCHCMD_git", d, True) or "git" - output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True) - return output.split()[0] != "0" - - def _revision_key(self, url, ud, d, name): - """ - Return a unique key for the url - """ - return "git:" + ud.host + ud.path.replace('/', '.') + ud.branches[name] - - def _latest_revision(self, url, ud, d, name): - """ - Compute the HEAD revision for the url - """ - if ud.user: - username = ud.user + '@' - else: - username = "" - - bb.fetch2.check_network_access(d, "git ls-remote %s%s %s" % (ud.host, ud.path, ud.branches[name])) - basecmd = data.getVar("FETCHCMD_git", d, True) or "git" - cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branches[name]) - output = runfetchcmd(cmd, d, True) - if not output: - raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, url) - return output.split()[0] - - def _build_revision(self, url, ud, d, name): - return ud.revisions[name] - - def _sortable_buildindex_disabled(self, url, ud, d, rev): - """ - Return a suitable buildindex for the revision specified. This is done by counting revisions - using "git rev-list" which may or may not work in different circumstances. - """ - - cwd = os.getcwd() - - # Check if we have the rev already - - if not os.path.exists(ud.clonedir): - print("no repo") - self.download(None, ud, d) - if not os.path.exists(ud.clonedir): - logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir) - return None - - - os.chdir(ud.clonedir) - if not self._contains_ref(rev, d): - self.download(None, ud, d) - - output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True) - os.chdir(cwd) - - buildindex = "%s" % output.split()[0] - logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev) - return buildindex diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py deleted file mode 100644 index 6a56f8d0c..000000000 --- a/bitbake/lib/bb/fetch2/hg.py +++ /dev/null @@ -1,174 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementation for mercurial DRCS (hg). - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2004 Marcin Juszkiewicz -# Copyright (C) 2007 Robert Schuster -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -import os -import sys -import logging -import bb -from bb import data -from bb.fetch2 import FetchMethod -from bb.fetch2 import FetchError -from bb.fetch2 import MissingParameterError -from bb.fetch2 import runfetchcmd -from bb.fetch2 import logger - -class Hg(FetchMethod): - """Class to fetch from mercurial repositories""" - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with mercurial. - """ - return ud.type in ['hg'] - - def urldata_init(self, ud, d): - """ - init hg specific variable within url data - """ - if not "module" in ud.parm: - raise MissingParameterError('module', ud.url) - - ud.module = ud.parm["module"] - - # Create paths to mercurial checkouts - relpath = self._strip_leading_slashes(ud.path) - ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath) - ud.moddir = os.path.join(ud.pkgdir, ud.module) - - if 'rev' in ud.parm: - ud.revision = ud.parm['rev'] - elif not ud.revision: - ud.revision = self.latest_revision(ud.url, ud, d) - - ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) - - def need_update(self, url, ud, d): - revTag = ud.parm.get('rev', 'tip') - if revTag == "tip": - return True - if not os.path.exists(ud.localpath): - return True - return False - - def _buildhgcommand(self, ud, d, command): - """ - Build up an hg commandline based on ud - command is "fetch", "update", "info" - """ - - basecmd = data.expand('${FETCHCMD_hg}', d) - - proto = ud.parm.get('proto', 'http') - - host = ud.host - if proto == "file": - host = "/" - ud.host = "localhost" - - if not ud.user: - hgroot = host + ud.path - else: - hgroot = ud.user + "@" + host + ud.path - - if command is "info": - return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module) - - options = []; - if ud.revision: - options.append("-r %s" % ud.revision) - - if command is "fetch": - cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module) - elif command is "pull": - # do not pass options list; limiting pull to rev causes the local - # repo not to contain it and immediately following "update" command - # will crash - cmd = "%s pull" % (basecmd) - elif command is "update": - cmd = "%s update -C %s" % (basecmd, " ".join(options)) - else: - raise FetchError("Invalid hg command %s" % command, ud.url) - - return cmd - - def download(self, loc, ud, d): - """Fetch url""" - - logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") - - if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): - updatecmd = self._buildhgcommand(ud, d, "pull") - logger.info("Update " + loc) - # update sources there - os.chdir(ud.moddir) - logger.debug(1, "Running %s", updatecmd) - bb.fetch2.check_network_access(d, updatecmd, ud.url) - runfetchcmd(updatecmd, d) - - else: - fetchcmd = self._buildhgcommand(ud, d, "fetch") - logger.info("Fetch " + loc) - # check out sources there - bb.mkdirhier(ud.pkgdir) - os.chdir(ud.pkgdir) - logger.debug(1, "Running %s", fetchcmd) - bb.fetch2.check_network_access(d, fetchcmd, ud.url) - runfetchcmd(fetchcmd, d) - - # Even when we clone (fetch), we still need to update as hg's clone - # won't checkout the specified revision if its on a branch - updatecmd = self._buildhgcommand(ud, d, "update") - os.chdir(ud.moddir) - logger.debug(1, "Running %s", updatecmd) - runfetchcmd(updatecmd, d) - - scmdata = ud.parm.get("scmdata", "") - if scmdata == "keep": - tar_flags = "" - else: - tar_flags = "--exclude '.hg' --exclude '.hgrags'" - - os.chdir(ud.pkgdir) - runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath]) - - def supports_srcrev(self): - return True - - def _latest_revision(self, url, ud, d, name): - """ - Compute tip revision for the url - """ - bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info")) - output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d) - return output.strip() - - def _build_revision(self, url, ud, d): - return ud.revision - - def _revision_key(self, url, ud, d, name): - """ - Return a unique key for the url - """ - return "hg:" + ud.moddir diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py deleted file mode 100644 index 77a296ec6..000000000 --- a/bitbake/lib/bb/fetch2/local.py +++ /dev/null @@ -1,80 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementations - -Classes for obtaining upstream sources for the -BitBake build tools. - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -import os -import bb -import bb.utils -from bb import data -from bb.fetch2 import FetchMethod - -class Local(FetchMethod): - def supports(self, url, urldata, d): - """ - Check to see if a given url represents a local fetch. - """ - return urldata.type in ['file'] - - def urldata_init(self, ud, d): - # We don't set localfile as for this fetcher the file is already local! - return - - def localpath(self, url, urldata, d): - """ - Return the local filename of a given url assuming a successful fetch. - """ - path = url.split("://")[1] - path = path.split(";")[0] - newpath = path - if path[0] != "/": - filespath = data.getVar('FILESPATH', d, True) - if filespath: - newpath = bb.utils.which(filespath, path) - if not newpath: - filesdir = data.getVar('FILESDIR', d, True) - if filesdir: - newpath = os.path.join(filesdir, path) - return newpath - - def download(self, url, urldata, d): - """Fetch urls (no-op for Local method)""" - # no need to fetch local files, we'll deal with them in place. - return 1 - - def checkstatus(self, url, urldata, d): - """ - Check the status of the url - """ - if urldata.localpath.find("*") != -1: - logger.info("URL %s looks like a glob and was therefore not checked.", url) - return True - if os.path.exists(urldata.localpath): - return True - return False - - def clean(self, urldata, d): - return - diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py deleted file mode 100644 index 4bf411c24..000000000 --- a/bitbake/lib/bb/fetch2/osc.py +++ /dev/null @@ -1,135 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -Bitbake "Fetch" implementation for osc (Opensuse build service client). -Based on the svn "Fetch" implementation. - -""" - -import os -import sys -import logging -import bb -from bb import data -from bb.fetch2 import FetchMethod -from bb.fetch2 import FetchError -from bb.fetch2 import MissingParameterError -from bb.fetch2 import runfetchcmd - -class Osc(FetchMethod): - """Class to fetch a module or modules from Opensuse build server - repositories.""" - - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with osc. - """ - return ud.type in ['osc'] - - def urldata_init(self, ud, d): - if not "module" in ud.parm: - raise MissingParameterError('module', ud.url) - - ud.module = ud.parm["module"] - - # Create paths to osc checkouts - relpath = self._strip_leading_slashes(ud.path) - ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host) - ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) - - if 'rev' in ud.parm: - ud.revision = ud.parm['rev'] - else: - pv = data.getVar("PV", d, 0) - rev = bb.fetch2.srcrev_internal_helper(ud, d) - if rev and rev != True: - ud.revision = rev - else: - ud.revision = "" - - ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d) - - def _buildosccommand(self, ud, d, command): - """ - Build up an ocs commandline based on ud - command is "fetch", "update", "info" - """ - - basecmd = data.expand('${FETCHCMD_osc}', d) - - proto = ud.parm.get('proto', 'ocs') - - options = [] - - config = "-c %s" % self.generate_config(ud, d) - - if ud.revision: - options.append("-r %s" % ud.revision) - - coroot = self._strip_leading_slashes(ud.path) - - if command is "fetch": - osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) - elif command is "update": - osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) - else: - raise FetchError("Invalid osc command %s" % command, ud.url) - - return osccmd - - def download(self, loc, ud, d): - """ - Fetch url - """ - - logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") - - if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): - oscupdatecmd = self._buildosccommand(ud, d, "update") - logger.info("Update "+ loc) - # update sources there - os.chdir(ud.moddir) - logger.debug(1, "Running %s", oscupdatecmd) - bb.fetch2.check_network_access(d, oscupdatecmd, ud.url) - runfetchcmd(oscupdatecmd, d) - else: - oscfetchcmd = self._buildosccommand(ud, d, "fetch") - logger.info("Fetch " + loc) - # check out sources there - bb.mkdirhier(ud.pkgdir) - os.chdir(ud.pkgdir) - logger.debug(1, "Running %s", oscfetchcmd) - bb.fetch2.check_network_access(d, oscfetchcmd, ud.url) - runfetchcmd(oscfetchcmd, d) - - os.chdir(os.path.join(ud.pkgdir + ud.path)) - # tar them up to a defined filename - runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath]) - - def supports_srcrev(self): - return False - - def generate_config(self, ud, d): - """ - Generate a .oscrc to be used for this run. - """ - - config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc") - if (os.path.exists(config_path)): - os.remove(config_path) - - f = open(config_path, 'w') - f.write("[general]\n") - f.write("apisrv = %s\n" % ud.host) - f.write("scheme = http\n") - f.write("su-wrapper = su -c\n") - f.write("build-root = %s\n" % data.expand('${WORKDIR}', d)) - f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n") - f.write("extra-pkgs = gzip\n") - f.write("\n") - f.write("[%s]\n" % ud.host) - f.write("user = %s\n" % ud.parm["user"]) - f.write("pass = %s\n" % ud.parm["pswd"]) - f.close() - - return config_path diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py deleted file mode 100644 index 6347834c7..000000000 --- a/bitbake/lib/bb/fetch2/perforce.py +++ /dev/null @@ -1,196 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementations - -Classes for obtaining upstream sources for the -BitBake build tools. - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -from future_builtins import zip -import os -import logging -import bb -from bb import data -from bb.fetch2 import FetchMethod -from bb.fetch2 import FetchError -from bb.fetch2 import logger -from bb.fetch2 import runfetchcmd - -class Perforce(FetchMethod): - def supports(self, url, ud, d): - return ud.type in ['p4'] - - def doparse(url, d): - parm = {} - path = url.split("://")[1] - delim = path.find("@"); - if delim != -1: - (user, pswd, host, port) = path.split('@')[0].split(":") - path = path.split('@')[1] - else: - (host, port) = data.getVar('P4PORT', d).split(':') - user = "" - pswd = "" - - if path.find(";") != -1: - keys=[] - values=[] - plist = path.split(';') - for item in plist: - if item.count('='): - (key, value) = item.split('=') - keys.append(key) - values.append(value) - - parm = dict(zip(keys, values)) - path = "//" + path.split(';')[0] - host += ":%s" % (port) - parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm) - - return host, path, user, pswd, parm - doparse = staticmethod(doparse) - - def getcset(d, depot, host, user, pswd, parm): - p4opt = "" - if "cset" in parm: - return parm["cset"]; - if user: - p4opt += " -u %s" % (user) - if pswd: - p4opt += " -P %s" % (pswd) - if host: - p4opt += " -p %s" % (host) - - p4date = data.getVar("P4DATE", d, True) - if "revision" in parm: - depot += "#%s" % (parm["revision"]) - elif "label" in parm: - depot += "@%s" % (parm["label"]) - elif p4date: - depot += "@%s" % (p4date) - - p4cmd = data.getVar('FETCHCOMMAND_p4', d, True) - logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot) - p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot)) - cset = p4file.readline().strip() - logger.debug(1, "READ %s", cset) - if not cset: - return -1 - - return cset.split(' ')[1] - getcset = staticmethod(getcset) - - def urldata_init(self, ud, d): - (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d) - - # If a label is specified, we use that as our filename - - if "label" in parm: - ud.localfile = "%s.tar.gz" % (parm["label"]) - return - - base = path - which = path.find('/...') - if which != -1: - base = path[:which] - - base = self._strip_leading_slashes(base) - - cset = Perforce.getcset(d, path, host, user, pswd, parm) - - ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d) - - def download(self, loc, ud, d): - """ - Fetch urls - """ - - (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) - - if depot.find('/...') != -1: - path = depot[:depot.find('/...')] - else: - path = depot - - module = parm.get('module', os.path.basename(path)) - - localdata = data.createCopy(d) - data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) - data.update_data(localdata) - - # Get the p4 command - p4opt = "" - if user: - p4opt += " -u %s" % (user) - - if pswd: - p4opt += " -P %s" % (pswd) - - if host: - p4opt += " -p %s" % (host) - - p4cmd = data.getVar('FETCHCOMMAND', localdata, True) - - # create temp directory - logger.debug(2, "Fetch: creating temporary directory") - bb.mkdirhier(data.expand('${WORKDIR}', localdata)) - data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) - tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") - tmpfile = tmppipe.readline().strip() - if not tmpfile: - raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) - - if "label" in parm: - depot = "%s@%s" % (depot, parm["label"]) - else: - cset = Perforce.getcset(d, depot, host, user, pswd, parm) - depot = "%s@%s" % (depot, cset) - - os.chdir(tmpfile) - logger.info("Fetch " + loc) - logger.info("%s%s files %s", p4cmd, p4opt, depot) - p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) - - if not p4file: - raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc) - - count = 0 - - for file in p4file: - list = file.split() - - if list[2] == "delete": - continue - - dest = list[0][len(path)+1:] - where = dest.find("#") - - os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0])) - count = count + 1 - - if count == 0: - logger.error() - raise FetchError("Fetch: No files gathered from the P4 fetch", loc) - - runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath]) - # cleanup - bb.utils.prunedir(tmpfile) diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py deleted file mode 100644 index 54130a8c3..000000000 --- a/bitbake/lib/bb/fetch2/repo.py +++ /dev/null @@ -1,98 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake "Fetch" repo (git) implementation - -""" - -# Copyright (C) 2009 Tom Rini -# -# Based on git.py which is: -#Copyright (C) 2005 Richard Purdie -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import os -import bb -from bb import data -from bb.fetch2 import FetchMethod -from bb.fetch2 import runfetchcmd - -class Repo(FetchMethod): - """Class to fetch a module or modules from repo (git) repositories""" - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with repo. - """ - return ud.type in ["repo"] - - def urldata_init(self, ud, d): - """ - We don"t care about the git rev of the manifests repository, but - we do care about the manifest to use. The default is "default". - We also care about the branch or tag to be used. The default is - "master". - """ - - ud.proto = ud.parm.get('protocol', 'git') - ud.branch = ud.parm.get('branch', 'master') - ud.manifest = ud.parm.get('manifest', 'default.xml') - if not ud.manifest.endswith('.xml'): - ud.manifest += '.xml' - - ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d) - - def download(self, loc, ud, d): - """Fetch url""" - - if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): - logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) - return - - gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) - repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo") - codir = os.path.join(repodir, gitsrcname, ud.manifest) - - if ud.user: - username = ud.user + "@" - else: - username = "" - - bb.mkdirhier(os.path.join(codir, "repo")) - os.chdir(os.path.join(codir, "repo")) - if not os.path.exists(os.path.join(codir, "repo", ".repo")): - bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url) - runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d) - - bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url) - runfetchcmd("repo sync", d) - os.chdir(codir) - - scmdata = ud.parm.get("scmdata", "") - if scmdata == "keep": - tar_flags = "" - else: - tar_flags = "--exclude '.repo' --exclude '.git'" - - # Create a cache - runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d) - - def supports_srcrev(self): - return False - - def _build_revision(self, url, ud, d): - return ud.manifest - - def _want_sortable_revision(self, url, ud, d): - return False diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py deleted file mode 100644 index 91ac15faa..000000000 --- a/bitbake/lib/bb/fetch2/ssh.py +++ /dev/null @@ -1,120 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -''' -BitBake 'Fetch' implementations - -This implementation is for Secure Shell (SSH), and attempts to comply with the -IETF secsh internet draft: - http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/ - - Currently does not support the sftp parameters, as this uses scp - Also does not support the 'fingerprint' connection parameter. - -''' - -# Copyright (C) 2006 OpenedHand Ltd. -# -# -# Based in part on svk.py: -# Copyright (C) 2006 Holger Hans Peter Freyther -# Based on svn.py: -# Copyright (C) 2003, 2004 Chris Larson -# Based on functions from the base bb module: -# Copyright 2003 Holger Schurig -# -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import re, os -from bb import data -from bb.fetch2 import FetchMethod -from bb.fetch2 import FetchError -from bb.fetch2 import logger -from bb.fetch2 import runfetchcmd - - -__pattern__ = re.compile(r''' - \s* # Skip leading whitespace - ssh:// # scheme - ( # Optional username/password block - (?P\S+) # username - (:(?P\S+))? # colon followed by the password (optional) - )? - (?P(;[^;]+)*)? # connection parameters block (optional) - @ - (?P\S+?) # non-greedy match of the host - (:(?P[0-9]+))? # colon followed by the port (optional) - / - (?P[^;]+) # path on the remote system, may be absolute or relative, - # and may include the use of '~' to reference the remote home - # directory - (?P(;[^;]+)*)? # parameters block (optional) - $ -''', re.VERBOSE) - -class SSH(FetchMethod): - '''Class to fetch a module or modules via Secure Shell''' - - def supports(self, url, urldata, d): - return __pattern__.match(url) != None - - def localpath(self, url, urldata, d): - m = __pattern__.match(urldata.url) - path = m.group('path') - host = m.group('host') - lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path)) - return lpath - - def download(self, url, urldata, d): - dldir = data.getVar('DL_DIR', d, True) - - m = __pattern__.match(url) - path = m.group('path') - host = m.group('host') - port = m.group('port') - user = m.group('user') - password = m.group('pass') - - ldir = os.path.join(dldir, host) - lpath = os.path.join(ldir, os.path.basename(path)) - - if not os.path.exists(ldir): - os.makedirs(ldir) - - if port: - port = '-P %s' % port - else: - port = '' - - if user: - fr = user - if password: - fr += ':%s' % password - fr += '@%s' % host - else: - fr = host - fr += ':%s' % path - - - import commands - cmd = 'scp -B -r %s %s %s/' % ( - port, - commands.mkarg(fr), - commands.mkarg(ldir) - ) - - bb.fetch2.check_network_access(d, cmd, urldata.url) - - runfetchcmd(cmd, d) - diff --git a/bitbake/lib/bb/fetch2/svk.py b/bitbake/lib/bb/fetch2/svk.py deleted file mode 100644 index 6211cac8d..000000000 --- a/bitbake/lib/bb/fetch2/svk.py +++ /dev/null @@ -1,97 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementations - -This implementation is for svk. It is based on the svn implementation - -""" - -# Copyright (C) 2006 Holger Hans Peter Freyther -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -import os -import logging -import bb -from bb import data -from bb.fetch2 import FetchMethod -from bb.fetch2 import FetchError -from bb.fetch2 import MissingParameterError -from bb.fetch2 import logger -from bb.fetch2 import runfetchcmd - -class Svk(FetchMethod): - """Class to fetch a module or modules from svk repositories""" - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with svk. - """ - return ud.type in ['svk'] - - def urldata_init(self, ud, d): - - if not "module" in ud.parm: - raise MissingParameterError('module', ud.url) - else: - ud.module = ud.parm["module"] - - ud.revision = ud.parm.get('rev', "") - - ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) - - def need_update(self, url, ud, d): - if ud.date == "now": - return True - if not os.path.exists(ud.localpath): - return True - return False - - def download(self, loc, ud, d): - """Fetch urls""" - - svkroot = ud.host + ud.path - - svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) - - if ud.revision: - svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) - - # create temp directory - localdata = data.createCopy(d) - data.update_data(localdata) - logger.debug(2, "Fetch: creating temporary directory") - bb.mkdirhier(data.expand('${WORKDIR}', localdata)) - data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) - tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") - tmpfile = tmppipe.readline().strip() - if not tmpfile: - logger.error() - raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) - - # check out sources there - os.chdir(tmpfile) - logger.info("Fetch " + loc) - logger.debug(1, "Running %s", svkcmd) - runfetchcmd(svkcmd, d, cleanup = [tmpfile]) - - os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) - # tar them up to a defined filename - runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath]) - - # cleanup - bb.utils.prunedir(tmpfile) diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py deleted file mode 100644 index ac4fd27e1..000000000 --- a/bitbake/lib/bb/fetch2/svn.py +++ /dev/null @@ -1,180 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementation for svn. - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2004 Marcin Juszkiewicz -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -import os -import sys -import logging -import bb -from bb import data -from bb.fetch2 import FetchMethod -from bb.fetch2 import FetchError -from bb.fetch2 import MissingParameterError -from bb.fetch2 import runfetchcmd -from bb.fetch2 import logger - -class Svn(FetchMethod): - """Class to fetch a module or modules from svn repositories""" - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with svn. - """ - return ud.type in ['svn'] - - def urldata_init(self, ud, d): - """ - init svn specific variable within url data - """ - if not "module" in ud.parm: - raise MissingParameterError('module', ud.url) - - ud.module = ud.parm["module"] - - # Create paths to svn checkouts - relpath = self._strip_leading_slashes(ud.path) - ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath) - ud.moddir = os.path.join(ud.pkgdir, ud.module) - - if 'rev' in ud.parm: - ud.revision = ud.parm['rev'] - - ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) - - def _buildsvncommand(self, ud, d, command): - """ - Build up an svn commandline based on ud - command is "fetch", "update", "info" - """ - - basecmd = data.expand('${FETCHCMD_svn}', d) - - proto = ud.parm.get('proto', 'svn') - - svn_rsh = None - if proto == "svn+ssh" and "rsh" in ud.parm: - svn_rsh = ud.parm["rsh"] - - svnroot = ud.host + ud.path - - options = [] - - if ud.user: - options.append("--username %s" % ud.user) - - if ud.pswd: - options.append("--password %s" % ud.pswd) - - if command is "info": - svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module) - else: - suffix = "" - if ud.revision: - options.append("-r %s" % ud.revision) - suffix = "@%s" % (ud.revision) - - if command is "fetch": - svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module) - elif command is "update": - svncmd = "%s update %s" % (basecmd, " ".join(options)) - else: - raise FetchError("Invalid svn command %s" % command, ud.url) - - if svn_rsh: - svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) - - return svncmd - - def download(self, loc, ud, d): - """Fetch url""" - - logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") - - if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): - svnupdatecmd = self._buildsvncommand(ud, d, "update") - logger.info("Update " + loc) - # update sources there - os.chdir(ud.moddir) - logger.debug(1, "Running %s", svnupdatecmd) - bb.fetch2.check_network_access(d, svnupdatecmd, ud.url) - runfetchcmd(svnupdatecmd, d) - else: - svnfetchcmd = self._buildsvncommand(ud, d, "fetch") - logger.info("Fetch " + loc) - # check out sources there - bb.mkdirhier(ud.pkgdir) - os.chdir(ud.pkgdir) - logger.debug(1, "Running %s", svnfetchcmd) - bb.fetch2.check_network_access(d, svnfetchcmd, ud.url) - runfetchcmd(svnfetchcmd, d) - - scmdata = ud.parm.get("scmdata", "") - if scmdata == "keep": - tar_flags = "" - else: - tar_flags = "--exclude '.svn'" - - os.chdir(ud.pkgdir) - # tar them up to a defined filename - runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath]) - - def clean(self, ud, d): - """ Clean SVN specific files and dirs """ - - bb.utils.remove(ud.localpath) - bb.utils.remove(ud.moddir, True) - - - def supports_srcrev(self): - return True - - def _revision_key(self, url, ud, d, name): - """ - Return a unique key for the url - """ - return "svn:" + ud.moddir - - def _latest_revision(self, url, ud, d, name): - """ - Return the latest upstream revision number - """ - bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "info")) - - output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True) - - revision = None - for line in output.splitlines(): - if "Last Changed Rev" in line: - revision = line.split(":")[1].strip() - - return revision - - def _sortable_revision(self, url, ud, d): - """ - Return a sortable revision number which in our case is the revision number - """ - - return self._build_revision(url, ud, d) - - def _build_revision(self, url, ud, d): - return ud.revision diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py deleted file mode 100644 index 7bd027adc..000000000 --- a/bitbake/lib/bb/fetch2/wget.py +++ /dev/null @@ -1,91 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'Fetch' implementations - -Classes for obtaining upstream sources for the -BitBake build tools. - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -import os -import logging -import bb -import urllib -from bb import data -from bb.fetch2 import FetchMethod -from bb.fetch2 import FetchError -from bb.fetch2 import encodeurl -from bb.fetch2 import decodeurl -from bb.fetch2 import logger -from bb.fetch2 import runfetchcmd - -class Wget(FetchMethod): - """Class to fetch urls via 'wget'""" - def supports(self, url, ud, d): - """ - Check to see if a given url can be fetched with wget. - """ - return ud.type in ['http', 'https', 'ftp'] - - def urldata_init(self, ud, d): - - ud.basename = os.path.basename(ud.path) - ud.localfile = data.expand(urllib.unquote(ud.basename), d) - - def download(self, uri, ud, d, checkonly = False): - """Fetch urls""" - - def fetch_uri(uri, ud, d): - if checkonly: - fetchcmd = data.getVar("CHECKCOMMAND", d, True) - elif os.path.exists(ud.localpath): - # file exists, but we didnt complete it.. trying again.. - fetchcmd = data.getVar("RESUMECOMMAND", d, True) - else: - fetchcmd = data.getVar("FETCHCOMMAND", d, True) - - uri = uri.split(";")[0] - uri_decoded = list(decodeurl(uri)) - uri_type = uri_decoded[0] - uri_host = uri_decoded[1] - - fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) - fetchcmd = fetchcmd.replace("${FILE}", ud.basename) - logger.info("fetch " + uri) - logger.debug(2, "executing " + fetchcmd) - bb.fetch2.check_network_access(d, fetchcmd) - runfetchcmd(fetchcmd, d) - - # Sanity check since wget can pretend it succeed when it didn't - # Also, this used to happen if sourceforge sent us to the mirror page - if not os.path.exists(ud.localpath) and not checkonly: - raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) - - localdata = data.createCopy(d) - data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) - data.update_data(localdata) - - fetch_uri(uri, ud, localdata) - - return True - - def checkstatus(self, uri, ud, d): - return self.download(uri, ud, d, True) diff --git a/bitbake/lib/bb/methodpool.py b/bitbake/lib/bb/methodpool.py deleted file mode 100644 index 1485b1357..000000000 --- a/bitbake/lib/bb/methodpool.py +++ /dev/null @@ -1,84 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# -# Copyright (C) 2006 Holger Hans Peter Freyther -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - - -""" - What is a method pool? - - BitBake has a global method scope where .bb, .inc and .bbclass - files can install methods. These methods are parsed from strings. - To avoid recompiling and executing these string we introduce - a method pool to do this task. - - This pool will be used to compile and execute the functions. It - will be smart enough to -""" - -from bb.utils import better_compile, better_exec -from bb import error - -# A dict of modules we have handled -# it is the number of .bbclasses + x in size -_parsed_methods = { } -_parsed_fns = { } - -def insert_method(modulename, code, fn): - """ - Add code of a module should be added. The methods - will be simply added, no checking will be done - """ - comp = better_compile(code, modulename, fn ) - better_exec(comp, None, code, fn) - - # now some instrumentation - code = comp.co_names - for name in code: - if name in ['None', 'False']: - continue - elif name in _parsed_fns and not _parsed_fns[name] == modulename: - error( "Error Method already seen: %s in' %s' now in '%s'" % (name, _parsed_fns[name], modulename)) - else: - _parsed_fns[name] = modulename - -def check_insert_method(modulename, code, fn): - """ - Add the code if it wasnt added before. The module - name will be used for that - - Variables: - @modulename a short name e.g. base.bbclass - @code The actual python code - @fn The filename from the outer file - """ - if not modulename in _parsed_methods: - return insert_method(modulename, code, fn) - _parsed_methods[modulename] = 1 - -def parsed_module(modulename): - """ - Inform me file xyz was parsed - """ - return modulename in _parsed_methods - - -def get_parsed_dict(): - """ - shortcut - """ - return _parsed_methods diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py deleted file mode 100644 index 1f9ff904a..000000000 --- a/bitbake/lib/bb/msg.py +++ /dev/null @@ -1,200 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake 'msg' implementation - -Message handling infrastructure for bitbake - -""" - -# Copyright (C) 2006 Richard Purdie -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import sys -import logging -import collections -from itertools import groupby -import warnings -import bb -import bb.event - -class BBLogFormatter(logging.Formatter): - """Formatter which ensures that our 'plain' messages (logging.INFO + 1) are used as is""" - - DEBUG3 = logging.DEBUG - 2 - DEBUG2 = logging.DEBUG - 1 - DEBUG = logging.DEBUG - VERBOSE = logging.INFO - 1 - NOTE = logging.INFO - PLAIN = logging.INFO + 1 - ERROR = logging.ERROR - WARNING = logging.WARNING - CRITICAL = logging.CRITICAL - - levelnames = { - DEBUG3 : 'DEBUG', - DEBUG2 : 'DEBUG', - DEBUG : 'DEBUG', - VERBOSE: 'NOTE', - NOTE : 'NOTE', - PLAIN : '', - WARNING : 'WARNING', - ERROR : 'ERROR', - CRITICAL: 'ERROR', - } - - def getLevelName(self, levelno): - try: - return self.levelnames[levelno] - except KeyError: - self.levelnames[levelno] = value = 'Level %d' % levelno - return value - - def format(self, record): - record.levelname = self.getLevelName(record.levelno) - if record.levelno == self.PLAIN: - return record.getMessage() - else: - return logging.Formatter.format(self, record) - -class Loggers(dict): - def __getitem__(self, key): - if key in self: - return dict.__getitem__(self, key) - else: - log = logging.getLogger("BitBake.%s" % domain._fields[key]) - dict.__setitem__(self, key, log) - return log - -class DebugLevel(dict): - def __getitem__(self, key): - if key == "default": - key = domain.Default - return get_debug_level(key) - -def _NamedTuple(name, fields): - Tuple = collections.namedtuple(name, " ".join(fields)) - return Tuple(*range(len(fields))) - -domain = _NamedTuple("Domain", ( - "Default", - "Build", - "Cache", - "Collection", - "Data", - "Depends", - "Fetcher", - "Parsing", - "PersistData", - "Provider", - "RunQueue", - "TaskData", - "Util")) -logger = logging.getLogger("BitBake") -loggers = Loggers() -debug_level = DebugLevel() - -# Message control functions -# - -def set_debug_level(level): - for log in loggers.itervalues(): - log.setLevel(logging.NOTSET) - - if level: - logger.setLevel(logging.DEBUG - level + 1) - else: - logger.setLevel(logging.INFO) - -def get_debug_level(msgdomain = domain.Default): - if not msgdomain: - level = logger.getEffectiveLevel() - else: - level = loggers[msgdomain].getEffectiveLevel() - return max(0, logging.DEBUG - level + 1) - -def set_verbose(level): - if level: - logger.setLevel(BBLogFormatter.VERBOSE) - else: - logger.setLevel(BBLogFormatter.INFO) - -def set_debug_domains(domainargs): - for (domainarg, iterator) in groupby(domainargs): - for index, msgdomain in enumerate(domain._fields): - if msgdomain == domainarg: - level = len(tuple(iterator)) - if level: - loggers[index].setLevel(logging.DEBUG - level + 1) - break - else: - warn(None, "Logging domain %s is not valid, ignoring" % domainarg) - -# -# Message handling functions -# - -def debug(level, msgdomain, msg): - warnings.warn("bb.msg.debug will soon be deprecated in favor of the python 'logging' module", - PendingDeprecationWarning, stacklevel=2) - level = logging.DEBUG - (level - 1) - if not msgdomain: - logger.debug(level, msg) - else: - loggers[msgdomain].debug(level, msg) - -def plain(msg): - warnings.warn("bb.msg.plain will soon be deprecated in favor of the python 'logging' module", - PendingDeprecationWarning, stacklevel=2) - logger.plain(msg) - -def note(level, msgdomain, msg): - warnings.warn("bb.msg.note will soon be deprecated in favor of the python 'logging' module", - PendingDeprecationWarning, stacklevel=2) - if level > 1: - if msgdomain: - logger.verbose(msg) - else: - loggers[msgdomain].verbose(msg) - else: - if msgdomain: - logger.info(msg) - else: - loggers[msgdomain].info(msg) - -def warn(msgdomain, msg): - warnings.warn("bb.msg.warn will soon be deprecated in favor of the python 'logging' module", - PendingDeprecationWarning, stacklevel=2) - if not msgdomain: - logger.warn(msg) - else: - loggers[msgdomain].warn(msg) - -def error(msgdomain, msg): - warnings.warn("bb.msg.error will soon be deprecated in favor of the python 'logging' module", - PendingDeprecationWarning, stacklevel=2) - if not msgdomain: - logger.error(msg) - else: - loggers[msgdomain].error(msg) - -def fatal(msgdomain, msg): - warnings.warn("bb.msg.fatal will soon be deprecated in favor of raising appropriate exceptions", - PendingDeprecationWarning, stacklevel=2) - if not msgdomain: - logger.critical(msg) - else: - loggers[msgdomain].critical(msg) - sys.exit(1) diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py deleted file mode 100644 index eee8d9cdd..000000000 --- a/bitbake/lib/bb/parse/__init__.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -BitBake Parsers - -File parsers for the BitBake build tools. - -""" - - -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2003, 2004 Phil Blundell -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -handlers = [] - -import os -import stat -import logging -import bb -import bb.utils -import bb.siggen - -logger = logging.getLogger("BitBake.Parsing") - -class ParseError(Exception): - """Exception raised when parsing fails""" - -class SkipPackage(Exception): - """Exception raised to skip this package""" - -__mtime_cache = {} -def cached_mtime(f): - if f not in __mtime_cache: - __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] - return __mtime_cache[f] - -def cached_mtime_noerror(f): - if f not in __mtime_cache: - try: - __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] - except OSError: - return 0 - return __mtime_cache[f] - -def update_mtime(f): - __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] - return __mtime_cache[f] - -def mark_dependency(d, f): - if f.startswith('./'): - f = "%s/%s" % (os.getcwd(), f[2:]) - deps = bb.data.getVar('__depends', d) or set() - deps.update([(f, cached_mtime(f))]) - bb.data.setVar('__depends', deps, d) - -def supports(fn, data): - """Returns true if we have a handler for this file, false otherwise""" - for h in handlers: - if h['supports'](fn, data): - return 1 - return 0 - -def handle(fn, data, include = 0): - """Call the handler that is appropriate for this file""" - for h in handlers: - if h['supports'](fn, data): - return h['handle'](fn, data, include) - raise ParseError("%s is not a BitBake file" % fn) - -def init(fn, data): - for h in handlers: - if h['supports'](fn): - return h['init'](data) - -def init_parser(d): - bb.parse.siggen = bb.siggen.init(d) - -def resolve_file(fn, d): - if not os.path.isabs(fn): - bbpath = bb.data.getVar("BBPATH", d, True) - newfn = bb.utils.which(bbpath, fn) - if not newfn: - raise IOError("file %s not found in %s" % (fn, bbpath)) - fn = newfn - - logger.debug(2, "LOAD %s", fn) - return fn - -# Used by OpenEmbedded metadata -__pkgsplit_cache__={} -def vars_from_file(mypkg, d): - if not mypkg: - return (None, None, None) - if mypkg in __pkgsplit_cache__: - return __pkgsplit_cache__[mypkg] - - myfile = os.path.splitext(os.path.basename(mypkg)) - parts = myfile[0].split('_') - __pkgsplit_cache__[mypkg] = parts - if len(parts) > 3: - raise ParseError("Unable to generate default variables from the filename: %s (too many underscores)" % mypkg) - exp = 3 - len(parts) - tmplist = [] - while exp != 0: - exp -= 1 - tmplist.append(None) - parts.extend(tmplist) - return parts - -from bb.parse.parse_py import __version__, ConfHandler, BBHandler diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py deleted file mode 100644 index b968db40b..000000000 --- a/bitbake/lib/bb/parse/ast.py +++ /dev/null @@ -1,446 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" - AbstractSyntaxTree classes for the Bitbake language -""" - -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2003, 2004 Phil Blundell -# Copyright (C) 2009 Holger Hans Peter Freyther -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -from __future__ import absolute_import -from future_builtins import filter -import re -import string -import logging -import bb -import itertools -from bb import methodpool -from bb.parse import logger - -__parsed_methods__ = bb.methodpool.get_parsed_dict() -_bbversions_re = re.compile(r"\[(?P[0-9]+)-(?P[0-9]+)\]") - -class StatementGroup(list): - def eval(self, data): - for statement in self: - statement.eval(data) - -class AstNode(object): - def __init__(self, filename, lineno): - self.filename = filename - self.lineno = lineno - -class IncludeNode(AstNode): - def __init__(self, filename, lineno, what_file, force): - AstNode.__init__(self, filename, lineno) - self.what_file = what_file - self.force = force - - def eval(self, data): - """ - Include the file and evaluate the statements - """ - s = bb.data.expand(self.what_file, data) - logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s) - - # TODO: Cache those includes... maybe not here though - if self.force: - bb.parse.ConfHandler.include(self.filename, s, data, "include required") - else: - bb.parse.ConfHandler.include(self.filename, s, data, False) - -class ExportNode(AstNode): - def __init__(self, filename, lineno, var): - AstNode.__init__(self, filename, lineno) - self.var = var - - def eval(self, data): - bb.data.setVarFlag(self.var, "export", 1, data) - -class DataNode(AstNode): - """ - Various data related updates. For the sake of sanity - we have one class doing all this. This means that all - this need to be re-evaluated... we might be able to do - that faster with multiple classes. - """ - def __init__(self, filename, lineno, groupd): - AstNode.__init__(self, filename, lineno) - self.groupd = groupd - - def getFunc(self, key, data): - if 'flag' in self.groupd and self.groupd['flag'] != None: - return bb.data.getVarFlag(key, self.groupd['flag'], data) - else: - return bb.data.getVar(key, data) - - def eval(self, data): - groupd = self.groupd - key = groupd["var"] - if "exp" in groupd and groupd["exp"] != None: - bb.data.setVarFlag(key, "export", 1, data) - if "ques" in groupd and groupd["ques"] != None: - val = self.getFunc(key, data) - if val == None: - val = groupd["value"] - elif "colon" in groupd and groupd["colon"] != None: - e = data.createCopy() - bb.data.update_data(e) - val = bb.data.expand(groupd["value"], e) - elif "append" in groupd and groupd["append"] != None: - val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"]) - elif "prepend" in groupd and groupd["prepend"] != None: - val = "%s %s" % (groupd["value"], (self.getFunc(key, data) or "")) - elif "postdot" in groupd and groupd["postdot"] != None: - val = "%s%s" % ((self.getFunc(key, data) or ""), groupd["value"]) - elif "predot" in groupd and groupd["predot"] != None: - val = "%s%s" % (groupd["value"], (self.getFunc(key, data) or "")) - else: - val = groupd["value"] - - if 'flag' in groupd and groupd['flag'] != None: - bb.data.setVarFlag(key, groupd['flag'], val, data) - elif groupd["lazyques"]: - bb.data.setVarFlag(key, "defaultval", val, data) - else: - bb.data.setVar(key, val, data) - -class MethodNode(AstNode): - def __init__(self, filename, lineno, func_name, body): - AstNode.__init__(self, filename, lineno) - self.func_name = func_name - self.body = body - - def eval(self, data): - if self.func_name == "__anonymous": - funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(string.maketrans('/.+-', '____')))) - if not funcname in bb.methodpool._parsed_fns: - text = "def %s(d):\n" % (funcname) + '\n'.join(self.body) - bb.methodpool.insert_method(funcname, text, self.filename) - anonfuncs = bb.data.getVar('__BBANONFUNCS', data) or [] - anonfuncs.append(funcname) - bb.data.setVar('__BBANONFUNCS', anonfuncs, data) - else: - bb.data.setVarFlag(self.func_name, "func", 1, data) - bb.data.setVar(self.func_name, '\n'.join(self.body), data) - -class PythonMethodNode(AstNode): - def __init__(self, filename, lineno, function, define, body): - AstNode.__init__(self, filename, lineno) - self.function = function - self.define = define - self.body = body - - def eval(self, data): - # Note we will add root to parsedmethods after having parse - # 'this' file. This means we will not parse methods from - # bb classes twice - text = '\n'.join(self.body) - if not bb.methodpool.parsed_module(self.define): - bb.methodpool.insert_method(self.define, text, self.filename) - bb.data.setVarFlag(self.function, "func", 1, data) - bb.data.setVarFlag(self.function, "python", 1, data) - bb.data.setVar(self.function, text, data) - -class MethodFlagsNode(AstNode): - def __init__(self, filename, lineno, key, m): - AstNode.__init__(self, filename, lineno) - self.key = key - self.m = m - - def eval(self, data): - if bb.data.getVar(self.key, data): - # clean up old version of this piece of metadata, as its - # flags could cause problems - bb.data.setVarFlag(self.key, 'python', None, data) - bb.data.setVarFlag(self.key, 'fakeroot', None, data) - if self.m.group("py") is not None: - bb.data.setVarFlag(self.key, "python", "1", data) - else: - bb.data.delVarFlag(self.key, "python", data) - if self.m.group("fr") is not None: - bb.data.setVarFlag(self.key, "fakeroot", "1", data) - else: - bb.data.delVarFlag(self.key, "fakeroot", data) - -class ExportFuncsNode(AstNode): - def __init__(self, filename, lineno, fns, classes): - AstNode.__init__(self, filename, lineno) - self.n = fns.split() - self.classes = classes - - def eval(self, data): - for f in self.n: - allvars = [] - allvars.append(f) - allvars.append(self.classes[-1] + "_" + f) - - vars = [[ allvars[0], allvars[1] ]] - if len(self.classes) > 1 and self.classes[-2] is not None: - allvars.append(self.classes[-2] + "_" + f) - vars = [] - vars.append([allvars[2], allvars[1]]) - vars.append([allvars[0], allvars[2]]) - - for (var, calledvar) in vars: - if bb.data.getVar(var, data) and not bb.data.getVarFlag(var, 'export_func', data): - continue - - if bb.data.getVar(var, data): - bb.data.setVarFlag(var, 'python', None, data) - bb.data.setVarFlag(var, 'func', None, data) - - for flag in [ "func", "python" ]: - if bb.data.getVarFlag(calledvar, flag, data): - bb.data.setVarFlag(var, flag, bb.data.getVarFlag(calledvar, flag, data), data) - for flag in [ "dirs" ]: - if bb.data.getVarFlag(var, flag, data): - bb.data.setVarFlag(calledvar, flag, bb.data.getVarFlag(var, flag, data), data) - - if bb.data.getVarFlag(calledvar, "python", data): - bb.data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n", data) - else: - bb.data.setVar(var, "\t" + calledvar + "\n", data) - bb.data.setVarFlag(var, 'export_func', '1', data) - -class AddTaskNode(AstNode): - def __init__(self, filename, lineno, func, before, after): - AstNode.__init__(self, filename, lineno) - self.func = func - self.before = before - self.after = after - - def eval(self, data): - var = self.func - if self.func[:3] != "do_": - var = "do_" + self.func - - bb.data.setVarFlag(var, "task", 1, data) - bbtasks = bb.data.getVar('__BBTASKS', data) or [] - if not var in bbtasks: - bbtasks.append(var) - bb.data.setVar('__BBTASKS', bbtasks, data) - - existing = bb.data.getVarFlag(var, "deps", data) or [] - if self.after is not None: - # set up deps for function - for entry in self.after.split(): - if entry not in existing: - existing.append(entry) - bb.data.setVarFlag(var, "deps", existing, data) - if self.before is not None: - # set up things that depend on this func - for entry in self.before.split(): - existing = bb.data.getVarFlag(entry, "deps", data) or [] - if var not in existing: - bb.data.setVarFlag(entry, "deps", [var] + existing, data) - -class BBHandlerNode(AstNode): - def __init__(self, filename, lineno, fns): - AstNode.__init__(self, filename, lineno) - self.hs = fns.split() - - def eval(self, data): - bbhands = bb.data.getVar('__BBHANDLERS', data) or [] - for h in self.hs: - bbhands.append(h) - bb.data.setVarFlag(h, "handler", 1, data) - bb.data.setVar('__BBHANDLERS', bbhands, data) - -class InheritNode(AstNode): - def __init__(self, filename, lineno, classes): - AstNode.__init__(self, filename, lineno) - self.classes = classes - - def eval(self, data): - bb.parse.BBHandler.inherit(self.classes, data) - -def handleInclude(statements, filename, lineno, m, force): - statements.append(IncludeNode(filename, lineno, m.group(1), force)) - -def handleExport(statements, filename, lineno, m): - statements.append(ExportNode(filename, lineno, m.group(1))) - -def handleData(statements, filename, lineno, groupd): - statements.append(DataNode(filename, lineno, groupd)) - -def handleMethod(statements, filename, lineno, func_name, body): - statements.append(MethodNode(filename, lineno, func_name, body)) - -def handlePythonMethod(statements, filename, lineno, funcname, root, body): - statements.append(PythonMethodNode(filename, lineno, funcname, root, body)) - -def handleMethodFlags(statements, filename, lineno, key, m): - statements.append(MethodFlagsNode(filename, lineno, key, m)) - -def handleExportFuncs(statements, filename, lineno, m, classes): - statements.append(ExportFuncsNode(filename, lineno, m.group(1), classes)) - -def handleAddTask(statements, filename, lineno, m): - func = m.group("func") - before = m.group("before") - after = m.group("after") - if func is None: - return - - statements.append(AddTaskNode(filename, lineno, func, before, after)) - -def handleBBHandlers(statements, filename, lineno, m): - statements.append(BBHandlerNode(filename, lineno, m.group(1))) - -def handleInherit(statements, filename, lineno, m): - classes = m.group(1) - statements.append(InheritNode(filename, lineno, classes.split())) - -def finalize(fn, d, variant = None): - bb.data.expandKeys(d) - bb.data.update_data(d) - code = [] - for funcname in bb.data.getVar("__BBANONFUNCS", d) or []: - code.append("%s(d)" % funcname) - bb.utils.simple_exec("\n".join(code), {"d": d}) - bb.data.update_data(d) - - all_handlers = {} - for var in bb.data.getVar('__BBHANDLERS', d) or []: - # try to add the handler - handler = bb.data.getVar(var, d) - bb.event.register(var, handler) - - tasklist = bb.data.getVar('__BBTASKS', d) or [] - bb.build.add_tasks(tasklist, d) - - bb.parse.siggen.finalise(fn, d, variant) - - bb.event.fire(bb.event.RecipeParsed(fn), d) - -def _create_variants(datastores, names, function): - def create_variant(name, orig_d, arg = None): - new_d = bb.data.createCopy(orig_d) - function(arg or name, new_d) - datastores[name] = new_d - - for variant, variant_d in datastores.items(): - for name in names: - if not variant: - # Based on main recipe - create_variant(name, variant_d) - else: - create_variant("%s-%s" % (variant, name), variant_d, name) - -def _expand_versions(versions): - def expand_one(version, start, end): - for i in xrange(start, end + 1): - ver = _bbversions_re.sub(str(i), version, 1) - yield ver - - versions = iter(versions) - while True: - try: - version = next(versions) - except StopIteration: - break - - range_ver = _bbversions_re.search(version) - if not range_ver: - yield version - else: - newversions = expand_one(version, int(range_ver.group("from")), - int(range_ver.group("to"))) - versions = itertools.chain(newversions, versions) - -def multi_finalize(fn, d): - appends = (d.getVar("__BBAPPEND", True) or "").split() - for append in appends: - logger.debug(2, "Appending .bbappend file %s to %s", append, fn) - bb.parse.BBHandler.handle(append, d, True) - - safe_d = d - d = bb.data.createCopy(safe_d) - try: - finalize(fn, d) - except bb.parse.SkipPackage: - bb.data.setVar("__SKIPPED", True, d) - datastores = {"": safe_d} - - versions = (d.getVar("BBVERSIONS", True) or "").split() - if versions: - pv = orig_pv = d.getVar("PV", True) - baseversions = {} - - def verfunc(ver, d, pv_d = None): - if pv_d is None: - pv_d = d - - overrides = d.getVar("OVERRIDES", True).split(":") - pv_d.setVar("PV", ver) - overrides.append(ver) - bpv = baseversions.get(ver) or orig_pv - pv_d.setVar("BPV", bpv) - overrides.append(bpv) - d.setVar("OVERRIDES", ":".join(overrides)) - - versions = list(_expand_versions(versions)) - for pos, version in enumerate(list(versions)): - try: - pv, bpv = version.split(":", 2) - except ValueError: - pass - else: - versions[pos] = pv - baseversions[pv] = bpv - - if pv in versions and not baseversions.get(pv): - versions.remove(pv) - else: - pv = versions.pop() - - # This is necessary because our existing main datastore - # has already been finalized with the old PV, we need one - # that's been finalized with the new PV. - d = bb.data.createCopy(safe_d) - verfunc(pv, d, safe_d) - try: - finalize(fn, d) - except bb.parse.SkipPackage: - bb.data.setVar("__SKIPPED", True, d) - - _create_variants(datastores, versions, verfunc) - - extended = d.getVar("BBCLASSEXTEND", True) or "" - if extended: - pn = d.getVar("PN", True) - def extendfunc(name, d): - d.setVar("PN", "%s-%s" % (pn, name)) - bb.parse.BBHandler.inherit([name], d) - - safe_d.setVar("BBCLASSEXTEND", extended) - _create_variants(datastores, extended.split(), extendfunc) - - for variant, variant_d in datastores.iteritems(): - if variant: - try: - finalize(fn, variant_d, variant) - except bb.parse.SkipPackage: - bb.data.setVar("__SKIPPED", True, variant_d) - - if len(datastores) > 1: - variants = filter(None, datastores.iterkeys()) - safe_d.setVar("__VARIANTS", " ".join(variants)) - - datastores[""] = d - return datastores diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py deleted file mode 100644 index 402cd07e2..000000000 --- a/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ /dev/null @@ -1,254 +0,0 @@ -#!/usr/bin/env python -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" - class for handling .bb files - - Reads a .bb file and obtains its metadata - -""" - - -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2003, 2004 Phil Blundell -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -from __future__ import absolute_import -import re, bb, os -import logging -import bb.build, bb.utils -from bb import data - -from . import ConfHandler -from .. import resolve_file, ast, logger -from .ConfHandler import include, init - -# For compatibility -bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"]) - -__func_start_regexp__ = re.compile( r"(((?Ppython)|(?Pfakeroot))\s*)*(?P[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" ) -__inherit_regexp__ = re.compile( r"inherit\s+(.+)" ) -__export_func_regexp__ = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" ) -__addtask_regexp__ = re.compile("addtask\s+(?P\w+)\s*((before\s*(?P((.*(?=after))|(.*))))|(after\s*(?P((.*(?=before))|(.*)))))*") -__addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" ) -__def_regexp__ = re.compile( r"def\s+(\w+).*:" ) -__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" ) - - -__infunc__ = "" -__inpython__ = False -__body__ = [] -__classname__ = "" -classes = [ None, ] - -cached_statements = {} - -# We need to indicate EOF to the feeder. This code is so messy that -# factoring it out to a close_parse_file method is out of question. -# We will use the IN_PYTHON_EOF as an indicator to just close the method -# -# The two parts using it are tightly integrated anyway -IN_PYTHON_EOF = -9999999999999 - - - -def supports(fn, d): - """Return True if fn has a supported extension""" - return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"] - -def inherit(files, d): - __inherit_cache = data.getVar('__inherit_cache', d) or [] - fn = "" - lineno = 0 - for file in files: - file = data.expand(file, d) - if not os.path.isabs(file) and not file.endswith(".bbclass"): - file = os.path.join('classes', '%s.bbclass' % file) - - if not file in __inherit_cache: - logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file) - __inherit_cache.append( file ) - data.setVar('__inherit_cache', __inherit_cache, d) - include(fn, file, d, "inherit") - __inherit_cache = data.getVar('__inherit_cache', d) or [] - -def get_statements(filename, absolute_filename, base_name): - global cached_statements - - try: - return cached_statements[absolute_filename] - except KeyError: - file = open(absolute_filename, 'r') - statements = ast.StatementGroup() - - lineno = 0 - while True: - lineno = lineno + 1 - s = file.readline() - if not s: break - s = s.rstrip() - feeder(lineno, s, filename, base_name, statements) - if __inpython__: - # add a blank line to close out any python definition - feeder(IN_PYTHON_EOF, "", filename, base_name, statements) - - if filename.endswith(".bbclass") or filename.endswith(".inc"): - cached_statements[absolute_filename] = statements - return statements - -def handle(fn, d, include): - global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__ - __body__ = [] - __infunc__ = "" - __classname__ = "" - __residue__ = [] - - - if include == 0: - logger.debug(2, "BB %s: handle(data)", fn) - else: - logger.debug(2, "BB %s: handle(data, include)", fn) - - base_name = os.path.basename(fn) - (root, ext) = os.path.splitext(base_name) - init(d) - - if ext == ".bbclass": - __classname__ = root - classes.append(__classname__) - __inherit_cache = data.getVar('__inherit_cache', d) or [] - if not fn in __inherit_cache: - __inherit_cache.append(fn) - data.setVar('__inherit_cache', __inherit_cache, d) - - if include != 0: - oldfile = data.getVar('FILE', d) - else: - oldfile = None - - abs_fn = resolve_file(fn, d) - - if include: - bb.parse.mark_dependency(d, abs_fn) - - # actual loading - statements = get_statements(fn, abs_fn, base_name) - - # DONE WITH PARSING... time to evaluate - if ext != ".bbclass": - data.setVar('FILE', fn, d) - - statements.eval(d) - - if ext == ".bbclass": - classes.remove(__classname__) - else: - if include == 0: - return ast.multi_finalize(fn, d) - - if oldfile: - bb.data.setVar("FILE", oldfile, d) - - # we have parsed the bb class now - if ext == ".bbclass" or ext == ".inc": - bb.methodpool.get_parsed_dict()[base_name] = 1 - - return d - -def feeder(lineno, s, fn, root, statements): - global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, classes, bb, __residue__ - if __infunc__: - if s == '}': - __body__.append('') - ast.handleMethod(statements, fn, lineno, __infunc__, __body__) - __infunc__ = "" - __body__ = [] - else: - __body__.append(s) - return - - if __inpython__: - m = __python_func_regexp__.match(s) - if m and lineno != IN_PYTHON_EOF: - __body__.append(s) - return - else: - ast.handlePythonMethod(statements, fn, lineno, __inpython__, - root, __body__) - __body__ = [] - __inpython__ = False - - if lineno == IN_PYTHON_EOF: - return - - - # Skip empty lines - if s == '': - return - - if s[0] == '#': - if len(__residue__) != 0 and __residue__[0][0] != "#": - bb.error("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s)) - - if s[-1] == '\\': - __residue__.append(s[:-1]) - return - - s = "".join(__residue__) + s - __residue__ = [] - - # Skip comments - if s[0] == '#': - return - - m = __func_start_regexp__.match(s) - if m: - __infunc__ = m.group("func") or "__anonymous" - ast.handleMethodFlags(statements, fn, lineno, __infunc__, m) - return - - m = __def_regexp__.match(s) - if m: - __body__.append(s) - __inpython__ = m.group(1) - - return - - m = __export_func_regexp__.match(s) - if m: - ast.handleExportFuncs(statements, fn, lineno, m, classes) - return - - m = __addtask_regexp__.match(s) - if m: - ast.handleAddTask(statements, fn, lineno, m) - return - - m = __addhandler_regexp__.match(s) - if m: - ast.handleBBHandlers(statements, fn, lineno, m) - return - - m = __inherit_regexp__.match(s) - if m: - ast.handleInherit(statements, fn, lineno, m) - return - - return ConfHandler.feeder(lineno, s, fn, statements) - -# Add us to the handlers list -from .. import handlers -handlers.append({'supports': supports, 'handle': handle, 'init': init}) -del handlers diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py deleted file mode 100644 index fc239a354..000000000 --- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py +++ /dev/null @@ -1,139 +0,0 @@ -#!/usr/bin/env python -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" - class for handling configuration data files - - Reads a .conf file and obtains its metadata - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2003, 2004 Phil Blundell -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import re, bb.data, os -import logging -import bb.utils -from bb.parse import ParseError, resolve_file, ast, logger - -#__config_regexp__ = re.compile( r"(?Pexport\s*)?(?P[a-zA-Z0-9\-_+.${}]+)\s*(?P:)?(?P\?)?=\s*(?P['\"]?)(?P.*)(?P=apo)$") -__config_regexp__ = re.compile( r"(?Pexport\s*)?(?P[a-zA-Z0-9\-_+.${}/]+)(\[(?P[a-zA-Z0-9\-_+.]+)\])?\s*((?P:=)|(?P\?\?=)|(?P\?=)|(?P\+=)|(?P=\+)|(?P=\.)|(?P\.=)|=)\s*(?P['\"]?)(?P.*)(?P=apo)$") -__include_regexp__ = re.compile( r"include\s+(.+)" ) -__require_regexp__ = re.compile( r"require\s+(.+)" ) -__export_regexp__ = re.compile( r"export\s+(.+)" ) - -def init(data): - topdir = bb.data.getVar('TOPDIR', data) - if not topdir: - bb.data.setVar('TOPDIR', os.getcwd(), data) - - -def supports(fn, d): - return fn[-5:] == ".conf" - -def include(oldfn, fn, data, error_out): - """ - error_out If True a ParseError will be raised if the to be included - config-files could not be included. - """ - if oldfn == fn: # prevent infinite recursion - return None - - import bb - fn = bb.data.expand(fn, data) - oldfn = bb.data.expand(oldfn, data) - - if not os.path.isabs(fn): - dname = os.path.dirname(oldfn) - bbpath = "%s:%s" % (dname, bb.data.getVar("BBPATH", data, 1)) - abs_fn = bb.utils.which(bbpath, fn) - if abs_fn: - fn = abs_fn - - from bb.parse import handle - try: - ret = handle(fn, data, True) - except IOError: - if error_out: - raise ParseError("Could not %(error_out)s file %(fn)s" % vars() ) - logger.debug(2, "CONF file '%s' not found", fn) - -def handle(fn, data, include): - init(data) - - if include == 0: - oldfile = None - else: - oldfile = bb.data.getVar('FILE', data) - - abs_fn = resolve_file(fn, data) - f = open(abs_fn, 'r') - - if include: - bb.parse.mark_dependency(data, abs_fn) - - statements = ast.StatementGroup() - lineno = 0 - while True: - lineno = lineno + 1 - s = f.readline() - if not s: break - w = s.strip() - if not w: continue # skip empty lines - s = s.rstrip() - if s[0] == '#': continue # skip comments - while s[-1] == '\\': - s2 = f.readline()[:-1].strip() - lineno = lineno + 1 - s = s[:-1] + s2 - feeder(lineno, s, fn, statements) - - # DONE WITH PARSING... time to evaluate - bb.data.setVar('FILE', fn, data) - statements.eval(data) - if oldfile: - bb.data.setVar('FILE', oldfile, data) - - return data - -def feeder(lineno, s, fn, statements): - m = __config_regexp__.match(s) - if m: - groupd = m.groupdict() - ast.handleData(statements, fn, lineno, groupd) - return - - m = __include_regexp__.match(s) - if m: - ast.handleInclude(statements, fn, lineno, m, False) - return - - m = __require_regexp__.match(s) - if m: - ast.handleInclude(statements, fn, lineno, m, True) - return - - m = __export_regexp__.match(s) - if m: - ast.handleExport(statements, fn, lineno, m) - return - - raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s)); - -# Add us to the handlers list -from bb.parse import handlers -handlers.append({'supports': supports, 'handle': handle, 'init': init}) -del handlers diff --git a/bitbake/lib/bb/parse/parse_py/__init__.py b/bitbake/lib/bb/parse/parse_py/__init__.py deleted file mode 100644 index 3e658d0de..000000000 --- a/bitbake/lib/bb/parse/parse_py/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -""" -BitBake Parsers - -File parsers for the BitBake build tools. - -""" - -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2003, 2004 Phil Blundell -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Based on functions from the base bb module, Copyright 2003 Holger Schurig - -from __future__ import absolute_import -from . import ConfHandler -from . import BBHandler - -__version__ = '1.0' diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py deleted file mode 100644 index da0575231..000000000 --- a/bitbake/lib/bb/persist_data.py +++ /dev/null @@ -1,194 +0,0 @@ -"""BitBake Persistent Data Store - -Used to store data in a central location such that other threads/tasks can -access them at some future date. Acts as a convenience wrapper around sqlite, -currently, providing a key/value store accessed by 'domain'. -""" - -# Copyright (C) 2007 Richard Purdie -# Copyright (C) 2010 Chris Larson -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import collections -import logging -import os.path -import sys -import warnings -import bb.msg, bb.data, bb.utils - -try: - import sqlite3 -except ImportError: - from pysqlite2 import dbapi2 as sqlite3 - -sqlversion = sqlite3.sqlite_version_info -if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): - raise Exception("sqlite3 version 3.3.0 or later is required.") - - -logger = logging.getLogger("BitBake.PersistData") - - -class SQLTable(collections.MutableMapping): - """Object representing a table/domain in the database""" - def __init__(self, cursor, table): - self.cursor = cursor - self.table = table - - self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" - % table) - - def _execute(self, *query): - """Execute a query, waiting to acquire a lock if necessary""" - count = 0 - while True: - try: - return self.cursor.execute(*query) - except sqlite3.OperationalError as exc: - if 'database is locked' in str(exc) and count < 500: - count = count + 1 - continue - raise - - def __getitem__(self, key): - data = self._execute("SELECT * from %s where key=?;" % - self.table, [key]) - for row in data: - return row[1] - - def __delitem__(self, key): - self._execute("DELETE from %s where key=?;" % self.table, [key]) - - def __setitem__(self, key, value): - data = self._execute("SELECT * from %s where key=?;" % - self.table, [key]) - exists = len(list(data)) - if exists: - self._execute("UPDATE %s SET value=? WHERE key=?;" % self.table, - [value, key]) - else: - self._execute("INSERT into %s(key, value) values (?, ?);" % - self.table, [key, value]) - - def __contains__(self, key): - return key in set(self) - - def __len__(self): - data = self._execute("SELECT COUNT(key) FROM %s;" % self.table) - for row in data: - return row[0] - - def __iter__(self): - data = self._execute("SELECT key FROM %s;" % self.table) - for row in data: - yield row[0] - - def iteritems(self): - data = self._execute("SELECT * FROM %s;" % self.table) - for row in data: - yield row[0], row[1] - - def itervalues(self): - data = self._execute("SELECT value FROM %s;" % self.table) - for row in data: - yield row[0] - - -class SQLData(object): - """Object representing the persistent data""" - def __init__(self, filename): - bb.utils.mkdirhier(os.path.dirname(filename)) - - self.filename = filename - self.connection = sqlite3.connect(filename, timeout=5, - isolation_level=None) - self.cursor = self.connection.cursor() - self._tables = {} - - def __getitem__(self, table): - if not isinstance(table, basestring): - raise TypeError("table argument must be a string, not '%s'" % - type(table)) - - if table in self._tables: - return self._tables[table] - else: - tableobj = self._tables[table] = SQLTable(self.cursor, table) - return tableobj - - def __delitem__(self, table): - if table in self._tables: - del self._tables[table] - self.cursor.execute("DROP TABLE IF EXISTS %s;" % table) - - -class PersistData(object): - """Deprecated representation of the bitbake persistent data store""" - def __init__(self, d): - warnings.warn("Use of PersistData will be deprecated in the future", - category=PendingDeprecationWarning, - stacklevel=2) - - self.data = persist(d) - logger.debug(1, "Using '%s' as the persistent data cache", - self.data.filename) - - def addDomain(self, domain): - """ - Add a domain (pending deprecation) - """ - return self.data[domain] - - def delDomain(self, domain): - """ - Removes a domain and all the data it contains - """ - del self.data[domain] - - def getKeyValues(self, domain): - """ - Return a list of key + value pairs for a domain - """ - return self.data[domain].items() - - def getValue(self, domain, key): - """ - Return the value of a key for a domain - """ - return self.data[domain][key] - - def setValue(self, domain, key, value): - """ - Sets the value of a key for a domain - """ - self.data[domain][key] = value - - def delValue(self, domain, key): - """ - Deletes a key/value pair - """ - del self.data[domain][key] - - -def persist(d): - """Convenience factory for construction of SQLData based upon metadata""" - cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or - bb.data.getVar("CACHE", d, True)) - if not cachedir: - logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") - sys.exit(1) - - cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3") - return SQLData(cachefile) diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py deleted file mode 100644 index 4150d80e0..000000000 --- a/bitbake/lib/bb/process.py +++ /dev/null @@ -1,109 +0,0 @@ -import logging -import signal -import subprocess - -logger = logging.getLogger('BitBake.Process') - -def subprocess_setup(): - # Python installs a SIGPIPE handler by default. This is usually not what - # non-Python subprocesses expect. - signal.signal(signal.SIGPIPE, signal.SIG_DFL) - -class CmdError(RuntimeError): - def __init__(self, command, msg=None): - self.command = command - self.msg = msg - - def __str__(self): - if not isinstance(self.command, basestring): - cmd = subprocess.list2cmdline(self.command) - else: - cmd = self.command - - msg = "Execution of '%s' failed" % cmd - if self.msg: - msg += ': %s' % self.msg - return msg - -class NotFoundError(CmdError): - def __str__(self): - return CmdError.__str__(self) + ": command not found" - -class ExecutionError(CmdError): - def __init__(self, command, exitcode, stdout = None, stderr = None): - CmdError.__init__(self, command) - self.exitcode = exitcode - self.stdout = stdout - self.stderr = stderr - - def __str__(self): - message = "" - if self.stderr: - message += self.stderr - if self.stdout: - message += self.stdout - if message: - message = ":\n" + message - return (CmdError.__str__(self) + - " with exit code %s" % self.exitcode + message) - -class Popen(subprocess.Popen): - defaults = { - "close_fds": True, - "preexec_fn": subprocess_setup, - "stdout": subprocess.PIPE, - "stderr": subprocess.STDOUT, - "stdin": subprocess.PIPE, - "shell": False, - } - - def __init__(self, *args, **kwargs): - options = dict(self.defaults) - options.update(kwargs) - subprocess.Popen.__init__(self, *args, **options) - -def _logged_communicate(pipe, log, input): - if pipe.stdin: - if input is not None: - pipe.stdin.write(input) - pipe.stdin.close() - - bufsize = 512 - outdata, errdata = [], [] - while pipe.poll() is None: - if pipe.stdout is not None: - data = pipe.stdout.read(bufsize) - if data is not None: - outdata.append(data) - log.write(data) - - if pipe.stderr is not None: - data = pipe.stderr.read(bufsize) - if data is not None: - errdata.append(data) - log.write(data) - return ''.join(outdata), ''.join(errdata) - -def run(cmd, input=None, log=None, **options): - """Convenience function to run a command and return its output, raising an - exception when the command fails""" - - if isinstance(cmd, basestring) and not "shell" in options: - options["shell"] = True - - try: - pipe = Popen(cmd, **options) - except OSError, exc: - if exc.errno == 2: - raise NotFoundError(cmd) - else: - raise CmdError(cmd, exc) - - if log: - stdout, stderr = _logged_communicate(pipe, log, input) - else: - stdout, stderr = pipe.communicate(input) - - if pipe.returncode != 0: - raise ExecutionError(cmd, pipe.returncode, stdout, stderr) - return stdout, stderr diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py deleted file mode 100644 index dcba9ae25..000000000 --- a/bitbake/lib/bb/providers.py +++ /dev/null @@ -1,330 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# Copyright (C) 2003, 2004 Chris Larson -# Copyright (C) 2003, 2004 Phil Blundell -# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer -# Copyright (C) 2005 Holger Hans Peter Freyther -# Copyright (C) 2005 ROAD GmbH -# Copyright (C) 2006 Richard Purdie -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import re -import logging -from bb import data, utils -import bb - -logger = logging.getLogger("BitBake.Provider") - -class NoProvider(Exception): - """Exception raised when no provider of a build dependency can be found""" - -class NoRProvider(Exception): - """Exception raised when no provider of a runtime dependency can be found""" - - -def sortPriorities(pn, dataCache, pkg_pn = None): - """ - Reorder pkg_pn by file priority and default preference - """ - - if not pkg_pn: - pkg_pn = dataCache.pkg_pn - - files = pkg_pn[pn] - priorities = {} - for f in files: - priority = dataCache.bbfile_priority[f] - preference = dataCache.pkg_dp[f] - if priority not in priorities: - priorities[priority] = {} - if preference not in priorities[priority]: - priorities[priority][preference] = [] - priorities[priority][preference].append(f) - tmp_pn = [] - for pri in sorted(priorities, lambda a, b: a - b): - tmp_pref = [] - for pref in sorted(priorities[pri], lambda a, b: b - a): - tmp_pref.extend(priorities[pri][pref]) - tmp_pn = [tmp_pref] + tmp_pn - - return tmp_pn - -def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r): - """ - Check if the version pe,pv,pr is the preferred one. - If there is preferred version defined and ends with '%', then pv has to start with that version after removing the '%' - """ - if (pr == preferred_r or preferred_r == None): - if (pe == preferred_e or preferred_e == None): - if preferred_v == pv: - return True - if preferred_v != None and preferred_v.endswith('%') and pv.startswith(preferred_v[:len(preferred_v)-1]): - return True - return False - -def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): - """ - Find the first provider in pkg_pn with a PREFERRED_VERSION set. - """ - - preferred_file = None - preferred_ver = None - - localdata = data.createCopy(cfgData) - bb.data.setVar('OVERRIDES', "pn-%s:%s:%s" % (pn, pn, data.getVar('OVERRIDES', localdata)), localdata) - bb.data.update_data(localdata) - - preferred_v = bb.data.getVar('PREFERRED_VERSION_%s' % pn, localdata, True) - if preferred_v: - m = re.match('(\d+:)*(.*)(_.*)*', preferred_v) - if m: - if m.group(1): - preferred_e = int(m.group(1)[:-1]) - else: - preferred_e = None - preferred_v = m.group(2) - if m.group(3): - preferred_r = m.group(3)[1:] - else: - preferred_r = None - else: - preferred_e = None - preferred_r = None - - for file_set in pkg_pn: - for f in file_set: - pe, pv, pr = dataCache.pkg_pepvpr[f] - if preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r): - preferred_file = f - preferred_ver = (pe, pv, pr) - break - if preferred_file: - break; - if preferred_r: - pv_str = '%s-%s' % (preferred_v, preferred_r) - else: - pv_str = preferred_v - if not (preferred_e is None): - pv_str = '%s:%s' % (preferred_e, pv_str) - itemstr = "" - if item: - itemstr = " (for item %s)" % item - if preferred_file is None: - logger.info("preferred version %s of %s not available%s", pv_str, pn, itemstr) - else: - logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr) - - return (preferred_ver, preferred_file) - - -def findLatestProvider(pn, cfgData, dataCache, file_set): - """ - Return the highest version of the providers in file_set. - Take default preferences into account. - """ - latest = None - latest_p = 0 - latest_f = None - for file_name in file_set: - pe, pv, pr = dataCache.pkg_pepvpr[file_name] - dp = dataCache.pkg_dp[file_name] - - if (latest is None) or ((latest_p == dp) and (utils.vercmp(latest, (pe, pv, pr)) < 0)) or (dp > latest_p): - latest = (pe, pv, pr) - latest_f = file_name - latest_p = dp - - return (latest, latest_f) - - -def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): - """ - If there is a PREFERRED_VERSION, find the highest-priority bbfile - providing that version. If not, find the latest version provided by - an bbfile in the highest-priority set. - """ - - sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn) - # Find the highest priority provider with a PREFERRED_VERSION set - (preferred_ver, preferred_file) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item) - # Find the latest version of the highest priority provider - (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0]) - - if preferred_file is None: - preferred_file = latest_f - preferred_ver = latest - - return (latest, latest_f, preferred_ver, preferred_file) - - -def _filterProviders(providers, item, cfgData, dataCache): - """ - Take a list of providers and filter/reorder according to the - environment variables and previous build results - """ - eligible = [] - preferred_versions = {} - sortpkg_pn = {} - - # The order of providers depends on the order of the files on the disk - # up to here. Sort pkg_pn to make dependency issues reproducible rather - # than effectively random. - providers.sort() - - # Collate providers by PN - pkg_pn = {} - for p in providers: - pn = dataCache.pkg_fn[p] - if pn not in pkg_pn: - pkg_pn[pn] = [] - pkg_pn[pn].append(p) - - logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys()) - - # First add PREFERRED_VERSIONS - for pn in pkg_pn: - sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn) - preferred_versions[pn] = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item) - if preferred_versions[pn][1]: - eligible.append(preferred_versions[pn][1]) - - # Now add latest versions - for pn in sortpkg_pn: - if pn in preferred_versions and preferred_versions[pn][1]: - continue - preferred_versions[pn] = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[pn][0]) - eligible.append(preferred_versions[pn][1]) - - if len(eligible) == 0: - logger.error("no eligible providers for %s", item) - return 0 - - # If pn == item, give it a slight default preference - # This means PREFERRED_PROVIDER_foobar defaults to foobar if available - for p in providers: - pn = dataCache.pkg_fn[p] - if pn != item: - continue - (newvers, fn) = preferred_versions[pn] - if not fn in eligible: - continue - eligible.remove(fn) - eligible = [fn] + eligible - - return eligible - - -def filterProviders(providers, item, cfgData, dataCache): - """ - Take a list of providers and filter/reorder according to the - environment variables and previous build results - Takes a "normal" target item - """ - - eligible = _filterProviders(providers, item, cfgData, dataCache) - - prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % item, cfgData, 1) - if prefervar: - dataCache.preferred[item] = prefervar - - foundUnique = False - if item in dataCache.preferred: - for p in eligible: - pn = dataCache.pkg_fn[p] - if dataCache.preferred[item] == pn: - logger.verbose("selecting %s to satisfy %s due to PREFERRED_PROVIDERS", pn, item) - eligible.remove(p) - eligible = [p] + eligible - foundUnique = True - break - - logger.debug(1, "sorted providers for %s are: %s", item, eligible) - - return eligible, foundUnique - -def filterProvidersRunTime(providers, item, cfgData, dataCache): - """ - Take a list of providers and filter/reorder according to the - environment variables and previous build results - Takes a "runtime" target item - """ - - eligible = _filterProviders(providers, item, cfgData, dataCache) - - # Should use dataCache.preferred here? - preferred = [] - preferred_vars = [] - pns = {} - for p in eligible: - pns[dataCache.pkg_fn[p]] = p - for p in eligible: - pn = dataCache.pkg_fn[p] - provides = dataCache.pn_provides[pn] - for provide in provides: - prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1) - logger.verbose("checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys()) - if prefervar in pns and pns[prefervar] not in preferred: - var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar) - logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var) - preferred_vars.append(var) - pref = pns[prefervar] - eligible.remove(pref) - eligible = [pref] + eligible - preferred.append(pref) - break - - numberPreferred = len(preferred) - - if numberPreferred > 1: - logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s", item, preferred, preferred_vars) - - logger.debug(1, "sorted providers for %s are: %s", item, eligible) - - return eligible, numberPreferred - -regexp_cache = {} - -def getRuntimeProviders(dataCache, rdepend): - """ - Return any providers of runtime dependency - """ - rproviders = [] - - if rdepend in dataCache.rproviders: - rproviders += dataCache.rproviders[rdepend] - - if rdepend in dataCache.packages: - rproviders += dataCache.packages[rdepend] - - if rproviders: - return rproviders - - # Only search dynamic packages if we can't find anything in other variables - for pattern in dataCache.packages_dynamic: - pattern = pattern.replace('+', "\+") - if pattern in regexp_cache: - regexp = regexp_cache[pattern] - else: - try: - regexp = re.compile(pattern) - except: - logger.error("Error parsing regular expression '%s'", pattern) - raise - regexp_cache[pattern] = regexp - if regexp.match(rdepend): - rproviders += dataCache.packages_dynamic[pattern] - - return rproviders diff --git a/bitbake/lib/bb/pysh/__init__.py b/bitbake/lib/bb/pysh/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/bitbake/lib/bb/pysh/builtin.py b/bitbake/lib/bb/pysh/builtin.py deleted file mode 100644 index 25ad22eb7..000000000 --- a/bitbake/lib/bb/pysh/builtin.py +++ /dev/null @@ -1,710 +0,0 @@ -# builtin.py - builtins and utilities definitions for pysh. -# -# Copyright 2007 Patrick Mezard -# -# This software may be used and distributed according to the terms -# of the GNU General Public License, incorporated herein by reference. - -"""Builtin and internal utilities implementations. - -- Beware not to use python interpreter environment as if it were the shell -environment. For instance, commands working directory must be explicitely handled -through env['PWD'] instead of relying on python working directory. -""" -import errno -import optparse -import os -import re -import subprocess -import sys -import time - -def has_subprocess_bug(): - return getattr(subprocess, 'list2cmdline') and \ - ( subprocess.list2cmdline(['']) == '' or \ - subprocess.list2cmdline(['foo|bar']) == 'foo|bar') - -# Detect python bug 1634343: "subprocess swallows empty arguments under win32" -# -# Also detect: "[ 1710802 ] subprocess must escape redirection characters under win32" -# -if has_subprocess_bug(): - import subprocess_fix - subprocess.list2cmdline = subprocess_fix.list2cmdline - -from sherrors import * - -class NonExitingParser(optparse.OptionParser): - """OptionParser default behaviour upon error is to print the error message and - exit. Raise a utility error instead. - """ - def error(self, msg): - raise UtilityError(msg) - -#------------------------------------------------------------------------------- -# set special builtin -#------------------------------------------------------------------------------- -OPT_SET = NonExitingParser(usage="set - set or unset options and positional parameters") -OPT_SET.add_option( '-f', action='store_true', dest='has_f', default=False, - help='The shell shall disable pathname expansion.') -OPT_SET.add_option('-e', action='store_true', dest='has_e', default=False, - help="""When this option is on, if a simple command fails for any of the \ - reasons listed in Consequences of Shell Errors or returns an exit status \ - value >0, and is not part of the compound list following a while, until, \ - or if keyword, and is not a part of an AND or OR list, and is not a \ - pipeline preceded by the ! reserved word, then the shell shall immediately \ - exit.""") -OPT_SET.add_option('-x', action='store_true', dest='has_x', default=False, - help="""The shell shall write to standard error a trace for each command \ - after it expands the command and before it executes it. It is unspecified \ - whether the command that turns tracing off is traced.""") - -def builtin_set(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - option, args = OPT_SET.parse_args(args) - env = interp.get_env() - - if option.has_f: - env.set_opt('-f') - if option.has_e: - env.set_opt('-e') - if option.has_x: - env.set_opt('-x') - return 0 - -#------------------------------------------------------------------------------- -# shift special builtin -#------------------------------------------------------------------------------- -def builtin_shift(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - params = interp.get_env().get_positional_args() - if args: - try: - n = int(args[0]) - if n > len(params): - raise ValueError() - except ValueError: - return 1 - else: - n = 1 - - params[:n] = [] - interp.get_env().set_positional_args(params) - return 0 - -#------------------------------------------------------------------------------- -# export special builtin -#------------------------------------------------------------------------------- -OPT_EXPORT = NonExitingParser(usage="set - set or unset options and positional parameters") -OPT_EXPORT.add_option('-p', action='store_true', dest='has_p', default=False) - -def builtin_export(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - option, args = OPT_EXPORT.parse_args(args) - if option.has_p: - raise NotImplementedError() - - for arg in args: - try: - name, value = arg.split('=', 1) - except ValueError: - name, value = arg, None - env = interp.get_env().export(name, value) - - return 0 - -#------------------------------------------------------------------------------- -# return special builtin -#------------------------------------------------------------------------------- -def builtin_return(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - res = 0 - if args: - try: - res = int(args[0]) - except ValueError: - res = 0 - if not 0<=res<=255: - res = 0 - - # BUG: should be last executed command exit code - raise ReturnSignal(res) - -#------------------------------------------------------------------------------- -# trap special builtin -#------------------------------------------------------------------------------- -def builtin_trap(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - if len(args) < 2: - stderr.write('trap: usage: trap [[arg] signal_spec ...]\n') - return 2 - - action = args[0] - for sig in args[1:]: - try: - env.traps[sig] = action - except Exception, e: - stderr.write('trap: %s\n' % str(e)) - return 0 - -#------------------------------------------------------------------------------- -# unset special builtin -#------------------------------------------------------------------------------- -OPT_UNSET = NonExitingParser("unset - unset values and attributes of variables and functions") -OPT_UNSET.add_option( '-f', action='store_true', dest='has_f', default=False) -OPT_UNSET.add_option( '-v', action='store_true', dest='has_v', default=False) - -def builtin_unset(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - option, args = OPT_UNSET.parse_args(args) - - status = 0 - env = interp.get_env() - for arg in args: - try: - if option.has_f: - env.remove_function(arg) - else: - del env[arg] - except KeyError: - pass - except VarAssignmentError: - status = 1 - - return status - -#------------------------------------------------------------------------------- -# wait special builtin -#------------------------------------------------------------------------------- -def builtin_wait(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - return interp.wait([int(arg) for arg in args]) - -#------------------------------------------------------------------------------- -# cat utility -#------------------------------------------------------------------------------- -def utility_cat(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - if not args: - args = ['-'] - - status = 0 - for arg in args: - if arg == '-': - data = stdin.read() - else: - path = os.path.join(env['PWD'], arg) - try: - f = file(path, 'rb') - try: - data = f.read() - finally: - f.close() - except IOError, e: - if e.errno != errno.ENOENT: - raise - status = 1 - continue - stdout.write(data) - stdout.flush() - return status - -#------------------------------------------------------------------------------- -# cd utility -#------------------------------------------------------------------------------- -OPT_CD = NonExitingParser("cd - change the working directory") - -def utility_cd(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - option, args = OPT_CD.parse_args(args) - env = interp.get_env() - - directory = None - printdir = False - if not args: - home = env.get('HOME') - if home: - # Unspecified, do nothing - return 0 - else: - directory = home - elif len(args)==1: - directory = args[0] - if directory=='-': - if 'OLDPWD' not in env: - raise UtilityError("OLDPWD not set") - printdir = True - directory = env['OLDPWD'] - else: - raise UtilityError("too many arguments") - - curpath = None - # Absolute directories will be handled correctly by the os.path.join call. - if not directory.startswith('.') and not directory.startswith('..'): - cdpaths = env.get('CDPATH', '.').split(';') - for cdpath in cdpaths: - p = os.path.join(cdpath, directory) - if os.path.isdir(p): - curpath = p - break - - if curpath is None: - curpath = directory - curpath = os.path.join(env['PWD'], directory) - - env['OLDPWD'] = env['PWD'] - env['PWD'] = curpath - if printdir: - stdout.write('%s\n' % curpath) - return 0 - -#------------------------------------------------------------------------------- -# colon utility -#------------------------------------------------------------------------------- -def utility_colon(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - return 0 - -#------------------------------------------------------------------------------- -# echo utility -#------------------------------------------------------------------------------- -def utility_echo(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - # Echo only takes arguments, no options. Use printf if you need fancy stuff. - output = ' '.join(args) + '\n' - stdout.write(output) - stdout.flush() - return 0 - -#------------------------------------------------------------------------------- -# egrep utility -#------------------------------------------------------------------------------- -# egrep is usually a shell script. -# Unfortunately, pysh does not support shell scripts *with arguments* right now, -# so the redirection is implemented here, assuming grep is available. -def utility_egrep(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - return run_command('grep', ['-E'] + args, interp, env, stdin, stdout, - stderr, debugflags) - -#------------------------------------------------------------------------------- -# env utility -#------------------------------------------------------------------------------- -def utility_env(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - if args and args[0]=='-i': - raise NotImplementedError('env: -i option is not implemented') - - i = 0 - for arg in args: - if '=' not in arg: - break - # Update the current environment - name, value = arg.split('=', 1) - env[name] = value - i += 1 - - if args[i:]: - # Find then execute the specified interpreter - utility = env.find_in_path(args[i]) - if not utility: - return 127 - args[i:i+1] = utility - name = args[i] - args = args[i+1:] - try: - return run_command(name, args, interp, env, stdin, stdout, stderr, - debugflags) - except UtilityError: - stderr.write('env: failed to execute %s' % ' '.join([name]+args)) - return 126 - else: - for pair in env.get_variables().iteritems(): - stdout.write('%s=%s\n' % pair) - return 0 - -#------------------------------------------------------------------------------- -# exit utility -#------------------------------------------------------------------------------- -def utility_exit(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - res = None - if args: - try: - res = int(args[0]) - except ValueError: - res = None - if not 0<=res<=255: - res = None - - if res is None: - # BUG: should be last executed command exit code - res = 0 - - raise ExitSignal(res) - -#------------------------------------------------------------------------------- -# fgrep utility -#------------------------------------------------------------------------------- -# see egrep -def utility_fgrep(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - return run_command('grep', ['-F'] + args, interp, env, stdin, stdout, - stderr, debugflags) - -#------------------------------------------------------------------------------- -# gunzip utility -#------------------------------------------------------------------------------- -# see egrep -def utility_gunzip(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - return run_command('gzip', ['-d'] + args, interp, env, stdin, stdout, - stderr, debugflags) - -#------------------------------------------------------------------------------- -# kill utility -#------------------------------------------------------------------------------- -def utility_kill(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - for arg in args: - pid = int(arg) - status = subprocess.call(['pskill', '/T', str(pid)], - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - # pskill is asynchronous, hence the stupid polling loop - while 1: - p = subprocess.Popen(['pslist', str(pid)], - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - output = p.communicate()[0] - if ('process %d was not' % pid) in output: - break - time.sleep(1) - return status - -#------------------------------------------------------------------------------- -# mkdir utility -#------------------------------------------------------------------------------- -OPT_MKDIR = NonExitingParser("mkdir - make directories.") -OPT_MKDIR.add_option('-p', action='store_true', dest='has_p', default=False) - -def utility_mkdir(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - # TODO: implement umask - # TODO: implement proper utility error report - option, args = OPT_MKDIR.parse_args(args) - for arg in args: - path = os.path.join(env['PWD'], arg) - if option.has_p: - try: - os.makedirs(path) - except IOError, e: - if e.errno != errno.EEXIST: - raise - else: - os.mkdir(path) - return 0 - -#------------------------------------------------------------------------------- -# netstat utility -#------------------------------------------------------------------------------- -def utility_netstat(name, args, interp, env, stdin, stdout, stderr, debugflags): - # Do you really expect me to implement netstat ? - # This empty form is enough for Mercurial tests since it's - # supposed to generate nothing upon success. Faking this test - # is not a big deal either. - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - return 0 - -#------------------------------------------------------------------------------- -# pwd utility -#------------------------------------------------------------------------------- -OPT_PWD = NonExitingParser("pwd - return working directory name") -OPT_PWD.add_option('-L', action='store_true', dest='has_L', default=True, - help="""If the PWD environment variable contains an absolute pathname of \ - the current directory that does not contain the filenames dot or dot-dot, \ - pwd shall write this pathname to standard output. Otherwise, the -L option \ - shall behave as the -P option.""") -OPT_PWD.add_option('-P', action='store_true', dest='has_L', default=False, - help="""The absolute pathname written shall not contain filenames that, in \ - the context of the pathname, refer to files of type symbolic link.""") - -def utility_pwd(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - option, args = OPT_PWD.parse_args(args) - stdout.write('%s\n' % env['PWD']) - return 0 - -#------------------------------------------------------------------------------- -# printf utility -#------------------------------------------------------------------------------- -RE_UNESCAPE = re.compile(r'(\\x[a-zA-Z0-9]{2}|\\[0-7]{1,3}|\\.)') - -def utility_printf(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - def replace(m): - assert m.group() - g = m.group()[1:] - if g.startswith('x'): - return chr(int(g[1:], 16)) - if len(g) <= 3 and len([c for c in g if c in '01234567']) == len(g): - # Yay, an octal number - return chr(int(g, 8)) - return { - 'a': '\a', - 'b': '\b', - 'f': '\f', - 'n': '\n', - 'r': '\r', - 't': '\t', - 'v': '\v', - '\\': '\\', - }.get(g) - - # Convert escape sequences - format = re.sub(RE_UNESCAPE, replace, args[0]) - stdout.write(format % tuple(args[1:])) - return 0 - -#------------------------------------------------------------------------------- -# true utility -#------------------------------------------------------------------------------- -def utility_true(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - return 0 - -#------------------------------------------------------------------------------- -# sed utility -#------------------------------------------------------------------------------- -RE_SED = re.compile(r'^s(.).*\1[a-zA-Z]*$') - -# cygwin sed fails with some expressions when they do not end with a single space. -# see unit tests for details. Interestingly, the same expressions works perfectly -# in cygwin shell. -def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - # Scan pattern arguments and append a space if necessary - for i in xrange(len(args)): - if not RE_SED.search(args[i]): - continue - args[i] = args[i] + ' ' - - return run_command(name, args, interp, env, stdin, stdout, - stderr, debugflags) - -#------------------------------------------------------------------------------- -# sleep utility -#------------------------------------------------------------------------------- -def utility_sleep(name, args, interp, env, stdin, stdout, stderr, debugflags): - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - time.sleep(int(args[0])) - return 0 - -#------------------------------------------------------------------------------- -# sort utility -#------------------------------------------------------------------------------- -OPT_SORT = NonExitingParser("sort - sort, merge, or sequence check text files") - -def utility_sort(name, args, interp, env, stdin, stdout, stderr, debugflags): - - def sort(path): - if path == '-': - lines = stdin.readlines() - else: - try: - f = file(path) - try: - lines = f.readlines() - finally: - f.close() - except IOError, e: - stderr.write(str(e) + '\n') - return 1 - - if lines and lines[-1][-1]!='\n': - lines[-1] = lines[-1] + '\n' - return lines - - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - option, args = OPT_SORT.parse_args(args) - alllines = [] - - if len(args)<=0: - args += ['-'] - - # Load all files lines - curdir = os.getcwd() - try: - os.chdir(env['PWD']) - for path in args: - alllines += sort(path) - finally: - os.chdir(curdir) - - alllines.sort() - for line in alllines: - stdout.write(line) - return 0 - -#------------------------------------------------------------------------------- -# hg utility -#------------------------------------------------------------------------------- - -hgcommands = [ - 'add', - 'addremove', - 'commit', 'ci', - 'debugrename', - 'debugwalk', - 'falabala', # Dummy command used in a mercurial test - 'incoming', - 'locate', - 'pull', - 'push', - 'qinit', - 'remove', 'rm', - 'rename', 'mv', - 'revert', - 'showconfig', - 'status', 'st', - 'strip', - ] - -def rewriteslashes(name, args): - # Several hg commands output file paths, rewrite the separators - if len(args) > 1 and name.lower().endswith('python') \ - and args[0].endswith('hg'): - for cmd in hgcommands: - if cmd in args[1:]: - return True - - # svn output contains many paths with OS specific separators. - # Normalize these to unix paths. - base = os.path.basename(name) - if base.startswith('svn'): - return True - - return False - -def rewritehg(output): - if not output: - return output - # Rewrite os specific messages - output = output.replace(': The system cannot find the file specified', - ': No such file or directory') - output = re.sub(': Access is denied.*$', ': Permission denied', output) - output = output.replace(': No connection could be made because the target machine actively refused it', - ': Connection refused') - return output - - -def run_command(name, args, interp, env, stdin, stdout, - stderr, debugflags): - # Execute the command - if 'debug-utility' in debugflags: - print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') - - hgbin = interp.options().hgbinary - ishg = hgbin and ('hg' in name or args and 'hg' in args[0]) - unixoutput = 'cygwin' in name or ishg - - exec_env = env.get_variables() - try: - # BUG: comparing file descriptor is clearly not a reliable way to tell - # whether they point on the same underlying object. But in pysh limited - # scope this is usually right, we do not expect complicated redirections - # besides usual 2>&1. - # Still there is one case we have but cannot deal with is when stdout - # and stderr are redirected *by pysh caller*. This the reason for the - # --redirect pysh() option. - # Now, we want to know they are the same because we sometimes need to - # transform the command output, mostly remove CR-LF to ensure that - # command output is unix-like. Cygwin utilies are a special case because - # they explicitely set their output streams to binary mode, so we have - # nothing to do. For all others commands, we have to guess whether they - # are sending text data, in which case the transformation must be done. - # Again, the NUL character test is unreliable but should be enough for - # hg tests. - redirected = stdout.fileno()==stderr.fileno() - if not redirected: - p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env, - stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - else: - p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env, - stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, err = p.communicate() - except WindowsError, e: - raise UtilityError(str(e)) - - if not unixoutput: - def encode(s): - if '\0' in s: - return s - return s.replace('\r\n', '\n') - else: - encode = lambda s: s - - if rewriteslashes(name, args): - encode1_ = encode - def encode(s): - s = encode1_(s) - s = s.replace('\\\\', '\\') - s = s.replace('\\', '/') - return s - - if ishg: - encode2_ = encode - def encode(s): - return rewritehg(encode2_(s)) - - stdout.write(encode(out)) - if not redirected: - stderr.write(encode(err)) - return p.returncode - diff --git a/bitbake/lib/bb/pysh/interp.py b/bitbake/lib/bb/pysh/interp.py deleted file mode 100644 index efe5181e1..000000000 --- a/bitbake/lib/bb/pysh/interp.py +++ /dev/null @@ -1,1367 +0,0 @@ -# interp.py - shell interpreter for pysh. -# -# Copyright 2007 Patrick Mezard -# -# This software may be used and distributed according to the terms -# of the GNU General Public License, incorporated herein by reference. - -"""Implement the shell interpreter. - -Most references are made to "The Open Group Base Specifications Issue 6". - -""" -# TODO: document the fact input streams must implement fileno() so Popen will work correctly. -# it requires non-stdin stream to be implemented as files. Still to be tested... -# DOC: pathsep is used in PATH instead of ':'. Clearly, there are path syntax issues here. -# TODO: stop command execution upon error. -# TODO: sort out the filename/io_number mess. It should be possible to use filenames only. -# TODO: review subshell implementation -# TODO: test environment cloning for non-special builtins -# TODO: set -x should not rebuild commands from tokens, assignments/redirections are lost -# TODO: unit test for variable assignment -# TODO: test error management wrt error type/utility type -# TODO: test for binary output everywhere -# BUG: debug-parsing does not pass log file to PLY. Maybe a PLY upgrade is necessary. -import base64 -import cPickle as pickle -import errno -import glob -import os -import re -import subprocess -import sys -import tempfile - -try: - s = set() - del s -except NameError: - from Set import Set as set - -import builtin -from sherrors import * -import pyshlex -import pyshyacc - -def mappend(func, *args, **kargs): - """Like map but assume func returns a list. Returned lists are merged into - a single one. - """ - return reduce(lambda a,b: a+b, map(func, *args, **kargs), []) - -class FileWrapper: - """File object wrapper to ease debugging. - - Allow mode checking and implement file duplication through a simple - reference counting scheme. Not sure the latter is really useful since - only real file descriptors can be used. - """ - def __init__(self, mode, file, close=True): - if mode not in ('r', 'w', 'a'): - raise IOError('invalid mode: %s' % mode) - self._mode = mode - self._close = close - if isinstance(file, FileWrapper): - if file._refcount[0] <= 0: - raise IOError(0, 'Error') - self._refcount = file._refcount - self._refcount[0] += 1 - self._file = file._file - else: - self._refcount = [1] - self._file = file - - def dup(self): - return FileWrapper(self._mode, self, self._close) - - def fileno(self): - """fileno() should be only necessary for input streams.""" - return self._file.fileno() - - def read(self, size=-1): - if self._mode!='r': - raise IOError(0, 'Error') - return self._file.read(size) - - def readlines(self, *args, **kwargs): - return self._file.readlines(*args, **kwargs) - - def write(self, s): - if self._mode not in ('w', 'a'): - raise IOError(0, 'Error') - return self._file.write(s) - - def flush(self): - self._file.flush() - - def close(self): - if not self._refcount: - return - assert self._refcount[0] > 0 - - self._refcount[0] -= 1 - if self._refcount[0] == 0: - self._mode = 'c' - if self._close: - self._file.close() - self._refcount = None - - def mode(self): - return self._mode - - def __getattr__(self, name): - if name == 'name': - self.name = getattr(self._file, name) - return self.name - else: - raise AttributeError(name) - - def __del__(self): - self.close() - - -def win32_open_devnull(mode): - return open('NUL', mode) - - -class Redirections: - """Stores open files and their mapping to pseudo-sh file descriptor. - """ - # BUG: redirections are not handled correctly: 1>&3 2>&3 3>&4 does - # not make 1 to redirect to 4 - def __init__(self, stdin=None, stdout=None, stderr=None): - self._descriptors = {} - if stdin is not None: - self._add_descriptor(0, stdin) - if stdout is not None: - self._add_descriptor(1, stdout) - if stderr is not None: - self._add_descriptor(2, stderr) - - def add_here_document(self, interp, name, content, io_number=None): - if io_number is None: - io_number = 0 - - if name==pyshlex.unquote_wordtree(name): - content = interp.expand_here_document(('TOKEN', content)) - - # Write document content in a temporary file - tmp = tempfile.TemporaryFile() - try: - tmp.write(content) - tmp.flush() - tmp.seek(0) - self._add_descriptor(io_number, FileWrapper('r', tmp)) - except: - tmp.close() - raise - - def add(self, interp, op, filename, io_number=None): - if op not in ('<', '>', '>|', '>>', '>&'): - # TODO: add descriptor duplication and here_documents - raise RedirectionError('Unsupported redirection operator "%s"' % op) - - if io_number is not None: - io_number = int(io_number) - - if (op == '>&' and filename.isdigit()) or filename=='-': - # No expansion for file descriptors, quote them if you want a filename - fullname = filename - else: - if filename.startswith('/'): - # TODO: win32 kludge - if filename=='/dev/null': - fullname = 'NUL' - else: - # TODO: handle absolute pathnames, they are unlikely to exist on the - # current platform (win32 for instance). - raise NotImplementedError() - else: - fullname = interp.expand_redirection(('TOKEN', filename)) - if not fullname: - raise RedirectionError('%s: ambiguous redirect' % filename) - # Build absolute path based on PWD - fullname = os.path.join(interp.get_env()['PWD'], fullname) - - if op=='<': - return self._add_input_redirection(interp, fullname, io_number) - elif op in ('>', '>|'): - clobber = ('>|'==op) - return self._add_output_redirection(interp, fullname, io_number, clobber) - elif op=='>>': - return self._add_output_appending(interp, fullname, io_number) - elif op=='>&': - return self._dup_output_descriptor(fullname, io_number) - - def close(self): - if self._descriptors is not None: - for desc in self._descriptors.itervalues(): - desc.flush() - desc.close() - self._descriptors = None - - def stdin(self): - return self._descriptors[0] - - def stdout(self): - return self._descriptors[1] - - def stderr(self): - return self._descriptors[2] - - def clone(self): - clone = Redirections() - for desc, fileobj in self._descriptors.iteritems(): - clone._descriptors[desc] = fileobj.dup() - return clone - - def _add_output_redirection(self, interp, filename, io_number, clobber): - if io_number is None: - # io_number default to standard output - io_number = 1 - - if not clobber and interp.get_env().has_opt('-C') and os.path.isfile(filename): - # File already exist in no-clobber mode, bail out - raise RedirectionError('File "%s" already exists' % filename) - - # Open and register - self._add_file_descriptor(io_number, filename, 'w') - - def _add_output_appending(self, interp, filename, io_number): - if io_number is None: - io_number = 1 - self._add_file_descriptor(io_number, filename, 'a') - - def _add_input_redirection(self, interp, filename, io_number): - if io_number is None: - io_number = 0 - self._add_file_descriptor(io_number, filename, 'r') - - def _add_file_descriptor(self, io_number, filename, mode): - try: - if filename.startswith('/'): - if filename=='/dev/null': - f = win32_open_devnull(mode+'b') - else: - # TODO: handle absolute pathnames, they are unlikely to exist on the - # current platform (win32 for instance). - raise NotImplementedError('cannot open absolute path %s' % repr(filename)) - else: - f = file(filename, mode+'b') - except IOError, e: - raise RedirectionError(str(e)) - - wrapper = None - try: - wrapper = FileWrapper(mode, f) - f = None - self._add_descriptor(io_number, wrapper) - except: - if f: f.close() - if wrapper: wrapper.close() - raise - - def _dup_output_descriptor(self, source_fd, dest_fd): - if source_fd is None: - source_fd = 1 - self._dup_file_descriptor(source_fd, dest_fd, 'w') - - def _dup_file_descriptor(self, source_fd, dest_fd, mode): - source_fd = int(source_fd) - if source_fd not in self._descriptors: - raise RedirectionError('"%s" is not a valid file descriptor' % str(source_fd)) - source = self._descriptors[source_fd] - - if source.mode()!=mode: - raise RedirectionError('Descriptor %s cannot be duplicated in mode "%s"' % (str(source), mode)) - - if dest_fd=='-': - # Close the source descriptor - del self._descriptors[source_fd] - source.close() - else: - dest_fd = int(dest_fd) - if dest_fd not in self._descriptors: - raise RedirectionError('Cannot replace file descriptor %s' % str(dest_fd)) - - dest = self._descriptors[dest_fd] - if dest.mode()!=mode: - raise RedirectionError('Descriptor %s cannot be cannot be redirected in mode "%s"' % (str(dest), mode)) - - self._descriptors[dest_fd] = source.dup() - dest.close() - - def _add_descriptor(self, io_number, file): - io_number = int(io_number) - - if io_number in self._descriptors: - # Close the current descriptor - d = self._descriptors[io_number] - del self._descriptors[io_number] - d.close() - - self._descriptors[io_number] = file - - def __str__(self): - names = [('%d=%r' % (k, getattr(v, 'name', None))) for k,v - in self._descriptors.iteritems()] - names = ','.join(names) - return 'Redirections(%s)' % names - - def __del__(self): - self.close() - -def cygwin_to_windows_path(path): - """Turn /cygdrive/c/foo into c:/foo, or return path if it - is not a cygwin path. - """ - if not path.startswith('/cygdrive/'): - return path - path = path[len('/cygdrive/'):] - path = path[:1] + ':' + path[1:] - return path - -def win32_to_unix_path(path): - if path is not None: - path = path.replace('\\', '/') - return path - -_RE_SHEBANG = re.compile(r'^\#!\s?([^\s]+)(?:\s([^\s]+))?') -_SHEBANG_CMDS = { - '/usr/bin/env': 'env', - '/bin/sh': 'pysh', - 'python': 'python', -} - -def resolve_shebang(path, ignoreshell=False): - """Return a list of arguments as shebang interpreter call or an empty list - if path does not refer to an executable script. - See . - - ignoreshell - set to True to ignore sh shebangs. Return an empty list instead. - """ - try: - f = file(path) - try: - # At most 80 characters in the first line - header = f.read(80).splitlines()[0] - finally: - f.close() - - m = _RE_SHEBANG.search(header) - if not m: - return [] - cmd, arg = m.group(1,2) - if os.path.isfile(cmd): - # Keep this one, the hg script for instance contains a weird windows - # shebang referencing the current python install. - cmdfile = os.path.basename(cmd).lower() - if cmdfile == 'python.exe': - cmd = 'python' - pass - elif cmd not in _SHEBANG_CMDS: - raise CommandNotFound('Unknown interpreter "%s" referenced in '\ - 'shebang' % header) - cmd = _SHEBANG_CMDS.get(cmd) - if cmd is None or (ignoreshell and cmd == 'pysh'): - return [] - if arg is None: - return [cmd, win32_to_unix_path(path)] - return [cmd, arg, win32_to_unix_path(path)] - except IOError, e: - if e.errno!=errno.ENOENT and \ - (e.errno!=errno.EPERM and not os.path.isdir(path)): # Opening a directory raises EPERM - raise - return [] - -def win32_find_in_path(name, path): - if isinstance(path, str): - path = path.split(os.pathsep) - - exts = os.environ.get('PATHEXT', '').lower().split(os.pathsep) - for p in path: - p_name = os.path.join(p, name) - - prefix = resolve_shebang(p_name) - if prefix: - return prefix - - for ext in exts: - p_name_ext = p_name + ext - if os.path.exists(p_name_ext): - return [win32_to_unix_path(p_name_ext)] - return [] - -class Traps(dict): - def __setitem__(self, key, value): - if key not in ('EXIT',): - raise NotImplementedError() - super(Traps, self).__setitem__(key, value) - -# IFS white spaces character class -_IFS_WHITESPACES = (' ', '\t', '\n') - -class Environment: - """Environment holds environment variables, export table, function - definitions and whatever is defined in 2.12 "Shell Execution Environment", - redirection excepted. - """ - def __init__(self, pwd): - self._opt = set() #Shell options - - self._functions = {} - self._env = {'?': '0', '#': '0'} - self._exported = set([ - 'HOME', 'IFS', 'PATH' - ]) - - # Set environment vars with side-effects - self._ifs_ws = None # Set of IFS whitespace characters - self._ifs_re = None # Regular expression used to split between words using IFS classes - self['IFS'] = ''.join(_IFS_WHITESPACES) #Default environment values - self['PWD'] = pwd - self.traps = Traps() - - def clone(self, subshell=False): - env = Environment(self['PWD']) - env._opt = set(self._opt) - for k,v in self.get_variables().iteritems(): - if k in self._exported: - env.export(k,v) - elif subshell: - env[k] = v - - if subshell: - env._functions = dict(self._functions) - - return env - - def __getitem__(self, key): - if key in ('@', '*', '-', '$'): - raise NotImplementedError('%s is not implemented' % repr(key)) - return self._env[key] - - def get(self, key, defval=None): - try: - return self[key] - except KeyError: - return defval - - def __setitem__(self, key, value): - if key=='IFS': - # Update the whitespace/non-whitespace classes - self._update_ifs(value) - elif key=='PWD': - pwd = os.path.abspath(value) - if not os.path.isdir(pwd): - raise VarAssignmentError('Invalid directory %s' % value) - value = pwd - elif key in ('?', '!'): - value = str(int(value)) - self._env[key] = value - - def __delitem__(self, key): - if key in ('IFS', 'PWD', '?'): - raise VarAssignmentError('%s cannot be unset' % key) - del self._env[key] - - def __contains__(self, item): - return item in self._env - - def set_positional_args(self, args): - """Set the content of 'args' as positional argument from 1 to len(args). - Return previous argument as a list of strings. - """ - # Save and remove previous arguments - prevargs = [] - for i in xrange(int(self._env['#'])): - i = str(i+1) - prevargs.append(self._env[i]) - del self._env[i] - self._env['#'] = '0' - - #Set new ones - for i,arg in enumerate(args): - self._env[str(i+1)] = str(arg) - self._env['#'] = str(len(args)) - - return prevargs - - def get_positional_args(self): - return [self._env[str(i+1)] for i in xrange(int(self._env['#']))] - - def get_variables(self): - return dict(self._env) - - def export(self, key, value=None): - if value is not None: - self[key] = value - self._exported.add(key) - - def get_exported(self): - return [(k,self._env.get(k)) for k in self._exported] - - def split_fields(self, word): - if not self._ifs_ws or not word: - return [word] - return re.split(self._ifs_re, word) - - def _update_ifs(self, value): - """Update the split_fields related variables when IFS character set is - changed. - """ - # TODO: handle NULL IFS - - # Separate characters in whitespace and non-whitespace - chars = set(value) - ws = [c for c in chars if c in _IFS_WHITESPACES] - nws = [c for c in chars if c not in _IFS_WHITESPACES] - - # Keep whitespaces in a string for left and right stripping - self._ifs_ws = ''.join(ws) - - # Build a regexp to split fields - trailing = '[' + ''.join([re.escape(c) for c in ws]) + ']' - if nws: - # First, the single non-whitespace occurence. - nws = '[' + ''.join([re.escape(c) for c in nws]) + ']' - nws = '(?:' + trailing + '*' + nws + trailing + '*' + '|' + trailing + '+)' - else: - # Then mix all parts with quantifiers - nws = trailing + '+' - self._ifs_re = re.compile(nws) - - def has_opt(self, opt, val=None): - return (opt, val) in self._opt - - def set_opt(self, opt, val=None): - self._opt.add((opt, val)) - - def find_in_path(self, name, pwd=False): - path = self._env.get('PATH', '').split(os.pathsep) - if pwd: - path[:0] = [self['PWD']] - if os.name == 'nt': - return win32_find_in_path(name, self._env.get('PATH', '')) - else: - raise NotImplementedError() - - def define_function(self, name, body): - if not is_name(name): - raise ShellSyntaxError('%s is not a valid function name' % repr(name)) - self._functions[name] = body - - def remove_function(self, name): - del self._functions[name] - - def is_function(self, name): - return name in self._functions - - def get_function(self, name): - return self._functions.get(name) - - -name_charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_' -name_charset = dict(zip(name_charset,name_charset)) - -def match_name(s): - """Return the length in characters of the longest prefix made of name - allowed characters in s. - """ - for i,c in enumerate(s): - if c not in name_charset: - return s[:i] - return s - -def is_name(s): - return len([c for c in s if c not in name_charset])<=0 - -def is_special_param(c): - return len(c)==1 and c in ('@','*','#','?','-','$','!','0') - -def utility_not_implemented(name, *args, **kwargs): - raise NotImplementedError('%s utility is not implemented' % name) - - -class Utility: - """Define utilities properties: - func -- utility callable. See builtin module for utility samples. - is_special -- see XCU 2.8. - """ - def __init__(self, func, is_special=0): - self.func = func - self.is_special = bool(is_special) - - -def encodeargs(args): - def encodearg(s): - lines = base64.encodestring(s) - lines = [l.splitlines()[0] for l in lines] - return ''.join(lines) - - s = pickle.dumps(args) - return encodearg(s) - -def decodeargs(s): - s = base64.decodestring(s) - return pickle.loads(s) - - -class GlobError(Exception): - pass - -class Options: - def __init__(self): - # True if Mercurial operates with binary streams - self.hgbinary = True - -class Interpreter: - # Implementation is very basic: the execute() method just makes a DFS on the - # AST and execute nodes one by one. Nodes are tuple (name,obj) where name - # is a string identifier and obj the AST element returned by the parser. - # - # Handler are named after the node identifiers. - # TODO: check node names and remove the switch in execute with some - # dynamic getattr() call to find node handlers. - """Shell interpreter. - - The following debugging flags can be passed: - debug-parsing - enable PLY debugging. - debug-tree - print the generated AST. - debug-cmd - trace command execution before word expansion, plus exit status. - debug-utility - trace utility execution. - """ - - # List supported commands. - COMMANDS = { - 'cat': Utility(builtin.utility_cat,), - 'cd': Utility(builtin.utility_cd,), - ':': Utility(builtin.utility_colon,), - 'echo': Utility(builtin.utility_echo), - 'env': Utility(builtin.utility_env), - 'exit': Utility(builtin.utility_exit), - 'export': Utility(builtin.builtin_export, is_special=1), - 'egrep': Utility(builtin.utility_egrep), - 'fgrep': Utility(builtin.utility_fgrep), - 'gunzip': Utility(builtin.utility_gunzip), - 'kill': Utility(builtin.utility_kill), - 'mkdir': Utility(builtin.utility_mkdir), - 'netstat': Utility(builtin.utility_netstat), - 'printf': Utility(builtin.utility_printf), - 'pwd': Utility(builtin.utility_pwd), - 'return': Utility(builtin.builtin_return, is_special=1), - 'sed': Utility(builtin.utility_sed,), - 'set': Utility(builtin.builtin_set,), - 'shift': Utility(builtin.builtin_shift,), - 'sleep': Utility(builtin.utility_sleep,), - 'sort': Utility(builtin.utility_sort,), - 'trap': Utility(builtin.builtin_trap, is_special=1), - 'true': Utility(builtin.utility_true), - 'unset': Utility(builtin.builtin_unset, is_special=1), - 'wait': Utility(builtin.builtin_wait, is_special=1), - } - - def __init__(self, pwd, debugflags = [], env=None, redirs=None, stdin=None, - stdout=None, stderr=None, opts=Options()): - self._env = env - if self._env is None: - self._env = Environment(pwd) - self._children = {} - - self._redirs = redirs - self._close_redirs = False - - if self._redirs is None: - if stdin is None: - stdin = sys.stdin - if stdout is None: - stdout = sys.stdout - if stderr is None: - stderr = sys.stderr - stdin = FileWrapper('r', stdin, False) - stdout = FileWrapper('w', stdout, False) - stderr = FileWrapper('w', stderr, False) - self._redirs = Redirections(stdin, stdout, stderr) - self._close_redirs = True - - self._debugflags = list(debugflags) - self._logfile = sys.stderr - self._options = opts - - def close(self): - """Must be called when the interpreter is no longer used.""" - script = self._env.traps.get('EXIT') - if script: - try: - self.execute_script(script=script) - except: - pass - - if self._redirs is not None and self._close_redirs: - self._redirs.close() - self._redirs = None - - def log(self, s): - self._logfile.write(s) - self._logfile.flush() - - def __getitem__(self, key): - return self._env[key] - - def __setitem__(self, key, value): - self._env[key] = value - - def options(self): - return self._options - - def redirect(self, redirs, ios): - def add_redir(io): - if isinstance(io, pyshyacc.IORedirect): - redirs.add(self, io.op, io.filename, io.io_number) - else: - redirs.add_here_document(self, io.name, io.content, io.io_number) - - map(add_redir, ios) - return redirs - - def execute_script(self, script=None, ast=None, sourced=False, - scriptpath=None): - """If script is not None, parse the input. Otherwise takes the supplied - AST. Then execute the AST. - Return the script exit status. - """ - try: - if scriptpath is not None: - self._env['0'] = os.path.abspath(scriptpath) - - if script is not None: - debug_parsing = ('debug-parsing' in self._debugflags) - cmds, script = pyshyacc.parse(script, True, debug_parsing) - if 'debug-tree' in self._debugflags: - pyshyacc.print_commands(cmds, self._logfile) - self._logfile.flush() - else: - cmds, script = ast, '' - - status = 0 - for cmd in cmds: - try: - status = self.execute(cmd) - except ExitSignal, e: - if sourced: - raise - status = int(e.args[0]) - return status - except ShellError: - self._env['?'] = 1 - raise - if 'debug-utility' in self._debugflags or 'debug-cmd' in self._debugflags: - self.log('returncode ' + str(status)+ '\n') - return status - except CommandNotFound, e: - print >>self._redirs.stderr, str(e) - self._redirs.stderr.flush() - # Command not found by non-interactive shell - # return 127 - raise - except RedirectionError, e: - # TODO: should be handled depending on the utility status - print >>self._redirs.stderr, str(e) - self._redirs.stderr.flush() - # Command not found by non-interactive shell - # return 127 - raise - - def dotcommand(self, env, args): - if len(args) < 1: - raise ShellError('. expects at least one argument') - path = args[0] - if '/' not in path: - found = env.find_in_path(args[0], True) - if found: - path = found[0] - script = file(path).read() - return self.execute_script(script=script, sourced=True) - - def execute(self, token, redirs=None): - """Execute and AST subtree with supplied redirections overriding default - interpreter ones. - Return the exit status. - """ - if not token: - return 0 - - if redirs is None: - redirs = self._redirs - - if isinstance(token, list): - # Commands sequence - res = 0 - for t in token: - res = self.execute(t, redirs) - return res - - type, value = token - status = 0 - if type=='simple_command': - redirs_copy = redirs.clone() - try: - # TODO: define and handle command return values - # TODO: implement set -e - status = self._execute_simple_command(value, redirs_copy) - finally: - redirs_copy.close() - elif type=='pipeline': - status = self._execute_pipeline(value, redirs) - elif type=='and_or': - status = self._execute_and_or(value, redirs) - elif type=='for_clause': - status = self._execute_for_clause(value, redirs) - elif type=='while_clause': - status = self._execute_while_clause(value, redirs) - elif type=='function_definition': - status = self._execute_function_definition(value, redirs) - elif type=='brace_group': - status = self._execute_brace_group(value, redirs) - elif type=='if_clause': - status = self._execute_if_clause(value, redirs) - elif type=='subshell': - status = self.subshell(ast=value.cmds, redirs=redirs) - elif type=='async': - status = self._asynclist(value) - elif type=='redirect_list': - redirs_copy = self.redirect(redirs.clone(), value.redirs) - try: - status = self.execute(value.cmd, redirs_copy) - finally: - redirs_copy.close() - else: - raise NotImplementedError('Unsupported token type ' + type) - - if status < 0: - status = 255 - return status - - def _execute_if_clause(self, if_clause, redirs): - cond_status = self.execute(if_clause.cond, redirs) - if cond_status==0: - return self.execute(if_clause.if_cmds, redirs) - else: - return self.execute(if_clause.else_cmds, redirs) - - def _execute_brace_group(self, group, redirs): - status = 0 - for cmd in group.cmds: - status = self.execute(cmd, redirs) - return status - - def _execute_function_definition(self, fundef, redirs): - self._env.define_function(fundef.name, fundef.body) - return 0 - - def _execute_while_clause(self, while_clause, redirs): - status = 0 - while 1: - cond_status = 0 - for cond in while_clause.condition: - cond_status = self.execute(cond, redirs) - - if cond_status: - break - - for cmd in while_clause.cmds: - status = self.execute(cmd, redirs) - - return status - - def _execute_for_clause(self, for_clause, redirs): - if not is_name(for_clause.name): - raise ShellSyntaxError('%s is not a valid name' % repr(for_clause.name)) - items = mappend(self.expand_token, for_clause.items) - - status = 0 - for item in items: - self._env[for_clause.name] = item - for cmd in for_clause.cmds: - status = self.execute(cmd, redirs) - return status - - def _execute_and_or(self, or_and, redirs): - res = self.execute(or_and.left, redirs) - if (or_and.op=='&&' and res==0) or (or_and.op!='&&' and res!=0): - res = self.execute(or_and.right, redirs) - return res - - def _execute_pipeline(self, pipeline, redirs): - if len(pipeline.commands)==1: - status = self.execute(pipeline.commands[0], redirs) - else: - # Execute all commands one after the other - status = 0 - inpath, outpath = None, None - try: - # Commands inputs and outputs cannot really be plugged as done - # by a real shell. Run commands sequentially and chain their - # input/output throught temporary files. - tmpfd, inpath = tempfile.mkstemp() - os.close(tmpfd) - tmpfd, outpath = tempfile.mkstemp() - os.close(tmpfd) - - inpath = win32_to_unix_path(inpath) - outpath = win32_to_unix_path(outpath) - - for i, cmd in enumerate(pipeline.commands): - call_redirs = redirs.clone() - try: - if i!=0: - call_redirs.add(self, '<', inpath) - if i!=len(pipeline.commands)-1: - call_redirs.add(self, '>', outpath) - - status = self.execute(cmd, call_redirs) - - # Chain inputs/outputs - inpath, outpath = outpath, inpath - finally: - call_redirs.close() - finally: - if inpath: os.remove(inpath) - if outpath: os.remove(outpath) - - if pipeline.reverse_status: - status = int(not status) - self._env['?'] = status - return status - - def _execute_function(self, name, args, interp, env, stdin, stdout, stderr, *others): - assert interp is self - - func = env.get_function(name) - #Set positional parameters - prevargs = None - try: - prevargs = env.set_positional_args(args) - try: - redirs = Redirections(stdin.dup(), stdout.dup(), stderr.dup()) - try: - status = self.execute(func, redirs) - finally: - redirs.close() - except ReturnSignal, e: - status = int(e.args[0]) - env['?'] = status - return status - finally: - #Reset positional parameters - if prevargs is not None: - env.set_positional_args(prevargs) - - def _execute_simple_command(self, token, redirs): - """Can raise ReturnSignal when return builtin is called, ExitSignal when - exit is called, and other shell exceptions upon builtin failures. - """ - debug_command = 'debug-cmd' in self._debugflags - if debug_command: - self.log('word' + repr(token.words) + '\n') - self.log('assigns' + repr(token.assigns) + '\n') - self.log('redirs' + repr(token.redirs) + '\n') - - is_special = None - env = self._env - - try: - # Word expansion - args = [] - for word in token.words: - args += self.expand_token(word) - if is_special is None and args: - is_special = env.is_function(args[0]) or \ - (args[0] in self.COMMANDS and self.COMMANDS[args[0]].is_special) - - if debug_command: - self.log('_execute_simple_command' + str(args) + '\n') - - if not args: - # Redirections happen is a subshell - redirs = redirs.clone() - elif not is_special: - env = self._env.clone() - - # Redirections - self.redirect(redirs, token.redirs) - - # Variables assignments - res = 0 - for type,(k,v) in token.assigns: - status, expanded = self.expand_variable((k,v)) - if status is not None: - res = status - if args: - env.export(k, expanded) - else: - env[k] = expanded - - if args and args[0] in ('.', 'source'): - res = self.dotcommand(env, args[1:]) - elif args: - if args[0] in self.COMMANDS: - command = self.COMMANDS[args[0]] - elif env.is_function(args[0]): - command = Utility(self._execute_function, is_special=True) - else: - if not '/' in args[0].replace('\\', '/'): - cmd = env.find_in_path(args[0]) - if not cmd: - # TODO: test error code on unknown command => 127 - raise CommandNotFound('Unknown command: "%s"' % args[0]) - else: - # Handle commands like '/cygdrive/c/foo.bat' - cmd = cygwin_to_windows_path(args[0]) - if not os.path.exists(cmd): - raise CommandNotFound('%s: No such file or directory' % args[0]) - shebang = resolve_shebang(cmd) - if shebang: - cmd = shebang - else: - cmd = [cmd] - args[0:1] = cmd - command = Utility(builtin.run_command) - - # Command execution - if 'debug-cmd' in self._debugflags: - self.log('redirections ' + str(redirs) + '\n') - - res = command.func(args[0], args[1:], self, env, - redirs.stdin(), redirs.stdout(), - redirs.stderr(), self._debugflags) - - if self._env.has_opt('-x'): - # Trace command execution in shell environment - # BUG: would be hard to reproduce a real shell behaviour since - # the AST is not annotated with source lines/tokens. - self._redirs.stdout().write(' '.join(args)) - - except ReturnSignal: - raise - except ShellError, e: - if is_special or isinstance(e, (ExitSignal, - ShellSyntaxError, ExpansionError)): - raise e - self._redirs.stderr().write(str(e)+'\n') - return 1 - - return res - - def expand_token(self, word): - """Expand a word as specified in [2.6 Word Expansions]. Return the list - of expanded words. - """ - status, wtrees = self._expand_word(word) - return map(pyshlex.wordtree_as_string, wtrees) - - def expand_variable(self, word): - """Return a status code (or None if no command expansion occurred) - and a single word. - """ - status, wtrees = self._expand_word(word, pathname=False, split=False) - words = map(pyshlex.wordtree_as_string, wtrees) - assert len(words)==1 - return status, words[0] - - def expand_here_document(self, word): - """Return the expanded document as a single word. The here document is - assumed to be unquoted. - """ - status, wtrees = self._expand_word(word, pathname=False, - split=False, here_document=True) - words = map(pyshlex.wordtree_as_string, wtrees) - assert len(words)==1 - return words[0] - - def expand_redirection(self, word): - """Return a single word.""" - return self.expand_variable(word)[1] - - def get_env(self): - return self._env - - def _expand_word(self, token, pathname=True, split=True, here_document=False): - wtree = pyshlex.make_wordtree(token[1], here_document=here_document) - - # TODO: implement tilde expansion - def expand(wtree): - """Return a pseudo wordtree: the tree or its subelements can be empty - lists when no value result from the expansion. - """ - status = None - for part in wtree: - if not isinstance(part, list): - continue - if part[0]in ("'", '\\'): - continue - elif part[0] in ('`', '$('): - status, result = self._expand_command(part) - part[:] = result - elif part[0] in ('$', '${'): - part[:] = self._expand_parameter(part, wtree[0]=='"', split) - elif part[0] in ('', '"'): - status, result = expand(part) - part[:] = result - else: - raise NotImplementedError('%s expansion is not implemented' - % part[0]) - # [] is returned when an expansion result in no-field, - # like an empty $@ - wtree = [p for p in wtree if p != []] - if len(wtree) < 3: - return status, [] - return status, wtree - - status, wtree = expand(wtree) - if len(wtree) == 0: - return status, wtree - wtree = pyshlex.normalize_wordtree(wtree) - - if split: - wtrees = self._split_fields(wtree) - else: - wtrees = [wtree] - - if pathname: - wtrees = mappend(self._expand_pathname, wtrees) - - wtrees = map(self._remove_quotes, wtrees) - return status, wtrees - - def _expand_command(self, wtree): - # BUG: there is something to do with backslashes and quoted - # characters here - command = pyshlex.wordtree_as_string(wtree[1:-1]) - status, output = self.subshell_output(command) - return status, ['', output, ''] - - def _expand_parameter(self, wtree, quoted=False, split=False): - """Return a valid wtree or an empty list when no parameter results.""" - # Get the parameter name - # TODO: implement weird expansion rules with ':' - name = pyshlex.wordtree_as_string(wtree[1:-1]) - if not is_name(name) and not is_special_param(name): - raise ExpansionError('Bad substitution "%s"' % name) - # TODO: implement special parameters - if name in ('@', '*'): - args = self._env.get_positional_args() - if len(args) == 0: - return [] - if len(args)<2: - return ['', ''.join(args), ''] - - sep = self._env.get('IFS', '')[:1] - if split and quoted and name=='@': - # Introduce a new token to tell the caller that these parameters - # cause a split as specified in 2.5.2 - return ['@'] + args + [''] - else: - return ['', sep.join(args), ''] - - return ['', self._env.get(name, ''), ''] - - def _split_fields(self, wtree): - def is_empty(split): - return split==['', '', ''] - - def split_positional(quoted): - # Return a list of wtree split according positional parameters rules. - # All remaining '@' groups are removed. - assert quoted[0]=='"' - - splits = [[]] - for part in quoted: - if not isinstance(part, list) or part[0]!='@': - splits[-1].append(part) - else: - # Empty or single argument list were dealt with already - assert len(part)>3 - # First argument must join with the beginning part of the original word - splits[-1].append(part[1]) - # Create double-quotes expressions for every argument after the first - for arg in part[2:-1]: - splits[-1].append('"') - splits.append(['"', arg]) - return splits - - # At this point, all expansions but pathnames have occured. Only quoted - # and positional sequences remain. Thus, all candidates for field splitting - # are in the tree root, or are positional splits ('@') and lie in root - # children. - if not wtree or wtree[0] not in ('', '"'): - # The whole token is quoted or empty, nothing to split - return [wtree] - - if wtree[0]=='"': - wtree = ['', wtree, ''] - - result = [['', '']] - for part in wtree[1:-1]: - if isinstance(part, list): - if part[0]=='"': - splits = split_positional(part) - if len(splits)<=1: - result[-1] += [part, ''] - else: - # Terminate the current split - result[-1] += [splits[0], ''] - result += splits[1:-1] - # Create a new split - result += [['', splits[-1], '']] - else: - result[-1] += [part, ''] - else: - splits = self._env.split_fields(part) - if len(splits)<=1: - # No split - result[-1][-1] += part - else: - # Terminate the current resulting part and create a new one - result[-1][-1] += splits[0] - result[-1].append('') - result += [['', r, ''] for r in splits[1:-1]] - result += [['', splits[-1]]] - result[-1].append('') - - # Leading and trailing empty groups come from leading/trailing blanks - if result and is_empty(result[-1]): - result[-1:] = [] - if result and is_empty(result[0]): - result[:1] = [] - return result - - def _expand_pathname(self, wtree): - """See [2.6.6 Pathname Expansion].""" - if self._env.has_opt('-f'): - return [wtree] - - # All expansions have been performed, only quoted sequences should remain - # in the tree. Generate the pattern by folding the tree, escaping special - # characters when appear quoted - special_chars = '*?[]' - - def make_pattern(wtree): - subpattern = [] - for part in wtree[1:-1]: - if isinstance(part, list): - part = make_pattern(part) - elif wtree[0]!='': - for c in part: - # Meta-characters cannot be quoted - if c in special_chars: - raise GlobError() - subpattern.append(part) - return ''.join(subpattern) - - def pwd_glob(pattern): - cwd = os.getcwd() - os.chdir(self._env['PWD']) - try: - return glob.glob(pattern) - finally: - os.chdir(cwd) - - #TODO: check working directory issues here wrt relative patterns - try: - pattern = make_pattern(wtree) - paths = pwd_glob(pattern) - except GlobError: - # BUG: Meta-characters were found in quoted sequences. The should - # have been used literally but this is unsupported in current glob module. - # Instead we consider the whole tree must be used literally and - # therefore there is no point in globbing. This is wrong when meta - # characters are mixed with quoted meta in the same pattern like: - # < foo*"py*" > - paths = [] - - if not paths: - return [wtree] - return [['', path, ''] for path in paths] - - def _remove_quotes(self, wtree): - """See [2.6.7 Quote Removal].""" - - def unquote(wtree): - unquoted = [] - for part in wtree[1:-1]: - if isinstance(part, list): - part = unquote(part) - unquoted.append(part) - return ''.join(unquoted) - - return ['', unquote(wtree), ''] - - def subshell(self, script=None, ast=None, redirs=None): - """Execute the script or AST in a subshell, with inherited redirections - if redirs is not None. - """ - if redirs: - sub_redirs = redirs - else: - sub_redirs = redirs.clone() - - subshell = None - try: - subshell = Interpreter(None, self._debugflags, self._env.clone(True), - sub_redirs, opts=self._options) - return subshell.execute_script(script, ast) - finally: - if not redirs: sub_redirs.close() - if subshell: subshell.close() - - def subshell_output(self, script): - """Execute the script in a subshell and return the captured output.""" - # Create temporary file to capture subshell output - tmpfd, tmppath = tempfile.mkstemp() - try: - tmpfile = os.fdopen(tmpfd, 'wb') - stdout = FileWrapper('w', tmpfile) - - redirs = Redirections(self._redirs.stdin().dup(), - stdout, - self._redirs.stderr().dup()) - try: - status = self.subshell(script=script, redirs=redirs) - finally: - redirs.close() - redirs = None - - # Extract subshell standard output - tmpfile = open(tmppath, 'rb') - try: - output = tmpfile.read() - return status, output.rstrip('\n') - finally: - tmpfile.close() - finally: - os.remove(tmppath) - - def _asynclist(self, cmd): - args = (self._env.get_variables(), cmd) - arg = encodeargs(args) - assert len(args) < 30*1024 - cmd = ['pysh.bat', '--ast', '-c', arg] - p = subprocess.Popen(cmd, cwd=self._env['PWD']) - self._children[p.pid] = p - self._env['!'] = p.pid - return 0 - - def wait(self, pids=None): - if not pids: - pids = self._children.keys() - - status = 127 - for pid in pids: - if pid not in self._children: - continue - p = self._children.pop(pid) - status = p.wait() - - return status - diff --git a/bitbake/lib/bb/pysh/lsprof.py b/bitbake/lib/bb/pysh/lsprof.py deleted file mode 100644 index b1831c22a..000000000 --- a/bitbake/lib/bb/pysh/lsprof.py +++ /dev/null @@ -1,116 +0,0 @@ -#! /usr/bin/env python - -import sys -from _lsprof import Profiler, profiler_entry - -__all__ = ['profile', 'Stats'] - -def profile(f, *args, **kwds): - """XXX docstring""" - p = Profiler() - p.enable(subcalls=True, builtins=True) - try: - f(*args, **kwds) - finally: - p.disable() - return Stats(p.getstats()) - - -class Stats(object): - """XXX docstring""" - - def __init__(self, data): - self.data = data - - def sort(self, crit="inlinetime"): - """XXX docstring""" - if crit not in profiler_entry.__dict__: - raise ValueError("Can't sort by %s" % crit) - self.data.sort(lambda b, a: cmp(getattr(a, crit), - getattr(b, crit))) - for e in self.data: - if e.calls: - e.calls.sort(lambda b, a: cmp(getattr(a, crit), - getattr(b, crit))) - - def pprint(self, top=None, file=None, limit=None, climit=None): - """XXX docstring""" - if file is None: - file = sys.stdout - d = self.data - if top is not None: - d = d[:top] - cols = "% 12s %12s %11.4f %11.4f %s\n" - hcols = "% 12s %12s %12s %12s %s\n" - cols2 = "+%12s %12s %11.4f %11.4f + %s\n" - file.write(hcols % ("CallCount", "Recursive", "Total(ms)", - "Inline(ms)", "module:lineno(function)")) - count = 0 - for e in d: - file.write(cols % (e.callcount, e.reccallcount, e.totaltime, - e.inlinetime, label(e.code))) - count += 1 - if limit is not None and count == limit: - return - ccount = 0 - if e.calls: - for se in e.calls: - file.write(cols % ("+%s" % se.callcount, se.reccallcount, - se.totaltime, se.inlinetime, - "+%s" % label(se.code))) - count += 1 - ccount += 1 - if limit is not None and count == limit: - return - if climit is not None and ccount == climit: - break - - def freeze(self): - """Replace all references to code objects with string - descriptions; this makes it possible to pickle the instance.""" - - # this code is probably rather ickier than it needs to be! - for i in range(len(self.data)): - e = self.data[i] - if not isinstance(e.code, str): - self.data[i] = type(e)((label(e.code),) + e[1:]) - if e.calls: - for j in range(len(e.calls)): - se = e.calls[j] - if not isinstance(se.code, str): - e.calls[j] = type(se)((label(se.code),) + se[1:]) - -_fn2mod = {} - -def label(code): - if isinstance(code, str): - return code - try: - mname = _fn2mod[code.co_filename] - except KeyError: - for k, v in sys.modules.items(): - if v is None: - continue - if not hasattr(v, '__file__'): - continue - if not isinstance(v.__file__, str): - continue - if v.__file__.startswith(code.co_filename): - mname = _fn2mod[code.co_filename] = k - break - else: - mname = _fn2mod[code.co_filename] = '<%s>'%code.co_filename - - return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name) - - -if __name__ == '__main__': - import os - sys.argv = sys.argv[1:] - if not sys.argv: - print >> sys.stderr, "usage: lsprof.py