summaryrefslogtreecommitdiff
path: root/bitbake
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake')
-rw-r--r--bitbake/AUTHORS2
-rwxr-xr-xbitbake/bin/bitbake87
-rwxr-xr-xbitbake/bin/bitdoc2
-rw-r--r--bitbake/contrib/vim/ftdetect/bitbake.vim28
-rw-r--r--bitbake/contrib/vim/ftplugin/bitbake.vim1
-rwxr-xr-xbitbake/contrib/vim/plugin/newbb.vim85
-rw-r--r--bitbake/contrib/vim/syntax/bitbake.vim192
-rw-r--r--bitbake/doc/manual/usermanual.xml6
-rw-r--r--bitbake/lib/bb/__init__.py51
-rw-r--r--bitbake/lib/bb/build.py488
-rw-r--r--bitbake/lib/bb/cache.py664
-rw-r--r--bitbake/lib/bb/codeparser.py41
-rw-r--r--bitbake/lib/bb/command.py65
-rw-r--r--bitbake/lib/bb/cooker.py436
-rw-r--r--bitbake/lib/bb/data.py19
-rw-r--r--bitbake/lib/bb/data_smart.py99
-rw-r--r--bitbake/lib/bb/event.py112
-rw-r--r--bitbake/lib/bb/fetch/__init__.py127
-rw-r--r--bitbake/lib/bb/fetch/bzr.py35
-rw-r--r--bitbake/lib/bb/fetch/cvs.py41
-rw-r--r--bitbake/lib/bb/fetch/git.py107
-rw-r--r--bitbake/lib/bb/fetch/hg.py40
-rw-r--r--bitbake/lib/bb/fetch/local.py2
-rw-r--r--bitbake/lib/bb/fetch/osc.py31
-rw-r--r--bitbake/lib/bb/fetch/perforce.py28
-rw-r--r--bitbake/lib/bb/fetch/repo.py35
-rw-r--r--bitbake/lib/bb/fetch/svk.py20
-rw-r--r--bitbake/lib/bb/fetch/svn.py33
-rw-r--r--bitbake/lib/bb/fetch/wget.py16
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py985
-rw-r--r--bitbake/lib/bb/fetch2/bzr.py141
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py169
-rw-r--r--bitbake/lib/bb/fetch2/git.py236
-rw-r--r--bitbake/lib/bb/fetch2/hg.py174
-rw-r--r--bitbake/lib/bb/fetch2/local.py76
-rw-r--r--bitbake/lib/bb/fetch2/osc.py135
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py196
-rw-r--r--bitbake/lib/bb/fetch2/repo.py98
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py120
-rw-r--r--bitbake/lib/bb/fetch2/svk.py97
-rw-r--r--bitbake/lib/bb/fetch2/svn.py173
-rw-r--r--bitbake/lib/bb/fetch2/wget.py91
-rw-r--r--bitbake/lib/bb/msg.py204
-rw-r--r--bitbake/lib/bb/parse/__init__.py21
-rw-r--r--bitbake/lib/bb/parse/ast.py127
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py35
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py19
-rw-r--r--bitbake/lib/bb/persist_data.py226
-rw-r--r--bitbake/lib/bb/process.py109
-rw-r--r--bitbake/lib/bb/providers.py37
-rw-r--r--bitbake/lib/bb/pysh/__init__.py (renamed from bitbake/lib/pysh/__init__.py)0
-rw-r--r--bitbake/lib/bb/pysh/builtin.py (renamed from bitbake/lib/pysh/builtin.py)0
-rw-r--r--bitbake/lib/bb/pysh/interp.py (renamed from bitbake/lib/pysh/interp.py)0
-rw-r--r--bitbake/lib/bb/pysh/lsprof.py (renamed from bitbake/lib/pysh/lsprof.py)0
-rw-r--r--bitbake/lib/bb/pysh/pysh.py (renamed from bitbake/lib/pysh/pysh.py)0
-rw-r--r--bitbake/lib/bb/pysh/pyshlex.py (renamed from bitbake/lib/pysh/pyshlex.py)0
-rw-r--r--bitbake/lib/bb/pysh/pyshyacc.py (renamed from bitbake/lib/pysh/pyshyacc.py)9
-rw-r--r--bitbake/lib/bb/pysh/sherrors.py (renamed from bitbake/lib/pysh/sherrors.py)0
-rw-r--r--bitbake/lib/bb/pysh/subprocess_fix.py (renamed from bitbake/lib/pysh/subprocess_fix.py)0
-rw-r--r--bitbake/lib/bb/runqueue.py443
-rw-r--r--bitbake/lib/bb/server/none.py2
-rw-r--r--bitbake/lib/bb/server/xmlrpc.py2
-rw-r--r--bitbake/lib/bb/shell.py12
-rw-r--r--bitbake/lib/bb/siggen.py98
-rw-r--r--bitbake/lib/bb/taskdata.py83
-rw-r--r--bitbake/lib/bb/ui/crumbs/progress.py2
-rw-r--r--bitbake/lib/bb/ui/crumbs/runningbuild.py225
-rw-r--r--bitbake/lib/bb/ui/depexp.py77
-rw-r--r--bitbake/lib/bb/ui/goggle.py33
-rw-r--r--bitbake/lib/bb/ui/knotty.py170
-rw-r--r--bitbake/lib/bb/ui/ncurses.py66
-rw-r--r--bitbake/lib/bb/ui/puccho.py2
-rw-r--r--bitbake/lib/bb/ui/uievent.py25
-rw-r--r--bitbake/lib/bb/utils.py177
-rw-r--r--bitbake/lib/progressbar.py384
75 files changed, 6167 insertions, 2005 deletions
diff --git a/bitbake/AUTHORS b/bitbake/AUTHORS
index a4014b1e3..91fd78fd2 100644
--- a/bitbake/AUTHORS
+++ b/bitbake/AUTHORS
@@ -1,7 +1,7 @@
Tim Ansell <mithro@mithis.net>
Phil Blundell <pb@handhelds.org>
Seb Frankengul <seb@frankengul.org>
-Holger Freyther <zecke@handhelds.org>
+Holger Freyther <holger@moiji-mobile.com>
Marcin Juszkiewicz <marcin@juszkiewicz.com.pl>
Chris Larson <kergoth@handhelds.org>
Ulrich Luckas <luckas@musoft.de>
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake
index 797b5a8d6..6d0528953 100755
--- a/bitbake/bin/bitbake
+++ b/bitbake/bin/bitbake
@@ -23,14 +23,18 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
-import sys
-sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])),
+import sys, logging
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
'lib'))
import optparse
import warnings
from traceback import format_exception
-import bb
+try:
+ import bb
+except RuntimeError, exc:
+ sys.exit(str(exc))
+from bb import event
import bb.msg
from bb import cooker
from bb import ui
@@ -39,12 +43,9 @@ from bb.server import none
#from bb.server import xmlrpc
__version__ = "1.11.0"
+logger = logging.getLogger("BitBake")
-
-#============================================================================#
-# BBOptions
-#============================================================================#
class BBConfiguration(object):
"""
Manages build options and configurations for one run
@@ -56,34 +57,44 @@ class BBConfiguration(object):
self.pkgs_to_build = []
-def print_exception(exc, value, tb):
- """Send exception information through bb.msg"""
- bb.fatal("".join(format_exception(exc, value, tb, limit=8)))
+def get_ui(config):
+ if config.ui:
+ interface = config.ui
+ else:
+ interface = 'knotty'
-sys.excepthook = print_exception
+ try:
+ # Dynamically load the UI based on the ui name. Although we
+ # suggest a fixed set this allows you to have flexibility in which
+ # ones are available.
+ module = __import__("bb.ui", fromlist = [interface])
+ return getattr(module, interface).main
+ except AttributeError:
+ sys.exit("FATAL: Invalid user interface '%s' specified.\n"
+ "Valid interfaces: depexp, goggle, ncurses, knotty [default]." % interface)
+# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
+warnlog = logging.getLogger("BitBake.Warnings")
_warnings_showwarning = warnings.showwarning
def _showwarning(message, category, filename, lineno, file=None, line=None):
- """Display python warning messages using bb.msg"""
if file is not None:
if _warnings_showwarning is not None:
_warnings_showwarning(message, category, filename, lineno, file, line)
else:
s = warnings.formatwarning(message, category, filename, lineno)
- s = s.split("\n")[0]
- bb.msg.warn(None, s)
+ warnlog.warn(s)
warnings.showwarning = _showwarning
-warnings.simplefilter("ignore", DeprecationWarning)
+warnings.filterwarnings("ignore")
+warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
+warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
+warnings.filterwarnings("ignore", category=ImportWarning)
+warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
+warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
-#============================================================================#
-# main
-#============================================================================#
def main():
- return_value = 1
-
parser = optparse.OptionParser(
version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__),
usage = """%prog [options] [package ...]
@@ -159,6 +170,11 @@ Default BBFILES are the .bb files in the current directory.""")
configuration.pkgs_to_build.extend(args[1:])
configuration.initial_path = os.environ['PATH']
+ ui_main = get_ui(configuration)
+
+ loghandler = event.LogHandler()
+ logger.addHandler(loghandler)
+
#server = bb.server.xmlrpc
server = bb.server.none
@@ -175,7 +191,6 @@ Default BBFILES are the .bb files in the current directory.""")
bb.utils.clean_environment()
cooker = bb.cooker.BBCooker(configuration, server)
-
cooker.parseCommandLine()
serverinfo = server.BitbakeServerInfo(cooker.server)
@@ -183,8 +198,10 @@ Default BBFILES are the .bb files in the current directory.""")
server.BitBakeServerFork(cooker, cooker.server, serverinfo, cooker_logfile)
del cooker
+ logger.removeHandler(loghandler)
+
# Setup a connection to the server (cooker)
- serverConnection = server.BitBakeServerConnection(serverinfo)
+ server_connection = server.BitBakeServerConnection(serverinfo)
# Launch the UI
if configuration.ui:
@@ -193,25 +210,15 @@ Default BBFILES are the .bb files in the current directory.""")
ui = "knotty"
try:
- # Dynamically load the UI based on the ui name. Although we
- # suggest a fixed set this allows you to have flexibility in which
- # ones are available.
- uimodule = __import__("bb.ui", fromlist = [ui])
- ui_init = getattr(uimodule, ui).init
- except AttributeError:
- print("FATAL: Invalid user interface '%s' specified. " % ui)
- print("Valid interfaces are 'ncurses', 'depexp' or the default, 'knotty'.")
- else:
- try:
- return_value = server.BitbakeUILauch().launch(serverinfo, ui_init, serverConnection.connection, serverConnection.events)
- except Exception as e:
- print("FATAL: Unable to start to '%s' UI: %s" % (ui, e))
- raise
+ return server.BitbakeUILauch().launch(serverinfo, ui_main, server_connection.connection, server_connection.events)
finally:
- serverConnection.terminate()
-
- return return_value
+ server_connection.terminate()
if __name__ == "__main__":
- ret = main()
+ try:
+ ret = main()
+ except Exception:
+ ret = 1
+ import traceback
+ traceback.print_exc(5)
sys.exit(ret)
diff --git a/bitbake/bin/bitdoc b/bitbake/bin/bitdoc
index 8043b2bd1..c2a7061d1 100755
--- a/bitbake/bin/bitdoc
+++ b/bitbake/bin/bitdoc
@@ -20,7 +20,7 @@
import optparse, os, sys
# bitbake
-sys.path.append(os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
+sys.path.append(os.path.join(os.path.dirname(os.path.dirname(__file__), 'lib'))
import bb
import bb.parse
from string import split, join
diff --git a/bitbake/contrib/vim/ftdetect/bitbake.vim b/bitbake/contrib/vim/ftdetect/bitbake.vim
index 3882a9a08..179e4d988 100644
--- a/bitbake/contrib/vim/ftdetect/bitbake.vim
+++ b/bitbake/contrib/vim/ftdetect/bitbake.vim
@@ -1,4 +1,24 @@
-au BufNewFile,BufRead *.bb setfiletype bitbake
-au BufNewFile,BufRead *.bbclass setfiletype bitbake
-au BufNewFile,BufRead *.inc setfiletype bitbake
-" au BufNewFile,BufRead *.conf setfiletype bitbake
+" Vim filetype detection file
+" Language: BitBake
+" Author: Ricardo Salveti <rsalveti@rsalveti.net>
+" Copyright: Copyright (C) 2008 Ricardo Salveti <rsalveti@rsalveti.net>
+" Licence: You may redistribute this under the same terms as Vim itself
+"
+" This sets up the syntax highlighting for BitBake files, like .bb, .bbclass and .inc
+
+if &compatible || version < 600
+ finish
+endif
+
+" .bb and .bbclass
+au BufNewFile,BufRead *.b{b,bclass} set filetype=bitbake
+
+" .inc
+au BufNewFile,BufRead *.inc set filetype=bitbake
+
+" .conf
+au BufNewFile,BufRead *.conf
+ \ if (match(expand("%:p:h"), "conf") > 0) |
+ \ set filetype=bitbake |
+ \ endif
+
diff --git a/bitbake/contrib/vim/ftplugin/bitbake.vim b/bitbake/contrib/vim/ftplugin/bitbake.vim
new file mode 100644
index 000000000..ed69d3b1b
--- /dev/null
+++ b/bitbake/contrib/vim/ftplugin/bitbake.vim
@@ -0,0 +1 @@
+set sts=4 sw=4 et
diff --git a/bitbake/contrib/vim/plugin/newbb.vim b/bitbake/contrib/vim/plugin/newbb.vim
new file mode 100755
index 000000000..afba1d9aa
--- /dev/null
+++ b/bitbake/contrib/vim/plugin/newbb.vim
@@ -0,0 +1,85 @@
+" Vim plugin file
+" Purpose: Create a template for new bb files
+" Author: Ricardo Salveti <rsalveti@gmail.com>
+" Copyright: Copyright (C) 2008 Ricardo Salveti <rsalveti@gmail.com>
+"
+" This file is licensed under the MIT license, see COPYING.MIT in
+" this source distribution for the terms.
+"
+" Based on the gentoo-syntax package
+"
+" Will try to use git to find the user name and email
+
+if &compatible || v:version < 600
+ finish
+endif
+
+fun! <SID>GetUserName()
+ let l:user_name = system("git-config --get user.name")
+ if v:shell_error
+ return "Unknow User"
+ else
+ return substitute(l:user_name, "\n", "", "")
+endfun
+
+fun! <SID>GetUserEmail()
+ let l:user_email = system("git-config --get user.email")
+ if v:shell_error
+ return "unknow@user.org"
+ else
+ return substitute(l:user_email, "\n", "", "")
+endfun
+
+fun! BBHeader()
+ let l:current_year = strftime("%Y")
+ let l:user_name = <SID>GetUserName()
+ let l:user_email = <SID>GetUserEmail()
+ 0 put ='# Copyright (C) ' . l:current_year .
+ \ ' ' . l:user_name . ' <' . l:user_email . '>'
+ put ='# Released under the MIT license (see COPYING.MIT for the terms)'
+ $
+endfun
+
+fun! NewBBTemplate()
+ let l:paste = &paste
+ set nopaste
+
+ " Get the header
+ call BBHeader()
+
+ " New the bb template
+ put ='DESCRIPTION = \"\"'
+ put ='HOMEPAGE = \"\"'
+ put ='LICENSE = \"\"'
+ put ='SECTION = \"\"'
+ put ='DEPENDS = \"\"'
+ put ='PR = \"r0\"'
+ put =''
+ put ='SRC_URI = \"\"'
+
+ " Go to the first place to edit
+ 0
+ /^DESCRIPTION =/
+ exec "normal 2f\""
+
+ if paste == 1
+ set paste
+ endif
+endfun
+
+if !exists("g:bb_create_on_empty")
+ let g:bb_create_on_empty = 1
+endif
+
+" disable in case of vimdiff
+if v:progname =~ "vimdiff"
+ let g:bb_create_on_empty = 0
+endif
+
+augroup NewBB
+ au BufNewFile *.bb
+ \ if g:bb_create_on_empty |
+ \ call NewBBTemplate() |
+ \ endif
+augroup END
+
diff --git a/bitbake/contrib/vim/syntax/bitbake.vim b/bitbake/contrib/vim/syntax/bitbake.vim
index be55980b3..a06dd9e0a 100644
--- a/bitbake/contrib/vim/syntax/bitbake.vim
+++ b/bitbake/contrib/vim/syntax/bitbake.vim
@@ -1,127 +1,123 @@
" Vim syntax file
+" Language: BitBake bb/bbclasses/inc
+" Author: Chris Larson <kergoth@handhelds.org>
+" Ricardo Salveti <rsalveti@rsalveti.net>
+" Copyright: Copyright (C) 2004 Chris Larson <kergoth@handhelds.org>
+" Copyright (C) 2008 Ricardo Salveti <rsalveti@rsalveti.net>
"
-" Copyright (C) 2004 Chris Larson <kergoth@handhelds.org>
" This file is licensed under the MIT license, see COPYING.MIT in
" this source distribution for the terms.
"
-" Language: BitBake
-" Maintainer: Chris Larson <kergoth@handhelds.org>
-" Filenames: *.bb, *.bbclass
-
-if version < 600
- syntax clear
-elseif exists("b:current_syntax")
- finish
-endif
-
-syn case match
-
-" Catch incorrect syntax (only matches if nothing else does)
+" Syntax highlighting for bb, bbclasses and inc files.
"
-syn match bbUnmatched "."
+" It's an entirely new type, just has specific syntax in shell and python code
+if &compatible || v:version < 600
+ finish
+endif
+if exists("b:current_syntax")
+ finish
+endif
syn include @python syntax/python.vim
if exists("b:current_syntax")
unlet b:current_syntax
endif
+" BitBake syntax
-" Other
-
-syn match bbComment "^#.*$" display contains=bbTodo
-syn keyword bbTodo TODO FIXME XXX contained
-syn match bbDelimiter "[(){}=]" contained
-syn match bbQuote /['"]/ contained
-syn match bbArrayBrackets "[\[\]]" contained
-
-
-" BitBake strings
-
-syn match bbContinue "\\$"
-syn region bbString matchgroup=bbQuote start=/"/ skip=/\\$/ excludenl end=/"/ contained keepend contains=bbTodo,bbContinue,bbVarInlinePy,bbVarDeref
-syn region bbString matchgroup=bbQuote start=/'/ skip=/\\$/ excludenl end=/'/ contained keepend contains=bbTodo,bbContinue,bbVarInlinePy,bbVarDeref
-
-" BitBake variable metadata
-
-syn match bbVarBraces "[\${}]"
-syn region bbVarDeref matchgroup=bbVarBraces start="${" end="}" contained
-" syn region bbVarDeref start="${" end="}" contained
-" syn region bbVarInlinePy start="${@" end="}" contained contains=@python
-syn region bbVarInlinePy matchgroup=bbVarBraces start="${@" end="}" contained contains=@python
-
-syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite
-" syn match bbVarDeref "${[a-zA-Z0-9\-_\.]\+}" contained
-syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.]\+\(_[${}a-zA/-Z0-9\-_\.]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
-
-syn match bbIdentifier "[a-zA-Z0-9\-_\./]\+" display contained
-"syn keyword bbVarEq = display contained nextgroup=bbVarValue
-syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)" contained nextgroup=bbVarValue
-syn match bbVarValue ".*$" contained contains=bbString
-
-" BitBake variable metadata flags
-syn match bbVarFlagDef "^\([a-zA-Z0-9\-_\.]\+\)\(\[[a-zA-Z0-9\-_\.]\+\]\)\@=" contains=bbIdentifier nextgroup=bbVarFlagFlag
-syn region bbVarFlagFlag matchgroup=bbArrayBrackets start="\[" end="\]\s*\(=\)\@=" keepend excludenl contained contains=bbIdentifier nextgroup=bbVarEq
-"syn match bbVarFlagFlag "\[\([a-zA-Z0-9\-_\.]\+\)\]\s*\(=\)\@=" contains=bbIdentifier nextgroup=bbVarEq
-
-
-" Functions!
-syn match bbFunction "\h\w*" display contained
+" Matching case
+syn case match
+" Indicates the error when nothing is matched
+syn match bbUnmatched "."
-" BitBake python metadata
+" Comments
+syn cluster bbCommentGroup contains=bbTodo,@Spell
+syn keyword bbTodo COMBAK FIXME TODO XXX contained
+syn match bbComment "#.*$" contains=@bbCommentGroup
-syn keyword bbPythonFlag python contained nextgroup=bbFunction
-syn match bbPythonFuncDef "^\(python\s\+\)\(\w\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbPythonFlag,bbFunction,bbDelimiter nextgroup=bbPythonFuncRegion skipwhite
-syn region bbPythonFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" keepend contained contains=@python
-"hi def link bbPythonFuncRegion Comment
+" String helpers
+syn match bbQuote +['"]+ contained
+syn match bbDelimiter "[(){}=]" contained
+syn match bbArrayBrackets "[\[\]]" contained
+" BitBake strings
+syn match bbContinue "\\$"
+syn region bbString matchgroup=bbQuote start=+"+ skip=+\\$+ excludenl end=+"+ contained keepend contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell
+syn region bbString matchgroup=bbQuote start=+'+ skip=+\\$+ excludenl end=+'+ contained keepend contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell
+
+" Vars definition
+syn match bbExport "^export" nextgroup=bbIdentifier skipwhite
+syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite
+syn match bbIdentifier "[a-zA-Z0-9\-_\.\/\+]\+" display contained
+syn match bbVarDeref "${[a-zA-Z0-9\-_\.\/\+]\+}" contained
+syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)" contained nextgroup=bbVarValue
+syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+]\+\(_[${}a-zA-Z0-9\-_\.\/\+]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
+syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue
+syn region bbVarPyValue start=+${@+ skip=+\\$+ excludenl end=+}+ contained contains=@python
+
+" Vars metadata flags
+syn match bbVarFlagDef "^\([a-zA-Z0-9\-_\.]\+\)\(\[[a-zA-Z0-9\-_\.]\+\]\)\@=" contains=bbIdentifier nextgroup=bbVarFlagFlag
+syn region bbVarFlagFlag matchgroup=bbArrayBrackets start="\[" end="\]\s*\(=\)\@=" keepend excludenl contained contains=bbIdentifier nextgroup=bbVarEq
+
+" Includes and requires
+syn keyword bbInclude inherit include require contained
+syn match bbIncludeRest ".*$" contained contains=bbString,bbVarDeref
+syn match bbIncludeLine "^\(inherit\|include\|require\)\s\+" contains=bbInclude nextgroup=bbIncludeRest
+
+" Add taks and similar
+syn keyword bbStatement addtask addhandler after before EXPORT_FUNCTIONS contained
+syn match bbStatementRest ".*$" skipwhite contained contains=bbStatement
+syn match bbStatementLine "^\(addtask\|addhandler\|after\|before\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest
+
+" OE Important Functions
+syn keyword bbOEFunctions do_fetch do_unpack do_patch do_configure do_compile do_stage do_install do_package contained
+
+" Generic Functions
+syn match bbFunction "\h[0-9A-Za-z_-]*" display contained contains=bbOEFunctions
" BitBake shell metadata
syn include @shell syntax/sh.vim
if exists("b:current_syntax")
unlet b:current_syntax
endif
+syn keyword bbShFakeRootFlag fakeroot contained
+syn match bbShFuncDef "^\(fakeroot\s*\)\?\([0-9A-Za-z_-]\+\)\(python\)\@<!\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbFunction,bbDelimiter nextgroup=bbShFuncRegion skipwhite
+syn region bbShFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" keepend contained contains=@shell
-syn keyword bbFakerootFlag fakeroot contained nextgroup=bbFunction
-syn match bbShellFuncDef "^\(fakeroot\s*\)\?\(\w\+\)\(python\)\@<!\(\s*()\s*\)\({\)\@=" contains=bbFakerootFlag,bbFunction,bbDelimiter nextgroup=bbShellFuncRegion skipwhite
-syn region bbShellFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" keepend contained contains=@shell
-"hi def link bbShellFuncRegion Comment
-
+" BitBake python metadata
+syn keyword bbPyFlag python contained
+syn match bbPyFuncDef "^\(python\s\+\)\([0-9A-Za-z_-]\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbPyFlag,bbFunction,bbDelimiter nextgroup=bbPyFuncRegion skipwhite
+syn region bbPyFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" keepend contained contains=@python
" BitBake 'def'd python functions
-syn keyword bbDef def contained
-syn region bbDefRegion start='^def\s\+\w\+\s*([^)]*)\s*:\s*$' end='^\(\s\|$\)\@!' contains=@python
-
-
-" BitBake statements
-syn keyword bbStatement include inherit require addtask addhandler EXPORT_FUNCTIONS display contained
-syn match bbStatementLine "^\(include\|inherit\|require\|addtask\|addhandler\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest
-syn match bbStatementRest ".*$" contained contains=bbString,bbVarDeref
-
-" Highlight
-"
-hi def link bbArrayBrackets Statement
-hi def link bbUnmatched Error
-hi def link bbContinue Special
-hi def link bbDef Statement
-hi def link bbPythonFlag Type
-hi def link bbExportFlag Type
-hi def link bbFakerootFlag Type
-hi def link bbStatement Statement
-hi def link bbString String
-hi def link bbTodo Todo
-hi def link bbComment Comment
-hi def link bbOperator Operator
-hi def link bbError Error
-hi def link bbFunction Function
-hi def link bbDelimiter Delimiter
-hi def link bbIdentifier Identifier
-hi def link bbVarEq Operator
-hi def link bbQuote String
-hi def link bbVarValue String
-" hi def link bbVarInlinePy PreProc
-hi def link bbVarDeref PreProc
-hi def link bbVarBraces PreProc
+syn keyword bbPyDef def contained
+syn region bbPyDefRegion start='^\(def\s\+\)\([0-9A-Za-z_-]\+\)\(\s*(.*)\s*\):\s*$' end='^\(\s\|$\)\@!' contains=@python
+
+" Highlighting Definitions
+hi def link bbUnmatched Error
+hi def link bbInclude Include
+hi def link bbTodo Todo
+hi def link bbComment Comment
+hi def link bbQuote String
+hi def link bbString String
+hi def link bbDelimiter Keyword
+hi def link bbArrayBrackets Statement
+hi def link bbContinue Special
+hi def link bbExport Type
+hi def link bbExportFlag Type
+hi def link bbIdentifier Identifier
+hi def link bbVarDeref PreProc
+hi def link bbVarDef Identifier
+hi def link bbVarValue String
+hi def link bbShFakeRootFlag Type
+hi def link bbFunction Function
+hi def link bbPyFlag Type
+hi def link bbPyDef Statement
+hi def link bbStatement Statement
+hi def link bbStatementRest Identifier
+hi def link bbOEFunctions Special
+hi def link bbVarPyValue PreProc
let b:current_syntax = "bb"
diff --git a/bitbake/doc/manual/usermanual.xml b/bitbake/doc/manual/usermanual.xml
index 748ac319e..32b40eee5 100644
--- a/bitbake/doc/manual/usermanual.xml
+++ b/bitbake/doc/manual/usermanual.xml
@@ -318,7 +318,7 @@ a per URI parameters separated by a <quote>;</quote> consisting of a key and a v
<title>CVS File Fetcher</title>
<para>The URN for the CVS Fetcher is <emphasis>cvs</emphasis>. This Fetcher honors the variables <varname>DL_DIR</varname>, <varname>SRCDATE</varname>, <varname>FETCHCOMMAND_cvs</varname>, <varname>UPDATECOMMAND_cvs</varname>. <varname>DL_DIR</varname> specifies where a temporary checkout is saved, <varname>SRCDATE</varname> specifies which date to use when doing the fetching (the special value of "now" will cause the checkout to be updated on every build), <varname>FETCHCOMMAND</varname> and <varname>UPDATECOMMAND</varname> specify which executables should be used when doing the CVS checkout or update.
</para>
- <para>The supported Parameters are <varname>module</varname>, <varname>tag</varname>, <varname>date</varname>, <varname>method</varname>, <varname>localdir</varname>, <varname>rsh</varname>. The <varname>module</varname> specifies which module to check out, the <varname>tag</varname> describes which CVS TAG should be used for the checkout by default the TAG is empty. A <varname>date</varname> can be specified to override the SRCDATE of the configuration to checkout a specific date. The special value of "now" will cause the checkout to be updated on every build.<varname>method</varname> is by default <emphasis>pserver</emphasis>, if <emphasis>ext</emphasis> is used the <varname>rsh</varname> parameter will be evaluated and <varname>CVS_RSH</varname> will be set. Finally <varname>localdir</varname> is used to checkout into a special directory relative to <varname>CVSDIR</varname>.
+ <para>The supported Parameters are <varname>module</varname>, <varname>tag</varname>, <varname>date</varname>, <varname>method</varname>, <varname>localdir</varname>, <varname>rsh</varname> and <varname>scmdata</varname>. The <varname>module</varname> specifies which module to check out, the <varname>tag</varname> describes which CVS TAG should be used for the checkout. By default the TAG is empty. A <varname>date</varname> can be specified to override the SRCDATE of the configuration to checkout a specific date. The special value of "now" will cause the checkout to be updated on every build.<varname>method</varname> is by default <emphasis>pserver</emphasis>, if <emphasis>ext</emphasis> is used the <varname>rsh</varname> parameter will be evaluated and <varname>CVS_RSH</varname> will be set. Finally <varname>localdir</varname> is used to checkout into a special directory relative to <varname>CVSDIR</varname>. If <varname>scmdata</varname> is set to <quote>keep</quote>
<screen><varname>SRC_URI</varname> = "cvs://CVSROOT;module=mymodule;tag=some-version;method=ext"
<varname>SRC_URI</varname> = "cvs://CVSROOT;module=mymodule;date=20060126;localdir=usethat"
</screen>
@@ -351,7 +351,7 @@ will be tried first when fetching a file if that fails the actual file will be t
</para>
<para>This Fetcher honors the variables <varname>FETCHCOMMAND_svn</varname>, <varname>DL_DIR</varname>, <varname>SRCDATE</varname>. <varname>FETCHCOMMAND</varname> contains the subversion command, <varname>DL_DIR</varname> is the directory where tarballs will be saved, <varname>SRCDATE</varname> specifies which date to use when doing the fetching (the special value of "now" will cause the checkout to be updated on every build).
</para>
- <para>The supported Parameters are <varname>proto</varname>, <varname>rev</varname>. <varname>proto</varname> is the subversion prototype, <varname>rev</varname> is the subversions revision.
+ <para>The supported Parameters are <varname>proto</varname>, <varname>rev</varname> and <varname>scmdata</varname>. <varname>proto</varname> is the subversion protocol, <varname>rev</varname> is the subversion revision. If <varname>scmdata</varname> is set to <quote>keep</quote>, the <quote>.svn</quote> directories will be available during compile-time.
</para>
<para><screen><varname>SRC_URI</varname> = "svn://svn.oe.handhelds.org/svn;module=vip;proto=http;rev=667"
<varname>SRC_URI</varname> = "svn://svn.oe.handhelds.org/svn/;module=opie;proto=svn+ssh;date=20060126"
@@ -364,7 +364,7 @@ will be tried first when fetching a file if that fails the actual file will be t
</para>
<para>The Variables <varname>DL_DIR</varname>, <varname>GITDIR</varname> are used. <varname>DL_DIR</varname> will be used to store the checkedout version. <varname>GITDIR</varname> will be used as the base directory where the git tree is cloned to.
</para>
- <para>The Parameters are <emphasis>tag</emphasis>, <emphasis>protocol</emphasis>. <emphasis>tag</emphasis> is a git tag, the default is <quote>master</quote>. <emphasis>protocol</emphasis> is the git protocol to use and defaults to <quote>rsync</quote>.
+ <para>The Parameters are <emphasis>tag</emphasis>, <emphasis>protocol</emphasis> and <emphasis>scmdata</emphasis>. <emphasis>tag</emphasis> is a git tag, the default is <quote>master</quote>. <emphasis>protocol</emphasis> is the git protocol to use and defaults to <quote>rsync</quote>. If <emphasis>scmdata</emphasis> is set to <quote>keep</quote>, the <quote>.git</quote> directory will be available during compile-time.
</para>
<para><screen><varname>SRC_URI</varname> = "git://git.oe.handhelds.org/git/vip.git;tag=version-1"
<varname>SRC_URI</varname> = "git://git.oe.handhelds.org/git/vip.git;protocol=http"
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 88adfc1df..4c7afc9c2 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -28,6 +28,41 @@ if sys.version_info < (2, 6, 0):
raise RuntimeError("Sorry, python 2.6.0 or later is required for this version of bitbake")
import os
+import logging
+import traceback
+
+class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+Logger = logging.getLoggerClass()
+class BBLogger(Logger):
+ def __init__(self, name):
+ if name.split(".")[0] == "BitBake":
+ self.debug = self.bbdebug
+ Logger.__init__(self, name)
+
+ def bbdebug(self, level, msg, *args, **kwargs):
+ return self.log(logging.DEBUG - level + 1, msg, *args, **kwargs)
+
+ def plain(self, msg, *args, **kwargs):
+ return self.log(logging.INFO + 1, msg, *args, **kwargs)
+
+ def verbose(self, msg, *args, **kwargs):
+ return self.log(logging.INFO - 1, msg, *args, **kwargs)
+
+ def exception(self, msg, *args, **kwargs):
+ return self.critical("%s\n%s" % (msg, traceback.format_exc()), *args, **kwargs)
+
+logging.raiseExceptions = False
+logging.setLoggerClass(BBLogger)
+
+logger = logging.getLogger("BitBake")
+logger.addHandler(NullHandler())
+logger.setLevel(logging.INFO)
+
+# This has to be imported after the setLoggerClass, as the import of bb.msg
+# can result in construction of the various loggers.
import bb.msg
if "BBDEBUG" in os.environ:
@@ -35,25 +70,29 @@ if "BBDEBUG" in os.environ:
if level:
bb.msg.set_debug_level(level)
+if True or os.environ.get("BBFETCH2"):
+ from bb import fetch2 as fetch
+ sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
# Messaging convenience functions
def plain(*args):
- bb.msg.plain(''.join(args))
+ logger.plain(''.join(args))
def debug(lvl, *args):
- bb.msg.debug(lvl, None, ''.join(args))
+ logger.debug(lvl, ''.join(args))
def note(*args):
- bb.msg.note(1, None, ''.join(args))
+ logger.info(''.join(args))
def warn(*args):
- bb.msg.warn(None, ''.join(args))
+ logger.warn(''.join(args))
def error(*args):
- bb.msg.error(None, ''.join(args))
+ logger.error(''.join(args))
def fatal(*args):
- bb.msg.fatal(None, ''.join(args))
+ logger.critical(''.join(args))
+ sys.exit(1)
def deprecated(func, name = None, advice = ""):
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index 18a75edca..07bd35afc 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -25,9 +25,20 @@
#
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
+import os
+import sys
+import logging
+import bb
+import bb.msg
+import bb.process
+from contextlib import nested
from bb import data, event, mkdirhier, utils
-import bb, os, sys
-import bb.utils
+
+bblogger = logging.getLogger('BitBake')
+logger = logging.getLogger('BitBake.Build')
+
+NULL = open(os.devnull, 'r+')
+
# When we execute a python function we'd like certain things
# in all namespaces, hence we add them to __builtins__
@@ -36,13 +47,22 @@ import bb.utils
__builtins__['bb'] = bb
__builtins__['os'] = os
-# events
class FuncFailed(Exception):
- """
- Executed function failed
- First parameter a message
- Second paramter is a logfile (optional)
- """
+ def __init__(self, name = None, logfile = None):
+ self.logfile = logfile
+ self.name = name
+ if name:
+ self.msg = "Function '%s' failed" % name
+ else:
+ self.msg = "Function failed"
+
+ def __str__(self):
+ if self.logfile and os.path.exists(self.logfile):
+ msg = ("%s (see %s for further information)" %
+ (self.msg, self.logfile))
+ else:
+ msg = self.msg
+ return msg
class TaskBase(event.Event):
"""Base class for task events"""
@@ -69,38 +89,56 @@ class TaskSucceeded(TaskBase):
class TaskFailed(TaskBase):
"""Task execution failed"""
- def __init__(self, msg, logfile, t, d ):
+
+ def __init__(self, task, logfile, metadata):
self.logfile = logfile
- self.msg = msg
- TaskBase.__init__(self, t, d)
+ super(TaskFailed, self).__init__(task, metadata)
class TaskInvalid(TaskBase):
- """Invalid Task"""
-# functions
+ def __init__(self, task, metadata):
+ super(TaskInvalid, self).__init__(task, metadata)
+ self._message = "No such task '%s'" % task
+
+
+class LogTee(object):
+ def __init__(self, logger, outfile):
+ self.outfile = outfile
+ self.logger = logger
+ self.name = self.outfile.name
+
+ def write(self, string):
+ self.logger.plain(string)
+ self.outfile.write(string)
+
+ def __enter__(self):
+ self.outfile.__enter__()
+ return self
+
+ def __exit__(self, *excinfo):
+ self.outfile.__exit__(*excinfo)
+
+ def __repr__(self):
+ return '<LogTee {0}>'.format(self.name)
+
def exec_func(func, d, dirs = None):
"""Execute an BB 'function'"""
body = data.getVar(func, d)
if not body:
- bb.warn("Function %s doesn't exist" % func)
+ if body is None:
+ logger.warn("Function %s doesn't exist", func)
return
flags = data.getVarFlags(func, d)
- for item in ['deps', 'check', 'interactive', 'python', 'cleandirs', 'dirs', 'lockfiles', 'fakeroot', 'task']:
- if not item in flags:
- flags[item] = None
-
- ispython = flags['python']
-
- cleandirs = flags['cleandirs']
+ cleandirs = flags.get('cleandirs')
if cleandirs:
for cdir in data.expand(cleandirs, d).split():
- os.system("rm -rf %s" % cdir)
+ bb.utils.remove(cdir, True)
if dirs is None:
- dirs = flags['dirs']
+ dirs = flags.get('dirs')
if dirs:
dirs = data.expand(dirs, d).split()
@@ -110,277 +148,254 @@ def exec_func(func, d, dirs = None):
adir = dirs[-1]
else:
adir = data.getVar('B', d, 1)
+ if not os.path.exists(adir):
+ adir = None
- # Save current directory
- try:
- prevdir = os.getcwd()
- except OSError:
- prevdir = data.getVar('TOPDIR', d, True)
-
- # Setup scriptfile
- t = data.getVar('T', d, 1)
- if not t:
- raise SystemExit("T variable not set, unable to build")
- bb.utils.mkdirhier(t)
- runfile = "%s/run.%s.%s" % (t, func, str(os.getpid()))
- logfile = d.getVar("BB_LOGFILE", True)
-
- # Change to correct directory (if specified)
- if adir and os.access(adir, os.F_OK):
- os.chdir(adir)
-
- locks = []
- lockfiles = flags['lockfiles']
- if lockfiles:
- for lock in data.expand(lockfiles, d).split():
- locks.append(bb.utils.lockfile(lock))
+ ispython = flags.get('python')
+ if flags.get('fakeroot') and not flags.get('task'):
+ bb.fatal("Function %s specifies fakeroot but isn't a task?!" % func)
- try:
- # Run the function
- if ispython:
- exec_func_python(func, d, runfile, logfile)
- else:
- exec_func_shell(func, d, runfile, logfile, flags)
+ lockflag = flags.get('lockfiles')
+ if lockflag:
+ lockfiles = [data.expand(f, d) for f in lockflag.split()]
+ else:
+ lockfiles = None
- # Restore original directory
- try:
- os.chdir(prevdir)
- except:
- pass
+ tempdir = data.getVar('T', d, 1)
+ runfile = os.path.join(tempdir, 'run.{0}.{1}'.format(func, os.getpid()))
- finally:
+ with bb.utils.fileslocked(lockfiles):
+ if ispython:
+ exec_func_python(func, d, runfile, cwd=adir)
+ else:
+ exec_func_shell(func, d, runfile, cwd=adir)
- # Unlock any lockfiles
- for lock in locks:
- bb.utils.unlockfile(lock)
+_functionfmt = """
+def {function}(d):
+{body}
-def exec_func_python(func, d, runfile, logfile):
+{function}(d)
+"""
+logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
+def exec_func_python(func, d, runfile, cwd=None):
"""Execute a python BB 'function'"""
- bbfile = bb.data.getVar('FILE', d, 1)
- tmp = "def " + func + "(d):\n%s" % data.getVar(func, d)
- tmp += '\n' + func + '(d)'
+ bbfile = d.getVar('FILE', True)
+ try:
+ olddir = os.getcwd()
+ except OSError:
+ olddir = None
+ code = _functionfmt.format(function=func, body=d.getVar(func, True))
+ bb.utils.mkdirhier(os.path.dirname(runfile))
+ with open(runfile, 'w') as script:
+ script.write(code)
+
+ if cwd:
+ os.chdir(cwd)
- f = open(runfile, "w")
- f.write(tmp)
- comp = utils.better_compile(tmp, func, bbfile)
try:
- utils.better_exec(comp, {"d": d}, tmp, bbfile)
+ comp = utils.better_compile(code, func, bbfile)
+ utils.better_exec(comp, {"d": d}, code, bbfile)
except:
- (t, value, tb) = sys.exc_info()
-
- if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
+ if sys.exc_info()[0] in (bb.parse.SkipPackage, bb.build.FuncFailed):
raise
- raise FuncFailed("Function %s failed" % func, logfile)
+ raise FuncFailed(func, None)
+ finally:
+ if olddir:
+ os.chdir(olddir)
-def exec_func_shell(func, d, runfile, logfile, flags):
- """Execute a shell BB 'function' Returns true if execution was successful.
-
- For this, it creates a bash shell script in the tmp dectory, writes the local
- data into it and finally executes. The output of the shell will end in a log file and stdout.
+def exec_func_shell(function, d, runfile, cwd=None):
+ """Execute a shell function from the metadata
Note on directory behavior. The 'dirs' varflag should contain a list
of the directories you need created prior to execution. The last
item in the list is where we will chdir/cd to.
"""
- deps = flags['deps']
- check = flags['check']
- if check in globals():
- if globals()[check](func, deps):
- return
-
- f = open(runfile, "w")
- f.write("#!/bin/sh -e\n")
- if bb.msg.debug_level['default'] > 0: f.write("set -x\n")
- data.emit_func(func, f, d)
-
- f.write("cd %s\n" % os.getcwd())
- if func: f.write("%s\n" % func)
- f.close()
- os.chmod(runfile, 0775)
- if not func:
- raise FuncFailed("Function not specified for exec_func_shell")
-
- # execute function
- if flags['fakeroot'] and not flags['task']:
- bb.fatal("Function %s specifies fakeroot but isn't a task?!" % func)
-
- lang_environment = "LC_ALL=C "
- ret = os.system('%ssh -e %s' % (lang_environment, runfile))
+ # Don't let the emitted shell script override PWD
+ d.delVarFlag('PWD', 'export')
- if ret == 0:
- return
+ with open(runfile, 'w') as script:
+ script.write('#!/bin/sh -e\n')
+ if logger.isEnabledFor(logging.DEBUG):
+ script.write("set -x\n")
+ data.emit_func(function, script, d)
- raise FuncFailed("function %s failed" % func, logfile)
+ script.write("%s\n" % function)
+ os.fchmod(script.fileno(), 0775)
+ env = {
+ 'PATH': d.getVar('PATH', True),
+ 'LC_ALL': 'C',
+ }
-def exec_task(fn, task, d):
- """Execute an BB 'task'
+ cmd = runfile
- The primary difference between executing a task versus executing
- a function is that a task exists in the task digraph, and therefore
- has dependencies amongst other tasks."""
+ if logger.isEnabledFor(logging.DEBUG):
+ logfile = LogTee(logger, sys.stdout)
+ else:
+ logfile = sys.stdout
- # Check whther this is a valid task
+ try:
+ bb.process.run(cmd, env=env, cwd=cwd, shell=False, stdin=NULL,
+ log=logfile)
+ except bb.process.CmdError:
+ logfn = d.getVar('BB_LOGFILE', True)
+ raise FuncFailed(function, logfn)
+
+def _task_data(fn, task, d):
+ localdata = data.createCopy(d)
+ localdata.setVar('BB_FILENAME', fn)
+ localdata.setVar('BB_CURRENTTASK', task[3:])
+ localdata.setVar('OVERRIDES', 'task-%s:%s' %
+ (task[3:], d.getVar('OVERRIDES', False)))
+ localdata.finalize()
+ data.expandKeys(localdata)
+ return localdata
+
+def _exec_task(fn, task, d, quieterr):
+ """Execute a BB 'task'
+
+ Execution of a task involves a bit more setup than executing a function,
+ running it with its own local metadata, and with some useful variables set.
+ """
if not data.getVarFlag(task, 'task', d):
event.fire(TaskInvalid(task, d), d)
- bb.msg.error(bb.msg.domain.Build, "No such task: %s" % task)
+ logger.error("No such task: %s" % task)
return 1
- quieterr = False
- if d.getVarFlag(task, "quieterrors") is not None:
- quieterr = True
+ logger.debug(1, "Executing task %s", task)
+
+ localdata = _task_data(fn, task, d)
+ tempdir = localdata.getVar('T', True)
+ if not tempdir:
+ bb.fatal("T variable not set, unable to build")
+
+ bb.utils.mkdirhier(tempdir)
+ loglink = os.path.join(tempdir, 'log.{0}'.format(task))
+ logfn = os.path.join(tempdir, 'log.{0}.{1}'.format(task, os.getpid()))
+ if loglink:
+ bb.utils.remove(loglink)
- try:
- bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % task)
- old_overrides = data.getVar('OVERRIDES', d, 0)
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata)
- data.update_data(localdata)
- data.expandKeys(localdata)
- data.setVar('BB_FILENAME', fn, d)
- data.setVar('BB_CURRENTTASK', task[3:], d)
- event.fire(TaskStarted(task, localdata), localdata)
-
- # Setup logfiles
- t = data.getVar('T', d, 1)
- if not t:
- raise SystemExit("T variable not set, unable to build")
- bb.utils.mkdirhier(t)
- loglink = "%s/log.%s" % (t, task)
- logfile = "%s/log.%s.%s" % (t, task, str(os.getpid()))
- d.setVar("BB_LOGFILE", logfile)
-
- # Even though the log file has not yet been opened, lets create the link
- if loglink:
- try:
- os.remove(loglink)
- except OSError as e:
- pass
-
- try:
- os.symlink(logfile, loglink)
- except OSError as e:
- pass
-
- # Handle logfiles
- si = file('/dev/null', 'r')
try:
- so = file(logfile, 'w')
- except OSError as e:
- bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e)
- pass
- se = so
-
- # Dup the existing fds so we dont lose them
- osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
- oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
- ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
-
- # Replace those fds with our own
- os.dup2(si.fileno(), osi[1])
- os.dup2(so.fileno(), oso[1])
- os.dup2(se.fileno(), ose[1])
-
- # Since we've remapped stdout and stderr, its safe for log messages to be printed there now
- # exec_func can nest so we have to save state
- origstdout = bb.event.useStdout
- bb.event.useStdout = True
-
-
- prefuncs = (data.getVarFlag(task, 'prefuncs', localdata) or "").split()
- for func in prefuncs:
- exec_func(func, localdata)
- exec_func(task, localdata)
- postfuncs = (data.getVarFlag(task, 'postfuncs', localdata) or "").split()
- for func in postfuncs:
- exec_func(func, localdata)
+ os.symlink(logfn, loglink)
+ except OSError:
+ pass
- event.fire(TaskSucceeded(task, localdata), localdata)
+ prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True)
+ postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True)
- # make stamp, or cause event and raise exception
- if not data.getVarFlag(task, 'nostamp', d) and not data.getVarFlag(task, 'selfstamp', d):
- make_stamp(task, d)
+ # Handle logfiles
+ si = file('/dev/null', 'r')
+ try:
+ logfile = file(logfn, 'w')
+ except OSError:
+ logger.exception("Opening log file '%s'", logfn)
+ pass
- except FuncFailed as message:
- # Try to extract the optional logfile
- try:
- (msg, logfile) = message
- except:
- logfile = None
- msg = message
- if not quieterr:
- bb.msg.error(bb.msg.domain.Build, "Task failed: %s" % message )
- failedevent = TaskFailed(msg, logfile, task, d)
- event.fire(failedevent, d)
- return 1
+ # Dup the existing fds so we dont lose them
+ osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
+ oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
+ ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
- except Exception:
- from traceback import format_exc
+ # Replace those fds with our own
+ os.dup2(si.fileno(), osi[1])
+ os.dup2(logfile.fileno(), oso[1])
+ os.dup2(logfile.fileno(), ose[1])
+
+ # Ensure python logging goes to the logfile
+ handler = logging.StreamHandler(logfile)
+ handler.setFormatter(logformatter)
+ bblogger.addHandler(handler)
+
+ localdata.setVar('BB_LOGFILE', logfn)
+
+ event.fire(TaskStarted(task, localdata), localdata)
+ try:
+ for func in (prefuncs or '').split():
+ exec_func(func, localdata)
+ exec_func(task, localdata)
+ for func in (postfuncs or '').split():
+ exec_func(func, localdata)
+ except FuncFailed as exc:
if not quieterr:
- bb.msg.error(bb.msg.domain.Build, "Build of %s failed" % (task))
- bb.msg.error(bb.msg.domain.Build, format_exc())
- failedevent = TaskFailed("Task Failed", None, task, d)
- event.fire(failedevent, d)
+ logger.error(str(exc))
+ event.fire(TaskFailed(exc.name, logfn, localdata), localdata)
return 1
finally:
sys.stdout.flush()
sys.stderr.flush()
- bb.event.useStdout = origstdout
+ bblogger.removeHandler(handler)
# Restore the backup fds
os.dup2(osi[0], osi[1])
os.dup2(oso[0], oso[1])
os.dup2(ose[0], ose[1])
- # Close our logs
- si.close()
- so.close()
- se.close()
-
- if logfile and os.path.exists(logfile) and os.path.getsize(logfile) == 0:
- bb.msg.debug(2, bb.msg.domain.Build, "Zero size logfile %s, removing" % logfile)
- os.remove(logfile)
- try:
- os.remove(loglink)
- except OSError as e:
- pass
-
# Close the backup fds
os.close(osi[0])
os.close(oso[0])
os.close(ose[0])
+ si.close()
+
+ logfile.close()
+ if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
+ logger.debug(2, "Zero size logfn %s, removing", logfn)
+ bb.utils.remove(logfn)
+ bb.utils.remove(loglink)
+ event.fire(TaskSucceeded(task, localdata), localdata)
+
+ if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'):
+ make_stamp(task, localdata)
return 0
-def extract_stamp(d, fn):
- """
- Extracts stamp format which is either a data dictionary (fn unset)
- or a dataCache entry (fn set).
- """
- if fn:
- return d.stamp[fn]
- return data.getVar('STAMP', d, 1)
+def exec_task(fn, task, d):
+ try:
+ quieterr = False
+ if d.getVarFlag(task, "quieterrors") is not None:
+ quieterr = True
+
+ return _exec_task(fn, task, d, quieterr)
+ except Exception:
+ from traceback import format_exc
+ if not quieterr:
+ logger.error("Build of %s failed" % (task))
+ logger.error(format_exc())
+ failedevent = TaskFailed("Task Failed", None, task, d)
+ event.fire(failedevent, d)
+ return 1
-def stamp_internal(task, d, file_name):
+def stamp_internal(taskname, d, file_name):
"""
Internal stamp helper function
- Removes any stamp for the given task
Makes sure the stamp directory exists
Returns the stamp path+filename
+
+ In the bitbake core, d can be a CacheData and file_name will be set.
+ When called in task context, d will be a data store, file_name will not be set
"""
- stamp = extract_stamp(d, file_name)
+ taskflagname = taskname
+ if taskname.endswith("_setscene") and taskname != "do_setscene":
+ taskflagname = taskname.replace("_setscene", "")
+
+ if file_name:
+ stamp = d.stamp[file_name]
+ extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
+ else:
+ stamp = d.getVar('STAMP', True)
+ file_name = d.getVar('BB_FILENAME', True)
+ extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
+
if not stamp:
return
- stamp = "%s.%s" % (stamp, task)
+
+ stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
+
bb.utils.mkdirhier(os.path.dirname(stamp))
- # Remove the file and recreate to force timestamp
- # change on broken NFS filesystems
- if os.access(stamp, os.F_OK):
- os.remove(stamp)
+
return stamp
def make_stamp(task, d, file_name = None):
@@ -389,7 +404,10 @@ def make_stamp(task, d, file_name = None):
(d can be a data dict or dataCache)
"""
stamp = stamp_internal(task, d, file_name)
+ # Remove the file and recreate to force timestamp
+ # change on broken NFS filesystems
if stamp:
+ bb.utils.remove(stamp)
f = open(stamp, "w")
f.close()
@@ -398,7 +416,15 @@ def del_stamp(task, d, file_name = None):
Removes a stamp for a given task
(d can be a data dict or dataCache)
"""
- stamp_internal(task, d, file_name)
+ stamp = stamp_internal(task, d, file_name)
+ bb.utils.remove(stamp)
+
+def stampfile(taskname, d, file_name = None):
+ """
+ Return the stamp for a given task
+ (d can be a data dict or dataCache)
+ """
+ return stamp_internal(taskname, d, file_name)
def add_tasks(tasklist, d):
task_deps = data.getVar('_task_deps', d)
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index c6f3794d5..ff42a37b4 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -29,27 +29,153 @@
import os
+import logging
+from collections import defaultdict, namedtuple
import bb.data
import bb.utils
+logger = logging.getLogger("BitBake.Cache")
+
try:
import cPickle as pickle
except ImportError:
import pickle
- bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
-
-__cache_version__ = "132"
-
-class Cache:
+ logger.info("Importing cPickle failed. "
+ "Falling back to a very slow implementation.")
+
+__cache_version__ = "136"
+
+recipe_fields = (
+ 'pn',
+ 'pv',
+ 'pr',
+ 'pe',
+ 'defaultpref',
+ 'depends',
+ 'provides',
+ 'task_deps',
+ 'stamp',
+ 'stamp_extrainfo',
+ 'broken',
+ 'not_world',
+ 'skipped',
+ 'timestamp',
+ 'packages',
+ 'packages_dynamic',
+ 'rdepends',
+ 'rdepends_pkg',
+ 'rprovides',
+ 'rprovides_pkg',
+ 'rrecommends',
+ 'rrecommends_pkg',
+ 'nocache',
+ 'variants',
+ 'file_depends',
+ 'tasks',
+ 'basetaskhashes',
+ 'hashfilename',
+)
+
+
+class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)):
+ __slots__ = ()
+
+ @classmethod
+ def listvar(cls, var, metadata):
+ return cls.getvar(var, metadata).split()
+
+ @classmethod
+ def intvar(cls, var, metadata):
+ return int(cls.getvar(var, metadata) or 0)
+
+ @classmethod
+ def depvar(cls, var, metadata):
+ return bb.utils.explode_deps(cls.getvar(var, metadata))
+
+ @classmethod
+ def pkgvar(cls, var, packages, metadata):
+ return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
+ for pkg in packages)
+
+ @classmethod
+ def taskvar(cls, var, tasks, metadata):
+ return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
+ for task in tasks)
+
+ @classmethod
+ def flaglist(cls, flag, varlist, metadata):
+ return dict((var, metadata.getVarFlag(var, flag, True))
+ for var in varlist)
+
+ @classmethod
+ def getvar(cls, var, metadata):
+ return metadata.getVar(var, True) or ''
+
+ @classmethod
+ def make_optional(cls, default=None, **kwargs):
+ """Construct the namedtuple from the specified keyword arguments,
+ with every value considered optional, using the default value if
+ it was not specified."""
+ for field in cls._fields:
+ kwargs[field] = kwargs.get(field, default)
+ return cls(**kwargs)
+
+ @classmethod
+ def from_metadata(cls, filename, metadata):
+ if cls.getvar('__SKIPPED', metadata):
+ return cls.make_optional(skipped=True)
+
+ tasks = metadata.getVar('__BBTASKS', False)
+
+ pn = cls.getvar('PN', metadata)
+ packages = cls.listvar('PACKAGES', metadata)
+ if not pn in packages:
+ packages.append(pn)
+
+ return RecipeInfo(
+ tasks = tasks,
+ basetaskhashes = cls.taskvar('BB_BASEHASH', tasks, metadata),
+ hashfilename = cls.getvar('BB_HASHFILENAME', metadata),
+
+ file_depends = metadata.getVar('__depends', False),
+ task_deps = metadata.getVar('_task_deps', False) or
+ {'tasks': [], 'parents': {}},
+ variants = cls.listvar('__VARIANTS', metadata) + [''],
+
+ skipped = False,
+ timestamp = bb.parse.cached_mtime(filename),
+ packages = cls.listvar('PACKAGES', metadata),
+ pn = pn,
+ pe = cls.getvar('PE', metadata),
+ pv = cls.getvar('PV', metadata),
+ pr = cls.getvar('PR', metadata),
+ nocache = cls.getvar('__BB_DONT_CACHE', metadata),
+ defaultpref = cls.intvar('DEFAULT_PREFERENCE', metadata),
+ broken = cls.getvar('BROKEN', metadata),
+ not_world = cls.getvar('EXCLUDE_FROM_WORLD', metadata),
+ stamp = cls.getvar('STAMP', metadata),
+ stamp_extrainfo = cls.flaglist('stamp-extra-info', tasks, metadata),
+ packages_dynamic = cls.listvar('PACKAGES_DYNAMIC', metadata),
+ depends = cls.depvar('DEPENDS', metadata),
+ provides = cls.depvar('PROVIDES', metadata),
+ rdepends = cls.depvar('RDEPENDS', metadata),
+ rprovides = cls.depvar('RPROVIDES', metadata),
+ rrecommends = cls.depvar('RRECOMMENDS', metadata),
+ rprovides_pkg = cls.pkgvar('RPROVIDES', packages, metadata),
+ rdepends_pkg = cls.pkgvar('RDEPENDS', packages, metadata),
+ rrecommends_pkg = cls.pkgvar('RRECOMMENDS', packages, metadata),
+ )
+
+
+class Cache(object):
"""
BitBake Cache implementation
"""
- def __init__(self, data):
-
+ def __init__(self, data):
self.cachedir = bb.data.getVar("CACHE", data, True)
- self.clean = {}
- self.checked = {}
+ self.clean = set()
+ self.checked = set()
self.depends_cache = {}
self.data = None
self.data_fn = None
@@ -57,92 +183,74 @@ class Cache:
if self.cachedir in [None, '']:
self.has_cache = False
- bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.")
+ logger.info("Not using a cache. "
+ "Set CACHE = <directory> to enable.")
return
self.has_cache = True
self.cachefile = os.path.join(self.cachedir, "bb_cache.dat")
- bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir)
+ logger.debug(1, "Using cache in '%s'", self.cachedir)
bb.utils.mkdirhier(self.cachedir)
# If any of configuration.data's dependencies are newer than the
# cache there isn't even any point in loading it...
newest_mtime = 0
- deps = bb.data.getVar("__depends", data)
+ deps = bb.data.getVar("__base_depends", data)
- old_mtimes = [old_mtime for f, old_mtime in deps]
+ old_mtimes = [old_mtime for _, old_mtime in deps]
old_mtimes.append(newest_mtime)
newest_mtime = max(old_mtimes)
if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime:
- try:
- p = pickle.Unpickler(file(self.cachefile, "rb"))
- self.depends_cache, version_data = p.load()
- if version_data['CACHE_VER'] != __cache_version__:
- raise ValueError('Cache Version Mismatch')
- if version_data['BITBAKE_VER'] != bb.__version__:
- raise ValueError('Bitbake Version Mismatch')
- except EOFError:
- bb.msg.note(1, bb.msg.domain.Cache, "Truncated cache found, rebuilding...")
- self.depends_cache = {}
- except:
- bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...")
- self.depends_cache = {}
- else:
- if os.path.isfile(self.cachefile):
- bb.msg.note(1, bb.msg.domain.Cache, "Out of date cache found, rebuilding...")
-
- def getVar(self, var, fn, exp = 0):
- """
- Gets the value of a variable
- (similar to getVar in the data class)
-
- There are two scenarios:
- 1. We have cached data - serve from depends_cache[fn]
- 2. We're learning what data to cache - serve from data
- backend but add a copy of the data to the cache.
- """
- if fn in self.clean:
- return self.depends_cache[fn][var]
-
- self.depends_cache.setdefault(fn, {})
-
- if fn != self.data_fn:
- # We're trying to access data in the cache which doesn't exist
- # yet setData hasn't been called to setup the right access. Very bad.
- bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
-
- self.cacheclean = False
- result = bb.data.getVar(var, self.data, exp)
- self.depends_cache[fn][var] = result
- return result
-
- def setData(self, virtualfn, fn, data):
- """
- Called to prime bb_cache ready to learn which variables to cache.
- Will be followed by calls to self.getVar which aren't cached
- but can be fulfilled from self.data.
- """
- self.data_fn = virtualfn
- self.data = data
-
- # Make sure __depends makes the depends_cache
- # If we're a virtual class we need to make sure all our depends are appended
- # to the depends of fn.
- depends = self.getVar("__depends", virtualfn) or set()
- self.depends_cache.setdefault(fn, {})
- if "__depends" not in self.depends_cache[fn] or not self.depends_cache[fn]["__depends"]:
- self.depends_cache[fn]["__depends"] = depends
- else:
- self.depends_cache[fn]["__depends"].update(depends)
-
- # Make sure the variants always make it into the cache too
- self.getVar('__VARIANTS', virtualfn, True)
+ self.load_cachefile()
+ elif os.path.isfile(self.cachefile):
+ logger.info("Out of date cache found, rebuilding...")
- self.depends_cache[virtualfn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn)
-
- def virtualfn2realfn(self, virtualfn):
+ def load_cachefile(self):
+ with open(self.cachefile, "rb") as cachefile:
+ pickled = pickle.Unpickler(cachefile)
+ try:
+ cache_ver = pickled.load()
+ bitbake_ver = pickled.load()
+ except Exception:
+ logger.info('Invalid cache, rebuilding...')
+ return
+
+ if cache_ver != __cache_version__:
+ logger.info('Cache version mismatch, rebuilding...')
+ return
+ elif bitbake_ver != bb.__version__:
+ logger.info('Bitbake version mismatch, rebuilding...')
+ return
+
+ cachesize = os.fstat(cachefile.fileno()).st_size
+ bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
+
+ previous_percent = 0
+ while cachefile:
+ try:
+ key = pickled.load()
+ value = pickled.load()
+ except Exception:
+ break
+
+ self.depends_cache[key] = value
+
+ # only fire events on even percentage boundaries
+ current_progress = cachefile.tell()
+ current_percent = 100 * current_progress / cachesize
+ if current_percent > previous_percent:
+ previous_percent = current_percent
+ bb.event.fire(bb.event.CacheLoadProgress(current_progress),
+ self.data)
+
+ bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
+ len(self.depends_cache)),
+ self.data)
+
+ @staticmethod
+ def virtualfn2realfn(virtualfn):
"""
Convert a virtual file name to a real one + the associated subclass keyword
"""
@@ -152,79 +260,94 @@ class Cache:
if virtualfn.startswith('virtual:'):
cls = virtualfn.split(':', 2)[1]
fn = virtualfn.replace('virtual:' + cls + ':', '')
- #bb.msg.debug(2, bb.msg.domain.Cache, "virtualfn2realfn %s to %s %s" % (virtualfn, fn, cls))
return (fn, cls)
- def realfn2virtual(self, realfn, cls):
+ @staticmethod
+ def realfn2virtual(realfn, cls):
"""
Convert a real filename + the associated subclass keyword to a virtual filename
"""
if cls == "":
- #bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and '%s' to %s" % (realfn, cls, realfn))
return realfn
- #bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and %s to %s" % (realfn, cls, "virtual:" + cls + ":" + realfn))
return "virtual:" + cls + ":" + realfn
- def loadDataFull(self, virtualfn, appends, cfgData):
+ @classmethod
+ def loadDataFull(cls, virtualfn, appends, cfgData):
"""
Return a complete set of data for fn.
To do this, we need to parse the file.
"""
- (fn, cls) = self.virtualfn2realfn(virtualfn)
-
- bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s (full)" % fn)
+ (fn, virtual) = cls.virtualfn2realfn(virtualfn)
+
+ logger.debug(1, "Parsing %s (full)", fn)
+
+ bb_data = cls.load_bbfile(fn, appends, cfgData)
+ return bb_data[virtual]
+
+ @classmethod
+ def parse(cls, filename, appends, configdata):
+ """Parse the specified filename, returning the recipe information"""
+ infos = []
+ datastores = cls.load_bbfile(filename, appends, configdata)
+ depends = set()
+ for variant, data in sorted(datastores.iteritems(),
+ key=lambda i: i[0],
+ reverse=True):
+ virtualfn = cls.realfn2virtual(filename, variant)
+ depends |= (data.getVar("__depends", False) or set())
+ if depends and not variant:
+ data.setVar("__depends", depends)
+ info = RecipeInfo.from_metadata(filename, data)
+ infos.append((virtualfn, info))
+ return infos
+
+ def load(self, filename, appends, configdata):
+ """Obtain the recipe information for the specified filename,
+ using cached values if available, otherwise parsing.
+
+ Note that if it does parse to obtain the info, it will not
+ automatically add the information to the cache or to your
+ CacheData. Use the add or add_info method to do so after
+ running this, or use loadData instead."""
+ cached = self.cacheValid(filename)
+ if cached:
+ infos = []
+ info = self.depends_cache[filename]
+ for variant in info.variants:
+ virtualfn = self.realfn2virtual(filename, variant)
+ infos.append((virtualfn, self.depends_cache[virtualfn]))
+ else:
+ logger.debug(1, "Parsing %s", filename)
+ return self.parse(filename, appends, configdata)
- bb_data = self.load_bbfile(fn, appends, cfgData)
- return bb_data[cls]
+ return cached, infos
def loadData(self, fn, appends, cfgData, cacheData):
- """
- Load a subset of data for fn.
- If the cached data is valid we do nothing,
- To do this, we need to parse the file and set the system
- to record the variables accessed.
- Return the cache status and whether the file was skipped when parsed
- """
- skipped = 0
- virtuals = 0
-
- if fn not in self.checked:
- self.cacheValidUpdate(fn)
-
- if self.cacheValid(fn):
- multi = self.getVar('__VARIANTS', fn, True)
- for cls in (multi or "").split() + [""]:
- virtualfn = self.realfn2virtual(fn, cls)
- if self.depends_cache[virtualfn]["__SKIPPED"]:
- skipped += 1
- bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn)
- continue
- self.handle_data(virtualfn, cacheData)
- virtuals += 1
- return True, skipped, virtuals
-
- bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s" % fn)
-
- bb_data = self.load_bbfile(fn, appends, cfgData)
-
- for data in bb_data:
- virtualfn = self.realfn2virtual(fn, data)
- self.setData(virtualfn, fn, bb_data[data])
- if self.getVar("__SKIPPED", virtualfn):
+ """Load the recipe info for the specified filename,
+ parsing and adding to the cache if necessary, and adding
+ the recipe information to the supplied CacheData instance."""
+ skipped, virtuals = 0, 0
+
+ cached, infos = self.load(fn, appends, cfgData)
+ for virtualfn, info in infos:
+ if info.skipped:
+ logger.debug(1, "Skipping %s", virtualfn)
skipped += 1
- bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn)
else:
- self.handle_data(virtualfn, cacheData)
+ self.add_info(virtualfn, info, cacheData, not cached)
virtuals += 1
- return False, skipped, virtuals
+ return cached, skipped, virtuals
def cacheValid(self, fn):
"""
Is the cache valid for fn?
Fast version, no timestamps checked.
"""
+ if fn not in self.checked:
+ self.cacheValidUpdate(fn)
+
# Is cache enabled?
if not self.has_cache:
return False
@@ -241,70 +364,67 @@ class Cache:
if not self.has_cache:
return False
- self.checked[fn] = ""
-
- # Pretend we're clean so getVar works
- self.clean[fn] = ""
+ self.checked.add(fn)
# File isn't in depends_cache
if not fn in self.depends_cache:
- bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn)
- self.remove(fn)
+ logger.debug(2, "Cache: %s is not cached", fn)
return False
mtime = bb.parse.cached_mtime_noerror(fn)
# Check file still exists
if mtime == 0:
- bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s no longer exists" % fn)
+ logger.debug(2, "Cache: %s no longer exists", fn)
self.remove(fn)
return False
+ info = self.depends_cache[fn]
# Check the file's timestamp
- if mtime != self.getVar("CACHETIMESTAMP", fn, True):
- bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn)
+ if mtime != info.timestamp:
+ logger.debug(2, "Cache: %s changed", fn)
self.remove(fn)
return False
# Check dependencies are still valid
- depends = self.getVar("__depends", fn, True)
+ depends = info.file_depends
if depends:
for f, old_mtime in depends:
fmtime = bb.parse.cached_mtime_noerror(f)
# Check if file still exists
if old_mtime != 0 and fmtime == 0:
+ logger.debug(2, "Cache: %s's dependency %s was removed",
+ fn, f)
self.remove(fn)
return False
if (fmtime != old_mtime):
- bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f))
+ logger.debug(2, "Cache: %s's dependency %s changed",
+ fn, f)
self.remove(fn)
return False
- #bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn)
- if not fn in self.clean:
- self.clean[fn] = ""
-
invalid = False
- # Mark extended class data as clean too
- multi = self.getVar('__VARIANTS', fn, True)
- for cls in (multi or "").split():
+ for cls in info.variants:
virtualfn = self.realfn2virtual(fn, cls)
- self.clean[virtualfn] = ""
- if not virtualfn in self.depends_cache:
- bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % virtualfn)
+ self.clean.add(virtualfn)
+ if virtualfn not in self.depends_cache:
+ logger.debug(2, "Cache: %s is not cached", virtualfn)
invalid = True
- # If any one of the varients is not present, mark cache as invalid for all
+ # If any one of the variants is not present, mark as invalid for all
if invalid:
- for cls in (multi or "").split():
+ for cls in info.variants:
virtualfn = self.realfn2virtual(fn, cls)
- bb.msg.debug(2, bb.msg.domain.Cache, "Cache: Removing %s from cache" % virtualfn)
- del self.clean[virtualfn]
- bb.msg.debug(2, bb.msg.domain.Cache, "Cache: Removing %s from cache" % fn)
- del self.clean[fn]
+ if virtualfn in self.clean:
+ logger.debug(2, "Cache: Removing %s from cache", virtualfn)
+ self.clean.remove(virtualfn)
+ if fn in self.clean:
+ logger.debug(2, "Cache: Marking %s as not clean", fn)
+ self.clean.remove(fn)
return False
+ self.clean.add(fn)
return True
def remove(self, fn):
@@ -312,154 +432,61 @@ class Cache:
Remove a fn from the cache
Called from the parser in error cases
"""
- bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn)
if fn in self.depends_cache:
+ logger.debug(1, "Removing %s from cache", fn)
del self.depends_cache[fn]
if fn in self.clean:
- del self.clean[fn]
+ logger.debug(1, "Marking %s as unclean", fn)
+ self.clean.remove(fn)
def sync(self):
"""
Save the cache
Called from the parser when complete (or exiting)
"""
- import copy
if not self.has_cache:
return
if self.cacheclean:
- bb.msg.note(1, bb.msg.domain.Cache, "Cache is clean, not saving.")
+ logger.debug(2, "Cache is clean, not saving.")
return
- version_data = {}
- version_data['CACHE_VER'] = __cache_version__
- version_data['BITBAKE_VER'] = bb.__version__
-
- cache_data = copy.copy(self.depends_cache)
- for fn in self.depends_cache:
- if '__BB_DONT_CACHE' in self.depends_cache[fn] and self.depends_cache[fn]['__BB_DONT_CACHE']:
- bb.msg.debug(2, bb.msg.domain.Cache, "Not caching %s, marked as not cacheable" % fn)
- del cache_data[fn]
- elif 'PV' in self.depends_cache[fn] and 'SRCREVINACTION' in self.depends_cache[fn]['PV']:
- bb.msg.error(bb.msg.domain.Cache, "Not caching %s as it had SRCREVINACTION in PV. Please report this bug" % fn)
- del cache_data[fn]
+ with open(self.cachefile, "wb") as cachefile:
+ pickler = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
+ pickler.dump(__cache_version__)
+ pickler.dump(bb.__version__)
+ for key, value in self.depends_cache.iteritems():
+ pickler.dump(key)
+ pickler.dump(value)
- p = pickle.Pickler(file(self.cachefile, "wb" ), -1 )
- p.dump([cache_data, version_data])
+ del self.depends_cache
- def mtime(self, cachefile):
+ @staticmethod
+ def mtime(cachefile):
return bb.parse.cached_mtime_noerror(cachefile)
- def handle_data(self, file_name, cacheData):
+ def add_info(self, filename, info, cacheData, parsed=None):
+ cacheData.add_from_recipeinfo(filename, info)
+ if not self.has_cache:
+ return
+
+ if 'SRCREVINACTION' not in info.pv and not info.nocache:
+ if parsed:
+ self.cacheclean = False
+ self.depends_cache[filename] = info
+
+ def add(self, file_name, data, cacheData, parsed=None):
"""
Save data we need into the cache
"""
- pn = self.getVar('PN', file_name, True)
- pe = self.getVar('PE', file_name, True) or "0"
- pv = self.getVar('PV', file_name, True)
- if 'SRCREVINACTION' in pv:
- bb.msg.note(1, bb.msg.domain.Cache, "Found SRCREVINACTION in PV (%s) or %s. Please report this bug." % (pv, file_name))
- pr = self.getVar('PR', file_name, True)
- dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
- depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "")
- packages = (self.getVar('PACKAGES', file_name, True) or "").split()
- packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
- rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split()
-
- cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name)
-
- # build PackageName to FileName lookup table
- if pn not in cacheData.pkg_pn:
- cacheData.pkg_pn[pn] = []
- cacheData.pkg_pn[pn].append(file_name)
-
- cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True)
-
- cacheData.tasks[file_name] = self.getVar('__BBTASKS', file_name, True)
- for t in cacheData.tasks[file_name]:
- cacheData.basetaskhash[file_name + "." + t] = self.getVar("BB_BASEHASH_task-%s" % t, file_name, True)
-
- # build FileName to PackageName lookup table
- cacheData.pkg_fn[file_name] = pn
- cacheData.pkg_pepvpr[file_name] = (pe, pv, pr)
- cacheData.pkg_dp[file_name] = dp
-
- provides = [pn]
- for provide in (self.getVar("PROVIDES", file_name, True) or "").split():
- if provide not in provides:
- provides.append(provide)
-
- # Build forward and reverse provider hashes
- # Forward: virtual -> [filenames]
- # Reverse: PN -> [virtuals]
- if pn not in cacheData.pn_provides:
- cacheData.pn_provides[pn] = []
-
- cacheData.fn_provides[file_name] = provides
- for provide in provides:
- if provide not in cacheData.providers:
- cacheData.providers[provide] = []
- cacheData.providers[provide].append(file_name)
- if not provide in cacheData.pn_provides[pn]:
- cacheData.pn_provides[pn].append(provide)
-
- cacheData.deps[file_name] = []
- for dep in depends:
- if not dep in cacheData.deps[file_name]:
- cacheData.deps[file_name].append(dep)
- if not dep in cacheData.all_depends:
- cacheData.all_depends.append(dep)
-
- # Build reverse hash for PACKAGES, so runtime dependencies
- # can be be resolved (RDEPENDS, RRECOMMENDS etc.)
- for package in packages:
- if not package in cacheData.packages:
- cacheData.packages[package] = []
- cacheData.packages[package].append(file_name)
- rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
-
- for package in packages_dynamic:
- if not package in cacheData.packages_dynamic:
- cacheData.packages_dynamic[package] = []
- cacheData.packages_dynamic[package].append(file_name)
-
- for rprovide in rprovides:
- if not rprovide in cacheData.rproviders:
- cacheData.rproviders[rprovide] = []
- cacheData.rproviders[rprovide].append(file_name)
-
- # Build hash of runtime depends and rececommends
-
- if not file_name in cacheData.rundeps:
- cacheData.rundeps[file_name] = {}
- if not file_name in cacheData.runrecs:
- cacheData.runrecs[file_name] = {}
-
- rdepends = self.getVar('RDEPENDS', file_name, True) or ""
- rrecommends = self.getVar('RRECOMMENDS', file_name, True) or ""
- for package in packages + [pn]:
- if not package in cacheData.rundeps[file_name]:
- cacheData.rundeps[file_name][package] = []
- if not package in cacheData.runrecs[file_name]:
- cacheData.runrecs[file_name][package] = []
-
- cacheData.rundeps[file_name][package] = rdepends + " " + (self.getVar("RDEPENDS_%s" % package, file_name, True) or "")
- cacheData.runrecs[file_name][package] = rrecommends + " " + (self.getVar("RRECOMMENDS_%s" % package, file_name, True) or "")
-
- # Collect files we may need for possible world-dep
- # calculations
- if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True):
- cacheData.possible_world.append(file_name)
+ realfn = self.virtualfn2realfn(file_name)[0]
+ info = RecipeInfo.from_metadata(realfn, data)
+ self.add_info(file_name, info, cacheData, parsed)
- cacheData.hashfn[file_name] = self.getVar('BB_HASHFILENAME', file_name, True)
-
- # Touch this to make sure its in the cache
- self.getVar('__BB_DONT_CACHE', file_name, True)
- self.getVar('__VARIANTS', file_name, True)
-
- def load_bbfile(self, bbfile, appends, config):
+ @staticmethod
+ def load_bbfile(bbfile, appends, config):
"""
Load and parse one .bb build file
Return the data and whether parsing resulted in the file being skipped
@@ -485,13 +512,16 @@ class Cache:
try:
if appends:
data.setVar('__BBAPPEND', " ".join(appends), bb_data)
- bb_data = parse.handle(bbfile, bb_data) # read .bb data
- if chdir_back: os.chdir(oldpath)
+ bb_data = parse.handle(bbfile, bb_data)
+ if chdir_back:
+ os.chdir(oldpath)
return bb_data
except:
- if chdir_back: os.chdir(oldpath)
+ if chdir_back:
+ os.chdir(oldpath)
raise
+
def init(cooker):
"""
The Objective: Cache the minimum amount of data possible yet get to the
@@ -512,11 +542,7 @@ def init(cooker):
return Cache(cooker.configuration.data)
-
-#============================================================================#
-# CacheData
-#============================================================================#
-class CacheData:
+class CacheData(object):
"""
The data structures we compile from the cached data
"""
@@ -524,26 +550,26 @@ class CacheData:
def __init__(self):
"""
Direct cache variables
- (from Cache.handle_data)
"""
- self.providers = {}
- self.rproviders = {}
- self.packages = {}
- self.packages_dynamic = {}
+ self.providers = defaultdict(list)
+ self.rproviders = defaultdict(list)
+ self.packages = defaultdict(list)
+ self.packages_dynamic = defaultdict(list)
self.possible_world = []
- self.pkg_pn = {}
+ self.pkg_pn = defaultdict(list)
self.pkg_fn = {}
self.pkg_pepvpr = {}
self.pkg_dp = {}
- self.pn_provides = {}
+ self.pn_provides = defaultdict(list)
self.fn_provides = {}
self.all_depends = []
- self.deps = {}
- self.rundeps = {}
- self.runrecs = {}
+ self.deps = defaultdict(list)
+ self.rundeps = defaultdict(lambda: defaultdict(list))
+ self.runrecs = defaultdict(lambda: defaultdict(list))
self.task_queues = {}
self.task_deps = {}
self.stamp = {}
+ self.stamp_extrainfo = {}
self.preferred = {}
self.tasks = {}
self.basetaskhash = {}
@@ -557,3 +583,55 @@ class CacheData:
self.world_target = set()
self.bbfile_priority = {}
self.bbfile_config_priorities = []
+
+ def add_from_recipeinfo(self, fn, info):
+ self.task_deps[fn] = info.task_deps
+ self.pkg_fn[fn] = info.pn
+ self.pkg_pn[info.pn].append(fn)
+ self.pkg_pepvpr[fn] = (info.pe, info.pv, info.pr)
+ self.pkg_dp[fn] = info.defaultpref
+ self.stamp[fn] = info.stamp
+ self.stamp_extrainfo[fn] = info.stamp_extrainfo
+
+ provides = [info.pn]
+ for provide in info.provides:
+ if provide not in provides:
+ provides.append(provide)
+ self.fn_provides[fn] = provides
+
+ for provide in provides:
+ self.providers[provide].append(fn)
+ if provide not in self.pn_provides[info.pn]:
+ self.pn_provides[info.pn].append(provide)
+
+ for dep in info.depends:
+ if dep not in self.deps[fn]:
+ self.deps[fn].append(dep)
+ if dep not in self.all_depends:
+ self.all_depends.append(dep)
+
+ rprovides = info.rprovides
+ for package in info.packages:
+ self.packages[package].append(fn)
+ rprovides += info.rprovides_pkg[package]
+
+ for rprovide in rprovides:
+ self.rproviders[rprovide].append(fn)
+
+ for package in info.packages_dynamic:
+ self.packages_dynamic[package].append(fn)
+
+ # Build hash of runtime depends and rececommends
+ for package in info.packages + [info.pn]:
+ self.rundeps[fn][package] = list(info.rdepends) + info.rdepends_pkg[package]
+ self.runrecs[fn][package] = list(info.rrecommends) + info.rrecommends_pkg[package]
+
+ # Collect files we may need for possible world-dep
+ # calculations
+ if not info.broken and not info.not_world:
+ self.possible_world.append(fn)
+
+ self.hashfn[fn] = info.hashfilename
+ for task, taskhash in info.basetaskhashes.iteritems():
+ identifier = '%s.%s' % (fn, task)
+ self.basetaskhash[identifier] = taskhash
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index ba3009212..bfffcacc3 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -1,16 +1,21 @@
-from pysh import pyshyacc, pyshlex
-from itertools import chain
-from bb import msg, utils
import ast
import codegen
+import logging
+import os.path
+import bb.utils, bb.data
+from itertools import chain
+from pysh import pyshyacc, pyshlex
+
+logger = logging.getLogger('BitBake.CodeParser')
PARSERCACHE_VERSION = 2
try:
import cPickle as pickle
except ImportError:
import pickle
- bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
+ logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
+
def check_indent(codestr):
"""If the code is indented, add a top level piece of code to 'remove' the indentation"""
@@ -23,7 +28,7 @@ def check_indent(codestr):
return codestr
if codestr[i-1] is " " or codestr[i-1] is " ":
- return "if 1:\n" + codestr
+ return "if 1:\n" + codestr
return codestr
@@ -31,15 +36,18 @@ pythonparsecache = {}
shellparsecache = {}
def parser_cachefile(d):
- cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True)
+ cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
+ bb.data.getVar("CACHE", d, True))
if cachedir in [None, '']:
return None
bb.utils.mkdirhier(cachedir)
cachefile = os.path.join(cachedir, "bb_codeparser.dat")
- bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s' for codeparser cache" % cachefile)
+ logger.debug(1, "Using cache in '%s' for codeparser cache", cachefile)
return cachefile
def parser_cache_init(d):
+ global pythonparsecache
+ global shellparsecache
cachefile = parser_cachefile(d)
if not cachefile:
@@ -54,17 +62,16 @@ def parser_cache_init(d):
if version != PARSERCACHE_VERSION:
return
- bb.codeparser.pythonparsecache = data[0]
- bb.codeparser.shellparsecache = data[1]
+ pythonparsecache = data[0]
+ shellparsecache = data[1]
def parser_cache_save(d):
-
cachefile = parser_cachefile(d)
if not cachefile:
return
p = pickle.Pickler(file(cachefile, "wb"), -1)
- p.dump([[bb.codeparser.pythonparsecache, bb.codeparser.shellparsecache], PARSERCACHE_VERSION])
+ p.dump([[pythonparsecache, shellparsecache], PARSERCACHE_VERSION])
class PythonParser():
class ValueVisitor():
@@ -129,10 +136,10 @@ class PythonParser():
funcstr = codegen.to_source(func)
argstr = codegen.to_source(arg)
except TypeError:
- msg.debug(2, None, "Failed to convert function and argument to source form")
+ logger.debug(2, 'Failed to convert function and argument to source form')
else:
- msg.debug(1, None, "Warning: in call to '%s', argument '%s' is not a literal" %
- (funcstr, argstr))
+ logger.debug(1, "Warning: in call to '%s', argument '%s' is "
+ "not a literal", funcstr, argstr)
def visit_Call(self, node):
if self.compare_name(self.getvars, node.func):
@@ -184,7 +191,7 @@ class PythonParser():
self.execs = pythonparsecache[h]["execs"]
return
- code = compile(check_indent(str(node)), "<string>", "exec",
+ code = compile(check_indent(str(node)), "<string>", "exec",
ast.PyCF_ONLY_AST)
visitor = self.ValueVisitor(code)
@@ -319,11 +326,11 @@ class ShellParser():
cmd = word[1]
if cmd.startswith("$"):
- msg.debug(1, None, "Warning: execution of non-literal command '%s'" % cmd)
+ logger.debug(1, "Warning: execution of non-literal "
+ "command '%s'", cmd)
elif cmd == "eval":
command = " ".join(word for _, word in words[1:])
self.parse_shell(command)
else:
self.allexecs.add(cmd)
break
-
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
index 9a8d689e2..b88089298 100644
--- a/bitbake/lib/bb/command.py
+++ b/bitbake/lib/bb/command.py
@@ -35,12 +35,25 @@ import bb.data
async_cmds = {}
sync_cmds = {}
+
+class CommandCompleted(bb.event.Event):
+ pass
+
+class CommandExit(bb.event.Event):
+ def __init__(self, exitcode):
+ bb.event.Event.__init__(self)
+ self.exitcode = int(exitcode)
+
+class CommandFailed(CommandExit):
+ def __init__(self, message):
+ self.error = message
+ CommandExit.__init__(self, 1)
+
class Command:
"""
A queue of asynchronous commands for bitbake
"""
def __init__(self, cooker):
-
self.cooker = cooker
self.cmds_sync = CommandsSync()
self.cmds_async = CommandsAsync()
@@ -81,7 +94,8 @@ class Command:
(command, options) = self.currentAsyncCommand
commandmethod = getattr(CommandsAsync, command)
needcache = getattr( commandmethod, "needcache" )
- if needcache and self.cooker.cookerState != bb.cooker.cookerParsed:
+ if (needcache and self.cooker.state in
+ (bb.cooker.state.initial, bb.cooker.state.parsing)):
self.cooker.updateCache()
return True
else:
@@ -104,11 +118,13 @@ class Command:
self.finishAsyncCommand(traceback.format_exc())
return False
- def finishAsyncCommand(self, error = None):
- if error:
- bb.event.fire(CookerCommandFailed(error), self.cooker.configuration.event_data)
+ def finishAsyncCommand(self, msg=None, code=None):
+ if msg:
+ bb.event.fire(CommandFailed(msg), self.cooker.configuration.event_data)
+ elif code:
+ bb.event.fire(CommandExit(code), self.cooker.configuration.event_data)
else:
- bb.event.fire(CookerCommandCompleted(), self.cooker.configuration.event_data)
+ bb.event.fire(CommandCompleted(), self.cooker.configuration.event_data)
self.currentAsyncCommand = None
@@ -123,13 +139,13 @@ class CommandsSync:
"""
Trigger cooker 'shutdown' mode
"""
- command.cooker.cookerAction = bb.cooker.cookerShutdown
+ command.cooker.shutdown()
def stateStop(self, command, params):
"""
Stop the cooker
"""
- command.cooker.cookerAction = bb.cooker.cookerStop
+ command.cooker.stop()
def getCmdLineAction(self, command, params):
"""
@@ -248,33 +264,8 @@ class CommandsAsync:
"""
Parse the .bb files
"""
- command.cooker.compareRevisions()
- command.finishAsyncCommand()
+ if bb.fetch.fetcher_compare_revisions(command.cooker.configuration.data):
+ command.finishAsyncCommand(code=1)
+ else:
+ command.finishAsyncCommand()
compareRevisions.needcache = True
-
-#
-# Events
-#
-class CookerCommandCompleted(bb.event.Event):
- """
- Cooker command completed
- """
- def __init__(self):
- bb.event.Event.__init__(self)
-
-
-class CookerCommandFailed(bb.event.Event):
- """
- Cooker command completed
- """
- def __init__(self, error):
- bb.event.Event.__init__(self)
- self.error = error
-
-class CookerCommandSetExitCode(bb.event.Event):
- """
- Set the exit code for a cooker command
- """
- def __init__(self, exitcode):
- bb.event.Event.__init__(self)
- self.exitcode = int(exitcode)
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 95f38f623..e524db749 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
@@ -23,37 +24,36 @@
from __future__ import print_function
import sys, os, glob, os.path, re, time
+import atexit
+import itertools
+import logging
+import multiprocessing
+import signal
import sre_constants
+import threading
from cStringIO import StringIO
from contextlib import closing
import bb
from bb import utils, data, parse, event, cache, providers, taskdata, command, runqueue
+logger = logging.getLogger("BitBake")
+collectlog = logging.getLogger("BitBake.Collection")
+buildlog = logging.getLogger("BitBake.Build")
+parselog = logging.getLogger("BitBake.Parsing")
+providerlog = logging.getLogger("BitBake.Provider")
+
class MultipleMatches(Exception):
"""
Exception raised when multiple file matches are found
"""
-class ParsingErrorsFound(Exception):
- """
- Exception raised when parsing errors are found
- """
-
class NothingToBuild(Exception):
"""
Exception raised when there is nothing to build
"""
-
-# Different states cooker can be in
-cookerClean = 1
-cookerParsing = 2
-cookerParsed = 3
-
-# Different action states the cooker can be in
-cookerRun = 1 # Cooker is running normally
-cookerShutdown = 2 # Active tasks should be brought to a controlled stop
-cookerStop = 3 # Stop, now!
+class state:
+ initial, parsing, running, shutdown, stop = range(5)
#============================================================================#
# BBCooker
@@ -65,9 +65,7 @@ class BBCooker:
def __init__(self, configuration, server):
self.status = None
-
- self.cache = None
- self.bb_cache = None
+ self.appendlist = {}
if server:
self.server = server.BitBakeServer(self)
@@ -102,13 +100,12 @@ class BBCooker:
import termios
tcattr = termios.tcgetattr(fd)
if tcattr[3] & termios.TOSTOP:
- bb.msg.note(1, bb.msg.domain.Build, "The terminal had the TOSTOP bit set, clearing...")
+ buildlog.info("The terminal had the TOSTOP bit set, clearing...")
tcattr[3] = tcattr[3] & ~termios.TOSTOP
termios.tcsetattr(fd, termios.TCSANOW, tcattr)
self.command = bb.command.Command(self)
- self.cookerState = cookerClean
- self.cookerAction = cookerRun
+ self.state = state.initial
def parseConfiguration(self):
@@ -118,7 +115,7 @@ class BBCooker:
if nice:
curnice = os.nice(0)
nice = int(nice) - curnice
- bb.msg.note(2, bb.msg.domain.Build, "Renice to %s " % os.nice(nice))
+ buildlog.verbose("Renice to %s " % os.nice(nice))
def parseCommandLine(self):
# Parse any commandline into actions
@@ -126,11 +123,11 @@ class BBCooker:
self.commandlineAction = None
if 'world' in self.configuration.pkgs_to_build:
- bb.msg.error(bb.msg.domain.Build, "'world' is not a valid target for --environment.")
+ buildlog.error("'world' is not a valid target for --environment.")
elif len(self.configuration.pkgs_to_build) > 1:
- bb.msg.error(bb.msg.domain.Build, "Only one target can be used with the --environment option.")
+ buildlog.error("Only one target can be used with the --environment option.")
elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0:
- bb.msg.error(bb.msg.domain.Build, "No target should be used with the --environment and --buildfile options.")
+ buildlog.error("No target should be used with the --environment and --buildfile options.")
elif len(self.configuration.pkgs_to_build) > 0:
self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build]
else:
@@ -148,13 +145,13 @@ class BBCooker:
self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd]
else:
self.commandlineAction = None
- bb.msg.error(bb.msg.domain.Build, "Please specify a package name for dependency graph generation.")
+ buildlog.error("Please specify a package name for dependency graph generation.")
else:
if self.configuration.pkgs_to_build:
self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd]
else:
self.commandlineAction = None
- bb.msg.error(bb.msg.domain.Build, "Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
+ buildlog.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
def runCommands(self, server, data, abort):
"""
@@ -180,8 +177,8 @@ class BBCooker:
preferred_versions[pn] = (pref_ver, pref_file)
latest_versions[pn] = (last_ver, last_file)
- bb.msg.plain("%-35s %25s %25s" % ("Package Name", "Latest Version", "Preferred Version"))
- bb.msg.plain("%-35s %25s %25s\n" % ("============", "==============", "================="))
+ logger.plain("%-35s %25s %25s", "Package Name", "Latest Version", "Preferred Version")
+ logger.plain("%-35s %25s %25s\n", "============", "==============", "=================")
for p in sorted(pkg_pn):
pref = preferred_versions[p]
@@ -193,11 +190,7 @@ class BBCooker:
if pref == latest:
prefstr = ""
- bb.msg.plain("%-35s %25s %25s" % (p, lateststr, prefstr))
-
- def compareRevisions(self):
- ret = bb.fetch.fetcher_compare_revisons(self.configuration.data)
- bb.event.fire(bb.command.CookerCommandSetExitCode(ret), self.configuration.event_data)
+ logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
def showEnvironment(self, buildfile = None, pkgs_to_build = []):
"""
@@ -207,8 +200,6 @@ class BBCooker:
envdata = None
if buildfile:
- self.cb = None
- self.bb_cache = bb.cache.init(self)
fn = self.matchFile(buildfile)
elif len(pkgs_to_build) == 1:
self.updateCache()
@@ -229,28 +220,22 @@ class BBCooker:
if fn:
try:
- envdata = self.bb_cache.loadDataFull(fn, self.get_file_appends(fn), self.configuration.data)
- except IOError as e:
- bb.msg.error(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e))
- raise
- except Exception as e:
- bb.msg.error(bb.msg.domain.Parsing, "%s" % e)
+ envdata = bb.cache.Cache.loadDataFull(fn, self.get_file_appends(fn), self.configuration.data)
+ except Exception, e:
+ parselog.exception("Unable to read %s", fn)
raise
# emit variables and shell functions
- try:
- data.update_data(envdata)
- with closing(StringIO()) as env:
- data.emit_env(env, envdata, True)
- bb.msg.plain(env.getvalue())
- except Exception as e:
- bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e)
+ data.update_data(envdata)
+ with closing(StringIO()) as env:
+ data.emit_env(env, envdata, True)
+ logger.plain(env.getvalue())
# emit the metadata which isnt valid shell
data.expandKeys(envdata)
for e in envdata.keys():
if data.getVarFlag( e, 'python', envdata ):
- bb.msg.plain("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1)))
+ logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
def generateDepTreeData(self, pkgs_to_build, task):
"""
@@ -290,7 +275,7 @@ class BBCooker:
depend_tree["rdepends-pkg"] = {}
depend_tree["rrecs-pkg"] = {}
- for task in range(len(rq.rqdata.runq_fnid)):
+ for task in xrange(len(rq.rqdata.runq_fnid)):
taskname = rq.rqdata.runq_task[task]
fnid = rq.rqdata.runq_fnid[task]
fn = taskdata.fn_index[fnid]
@@ -374,7 +359,7 @@ class BBCooker:
for rdepend in depgraph["rdepends-pn"][pn]:
print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
print("}", file=depends_file)
- bb.msg.plain("PN dependencies saved to 'pn-depends.dot'")
+ logger.info("PN dependencies saved to 'pn-depends.dot'")
depends_file = file('package-depends.dot', 'w' )
print("digraph depends {", file=depends_file)
@@ -395,7 +380,7 @@ class BBCooker:
for rdepend in depgraph["rrecs-pkg"][package]:
print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
print("}", file=depends_file)
- bb.msg.plain("Package dependencies saved to 'package-depends.dot'")
+ logger.info("Package dependencies saved to 'package-depends.dot'")
tdepends_file = file('task-depends.dot', 'w' )
print("digraph depends {", file=tdepends_file)
@@ -407,7 +392,7 @@ class BBCooker:
for dep in depgraph["tdepends"][task]:
print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
print("}", file=tdepends_file)
- bb.msg.plain("Task dependencies saved to 'task-depends.dot'")
+ logger.info("Task dependencies saved to 'task-depends.dot'")
def buildDepgraph( self ):
all_depends = self.status.all_depends
@@ -431,10 +416,10 @@ class BBCooker:
try:
(providee, provider) = p.split(':')
except:
- bb.msg.fatal(bb.msg.domain.Provider, "Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
+ providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
continue
if providee in self.status.preferred and self.status.preferred[providee] != provider:
- bb.msg.error(bb.msg.domain.Provider, "conflicting preferences for %s: both %s and %s specified" % (providee, provider, self.status.preferred[providee]))
+ providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.status.preferred[providee])
self.status.preferred[providee] = provider
# Calculate priorities for each file
@@ -443,8 +428,7 @@ class BBCooker:
for collection, pattern, regex, _ in self.status.bbfile_config_priorities:
if not regex in matched:
- bb.msg.warn(bb.msg.domain.Provider, "No bb files matched BBFILE_PATTERN_%s '%s'" %
- (collection, pattern))
+ collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
def buildWorldTargetList(self):
"""
@@ -452,19 +436,19 @@ class BBCooker:
"""
all_depends = self.status.all_depends
pn_provides = self.status.pn_provides
- bb.msg.debug(1, bb.msg.domain.Parsing, "collating packages for \"world\"")
+ parselog.debug(1, "collating packages for \"world\"")
for f in self.status.possible_world:
terminal = True
pn = self.status.pkg_fn[f]
for p in pn_provides[pn]:
if p.startswith('virtual/'):
- bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to %s provider starting with virtual/" % (f, p))
+ parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
terminal = False
break
for pf in self.status.providers[p]:
if self.status.pkg_fn[pf] != pn:
- bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to both us and %s providing %s" % (f, pf, p))
+ parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
terminal = False
break
if terminal:
@@ -478,8 +462,9 @@ class BBCooker:
"""Drop off into a shell"""
try:
from bb import shell
- except ImportError as details:
- bb.msg.fatal(bb.msg.domain.Parsing, "Sorry, shell not available (%s)" % details )
+ except ImportError:
+ parselog.exception("Interactive mode not available")
+ sys.exit(1)
else:
shell.start( self )
@@ -493,70 +478,56 @@ class BBCooker:
path, _ = os.path.split(path)
def parseConfigurationFiles(self, files):
- try:
- data = self.configuration.data
-
- bb.parse.init_parser(data, self.configuration.dump_signatures)
- for f in files:
- data = bb.parse.handle(f, data)
-
- layerconf = self._findLayerConf()
- if layerconf:
- bb.msg.debug(2, bb.msg.domain.Parsing, "Found bblayers.conf (%s)" % layerconf)
- data = bb.parse.handle(layerconf, data)
-
- layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
-
- data = bb.data.createCopy(data)
- for layer in layers:
- bb.msg.debug(2, bb.msg.domain.Parsing, "Adding layer %s" % layer)
- bb.data.setVar('LAYERDIR', layer, data)
- data = bb.parse.handle(os.path.join(layer, "conf", "layer.conf"), data)
+ def _parse(f, data, include=False):
+ try:
+ return bb.parse.handle(f, data, include)
+ except (IOError, bb.parse.ParseError) as exc:
+ parselog.critical("Unable to parse %s: %s" % (f, exc))
+ sys.exit(1)
- # XXX: Hack, relies on the local keys of the datasmart
- # instance being stored in the 'dict' attribute and makes
- # assumptions about how variable expansion works, but
- # there's no better way to force an expansion of a single
- # variable across the datastore today, and this at least
- # lets us reference LAYERDIR without having to immediately
- # eval all our variables that use it.
- for key in data.dict:
- if key != "_data":
- value = data.getVar(key, False)
- if value and "${LAYERDIR}" in value:
- data.setVar(key, value.replace("${LAYERDIR}", layer))
+ data = self.configuration.data
+ bb.parse.init_parser(data)
+ for f in files:
+ data = _parse(f, data)
- bb.data.delVar('LAYERDIR', data)
+ layerconf = self._findLayerConf()
+ if layerconf:
+ parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
+ data = _parse(layerconf, data)
- if not data.getVar("BBPATH", True):
- bb.fatal("The BBPATH variable is not set")
+ layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
- data = bb.parse.handle(os.path.join("conf", "bitbake.conf"), data)
+ data = bb.data.createCopy(data)
+ for layer in layers:
+ parselog.debug(2, "Adding layer %s", layer)
+ bb.data.setVar('LAYERDIR', layer, data)
+ data = _parse(os.path.join(layer, "conf", "layer.conf"), data)
+ data.expandVarref('LAYERDIR')
- self.configuration.data = data
+ bb.data.delVar('LAYERDIR', data)
- # Handle any INHERITs and inherit the base class
- inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split()
- for inherit in inherits:
- self.configuration.data = bb.parse.handle(os.path.join('classes', '%s.bbclass' % inherit), self.configuration.data, True )
+ if not data.getVar("BBPATH", True):
+ raise SystemExit("The BBPATH variable is not set")
- # Nomally we only register event handlers at the end of parsing .bb files
- # We register any handlers we've found so far here...
- for var in bb.data.getVar('__BBHANDLERS', self.configuration.data) or []:
- bb.event.register(var, bb.data.getVar(var, self.configuration.data))
+ data = _parse(os.path.join("conf", "bitbake.conf"), data)
- if bb.data.getVar("BB_WORKERCONTEXT", self.configuration.data) is None:
- bb.fetch.fetcher_init(self.configuration.data)
- bb.codeparser.parser_cache_init(self.configuration.data)
+ self.configuration.data = data
- bb.parse.init_parser(data, self.configuration.dump_signatures)
+ # Handle any INHERITs and inherit the base class
+ inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split()
+ for inherit in inherits:
+ self.configuration.data = _parse(os.path.join('classes', '%s.bbclass' % inherit), self.configuration.data, True )
- bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
+ # Nomally we only register event handlers at the end of parsing .bb files
+ # We register any handlers we've found so far here...
+ for var in bb.data.getVar('__BBHANDLERS', self.configuration.data) or []:
+ bb.event.register(var, bb.data.getVar(var, self.configuration.data))
- except IOError as e:
- bb.msg.fatal(bb.msg.domain.Parsing, "Error when parsing %s: %s" % (files, str(e)))
- except bb.parse.ParseError as details:
- bb.msg.fatal(bb.msg.domain.Parsing, "Unable to parse %s (%s)" % (files, details) )
+ if bb.data.getVar("BB_WORKERCONTEXT", self.configuration.data) is None:
+ bb.fetch.fetcher_init(self.configuration.data)
+ bb.codeparser.parser_cache_init(self.configuration.data)
+ bb.parse.init_parser(data)
+ bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
def handleCollections( self, collections ):
"""Handle collections"""
@@ -565,22 +536,22 @@ class BBCooker:
for c in collection_list:
regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1)
if regex == None:
- bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s not defined" % c)
+ parselog.error("BBFILE_PATTERN_%s not defined" % c)
continue
priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1)
if priority == None:
- bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PRIORITY_%s not defined" % c)
+ parselog.error("BBFILE_PRIORITY_%s not defined" % c)
continue
try:
cre = re.compile(regex)
except re.error:
- bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s \"%s\" is not a valid regular expression" % (c, regex))
+ parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
continue
try:
pri = int(priority)
self.status.bbfile_config_priorities.append((c, regex, cre, pri))
except ValueError:
- bb.msg.error(bb.msg.domain.Parsing, "invalid value for BBFILE_PRIORITY_%s: \"%s\"" % (c, priority))
+ parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
def buildSetVars(self):
"""
@@ -596,7 +567,7 @@ class BBCooker:
"""
bf = os.path.abspath(buildfile)
- (filelist, masked) = self.collect_bbfiles()
+ filelist, masked = self.collect_bbfiles()
try:
os.stat(bf)
return [bf]
@@ -616,9 +587,9 @@ class BBCooker:
"""
matches = self.matchFiles(buildfile)
if len(matches) != 1:
- bb.msg.error(bb.msg.domain.Parsing, "Unable to match %s (%s matches found):" % (buildfile, len(matches)))
+ parselog.error("Unable to match %s (%s matches found):" % (buildfile, len(matches)))
for f in matches:
- bb.msg.error(bb.msg.domain.Parsing, " %s" % f)
+ parselog.error(" %s" % f)
raise MultipleMatches
return matches[0]
@@ -635,22 +606,23 @@ class BBCooker:
if (task == None):
task = self.configuration.cmd
- self.bb_cache = bb.cache.init(self)
- self.status = bb.cache.CacheData()
-
- (fn, cls) = self.bb_cache.virtualfn2realfn(buildfile)
+ (fn, cls) = bb.cache.Cache.virtualfn2realfn(buildfile)
buildfile = self.matchFile(fn)
- fn = self.bb_cache.realfn2virtual(buildfile, cls)
+ fn = bb.cache.Cache.realfn2virtual(buildfile, cls)
self.buildSetVars()
- # Load data into the cache for fn and parse the loaded cache data
- the_data = self.bb_cache.loadDataFull(fn, self.get_file_appends(fn), self.configuration.data)
- self.bb_cache.setData(fn, buildfile, the_data)
- self.bb_cache.handle_data(fn, self.status)
+ self.status = bb.cache.CacheData()
+ infos = bb.cache.Cache.parse(fn, self.get_file_appends(fn), \
+ self.configuration.data)
+ maininfo = None
+ for vfn, info in infos:
+ self.status.add_from_recipeinfo(vfn, info)
+ if vfn == fn:
+ maininfo = info
# Tweak some variables
- item = self.bb_cache.getVar('PN', fn, True)
+ item = maininfo.pn
self.status.ignored_dependencies = set()
self.status.bbfile_priority[fn] = 1
@@ -662,7 +634,7 @@ class BBCooker:
# Remove stamp for target if force mode active
if self.configuration.force:
- bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (task, fn))
+ logger.verbose("Remove stamp %s, %s", task, fn)
bb.build.del_stamp('do_%s' % task, self.status, fn)
# Setup taskdata structure
@@ -682,17 +654,17 @@ class BBCooker:
def buildFileIdle(server, rq, abort):
- if abort or self.cookerAction == cookerStop:
+ if abort or self.state == state.stop:
rq.finish_runqueue(True)
- elif self.cookerAction == cookerShutdown:
+ elif self.state == state.shutdown:
rq.finish_runqueue(False)
failures = 0
try:
retval = rq.execute_runqueue()
except runqueue.TaskFailure as exc:
for fnid in exc.args:
- bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid])
- failures = failures + 1
+ buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
+ failures += len(exc.args)
retval = False
if not retval:
bb.event.fire(bb.event.BuildCompleted(buildname, item, failures), self.configuration.event_data)
@@ -719,17 +691,17 @@ class BBCooker:
targets = self.checkPackages(targets)
def buildTargetsIdle(server, rq, abort):
- if abort or self.cookerAction == cookerStop:
+ if abort or self.state == state.stop:
rq.finish_runqueue(True)
- elif self.cookerAction == cookerShutdown:
+ elif self.state == state.shutdown:
rq.finish_runqueue(False)
failures = 0
try:
retval = rq.execute_runqueue()
except runqueue.TaskFailure as exc:
for fnid in exc.args:
- bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid])
- failures = failures + 1
+ buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
+ failures += len(exc.args)
retval = False
if not retval:
bb.event.fire(bb.event.BuildCompleted(buildname, targets, failures), self.configuration.event_data)
@@ -764,12 +736,10 @@ class BBCooker:
self.server.register_idle_function(buildTargetsIdle, rq)
def updateCache(self):
-
- if self.cookerState == cookerParsed:
+ if self.state == state.running:
return
- if self.cookerState != cookerParsing:
-
+ if self.state != state.parsing:
self.parseConfiguration ()
# Import Psyco if available and not disabled
@@ -779,11 +749,11 @@ class BBCooker:
try:
import psyco
except ImportError:
- bb.msg.note(1, bb.msg.domain.Collection, "Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
+ collectlog.info("Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
else:
psyco.bind( CookerParser.parse_next )
else:
- bb.msg.note(1, bb.msg.domain.Collection, "You have disabled Psyco. This decreases performance.")
+ collectlog.info("You have disabled Psyco. This decreases performance.")
self.status = bb.cache.CacheData()
@@ -799,12 +769,12 @@ class BBCooker:
bb.data.renameVar("__depends", "__base_depends", self.configuration.data)
self.parser = CookerParser(self, filelist, masked)
- self.cookerState = cookerParsing
+ self.state = state.parsing
if not self.parser.parse_next():
- bb.msg.debug(1, bb.msg.domain.Collection, "parsing complete")
+ collectlog.debug(1, "parsing complete")
self.buildDepgraph()
- self.cookerState = cookerParsed
+ self.state = state.running
return None
return True
@@ -848,9 +818,8 @@ class BBCooker:
def collect_bbfiles( self ):
"""Collect all available .bb build files"""
parsed, cached, skipped, masked = 0, 0, 0, 0
- self.bb_cache = bb.cache.init(self)
- bb.msg.debug(1, bb.msg.domain.Collection, "collecting .bb files")
+ collectlog.debug(1, "collecting .bb files")
files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
data.setVar("BBFILES", " ".join(files), self.configuration.data)
@@ -859,7 +828,7 @@ class BBCooker:
files = self.get_bbfiles()
if not len(files):
- bb.msg.error(bb.msg.domain.Collection, "no recipe files to build, check your BBPATH and BBFILES?")
+ collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
bb.event.fire(CookerExit(), self.configuration.event_data)
newfiles = set()
@@ -879,13 +848,14 @@ class BBCooker:
try:
bbmask_compiled = re.compile(bbmask)
except sre_constants.error:
- bb.msg.fatal(bb.msg.domain.Collection, "BBMASK is not a valid regular expression.")
+ collectlog.critical("BBMASK is not a valid regular expression, ignoring.")
+ return list(newfiles), 0
bbfiles = []
bbappend = []
for f in newfiles:
if bbmask and bbmask_compiled.search(f):
- bb.msg.debug(1, bb.msg.domain.Collection, "skipping masked file %s" % f)
+ collectlog.debug(1, "skipping masked file %s", f)
masked += 1
continue
if f.endswith('.bb'):
@@ -893,26 +863,25 @@ class BBCooker:
elif f.endswith('.bbappend'):
bbappend.append(f)
else:
- bb.msg.note(1, bb.msg.domain.Collection, "File %s of unknown filetype in BBFILES? Ignorning..." % f)
+ collectlog.debug(1, "skipping %s: unknown file extension", f)
# Build a list of .bbappend files for each .bb file
- self.appendlist = {}
for f in bbappend:
base = os.path.basename(f).replace('.bbappend', '.bb')
if not base in self.appendlist:
self.appendlist[base] = []
self.appendlist[base].append(f)
-
+
return (bbfiles, masked)
def get_file_appends(self, fn):
"""
Returns a list of .bbappend files to apply to fn
- NB: collect_files() must have been called prior to this
+ NB: collect_bbfiles() must have been called prior to this
"""
f = os.path.basename(fn)
if f in self.appendlist:
- return self.appendlist[f]
+ return self.appendlist[f]
return []
def pre_serve(self):
@@ -924,6 +893,11 @@ class BBCooker:
def post_serve(self):
bb.event.fire(CookerExit(), self.configuration.event_data)
+ def shutdown(self):
+ self.state = state.shutdown
+
+ def stop(self):
+ self.state = state.stop
def server_main(cooker, func, *args):
cooker.pre_serve()
@@ -974,65 +948,119 @@ class CookerExit(bb.event.Event):
def __init__(self):
bb.event.Event.__init__(self)
-class CookerParser:
+def parse_file(task):
+ filename, appends = task
+ try:
+ return True, bb.cache.Cache.parse(filename, appends, parse_file.cfg)
+ except Exception, exc:
+ exc.recipe = filename
+ raise exc
+
+class CookerParser(object):
def __init__(self, cooker, filelist, masked):
- # Internal data
self.filelist = filelist
self.cooker = cooker
+ self.cfgdata = cooker.configuration.data
# Accounting statistics
self.parsed = 0
self.cached = 0
self.error = 0
self.masked = masked
- self.total = len(filelist)
self.skipped = 0
self.virtuals = 0
+ self.total = len(filelist)
- # Pointer to the next file to parse
- self.pointer = 0
+ self.current = 0
+ self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
+ multiprocessing.cpu_count())
+
+ self.bb_cache = bb.cache.Cache(self.cfgdata)
+ self.fromcache = []
+ self.willparse = []
+ for filename in self.filelist:
+ appends = self.cooker.get_file_appends(filename)
+ if not self.bb_cache.cacheValid(filename):
+ self.willparse.append((filename, appends))
+ else:
+ self.fromcache.append((filename, appends))
+ self.toparse = self.total - len(self.fromcache)
+ self.progress_chunk = max(self.toparse / 100, 1)
- def parse_next(self):
- cooker = self.cooker
- if self.pointer < len(self.filelist):
- f = self.filelist[self.pointer]
+ self.start()
- try:
- fromCache, skipped, virtuals = cooker.bb_cache.loadData(f, cooker.get_file_appends(f), cooker.configuration.data, cooker.status)
- if fromCache:
- self.cached += 1
- else:
- self.parsed += 1
-
- self.skipped += skipped
- self.virtuals += virtuals
-
- except IOError as e:
- self.error += 1
- cooker.bb_cache.remove(f)
- bb.msg.error(bb.msg.domain.Collection, "opening %s: %s" % (f, e))
- pass
- except KeyboardInterrupt:
- cooker.bb_cache.remove(f)
- cooker.bb_cache.sync()
- raise
- except Exception as e:
- self.error += 1
- cooker.bb_cache.remove(f)
- bb.msg.error(bb.msg.domain.Collection, "%s while parsing %s" % (e, f))
- except:
- cooker.bb_cache.remove(f)
- raise
- finally:
- bb.event.fire(bb.event.ParseProgress(self.cached, self.parsed, self.skipped, self.masked, self.virtuals, self.error, self.total), cooker.configuration.event_data)
+ def start(self):
+ def init(cfg):
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+ parse_file.cfg = cfg
+
+ bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
- self.pointer += 1
+ self.pool = multiprocessing.Pool(self.num_processes, init, [self.cfgdata])
+ parsed = self.pool.imap(parse_file, self.willparse)
+ self.pool.close()
- if self.pointer >= self.total:
- cooker.bb_cache.sync()
- bb.codeparser.parser_cache_save(cooker.configuration.data)
- if self.error > 0:
- raise ParsingErrorsFound
+ self.results = itertools.chain(self.load_cached(), parsed)
+
+ def shutdown(self, clean=True):
+ if clean:
+ event = bb.event.ParseCompleted(self.cached, self.parsed,
+ self.skipped, self.masked,
+ self.virtuals, self.error,
+ self.total)
+ bb.event.fire(event, self.cfgdata)
+ else:
+ self.pool.terminate()
+ self.pool.join()
+
+ sync = threading.Thread(target=self.bb_cache.sync)
+ sync.start()
+ atexit.register(lambda: sync.join())
+
+ codesync = threading.Thread(target=bb.codeparser.parser_cache_save(self.cooker.configuration.data))
+ codesync.start()
+ atexit.register(lambda: codesync.join())
+
+ def load_cached(self):
+ for filename, appends in self.fromcache:
+ cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
+ yield not cached, infos
+
+ def parse_next(self):
+ try:
+ parsed, result = self.results.next()
+ except StopIteration:
+ self.shutdown()
return False
+ except KeyboardInterrupt:
+ self.shutdown(clean=False)
+ raise
+ except Exception as exc:
+ self.shutdown(clean=False)
+ bb.fatal('Error parsing %s: %s' % (exc.recipe, exc))
+
+ self.current += 1
+ self.virtuals += len(result)
+ if parsed:
+ self.parsed += 1
+ if self.parsed % self.progress_chunk == 0:
+ bb.event.fire(bb.event.ParseProgress(self.parsed),
+ self.cfgdata)
+ else:
+ self.cached += 1
+
+ for virtualfn, info in result:
+ if info.skipped:
+ self.skipped += 1
+ else:
+ self.bb_cache.add_info(virtualfn, info, self.cooker.status,
+ parsed=parsed)
return True
+
+ def reparse(self, filename):
+ infos = self.bb_cache.parse(filename,
+ self.cooker.get_file_appends(filename),
+ self.cfgdata)
+ for vfn, info in infos:
+ self.cooker.status.add_from_recipeinfo(vfn, info)
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index 0aa8b404c..50f2218a7 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -161,7 +161,7 @@ def expandKeys(alterdata, readdata = None):
def inheritFromOS(d):
"""Inherit variables from the environment."""
- exportlist = bb.utils.preserved_envvars_export_list()
+ exportlist = bb.utils.preserved_envvars_exported()
for s in os.environ.keys():
try:
setVar(s, os.environ[s], d)
@@ -192,7 +192,8 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
return 0
if all:
- o.write('# %s=%s\n' % (var, oval))
+ commentVal = re.sub('\n', '\n#', str(oval))
+ o.write('# %s=%s\n' % (var, commentVal))
if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
return 0
@@ -201,7 +202,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
if unexport:
o.write('unset %s\n' % varExpanded)
- return 1
+ return 0
if not val:
return 0
@@ -219,8 +220,9 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
# if we're going to output this within doublequotes,
# to a shell, we need to escape the quotes in the var
alter = re.sub('"', '\\"', val.strip())
+ alter = re.sub('\n', ' \\\n', alter)
o.write('%s="%s"\n' % (varExpanded, alter))
- return 1
+ return 0
def emit_env(o=sys.__stdout__, d = init(), all=False):
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
@@ -259,7 +261,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
for key in keys:
emit_var(key, o, d, False) and o.write('\n')
- emit_var(func, o, d, False) and o.write('\n')
+ emit_var(func, o, d, False) and o.write('\n')
newdeps = bb.codeparser.ShellParser().parse_shell(d.getVar(func, True))
seen = set()
while newdeps:
@@ -299,7 +301,7 @@ def build_dependencies(key, keys, shelldeps, d):
deps |= set((d.getVarFlag(key, "vardeps", True) or "").split())
deps -= set((d.getVarFlag(key, "vardepsexclude", True) or "").split())
except:
- bb.note("Error expanding variable %s" % key)
+ bb.note("Error expanding variable %s" % key)
raise
return deps
#bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
@@ -311,12 +313,10 @@ def generate_dependencies(d):
shelldeps = set(key for key in keys if d.getVarFlag(key, "export") and not d.getVarFlag(key, "unexport"))
deps = {}
- taskdeps = {}
tasklist = bb.data.getVar('__BBTASKS', d) or []
for task in tasklist:
deps[task] = build_dependencies(task, keys, shelldeps, d)
-
newdeps = deps[task]
seen = set()
while newdeps:
@@ -328,9 +328,8 @@ def generate_dependencies(d):
deps[dep] = build_dependencies(dep, keys, shelldeps, d)
newdeps |= deps[dep]
newdeps -= seen
- taskdeps[task] = seen | newdeps
#print "For %s: %s" % (task, str(taskdeps[task]))
- return taskdeps, deps
+ return tasklist, deps
def inherits_class(klass, d):
val = getVar('__inherit_cache', d) or []
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index 30f9cbc2d..83e6f70cd 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -28,17 +28,21 @@ BitBake build tools.
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-import copy, re, sys
-import bb
+import copy, re
+from collections import MutableMapping
+import logging
+import bb, bb.codeparser
from bb import utils
from bb.COW import COWDictBase
+logger = logging.getLogger("BitBake.Data")
__setvar_keyword__ = ["_append", "_prepend"]
__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?')
__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
__expand_python_regexp__ = re.compile(r"\${@.+?}")
+
class VariableParse:
def __init__(self, varname, d, val = None):
self.varname = varname
@@ -69,11 +73,24 @@ class VariableParse:
self.references |= parser.references
self.execs |= parser.execs
- value = utils.better_eval(codeobj, {"d": self.d})
+ value = utils.better_eval(codeobj, DataContext(self.d))
return str(value)
-class DataSmart:
+class DataContext(dict):
+ def __init__(self, metadata, **kwargs):
+ self.metadata = metadata
+ dict.__init__(self, **kwargs)
+ self['d'] = metadata
+
+ def __missing__(self, key):
+ value = self.metadata.getVar(key, True)
+ if value is None or self.metadata.getVarFlag(key, 'func'):
+ raise KeyError(key)
+ else:
+ return value
+
+class DataSmart(MutableMapping):
def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
self.dict = {}
@@ -100,10 +117,8 @@ class DataSmart:
s = __expand_python_regexp__.sub(varparse.python_sub, s)
if s == olds:
break
- except KeyboardInterrupt:
- raise
- except:
- bb.msg.note(1, bb.msg.domain.Data, "%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s))
+ except Exception:
+ logger.exception("Error evaluating '%s'", s)
raise
varparse.value = s
@@ -115,7 +130,7 @@ class DataSmart:
def expand(self, s, varname):
return self.expandWithRefs(s, varname).value
-
+
def finalize(self):
"""Performs final steps upon the datastore, including application of overrides"""
@@ -149,9 +164,9 @@ class DataSmart:
for var in vars:
name = var[:-l]
try:
- self[name] = self[var]
+ self.setVar(name, self.getVar(var, False))
except Exception:
- bb.msg.note(1, bb.msg.domain.Data, "Untracked delVar")
+ logger.info("Untracked delVar")
# now on to the appends and prepends
for op in __setvar_keyword__:
@@ -277,13 +292,13 @@ class DataSmart:
self._makeShadowCopy(var)
self.dict[var][flag] = flagvalue
- def getVarFlag(self, var, flag, exp = False):
+ def getVarFlag(self, var, flag, expand=False):
local_var = self._findVar(var)
value = None
if local_var:
if flag in local_var:
value = copy.copy(local_var[flag])
- if exp and value:
+ if expand and value:
value = self.expand(value, None)
return value
@@ -347,23 +362,53 @@ class DataSmart:
return data
- # Dictionary Methods
- def keys(self):
- def _keys(d, mykey):
+ def expandVarref(self, variable, parents=False):
+ """Find all references to variable in the data and expand it
+ in place, optionally descending to parent datastores."""
+
+ if parents:
+ keys = iter(self)
+ else:
+ keys = self.localkeys()
+
+ ref = '${%s}' % variable
+ value = self.getVar(variable, False)
+ for key in keys:
+ referrervalue = self.getVar(key, False)
+ if referrervalue and ref in referrervalue:
+ self.setVar(key, referrervalue.replace(ref, value))
+
+ def localkeys(self):
+ for key in self.dict:
+ if key != '_data':
+ yield key
+
+ def __iter__(self):
+ seen = set()
+ def _keys(d):
if "_data" in d:
- _keys(d["_data"], mykey)
+ for key in _keys(d["_data"]):
+ yield key
- for key in d.keys():
+ for key in d:
if key != "_data":
- mykey[key] = None
- keytab = {}
- _keys(self.dict, keytab)
- return keytab.keys()
+ if not key in seen:
+ seen.add(key)
+ yield key
+ return _keys(self.dict)
+
+ def __len__(self):
+ return len(frozenset(self))
def __getitem__(self, item):
- #print "Warning deprecated"
- return self.getVar(item, False)
+ value = self.getVar(item, False)
+ if value is None:
+ raise KeyError(item)
+ else:
+ return value
+
+ def __setitem__(self, var, value):
+ self.setVar(var, value)
- def __setitem__(self, var, data):
- #print "Warning deprecated"
- self.setVar(var, data)
+ def __delitem__(self, var):
+ self.delVar(var)
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index 45458c2d6..3467ddd61 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -24,16 +24,20 @@ BitBake build tools.
import os, sys
import warnings
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+import logging
+import atexit
import bb.utils
-import pickle
# This is the pid for which we should generate the event. This is set when
# the runqueue forks off.
worker_pid = 0
worker_pipe = None
-useStdout = True
-class Event:
+class Event(object):
"""Base class for events"""
def __init__(self):
@@ -55,8 +59,7 @@ bb.utils._context["NotHandled"] = NotHandled
bb.utils._context["Handled"] = Handled
def fire_class_handlers(event, d):
- import bb.msg
- if isinstance(event, bb.msg.MsgBase):
+ if isinstance(event, logging.LogRecord):
return
for handler in _handlers:
@@ -73,7 +76,28 @@ def fire_class_handlers(event, d):
h(event)
del event.data
+ui_queue = []
+@atexit.register
+def print_ui_queue():
+ """If we're exiting before a UI has been spawned, display any queued
+ LogRecords to the console."""
+ logger = logging.getLogger("BitBake")
+ if not _ui_handlers:
+ from bb.msg import BBLogFormatter
+ console = logging.StreamHandler(sys.stdout)
+ console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
+ logger.handlers = [console]
+ while ui_queue:
+ event = ui_queue.pop()
+ if isinstance(event, logging.LogRecord):
+ logger.handle(event)
+
def fire_ui_handlers(event, d):
+ if not _ui_handlers:
+ # No UI handlers registered yet, queue up the messages
+ ui_queue.append(event)
+ return
+
errors = []
for h in _ui_handlers:
#print "Sending event %s" % event
@@ -104,13 +128,11 @@ def fire(event, d):
def worker_fire(event, d):
data = "<event>" + pickle.dumps(event) + "</event>"
worker_pipe.write(data)
- worker_pipe.flush()
def fire_from_worker(event, d):
if not event.startswith("<event>") or not event.endswith("</event>"):
print("Error, not an event %s" % event)
return
- #print "Got event %s" % event
event = pickle.loads(event[7:-8])
fire_ui_handlers(event, d)
@@ -123,7 +145,7 @@ def register(name, handler):
if handler is not None:
# handle string containing python code
- if type(handler).__name__ == "str":
+ if isinstance(handler, basestring):
tmp = "def tmpHandler(e):\n%s" % handler
comp = bb.utils.better_compile(tmp, "tmpHandler(e)", "bb.event._registerCode")
_handlers[name] = comp
@@ -139,7 +161,6 @@ def remove(name, handler):
def register_UIHhandler(handler):
bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
_ui_handlers[_ui_handler_seq] = handler
- bb.event.useStdout = False
return _ui_handler_seq
def unregister_UIHhandler(handlerNum):
@@ -274,10 +295,14 @@ class MultipleProviders(Event):
"""
return self._candidates
-class ParseProgress(Event):
- """
- Parsing Progress Event
- """
+class ParseStarted(Event):
+ """Recipe parsing for the runqueue has begun"""
+ def __init__(self, total):
+ Event.__init__(self)
+ self.total = total
+
+class ParseCompleted(Event):
+ """Recipe parsing for the runqueue has completed"""
def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total):
Event.__init__(self)
@@ -290,6 +315,32 @@ class ParseProgress(Event):
self.sofar = cached + parsed
self.total = total
+class ParseProgress(Event):
+ """Recipe parsing progress"""
+
+ def __init__(self, current):
+ self.current = current
+
+class CacheLoadStarted(Event):
+ """Loading of the dependency cache has begun"""
+ def __init__(self, total):
+ Event.__init__(self)
+ self.total = total
+
+class CacheLoadProgress(Event):
+ """Cache loading progress"""
+ def __init__(self, current):
+ Event.__init__(self)
+ self.current = current
+
+class CacheLoadCompleted(Event):
+ """Cache loading is complete"""
+ def __init__(self, total, num_entries):
+ Event.__init__(self)
+ self.total = total
+ self.num_entries = num_entries
+
+
class DepTreeGenerated(Event):
"""
Event when a dependency tree has been generated
@@ -298,3 +349,38 @@ class DepTreeGenerated(Event):
def __init__(self, depgraph):
Event.__init__(self)
self._depgraph = depgraph
+
+class MsgBase(Event):
+ """Base class for messages"""
+
+ def __init__(self, msg):
+ self._message = msg
+ Event.__init__(self)
+
+class MsgDebug(MsgBase):
+ """Debug Message"""
+
+class MsgNote(MsgBase):
+ """Note Message"""
+
+class MsgWarn(MsgBase):
+ """Warning Message"""
+
+class MsgError(MsgBase):
+ """Error Message"""
+
+class MsgFatal(MsgBase):
+ """Fatal Message"""
+
+class MsgPlain(MsgBase):
+ """General output"""
+
+class LogHandler(logging.Handler):
+ """Dispatch logging messages as bitbake events"""
+
+ def emit(self, record):
+ fire(record, None)
+
+ def filter(self, record):
+ record.taskpid = worker_pid
+ return True
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
index 88212ffa3..b452751c7 100644
--- a/bitbake/lib/bb/fetch/__init__.py
+++ b/bitbake/lib/bb/fetch/__init__.py
@@ -27,9 +27,15 @@ BitBake build tools.
from __future__ import absolute_import
from __future__ import print_function
import os, re
+import logging
import bb
from bb import data
from bb import persist_data
+from bb import utils
+
+__version__ = "1"
+
+logger = logging.getLogger("BitBake.Fetch")
class MalformedUrl(Exception):
"""Exception raised when encountering an invalid url"""
@@ -117,9 +123,8 @@ def encodeurl(decoded):
return url
def uri_replace(uri, uri_find, uri_replace, d):
-# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri)
if not uri or not uri_find or not uri_replace:
- bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing")
+ logger.debug(1, "uri_replace: passed an undefined value, not replacing")
uri_decoded = list(decodeurl(uri))
uri_find_decoded = list(decodeurl(uri_find))
uri_replace_decoded = list(decodeurl(uri_replace))
@@ -134,38 +139,32 @@ def uri_replace(uri, uri_find, uri_replace, d):
if d:
localfn = bb.fetch.localpath(uri, d)
if localfn:
- result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d))
-# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
+ result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(bb.fetch.localpath(uri, d)))
else:
-# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match")
return uri
-# else:
-# for j in i:
-# FIXME: apply replacements against options
return encodeurl(result_decoded)
methods = []
urldata_cache = {}
saved_headrevs = {}
-persistent_database_connection = {}
def fetcher_init(d):
"""
Called to initialize the fetchers once the configuration data is known.
Calls before this must not hit the cache.
"""
- pd = persist_data.PersistData(d, persistent_database_connection)
+ pd = persist_data.persist(d)
# When to drop SCM head revisions controlled by user policy
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
if srcrev_policy == "cache":
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy)
+ logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
elif srcrev_policy == "clear":
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy)
+ logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
try:
- bb.fetch.saved_headrevs = pd.getKeyValues("BB_URI_HEADREVS")
+ bb.fetch.saved_headrevs = pd['BB_URI_HEADREVS'].items()
except:
pass
- pd.delDomain("BB_URI_HEADREVS")
+ del pd['BB_URI_HEADREVS']
else:
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
@@ -173,28 +172,24 @@ def fetcher_init(d):
if hasattr(m, "init"):
m.init(d)
- # Make sure our domains exist
- pd.addDomain("BB_URI_HEADREVS")
- pd.addDomain("BB_URI_LOCALCOUNT")
-
-def fetcher_compare_revisons(d):
+def fetcher_compare_revisions(d):
"""
Compare the revisions in the persistant cache with current values and
return true/false on whether they've changed.
"""
- pd = persist_data.PersistData(d, persistent_database_connection)
- data = pd.getKeyValues("BB_URI_HEADREVS")
+ pd = persist_data.persist(d)
+ data = pd['BB_URI_HEADREVS'].items()
data2 = bb.fetch.saved_headrevs
changed = False
for key in data:
if key not in data2 or data2[key] != data[key]:
- bb.msg.debug(1, bb.msg.domain.Fetcher, "%s changed" % key)
+ logger.debug(1, "%s changed", key)
changed = True
return True
else:
- bb.msg.debug(2, bb.msg.domain.Fetcher, "%s did not change" % key)
+ logger.debug(2, "%s did not change", key)
return False
# Function call order is usually:
@@ -225,12 +220,6 @@ def init(urls, d, setup = True):
def mirror_from_string(data):
return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
-def removefile(f):
- try:
- os.remove(f)
- except:
- pass
-
def verify_checksum(u, ud, d):
"""
verify the MD5 and SHA256 checksum for downloaded src
@@ -251,17 +240,20 @@ def verify_checksum(u, ud, d):
sha256data = bb.utils.sha256_file(ud.localpath)
if (ud.md5_expected == None or ud.sha256_expected == None):
- bb.warn("Missing SRC_URI checksum for %s, consider to add\n" \
- "SRC_URI[%s] = \"%s\"\nSRC_URI[%s] = \"%s\"" \
- % (ud.localpath, ud.md5_name, md5data, ud.sha256_name, sha256data))
+ logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n'
+ 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
+ ud.localpath, ud.md5_name, md5data,
+ ud.sha256_name, sha256data)
if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1":
raise FetchError("No checksum specified for %s." % u)
return
if (ud.md5_expected != md5data or ud.sha256_expected != sha256data):
- bb.error("The checksums for '%s' did not match." % ud.localpath)
- bb.error("Expected MD5: '%s' and Got: '%s'" % (ud.md5_expected, md5data))
- bb.error("Expected SHA256: '%s' and Got: '%s'" % (ud.sha256_expected, sha256data))
+ logger.error('The checksums for "%s" did not match.\n'
+ ' MD5: expected "%s", got "%s"\n'
+ ' SHA256: expected "%s", got "%s"\n',
+ ud.localpath, ud.md5_expected, md5data,
+ ud.sha256_expected, sha256data)
raise FetchError("%s checksum mismatch." % u)
def go(d, urls = None):
@@ -298,7 +290,7 @@ def go(d, urls = None):
localpath = ud.localpath
except FetchError:
# Remove any incomplete file
- removefile(ud.localpath)
+ bb.utils.remove(ud.localpath)
# Finally, try fetching uri, u, from MIRRORS
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
localpath = try_mirrors (d, u, mirrors)
@@ -307,8 +299,6 @@ def go(d, urls = None):
ud.localpath = localpath
- verify_checksum(u, ud, d)
-
if os.path.exists(ud.md5):
# Touch the md5 file to show active use of the download
try:
@@ -317,6 +307,8 @@ def go(d, urls = None):
# Errors aren't fatal here
pass
else:
+ # Only check the checksums if we've not seen this item before
+ verify_checksum(u, ud, d)
Fetch.write_md5sum(u, ud, d)
bb.utils.unlockfile(lf)
@@ -334,7 +326,7 @@ def checkstatus(d, urls = None):
for u in urls:
ud = urldata[u]
m = ud.method
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Testing URL %s" % u)
+ logger.debug(1, "Testing URL %s", u)
# First try checking uri, u, from PREMIRRORS
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
ret = try_mirrors(d, u, mirrors, True)
@@ -365,6 +357,9 @@ def localpaths(d):
srcrev_internal_call = False
+def get_autorev(d):
+ return get_srcrev(d)
+
def get_srcrev(d):
"""
Return the version string for the current package
@@ -388,17 +383,17 @@ def get_srcrev(d):
scms = []
- # Only call setup_localpath on URIs which suppports_srcrev()
+ # Only call setup_localpath on URIs which supports_srcrev()
urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
for u in urldata:
ud = urldata[u]
- if ud.method.suppports_srcrev():
+ if ud.method.supports_srcrev():
if not ud.setup:
ud.setup_localpath(d)
scms.append(u)
if len(scms) == 0:
- bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
+ logger.error("SRCREV was used yet no valid SCM was found in SRC_URI")
raise ParameterError
if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
@@ -412,7 +407,7 @@ def get_srcrev(d):
#
format = bb.data.getVar('SRCREV_FORMAT', d, 1)
if not format:
- bb.msg.error(bb.msg.domain.Fetcher, "The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
+ logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
raise ParameterError
for scm in scms:
@@ -454,7 +449,7 @@ def runfetchcmd(cmd, d, quiet = False):
if val:
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
+ logger.debug(1, "Running %s", cmd)
# redirect stderr to stdout
stdout_handle = os.popen(cmd + " 2>&1", "r")
@@ -490,7 +485,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False):
"""
fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri))
if not check and os.access(fpath, os.R_OK) and not force:
- bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % fpath)
+ logger.debug(1, "%s already exists, skipping checkout.", fpath)
return fpath
ld = d.createCopy()
@@ -500,7 +495,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False):
try:
ud = FetchData(newuri, ld)
except bb.fetch.NoMethodError:
- bb.msg.debug(1, bb.msg.domain.Fetcher, "No method for %s" % uri)
+ logger.debug(1, "No method for %s", uri)
continue
ud.setup_localpath(ld)
@@ -518,8 +513,8 @@ def try_mirrors(d, uri, mirrors, check = False, force = False):
bb.fetch.MD5SumError):
import sys
(type, value, traceback) = sys.exc_info()
- bb.msg.debug(2, bb.msg.domain.Fetcher, "Mirror fetch failure: %s" % value)
- removefile(ud.localpath)
+ logger.debug(2, "Mirror fetch failure: %s", value)
+ bb.utils.remove(ud.localpath)
continue
return None
@@ -610,6 +605,13 @@ class Fetch(object):
and duplicate code execution)
"""
return url
+ def _strip_leading_slashes(self, relpath):
+ """
+ Remove leading slash as os.path.join can't cope
+ """
+ while os.path.isabs(relpath):
+ relpath = relpath[1:]
+ return relpath
def setUrls(self, urls):
self.__urls = urls
@@ -625,7 +627,7 @@ class Fetch(object):
"""
return False
- def suppports_srcrev(self):
+ def supports_srcrev(self):
"""
The fetcher supports auto source revisions (SRCREV)
"""
@@ -654,7 +656,7 @@ class Fetch(object):
Check the status of a URL
Assumes localpath was called first
"""
- bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s could not be checked for status since no method exists." % url)
+ logger.info("URL %s could not be checked for status since no method exists.", url)
return True
def getSRCDate(urldata, d):
@@ -695,7 +697,7 @@ class Fetch(object):
if not rev:
rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1)
if not rev:
- rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1)
+ rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1)
if not rev:
rev = data.getVar("SRCREV", d, 1)
if rev == "INVALID":
@@ -729,9 +731,7 @@ class Fetch(object):
"""
Verify the md5sum we wanted with the one we got
"""
- wanted_sum = None
- if 'md5sum' in ud.parm:
- wanted_sum = ud.parm['md5sum']
+ wanted_sum = ud.parm.get('md5sum')
if not wanted_sum:
return True
@@ -756,14 +756,14 @@ class Fetch(object):
if not hasattr(self, "_latest_revision"):
raise ParameterError
- pd = persist_data.PersistData(d, persistent_database_connection)
+ pd = persist_data.persist(d)
+ revs = pd['BB_URI_HEADREVS']
key = self.generate_revision_key(url, ud, d)
- rev = pd.getValue("BB_URI_HEADREVS", key)
+ rev = revs[key]
if rev != None:
return str(rev)
- rev = self._latest_revision(url, ud, d)
- pd.setValue("BB_URI_HEADREVS", key, rev)
+ revs[key] = rev = self._latest_revision(url, ud, d)
return rev
def sortable_revision(self, url, ud, d):
@@ -773,17 +773,18 @@ class Fetch(object):
if hasattr(self, "_sortable_revision"):
return self._sortable_revision(url, ud, d)
- pd = persist_data.PersistData(d, persistent_database_connection)
+ pd = persist_data.persist(d)
+ localcounts = pd['BB_URI_LOCALCOUNT']
key = self.generate_revision_key(url, ud, d)
latest_rev = self._build_revision(url, ud, d)
- last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev")
+ last_rev = localcounts[key + '_rev']
uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
count = None
if uselocalcount:
count = Fetch.localcount_internal_helper(ud, d)
if count is None:
- count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count")
+ count = localcounts[key + '_count']
if last_rev == latest_rev:
return str(count + "+" + latest_rev)
@@ -799,8 +800,8 @@ class Fetch(object):
else:
count = str(int(count) + 1)
- pd.setValue("BB_URI_LOCALCOUNT", key + "_rev", latest_rev)
- pd.setValue("BB_URI_LOCALCOUNT", key + "_count", count)
+ localcounts[key + '_rev'] = latest_rev
+ localcounts[key + '_count'] = count
return str(count + "+" + latest_rev)
diff --git a/bitbake/lib/bb/fetch/bzr.py b/bitbake/lib/bb/fetch/bzr.py
index 813d7d8c8..afaf79990 100644
--- a/bitbake/lib/bb/fetch/bzr.py
+++ b/bitbake/lib/bb/fetch/bzr.py
@@ -25,11 +25,10 @@ BitBake 'Fetch' implementation for bzr.
import os
import sys
+import logging
import bb
from bb import data
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-from bb.fetch import runfetchcmd
+from bb.fetch import Fetch, FetchError, runfetchcmd, logger
class Bzr(Fetch):
def supports(self, url, ud, d):
@@ -38,10 +37,7 @@ class Bzr(Fetch):
def localpath (self, url, ud, d):
# Create paths to bzr checkouts
- relpath = ud.path
- if relpath.startswith('/'):
- # Remove leading slash as os.path.join can't cope
- relpath = relpath[1:]
+ relpath = self._strip_leading_slashes(ud.path)
ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
revision = Fetch.srcrev_internal_helper(ud, d)
@@ -65,9 +61,7 @@ class Bzr(Fetch):
basecmd = data.expand('${FETCHCMD_bzr}', d)
- proto = "http"
- if "proto" in ud.parm:
- proto = ud.parm["proto"]
+ proto = ud.parm.get('proto', 'http')
bzrroot = ud.host + ud.path
@@ -93,22 +87,29 @@ class Bzr(Fetch):
if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
bzrcmd = self._buildbzrcommand(ud, d, "update")
- bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Update %s" % loc)
+ logger.debug(1, "BZR Update %s", loc)
os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
runfetchcmd(bzrcmd, d)
else:
- os.system("rm -rf %s" % os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)))
+ bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
- bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Checkout %s" % loc)
+ logger.debug(1, "BZR Checkout %s", loc)
bb.mkdirhier(ud.pkgdir)
os.chdir(ud.pkgdir)
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % bzrcmd)
+ logger.debug(1, "Running %s", bzrcmd)
runfetchcmd(bzrcmd, d)
os.chdir(ud.pkgdir)
+
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
+
# tar them up to a defined filename
try:
- runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.pkgdir)), d)
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d)
except:
t, v, tb = sys.exc_info()
try:
@@ -117,7 +118,7 @@ class Bzr(Fetch):
pass
raise t, v, tb
- def suppports_srcrev(self):
+ def supports_srcrev(self):
return True
def _revision_key(self, url, ud, d):
@@ -130,7 +131,7 @@ class Bzr(Fetch):
"""
Return the latest upstream revision number
"""
- bb.msg.debug(2, bb.msg.domain.Fetcher, "BZR fetcher hitting network for %s" % url)
+ logger.debug(2, "BZR fetcher hitting network for %s", url)
output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
diff --git a/bitbake/lib/bb/fetch/cvs.py b/bitbake/lib/bb/fetch/cvs.py
index 61976f7ef..0edb794b0 100644
--- a/bitbake/lib/bb/fetch/cvs.py
+++ b/bitbake/lib/bb/fetch/cvs.py
@@ -27,11 +27,10 @@ BitBake build tools.
#
import os
+import logging
import bb
from bb import data
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-from bb.fetch import MissingParameterError
+from bb.fetch import Fetch, FetchError, MissingParameterError, logger
class Cvs(Fetch):
"""
@@ -48,9 +47,7 @@ class Cvs(Fetch):
raise MissingParameterError("cvs method needs a 'module' parameter")
ud.module = ud.parm["module"]
- ud.tag = ""
- if 'tag' in ud.parm:
- ud.tag = ud.parm['tag']
+ ud.tag = ud.parm.get('tag', "")
# Override the default date in certain cases
if 'date' in ud.parm:
@@ -77,17 +74,9 @@ class Cvs(Fetch):
def go(self, loc, ud, d):
- method = "pserver"
- if "method" in ud.parm:
- method = ud.parm["method"]
-
- localdir = ud.module
- if "localdir" in ud.parm:
- localdir = ud.parm["localdir"]
-
- cvs_port = ""
- if "port" in ud.parm:
- cvs_port = ud.parm["port"]
+ method = ud.parm.get('method', 'pserver')
+ localdir = ud.parm.get('localdir', ud.module)
+ cvs_port = ud.parm.get('port', '')
cvs_rsh = None
if method == "ext":
@@ -136,21 +125,21 @@ class Cvs(Fetch):
cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
# create module directory
- bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory")
+ logger.debug(2, "Fetch: checking for module directory")
pkg = data.expand('${PN}', d)
pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
moddir = os.path.join(pkgdir, localdir)
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
- bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
+ logger.info("Update " + loc)
# update sources there
os.chdir(moddir)
myret = os.system(cvsupdatecmd)
else:
- bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
+ logger.info("Fetch " + loc)
# check out sources there
bb.mkdirhier(pkgdir)
os.chdir(pkgdir)
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd)
+ logger.debug(1, "Running %s", cvscmd)
myret = os.system(cvscmd)
if myret != 0 or not os.access(moddir, os.R_OK):
@@ -160,14 +149,20 @@ class Cvs(Fetch):
pass
raise FetchError(ud.module)
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude 'CVS'"
+
# tar them up to a defined filename
if 'fullpath' in ud.parm:
os.chdir(pkgdir)
- myret = os.system("tar -czf %s %s" % (ud.localpath, localdir))
+ myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir))
else:
os.chdir(moddir)
os.chdir('..')
- myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(moddir)))
+ myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir)))
if myret != 0:
try:
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py
index 9bd447ff8..b37a09743 100644
--- a/bitbake/lib/bb/fetch/git.py
+++ b/bitbake/lib/bb/fetch/git.py
@@ -22,9 +22,11 @@ BitBake 'Fetch' git implementation
import os
import bb
+import bb.persist_data
from bb import data
from bb.fetch import Fetch
from bb.fetch import runfetchcmd
+from bb.fetch import logger
class Git(Fetch):
"""Class to fetch a module or modules from git repositories"""
@@ -116,6 +118,7 @@ class Git(Fetch):
repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball)
+
coname = '%s' % (ud.tag)
codir = os.path.join(ud.clonedir, coname)
@@ -153,7 +156,7 @@ class Git(Fetch):
os.chdir(ud.clonedir)
mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
if mirror_tarballs != "0" or 'fullclone' in ud.parm:
- bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository")
+ logger.info("Creating tarball of git repository")
runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
if 'fullclone' in ud.parm:
@@ -179,19 +182,25 @@ class Git(Fetch):
readpathspec = ""
coprefix = os.path.join(codir, "git", "")
- bb.mkdirhier(codir)
- os.chdir(ud.clonedir)
- runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
- runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d)
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ runfetchcmd("%s clone -n %s %s" % (ud.basecmd, ud.clonedir, coprefix), d)
+ os.chdir(coprefix)
+ runfetchcmd("%s checkout -q -f %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
+ else:
+ bb.mkdirhier(codir)
+ os.chdir(ud.clonedir)
+ runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
+ runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d)
os.chdir(codir)
- bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout")
+ logger.info("Creating tarball of git checkout")
runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
os.chdir(ud.clonedir)
bb.utils.prunedir(codir)
- def suppports_srcrev(self):
+ def supports_srcrev(self):
return True
def _contains_ref(self, tag, d):
@@ -199,11 +208,19 @@ class Git(Fetch):
output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True)
return output.split()[0] != "0"
- def _revision_key(self, url, ud, d):
+ def _revision_key(self, url, ud, d, branch=False):
"""
Return a unique key for the url
"""
- return "git:" + ud.host + ud.path.replace('/', '.') + ud.branch
+ key = 'git:' + ud.host + ud.path.replace('/', '.')
+ if branch:
+ return key + ud.branch
+ else:
+ return key
+
+ def generate_revision_key(self, url, ud, d, branch=False):
+ key = self._revision_key(url, ud, d, branch)
+ return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
def _latest_revision(self, url, ud, d):
"""
@@ -221,6 +238,74 @@ class Git(Fetch):
raise bb.fetch.FetchError("Fetch command %s gave empty output\n" % (cmd))
return output.split()[0]
+ def latest_revision(self, url, ud, d):
+ """
+ Look in the cache for the latest revision, if not present ask the SCM.
+ """
+ persisted = bb.persist_data.persist(d)
+ revs = persisted['BB_URI_HEADREVS']
+
+ key = self.generate_revision_key(url, ud, d, branch=True)
+ rev = revs[key]
+ if rev is None:
+ # Compatibility with old key format, no branch included
+ oldkey = self.generate_revision_key(url, ud, d, branch=False)
+ rev = revs[oldkey]
+ if rev is not None:
+ del revs[oldkey]
+ else:
+ rev = self._latest_revision(url, ud, d)
+ revs[key] = rev
+
+ return str(rev)
+
+ def sortable_revision(self, url, ud, d):
+ """
+
+ """
+ pd = bb.persist_data.persist(d)
+ localcounts = pd['BB_URI_LOCALCOUNT']
+ key = self.generate_revision_key(url, ud, d, branch=True)
+ oldkey = self.generate_revision_key(url, ud, d, branch=False)
+
+ latest_rev = self._build_revision(url, ud, d)
+ last_rev = localcounts[key + '_rev']
+ if last_rev is None:
+ last_rev = localcounts[oldkey + '_rev']
+ if last_rev is not None:
+ del localcounts[oldkey + '_rev']
+ localcounts[key + '_rev'] = last_rev
+
+ uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
+ count = None
+ if uselocalcount:
+ count = Fetch.localcount_internal_helper(ud, d)
+ if count is None:
+ count = localcounts[key + '_count']
+ if count is None:
+ count = localcounts[oldkey + '_count']
+ if count is not None:
+ del localcounts[oldkey + '_count']
+ localcounts[key + '_count'] = count
+
+ if last_rev == latest_rev:
+ return str(count + "+" + latest_rev)
+
+ buildindex_provided = hasattr(self, "_sortable_buildindex")
+ if buildindex_provided:
+ count = self._sortable_buildindex(url, ud, d, latest_rev)
+ if count is None:
+ count = "0"
+ elif uselocalcount or buildindex_provided:
+ count = str(count)
+ else:
+ count = str(int(count) + 1)
+
+ localcounts[key + '_rev'] = latest_rev
+ localcounts[key + '_count'] = count
+
+ return str(count + "+" + latest_rev)
+
def _build_revision(self, url, ud, d):
return ud.tag
@@ -238,7 +323,7 @@ class Git(Fetch):
print("no repo")
self.go(None, ud, d)
if not os.path.exists(ud.clonedir):
- bb.msg.error(bb.msg.domain.Fetcher, "GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value" % (url, ud.clonedir))
+ logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir)
return None
@@ -250,5 +335,5 @@ class Git(Fetch):
os.chdir(cwd)
buildindex = "%s" % output.split()[0]
- bb.msg.debug(1, bb.msg.domain.Fetcher, "GIT repository for %s in %s is returning %s revisions in rev-list before %s" % (url, ud.clonedir, buildindex, rev))
+ logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev)
return buildindex
diff --git a/bitbake/lib/bb/fetch/hg.py b/bitbake/lib/bb/fetch/hg.py
index efb3b5c76..3c649a6ad 100644
--- a/bitbake/lib/bb/fetch/hg.py
+++ b/bitbake/lib/bb/fetch/hg.py
@@ -26,21 +26,27 @@ BitBake 'Fetch' implementation for mercurial DRCS (hg).
import os
import sys
+import logging
import bb
from bb import data
from bb.fetch import Fetch
from bb.fetch import FetchError
from bb.fetch import MissingParameterError
from bb.fetch import runfetchcmd
+from bb.fetch import logger
class Hg(Fetch):
- """Class to fetch a from mercurial repositories"""
+ """Class to fetch from mercurial repositories"""
def supports(self, url, ud, d):
"""
Check to see if a given url can be fetched with mercurial.
"""
return ud.type in ['hg']
+ def forcefetch(self, url, ud, d):
+ revTag = ud.parm.get('rev', 'tip')
+ return revTag == "tip"
+
def localpath(self, url, ud, d):
if not "module" in ud.parm:
raise MissingParameterError("hg method needs a 'module' parameter")
@@ -48,10 +54,7 @@ class Hg(Fetch):
ud.module = ud.parm["module"]
# Create paths to mercurial checkouts
- relpath = ud.path
- if relpath.startswith('/'):
- # Remove leading slash as os.path.join can't cope
- relpath = relpath[1:]
+ relpath = self._strip_leading_slashes(ud.path)
ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
ud.moddir = os.path.join(ud.pkgdir, ud.module)
@@ -78,9 +81,7 @@ class Hg(Fetch):
basecmd = data.expand('${FETCHCMD_hg}', d)
- proto = "http"
- if "proto" in ud.parm:
- proto = ud.parm["proto"]
+ proto = ud.parm.get('proto', 'http')
host = ud.host
if proto == "file":
@@ -116,34 +117,41 @@ class Hg(Fetch):
def go(self, loc, ud, d):
"""Fetch url"""
- bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
+ logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
updatecmd = self._buildhgcommand(ud, d, "pull")
- bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
+ logger.info("Update " + loc)
# update sources there
os.chdir(ud.moddir)
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd)
+ logger.debug(1, "Running %s", updatecmd)
runfetchcmd(updatecmd, d)
else:
fetchcmd = self._buildhgcommand(ud, d, "fetch")
- bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
+ logger.info("Fetch " + loc)
# check out sources there
bb.mkdirhier(ud.pkgdir)
os.chdir(ud.pkgdir)
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % fetchcmd)
+ logger.debug(1, "Running %s", fetchcmd)
runfetchcmd(fetchcmd, d)
# Even when we clone (fetch), we still need to update as hg's clone
# won't checkout the specified revision if its on a branch
updatecmd = self._buildhgcommand(ud, d, "update")
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd)
+ os.chdir(ud.moddir)
+ logger.debug(1, "Running %s", updatecmd)
runfetchcmd(updatecmd, d)
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude '.hg' --exclude '.hgrags'"
+
os.chdir(ud.pkgdir)
try:
- runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d)
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d)
except:
t, v, tb = sys.exc_info()
try:
@@ -152,7 +160,7 @@ class Hg(Fetch):
pass
raise t, v, tb
- def suppports_srcrev(self):
+ def supports_srcrev(self):
return True
def _latest_revision(self, url, ud, d):
diff --git a/bitbake/lib/bb/fetch/local.py b/bitbake/lib/bb/fetch/local.py
index 882a2c460..6aa9e4576 100644
--- a/bitbake/lib/bb/fetch/local.py
+++ b/bitbake/lib/bb/fetch/local.py
@@ -66,7 +66,7 @@ class Local(Fetch):
Check the status of the url
"""
if urldata.localpath.find("*") != -1:
- bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s looks like a glob and was therefore not checked." % url)
+ logger.info("URL %s looks like a glob and was therefore not checked.", url)
return True
if os.path.exists(urldata.localpath):
return True
diff --git a/bitbake/lib/bb/fetch/osc.py b/bitbake/lib/bb/fetch/osc.py
index ed773939b..8e0423d76 100644
--- a/bitbake/lib/bb/fetch/osc.py
+++ b/bitbake/lib/bb/fetch/osc.py
@@ -8,8 +8,10 @@ Based on the svn "Fetch" implementation.
import os
import sys
+import logging
import bb
from bb import data
+from bb import utils
from bb.fetch import Fetch
from bb.fetch import FetchError
from bb.fetch import MissingParameterError
@@ -32,10 +34,7 @@ class Osc(Fetch):
ud.module = ud.parm["module"]
# Create paths to osc checkouts
- relpath = ud.path
- if relpath.startswith('/'):
- # Remove leading slash as os.path.join can't cope
- relpath = relpath[1:]
+ relpath = self._strip_leading_slashes(ud.path)
ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
@@ -61,9 +60,7 @@ class Osc(Fetch):
basecmd = data.expand('${FETCHCMD_osc}', d)
- proto = "ocs"
- if "proto" in ud.parm:
- proto = ud.parm["proto"]
+ proto = ud.parm.get('proto', 'ocs')
options = []
@@ -72,10 +69,7 @@ class Osc(Fetch):
if ud.revision:
options.append("-r %s" % ud.revision)
- coroot = ud.path
- if coroot.startswith('/'):
- # Remove leading slash as os.path.join can't cope
- coroot= coroot[1:]
+ coroot = self._strip_leading_slashes(ud.path)
if command is "fetch":
osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
@@ -91,22 +85,22 @@ class Osc(Fetch):
Fetch url
"""
- bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
+ logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
oscupdatecmd = self._buildosccommand(ud, d, "update")
- bb.msg.note(1, bb.msg.domain.Fetcher, "Update "+ loc)
+ logger.info("Update "+ loc)
# update sources there
os.chdir(ud.moddir)
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscupdatecmd)
+ logger.debug(1, "Running %s", oscupdatecmd)
runfetchcmd(oscupdatecmd, d)
else:
oscfetchcmd = self._buildosccommand(ud, d, "fetch")
- bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
+ logger.info("Fetch " + loc)
# check out sources there
bb.mkdirhier(ud.pkgdir)
os.chdir(ud.pkgdir)
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscfetchcmd)
+ logger.debug(1, "Running %s", oscfetchcmd)
runfetchcmd(oscfetchcmd, d)
os.chdir(os.path.join(ud.pkgdir + ud.path))
@@ -129,9 +123,8 @@ class Osc(Fetch):
Generate a .oscrc to be used for this run.
"""
- config_path = "%s/oscrc" % data.expand('${OSCDIR}', d)
- if (os.path.exists(config_path)):
- os.remove(config_path)
+ config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
+ bb.utils.remove(config_path)
f = open(config_path, 'w')
f.write("[general]\n")
diff --git a/bitbake/lib/bb/fetch/perforce.py b/bitbake/lib/bb/fetch/perforce.py
index 1c74cff34..222ed7eaa 100644
--- a/bitbake/lib/bb/fetch/perforce.py
+++ b/bitbake/lib/bb/fetch/perforce.py
@@ -27,10 +27,12 @@ BitBake build tools.
from future_builtins import zip
import os
+import logging
import bb
from bb import data
from bb.fetch import Fetch
from bb.fetch import FetchError
+from bb.fetch import logger
class Perforce(Fetch):
def supports(self, url, ud, d):
@@ -86,10 +88,10 @@ class Perforce(Fetch):
depot += "@%s" % (p4date)
p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
+ logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
cset = p4file.readline().strip()
- bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset))
+ logger.debug(1, "READ %s", cset)
if not cset:
return -1
@@ -111,8 +113,7 @@ class Perforce(Fetch):
if which != -1:
base = path[:which]
- if base[0] == "/":
- base = base[1:]
+ base = self._strip_leading_slashes(base)
cset = Perforce.getcset(d, path, host, user, pswd, parm)
@@ -132,10 +133,7 @@ class Perforce(Fetch):
else:
path = depot
- if "module" in parm:
- module = parm["module"]
- else:
- module = os.path.basename(path)
+ module = parm.get('module', os.path.basename(path))
localdata = data.createCopy(d)
data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
@@ -155,13 +153,13 @@ class Perforce(Fetch):
p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
# create temp directory
- bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
+ logger.debug(2, "Fetch: creating temporary directory")
bb.mkdirhier(data.expand('${WORKDIR}', localdata))
data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
tmpfile = tmppipe.readline().strip()
if not tmpfile:
- bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
+ logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
raise FetchError(module)
if "label" in parm:
@@ -171,12 +169,12 @@ class Perforce(Fetch):
depot = "%s@%s" % (depot, cset)
os.chdir(tmpfile)
- bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
- bb.msg.note(1, bb.msg.domain.Fetcher, "%s%s files %s" % (p4cmd, p4opt, depot))
+ logger.info("Fetch " + loc)
+ logger.info("%s%s files %s", p4cmd, p4opt, depot)
p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
if not p4file:
- bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to get the P4 files from %s" % (depot))
+ logger.error("Fetch: unable to get the P4 files from %s", depot)
raise FetchError(module)
count = 0
@@ -194,7 +192,7 @@ class Perforce(Fetch):
count = count + 1
if count == 0:
- bb.msg.error(bb.msg.domain.Fetcher, "Fetch: No files gathered from the P4 fetch")
+ logger.error("Fetch: No files gathered from the P4 fetch")
raise FetchError(module)
myret = os.system("tar -czf %s %s" % (ud.localpath, module))
@@ -205,4 +203,4 @@ class Perforce(Fetch):
pass
raise FetchError(module)
# cleanup
- os.system('rm -rf %s' % tmpfile)
+ bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch/repo.py b/bitbake/lib/bb/fetch/repo.py
index 883310b01..03642e7a0 100644
--- a/bitbake/lib/bb/fetch/repo.py
+++ b/bitbake/lib/bb/fetch/repo.py
@@ -45,24 +45,11 @@ class Repo(Fetch):
"master".
"""
- if "protocol" in ud.parm:
- ud.proto = ud.parm["protocol"]
- else:
- ud.proto = "git"
-
- if "branch" in ud.parm:
- ud.branch = ud.parm["branch"]
- else:
- ud.branch = "master"
-
- if "manifest" in ud.parm:
- manifest = ud.parm["manifest"]
- if manifest.endswith(".xml"):
- ud.manifest = manifest
- else:
- ud.manifest = manifest + ".xml"
- else:
- ud.manifest = "default.xml"
+ ud.proto = ud.parm.get('protocol', 'git')
+ ud.branch = ud.parm.get('branch', 'master')
+ ud.manifest = ud.parm.get('manifest', 'default.xml')
+ if not ud.manifest.endswith('.xml'):
+ ud.manifest += '.xml'
ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
@@ -72,7 +59,7 @@ class Repo(Fetch):
"""Fetch url"""
if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
- bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping repo init / sync." % ud.localpath)
+ logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
return
gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
@@ -92,10 +79,16 @@ class Repo(Fetch):
runfetchcmd("repo sync", d)
os.chdir(codir)
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude '.repo' --exclude '.git'"
+
# Create a cache
- runfetchcmd("tar --exclude=.repo --exclude=.git -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
- def suppports_srcrev(self):
+ def supports_srcrev(self):
return False
def _build_revision(self, url, ud, d):
diff --git a/bitbake/lib/bb/fetch/svk.py b/bitbake/lib/bb/fetch/svk.py
index a17ac04d2..595a9da25 100644
--- a/bitbake/lib/bb/fetch/svk.py
+++ b/bitbake/lib/bb/fetch/svk.py
@@ -26,11 +26,13 @@ This implementation is for svk. It is based on the svn implementation
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import os
+import logging
import bb
from bb import data
from bb.fetch import Fetch
from bb.fetch import FetchError
from bb.fetch import MissingParameterError
+from bb.fetch import logger
class Svk(Fetch):
"""Class to fetch a module or modules from svk repositories"""
@@ -46,18 +48,14 @@ class Svk(Fetch):
else:
ud.module = ud.parm["module"]
- ud.revision = ""
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
+ ud.revision = ud.parm.get('rev', "")
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def forcefetch(self, url, ud, d):
- if (ud.date == "now"):
- return True
- return False
+ return ud.date == "now"
def go(self, loc, ud, d):
"""Fetch urls"""
@@ -72,19 +70,19 @@ class Svk(Fetch):
# create temp directory
localdata = data.createCopy(d)
data.update_data(localdata)
- bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
+ logger.debug(2, "Fetch: creating temporary directory")
bb.mkdirhier(data.expand('${WORKDIR}', localdata))
data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
tmpfile = tmppipe.readline().strip()
if not tmpfile:
- bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
+ logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
raise FetchError(ud.module)
# check out sources there
os.chdir(tmpfile)
- bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd)
+ logger.info("Fetch " + loc)
+ logger.debug(1, "Running %s", svkcmd)
myret = os.system(svkcmd)
if myret != 0:
try:
@@ -103,4 +101,4 @@ class Svk(Fetch):
pass
raise FetchError(ud.module)
# cleanup
- os.system('rm -rf %s' % tmpfile)
+ bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py
index 375e8df05..8f053abf7 100644
--- a/bitbake/lib/bb/fetch/svn.py
+++ b/bitbake/lib/bb/fetch/svn.py
@@ -25,12 +25,14 @@ BitBake 'Fetch' implementation for svn.
import os
import sys
+import logging
import bb
from bb import data
from bb.fetch import Fetch
from bb.fetch import FetchError
from bb.fetch import MissingParameterError
from bb.fetch import runfetchcmd
+from bb.fetch import logger
class Svn(Fetch):
"""Class to fetch a module or modules from svn repositories"""
@@ -47,10 +49,7 @@ class Svn(Fetch):
ud.module = ud.parm["module"]
# Create paths to svn checkouts
- relpath = ud.path
- if relpath.startswith('/'):
- # Remove leading slash as os.path.join can't cope
- relpath = relpath[1:]
+ relpath = self._strip_leading_slashes(ud.path)
ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
ud.moddir = os.path.join(ud.pkgdir, ud.module)
@@ -92,9 +91,7 @@ class Svn(Fetch):
basecmd = data.expand('${FETCHCMD_svn}', d)
- proto = "svn"
- if "proto" in ud.parm:
- proto = ud.parm["proto"]
+ proto = ud.parm.get('proto', 'svn')
svn_rsh = None
if proto == "svn+ssh" and "rsh" in ud.parm:
@@ -136,28 +133,34 @@ class Svn(Fetch):
def go(self, loc, ud, d):
"""Fetch url"""
- bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
+ logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
svnupdatecmd = self._buildsvncommand(ud, d, "update")
- bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
+ logger.info("Update " + loc)
# update sources there
os.chdir(ud.moddir)
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupdatecmd)
+ logger.debug(1, "Running %s", svnupdatecmd)
runfetchcmd(svnupdatecmd, d)
else:
svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
- bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
+ logger.info("Fetch " + loc)
# check out sources there
bb.mkdirhier(ud.pkgdir)
os.chdir(ud.pkgdir)
- bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnfetchcmd)
+ logger.debug(1, "Running %s", svnfetchcmd)
runfetchcmd(svnfetchcmd, d)
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude '.svn'"
+
os.chdir(ud.pkgdir)
# tar them up to a defined filename
try:
- runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d)
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d)
except:
t, v, tb = sys.exc_info()
try:
@@ -166,7 +169,7 @@ class Svn(Fetch):
pass
raise t, v, tb
- def suppports_srcrev(self):
+ def supports_srcrev(self):
return True
def _revision_key(self, url, ud, d):
@@ -179,7 +182,7 @@ class Svn(Fetch):
"""
Return the latest upstream revision number
"""
- bb.msg.debug(2, bb.msg.domain.Fetcher, "SVN fetcher hitting network for %s" % url)
+ logger.debug(2, "SVN fetcher hitting network for %s", url)
output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
diff --git a/bitbake/lib/bb/fetch/wget.py b/bitbake/lib/bb/fetch/wget.py
index dcc58c75e..4d4bdfd49 100644
--- a/bitbake/lib/bb/fetch/wget.py
+++ b/bitbake/lib/bb/fetch/wget.py
@@ -26,12 +26,11 @@ BitBake build tools.
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import os
+import logging
import bb
+import urllib
from bb import data
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-from bb.fetch import encodeurl, decodeurl
-from bb.fetch import runfetchcmd
+from bb.fetch import Fetch, FetchError, encodeurl, decodeurl, logger, runfetchcmd
class Wget(Fetch):
"""Class to fetch urls via 'wget'"""
@@ -45,7 +44,7 @@ class Wget(Fetch):
url = encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}])
ud.basename = os.path.basename(ud.path)
- ud.localfile = data.expand(os.path.basename(url), d)
+ ud.localfile = data.expand(urllib.unquote(ud.basename), d)
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
@@ -68,15 +67,14 @@ class Wget(Fetch):
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
-
- bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
- bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
+ logger.info("fetch " + uri)
+ logger.debug(2, "executing " + fetchcmd)
runfetchcmd(fetchcmd, d)
# Sanity check since wget can pretend it succeed when it didn't
# Also, this used to happen if sourceforge sent us to the mirror page
if not os.path.exists(ud.localpath) and not checkonly:
- bb.msg.debug(2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath))
+ logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath)
return False
return True
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
new file mode 100644
index 000000000..ef9d75f3f
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -0,0 +1,985 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+from __future__ import absolute_import
+from __future__ import print_function
+import os, re
+import logging
+import bb
+from bb import data
+from bb import persist_data
+from bb import utils
+
+__version__ = "2"
+
+logger = logging.getLogger("BitBake.Fetcher")
+
+class BBFetchException(Exception):
+ """Class all fetch exceptions inherit from"""
+ def __init__(self, message):
+ self.msg = message
+ Exception.__init__(self, message)
+
+ def __str__(self):
+ return self.msg
+
+class MalformedUrl(BBFetchException):
+ """Exception raised when encountering an invalid url"""
+ def __init__(self, url):
+ msg = "The URL: '%s' is invalid and cannot be interpreted" % url
+ self.url = url
+ BBFetchException.__init__(self, msg)
+ self.args = url
+
+class FetchError(BBFetchException):
+ """General fetcher exception when something happens incorrectly"""
+ def __init__(self, message, url = None):
+ msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
+ self.url = url
+ BBFetchException.__init__(self, msg)
+ self.args = (message, url)
+
+class UnpackError(BBFetchException):
+ """General fetcher exception when something happens incorrectly when unpacking"""
+ def __init__(self, message, url):
+ msg = "Unpack failure for URL: '%s'. %s" % (url, message)
+ self.url = url
+ BBFetchException.__init__(self, msg)
+ self.args = (message, url)
+
+class NoMethodError(BBFetchException):
+ """Exception raised when there is no method to obtain a supplied url or set of urls"""
+ def __init__(self, url):
+ msg = "Could not find a fetcher which supports the URL: '%s'" % url
+ self.url = url
+ BBFetchException.__init__(self, msg)
+ self.args = url
+
+class MissingParameterError(BBFetchException):
+ """Exception raised when a fetch method is missing a critical parameter in the url"""
+ def __init__(self, missing, url):
+ msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
+ self.url = url
+ self.missing = missing
+ BBFetchException.__init__(self, msg)
+ self.args = (missing, url)
+
+class ParameterError(BBFetchException):
+ """Exception raised when a url cannot be proccessed due to invalid parameters."""
+ def __init__(self, message, url):
+ msg = "URL: '%s' has invalid parameters. %s" % (url, message)
+ self.url = url
+ BBFetchException.__init__(self, msg)
+ self.args = (message, url)
+
+class MD5SumError(BBFetchException):
+ """Exception raised when a MD5 checksum of a file does not match for a downloaded file"""
+ def __init__(self, path, wanted, got, url):
+ msg = "File: '%s' has md5 sum %s when %s was expected (from URL: '%s')" % (path, got, wanted, url)
+ self.url = url
+ self.path = path
+ self.wanted = wanted
+ self.got = got
+ BBFetchException.__init__(self, msg)
+ self.args = (path, wanted, got, url)
+
+class SHA256SumError(MD5SumError):
+ """Exception raised when a SHA256 checksum of a file does not match for a downloaded file"""
+
+def decodeurl(url):
+ """Decodes an URL into the tokens (scheme, network location, path,
+ user, password, parameters).
+ """
+
+ m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
+ if not m:
+ raise MalformedUrl(url)
+
+ type = m.group('type')
+ location = m.group('location')
+ if not location:
+ raise MalformedUrl(url)
+ user = m.group('user')
+ parm = m.group('parm')
+
+ locidx = location.find('/')
+ if locidx != -1 and type.lower() != 'file':
+ host = location[:locidx]
+ path = location[locidx:]
+ else:
+ host = ""
+ path = location
+ if user:
+ m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
+ if m:
+ user = m.group('user')
+ pswd = m.group('pswd')
+ else:
+ user = ''
+ pswd = ''
+
+ p = {}
+ if parm:
+ for s in parm.split(';'):
+ s1, s2 = s.split('=')
+ p[s1] = s2
+
+ return (type, host, path, user, pswd, p)
+
+def encodeurl(decoded):
+ """Encodes a URL from tokens (scheme, network location, path,
+ user, password, parameters).
+ """
+
+ (type, host, path, user, pswd, p) = decoded
+
+ if not path:
+ raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
+ if not type:
+ raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
+ url = '%s://' % type
+ if user and type != "file":
+ url += "%s" % user
+ if pswd:
+ url += ":%s" % pswd
+ url += "@"
+ if host and type != "file":
+ url += "%s" % host
+ url += "%s" % path
+ if p:
+ for parm in p:
+ url += ";%s=%s" % (parm, p[parm])
+
+ return url
+
+def uri_replace(ud, uri_find, uri_replace, d):
+ if not ud.url or not uri_find or not uri_replace:
+ logger.debug(1, "uri_replace: passed an undefined value, not replacing")
+ uri_decoded = list(decodeurl(ud.url))
+ uri_find_decoded = list(decodeurl(uri_find))
+ uri_replace_decoded = list(decodeurl(uri_replace))
+ result_decoded = ['', '', '', '', '', {}]
+ for i in uri_find_decoded:
+ loc = uri_find_decoded.index(i)
+ result_decoded[loc] = uri_decoded[loc]
+ if isinstance(i, basestring):
+ if (re.match(i, uri_decoded[loc])):
+ result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
+ if uri_find_decoded.index(i) == 2:
+ if ud.mirrortarball:
+ result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(ud.mirrortarball))
+ elif ud.localpath:
+ result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(ud.localpath))
+ else:
+ return ud.url
+ return encodeurl(result_decoded)
+
+methods = []
+urldata_cache = {}
+saved_headrevs = {}
+
+def fetcher_init(d):
+ """
+ Called to initialize the fetchers once the configuration data is known.
+ Calls before this must not hit the cache.
+ """
+ pd = persist_data.persist(d)
+ # When to drop SCM head revisions controlled by user policy
+ srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear"
+ if srcrev_policy == "cache":
+ logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
+ elif srcrev_policy == "clear":
+ logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
+ try:
+ bb.fetch2.saved_headrevs = pd['BB_URI_HEADREVS'].items()
+ except:
+ pass
+ del pd['BB_URI_HEADREVS']
+ else:
+ raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
+
+ for m in methods:
+ if hasattr(m, "init"):
+ m.init(d)
+
+def fetcher_compare_revisions(d):
+ """
+ Compare the revisions in the persistant cache with current values and
+ return true/false on whether they've changed.
+ """
+
+ pd = persist_data.persist(d)
+ data = pd['BB_URI_HEADREVS'].items()
+ data2 = bb.fetch2.saved_headrevs
+
+ changed = False
+ for key in data:
+ if key not in data2 or data2[key] != data[key]:
+ logger.debug(1, "%s changed", key)
+ changed = True
+ return True
+ else:
+ logger.debug(2, "%s did not change", key)
+ return False
+
+def mirror_from_string(data):
+ return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
+
+def verify_checksum(u, ud, d):
+ """
+ verify the MD5 and SHA256 checksum for downloaded src
+
+ return value:
+ - True: checksum matched
+ - False: checksum unmatched
+
+ if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
+ if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
+ matched
+ """
+
+ if not ud.type in ["http", "https", "ftp", "ftps"]:
+ return
+
+ md5data = bb.utils.md5_file(ud.localpath)
+ sha256data = bb.utils.sha256_file(ud.localpath)
+
+ if (ud.md5_expected == None or ud.sha256_expected == None):
+ logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n'
+ 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
+ ud.localpath, ud.md5_name, md5data,
+ ud.sha256_name, sha256data)
+ if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1":
+ raise FetchError("No checksum specified for %s." % u, u)
+ return
+
+ if ud.md5_expected != md5data:
+ raise MD5SumError(ud.localpath, ud.md5_expected, md5data, u)
+
+ if ud.sha256_expected != sha256data:
+ raise SHA256SumError(ud.localpath, ud.sha256_expected, sha256data, u)
+
+def subprocess_setup():
+ import signal
+ # Python installs a SIGPIPE handler by default. This is usually not what
+ # non-Python subprocesses expect.
+ # SIGPIPE errors are known issues with gzip/bash
+ signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+
+def get_autorev(d):
+ # only not cache src rev in autorev case
+ if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
+ bb.data.setVar('__BB_DONT_CACHE', '1', d)
+ return "AUTOINC"
+
+def get_srcrev(d):
+ """
+ Return the version string for the current package
+ (usually to be used as PV)
+ Most packages usually only have one SCM so we just pass on the call.
+ In the multi SCM case, we build a value based on SRCREV_FORMAT which must
+ have been set.
+ """
+
+ scms = []
+ fetcher = Fetch(bb.data.getVar('SRC_URI', d, True).split(), d)
+ urldata = fetcher.ud
+ for u in urldata:
+ if urldata[u].method.supports_srcrev():
+ scms.append(u)
+
+ if len(scms) == 0:
+ raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
+
+ if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
+ return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d, urldata[scms[0]].names[0])
+
+ #
+ # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
+ #
+ format = bb.data.getVar('SRCREV_FORMAT', d, True)
+ if not format:
+ raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
+
+ for scm in scms:
+ ud = urldata[scm]
+ for name in ud.names:
+ rev = ud.method.sortable_revision(scm, ud, d, name)
+ format = format.replace(name, rev)
+
+ return format
+
+def localpath(url, d):
+ fetcher = bb.fetch2.Fetch([url], d)
+ return fetcher.localpath(url)
+
+def runfetchcmd(cmd, d, quiet = False, cleanup = []):
+ """
+ Run cmd returning the command output
+ Raise an error if interrupted or cmd fails
+ Optionally echo command output to stdout
+ Optionally remove the files/directories listed in cleanup upon failure
+ """
+
+ # Need to export PATH as binary could be in metadata paths
+ # rather than host provided
+ # Also include some other variables.
+ # FIXME: Should really include all export varaiables?
+ exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST',
+ 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy',
+ 'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy',
+ 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
+
+ for var in exportvars:
+ val = data.getVar(var, d, True)
+ if val:
+ cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
+
+ logger.debug(1, "Running %s", cmd)
+
+ # redirect stderr to stdout
+ stdout_handle = os.popen(cmd + " 2>&1", "r")
+ output = ""
+
+ while True:
+ line = stdout_handle.readline()
+ if not line:
+ break
+ if not quiet:
+ print(line, end=' ')
+ output += line
+
+ status = stdout_handle.close() or 0
+ signal = status >> 8
+ exitstatus = status & 0xff
+
+ if (signal or status != 0):
+ for f in cleanup:
+ try:
+ bb.utils.remove(f, True)
+ except OSError:
+ pass
+
+ if signal:
+ raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
+ elif status != 0:
+ raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
+
+ return output
+
+def check_network_access(d, info = ""):
+ """
+ log remote network access, and error if BB_NO_NETWORK is set
+ """
+ if bb.data.getVar("BB_NO_NETWORK", d, True) == "1":
+ raise FetchError("BB_NO_NETWORK is set, but the fetcher code attempted network access with the command %s" % info)
+ else:
+ logger.debug(1, "Fetcher accessed the network with the command %s" % info)
+
+def try_mirrors(d, origud, mirrors, check = False):
+ """
+ Try to use a mirrored version of the sources.
+ This method will be automatically called before the fetchers go.
+
+ d Is a bb.data instance
+ uri is the original uri we're trying to download
+ mirrors is the list of mirrors we're going to try
+ """
+ ld = d.createCopy()
+ for (find, replace) in mirrors:
+ newuri = uri_replace(origud, find, replace, ld)
+ if newuri == origud.url:
+ continue
+ try:
+ ud = FetchData(newuri, ld)
+ ud.setup_localpath(ld)
+
+ if check:
+ found = ud.method.checkstatus(newuri, ud, ld)
+ if found:
+ return found
+ continue
+
+ if ud.method.need_update(newuri, ud, ld):
+ ud.method.download(newuri, ud, ld)
+ if hasattr(ud.method,"build_mirror_data"):
+ ud.method.build_mirror_data(newuri, ud, ld)
+
+ if not ud.localpath or not os.path.exists(ud.localpath):
+ continue
+
+ if ud.localpath == origud.localpath:
+ return ud.localpath
+
+ # We may be obtaining a mirror tarball which needs further processing by the real fetcher
+ # If that tarball is a local file:// we need to provide a symlink to it
+ dldir = ld.getVar("DL_DIR", True)
+ if not ud.localpath.startswith(dldir):
+ if os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
+ os.symlink(ud.localpath, os.path.join(dldir, os.path.basename(ud.localpath)))
+ return None
+ # Otherwise the result is a local file:// and we symlink to it
+ if not os.path.exists(origud.localpath):
+ os.symlink(ud.localpath, origud.localpath)
+ return ud.localpath
+
+ except bb.fetch2.BBFetchException:
+ logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
+ try:
+ if os.path.isfile(ud.localpath):
+ bb.utils.remove(ud.localpath)
+ except UnboundLocalError:
+ pass
+ continue
+ return None
+
+def srcrev_internal_helper(ud, d, name):
+ """
+ Return:
+ a) a source revision if specified
+ b) latest revision if SRCREV="AUTOINC"
+ c) None if not specified
+ """
+
+ if 'rev' in ud.parm:
+ return ud.parm['rev']
+
+ if 'tag' in ud.parm:
+ return ud.parm['tag']
+
+ rev = None
+ if name != '':
+ pn = data.getVar("PN", d, True)
+ rev = data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, True)
+ if not rev:
+ rev = data.getVar("SRCREV_%s" % name, d, True)
+ if not rev:
+ rev = data.getVar("SRCREV", d, True)
+ if rev == "INVALID":
+ raise FetchError("Please set SRCREV to a valid value", ud.url)
+ if rev == "AUTOINC":
+ rev = ud.method.latest_revision(ud.url, ud, d, name)
+
+ return rev
+
+class FetchData(object):
+ """
+ A class which represents the fetcher state for a given URI.
+ """
+ def __init__(self, url, d):
+ # localpath is the location of a downloaded result. If not set, the file is local.
+ self.localfile = ""
+ self.localpath = None
+ self.lockfile = None
+ self.mirrortarball = None
+ (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
+ self.date = self.getSRCDate(d)
+ self.url = url
+ if not self.user and "user" in self.parm:
+ self.user = self.parm["user"]
+ if not self.pswd and "pswd" in self.parm:
+ self.pswd = self.parm["pswd"]
+ self.setup = False
+
+ if "name" in self.parm:
+ self.md5_name = "%s.md5sum" % self.parm["name"]
+ self.sha256_name = "%s.sha256sum" % self.parm["name"]
+ else:
+ self.md5_name = "md5sum"
+ self.sha256_name = "sha256sum"
+ self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d)
+ self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d)
+
+ self.names = self.parm.get("name",'default').split(',')
+
+ self.method = None
+ for m in methods:
+ if m.supports(url, self, d):
+ self.method = m
+ break
+
+ if not self.method:
+ raise NoMethodError(url)
+
+ if self.method.supports_srcrev():
+ self.revisions = {}
+ for name in self.names:
+ self.revisions[name] = srcrev_internal_helper(self, d, name)
+
+ # add compatibility code for non name specified case
+ if len(self.names) == 1:
+ self.revision = self.revisions[self.names[0]]
+
+ if hasattr(self.method, "urldata_init"):
+ self.method.urldata_init(self, d)
+
+ if "localpath" in self.parm:
+ # if user sets localpath for file, use it instead.
+ self.localpath = self.parm["localpath"]
+ self.basename = os.path.basename(self.localpath)
+ elif self.localfile:
+ self.localpath = self.method.localpath(self.url, self, d)
+
+ if self.localfile and self.localpath:
+ # Note: These files should always be in DL_DIR whereas localpath may not be.
+ basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d)
+ self.donestamp = basepath + '.done'
+ self.lockfile = basepath + '.lock'
+
+ def setup_localpath(self, d):
+ if not self.localpath:
+ self.localpath = self.method.localpath(self.url, self, d)
+
+ def getSRCDate(self, d):
+ """
+ Return the SRC Date for the component
+
+ d the bb.data module
+ """
+ if "srcdate" in self.parm:
+ return self.parm['srcdate']
+
+ pn = data.getVar("PN", d, True)
+
+ if pn:
+ return data.getVar("SRCDATE_%s" % pn, d, True) or data.getVar("SRCDATE", d, True) or data.getVar("DATE", d, True)
+
+ return data.getVar("SRCDATE", d, True) or data.getVar("DATE", d, True)
+
+class FetchMethod(object):
+ """Base class for 'fetch'ing data"""
+
+ def __init__(self, urls = []):
+ self.urls = []
+
+ def supports(self, url, urldata, d):
+ """
+ Check to see if this fetch class supports a given url.
+ """
+ return 0
+
+ def localpath(self, url, urldata, d):
+ """
+ Return the local filename of a given url assuming a successful fetch.
+ Can also setup variables in urldata for use in go (saving code duplication
+ and duplicate code execution)
+ """
+ return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
+
+ def _strip_leading_slashes(self, relpath):
+ """
+ Remove leading slash as os.path.join can't cope
+ """
+ while os.path.isabs(relpath):
+ relpath = relpath[1:]
+ return relpath
+
+ def setUrls(self, urls):
+ self.__urls = urls
+
+ def getUrls(self):
+ return self.__urls
+
+ urls = property(getUrls, setUrls, None, "Urls property")
+
+ def need_update(self, url, ud, d):
+ """
+ Force a fetch, even if localpath exists?
+ """
+ if os.path.exists(ud.localpath):
+ return False
+ return True
+
+ def supports_srcrev(self):
+ """
+ The fetcher supports auto source revisions (SRCREV)
+ """
+ return False
+
+ def download(self, url, urldata, d):
+ """
+ Fetch urls
+ Assumes localpath was called first
+ """
+ raise NoMethodError(url)
+
+ def unpack(self, urldata, rootdir, data):
+ import subprocess
+ iterate = False
+ file = urldata.localpath
+ dots = file.split(".")
+ if dots[-1] in ['gz', 'bz2', 'Z']:
+ efile = os.path.join(bb.data.getVar('WORKDIR', data, True),os.path.basename('.'.join(dots[0:-1])))
+ else:
+ efile = file
+ cmd = None
+
+ if file.endswith('.tar'):
+ cmd = 'tar x --no-same-owner -f %s' % file
+ elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
+ cmd = 'tar xz --no-same-owner -f %s' % file
+ elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
+ cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
+ elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
+ cmd = 'gzip -dc %s > %s' % (file, efile)
+ elif file.endswith('.bz2'):
+ cmd = 'bzip2 -dc %s > %s' % (file, efile)
+ elif file.endswith('.tar.xz'):
+ cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
+ elif file.endswith('.xz'):
+ cmd = 'xz -dc %s > %s' % (file, efile)
+ elif file.endswith('.zip') or file.endswith('.jar'):
+ cmd = 'unzip -q -o'
+ if 'dos' in urldata.parm:
+ cmd = '%s -a' % cmd
+ cmd = "%s '%s'" % (cmd, file)
+ elif file.endswith('.src.rpm') or file.endswith('.srpm'):
+ if 'unpack' in urldata.parm:
+ unpack_file = ("%s" % urldata.parm['unpack'])
+ cmd = 'rpm2cpio.sh %s | cpio -i %s' % (file, unpack_file)
+ iterate = True
+ iterate_file = unpack_file
+ else:
+ cmd = 'rpm2cpio.sh %s | cpio -i' % (file)
+ else:
+ # If file == dest, then avoid any copies, as we already put the file into dest!
+ dest = os.path.join(rootdir, os.path.basename(file))
+ if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
+ if os.path.isdir(file):
+ filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, True))
+ destdir = "."
+ if file[0:len(filesdir)] == filesdir:
+ destdir = file[len(filesdir):file.rfind('/')]
+ destdir = destdir.strip('/')
+ if len(destdir) < 1:
+ destdir = "."
+ elif not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
+ os.makedirs("%s/%s" % (rootdir, destdir))
+ cmd = 'cp -pPR %s %s/%s/' % (file, rootdir, destdir)
+ else:
+ if not 'patch' in urldata.parm:
+ # The "destdir" handling was specifically done for FILESPATH
+ # items. So, only do so for file:// entries.
+ if urldata.type == "file" and urldata.path.find("/") != -1:
+ destdir = urldata.path.rsplit("/", 1)[0]
+ else:
+ destdir = "."
+ bb.mkdirhier("%s/%s" % (rootdir, destdir))
+ cmd = 'cp %s %s/%s/' % (file, rootdir, destdir)
+
+ if not cmd:
+ return
+
+ # Change to subdir before executing command
+ save_cwd = os.getcwd();
+ os.chdir(rootdir)
+ if 'subdir' in urldata.parm:
+ newdir = ("%s/%s" % (rootdir, urldata.parm['subdir']))
+ bb.mkdirhier(newdir)
+ os.chdir(newdir)
+
+ cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, True), cmd)
+ bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
+ ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
+
+ os.chdir(save_cwd)
+
+ if ret != 0:
+ raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
+
+ if iterate is True:
+ iterate_urldata = urldata
+ iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
+ self.unpack(urldata, rootdir, data)
+
+ return
+
+ def try_premirror(self, url, urldata, d):
+ """
+ Should premirrors be used?
+ """
+ return True
+
+ def checkstatus(self, url, urldata, d):
+ """
+ Check the status of a URL
+ Assumes localpath was called first
+ """
+ logger.info("URL %s could not be checked for status since no method exists.", url)
+ return True
+
+ def localcount_internal_helper(ud, d, name):
+ """
+ Return:
+ a) a locked localcount if specified
+ b) None otherwise
+ """
+
+ localcount = None
+ if name != '':
+ pn = data.getVar("PN", d, True)
+ localcount = data.getVar("LOCALCOUNT_" + name, d, True)
+ if not localcount:
+ localcount = data.getVar("LOCALCOUNT", d, True)
+ return localcount
+
+ localcount_internal_helper = staticmethod(localcount_internal_helper)
+
+ def latest_revision(self, url, ud, d, name):
+ """
+ Look in the cache for the latest revision, if not present ask the SCM.
+ """
+ if not hasattr(self, "_latest_revision"):
+ raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
+
+ pd = persist_data.persist(d)
+ revs = pd['BB_URI_HEADREVS']
+ key = self.generate_revision_key(url, ud, d, name)
+ rev = revs[key]
+ if rev != None:
+ return str(rev)
+
+ revs[key] = rev = self._latest_revision(url, ud, d, name)
+ return rev
+
+ def sortable_revision(self, url, ud, d, name):
+ """
+
+ """
+ if hasattr(self, "_sortable_revision"):
+ return self._sortable_revision(url, ud, d)
+
+ pd = persist_data.persist(d)
+ localcounts = pd['BB_URI_LOCALCOUNT']
+ key = self.generate_revision_key(url, ud, d, name)
+
+ latest_rev = self._build_revision(url, ud, d, name)
+ last_rev = localcounts[key + '_rev']
+ uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
+ count = None
+ if uselocalcount:
+ count = FetchMethod.localcount_internal_helper(ud, d, name)
+ if count is None:
+ count = localcounts[key + '_count'] or "0"
+
+ if last_rev == latest_rev:
+ return str(count + "+" + latest_rev)
+
+ buildindex_provided = hasattr(self, "_sortable_buildindex")
+ if buildindex_provided:
+ count = self._sortable_buildindex(url, ud, d, latest_rev)
+
+ if count is None:
+ count = "0"
+ elif uselocalcount or buildindex_provided:
+ count = str(count)
+ else:
+ count = str(int(count) + 1)
+
+ localcounts[key + '_rev'] = latest_rev
+ localcounts[key + '_count'] = count
+
+ return str(count + "+" + latest_rev)
+
+ def generate_revision_key(self, url, ud, d, name):
+ key = self._revision_key(url, ud, d, name)
+ return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
+
+class Fetch(object):
+ def __init__(self, urls, d, cache = True):
+ if len(urls) == 0:
+ urls = d.getVar("SRC_URI", True).split()
+ self.urls = urls
+ self.d = d
+ self.ud = {}
+
+ fn = bb.data.getVar('FILE', d, True)
+ if cache and fn in urldata_cache:
+ self.ud = urldata_cache[fn]
+
+ for url in urls:
+ if url not in self.ud:
+ self.ud[url] = FetchData(url, d)
+
+ if cache:
+ urldata_cache[fn] = self.ud
+
+ def localpath(self, url):
+ if url not in self.urls:
+ self.ud[url] = FetchData(url, self.d)
+
+ self.ud[url].setup_localpath(self.d)
+ return bb.data.expand(self.ud[url].localpath, self.d)
+
+ def localpaths(self):
+ """
+ Return a list of the local filenames, assuming successful fetch
+ """
+ local = []
+
+ for u in self.urls:
+ ud = self.ud[u]
+ ud.setup_localpath(self.d)
+ local.append(ud.localpath)
+
+ return local
+
+ def download(self, urls = []):
+ """
+ Fetch all urls
+ """
+ if len(urls) == 0:
+ urls = self.urls
+
+ for u in urls:
+ ud = self.ud[u]
+ ud.setup_localpath(self.d)
+ m = ud.method
+ localpath = ""
+
+ if not ud.localfile:
+ continue
+
+ lf = bb.utils.lockfile(ud.lockfile)
+
+ try:
+ if not m.need_update(u, ud, self.d):
+ localpath = ud.localpath
+ elif m.try_premirror(u, ud, self.d):
+ mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
+ localpath = try_mirrors(self.d, ud, mirrors, False)
+
+ if bb.data.getVar("BB_FETCH_PREMIRRORONLY", self.d, True) is not None:
+ bb.data.setVar("BB_NO_NETWORK", "1", self.d)
+
+ if not localpath and m.need_update(u, ud, self.d):
+ try:
+ m.download(u, ud, self.d)
+ if hasattr(m, "build_mirror_data"):
+ m.build_mirror_data(u, ud, self.d)
+ localpath = ud.localpath
+
+ except BBFetchException:
+ # Remove any incomplete fetch
+ if os.path.isfile(ud.localpath):
+ bb.utils.remove(ud.localpath)
+ mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
+ localpath = try_mirrors (self.d, ud, mirrors)
+
+ if not localpath or not os.path.exists(localpath):
+ raise FetchError("Unable to fetch URL %s from any source." % u, u)
+
+ if os.path.exists(ud.donestamp):
+ # Touch the done stamp file to show active use of the download
+ try:
+ os.utime(ud.donestamp, None)
+ except:
+ # Errors aren't fatal here
+ pass
+ else:
+ # Only check the checksums if we've not seen this item before, then create the stamp
+ verify_checksum(u, ud, self.d)
+ open(ud.donestamp, 'w').close()
+
+ finally:
+ bb.utils.unlockfile(lf)
+
+ def checkstatus(self, urls = []):
+ """
+ Check all urls exist upstream
+ """
+
+ if len(urls) == 0:
+ urls = self.urls
+
+ for u in urls:
+ ud = self.ud[u]
+ ud.setup_localpath(self.d)
+ m = ud.method
+ logger.debug(1, "Testing URL %s", u)
+ # First try checking uri, u, from PREMIRRORS
+ mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
+ ret = try_mirrors(self.d, ud, mirrors, True)
+ if not ret:
+ # Next try checking from the original uri, u
+ try:
+ ret = m.checkstatus(u, ud, self.d)
+ except:
+ # Finally, try checking uri, u, from MIRRORS
+ mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
+ ret = try_mirrors (self.d, ud, mirrors, True)
+
+ if not ret:
+ raise FetchError("URL %s doesn't work" % u, u)
+
+ def unpack(self, root, urls = []):
+ """
+ Check all urls exist upstream
+ """
+
+ if len(urls) == 0:
+ urls = self.urls
+
+ for u in urls:
+ ud = self.ud[u]
+ ud.setup_localpath(self.d)
+
+ if bb.data.expand(self.localpath, self.d) is None:
+ continue
+
+ if ud.lockfile:
+ lf = bb.utils.lockfile(ud.lockfile)
+
+ ud.method.unpack(ud, root, self.d)
+
+ if ud.lockfile:
+ bb.utils.unlockfile(lf)
+
+from . import cvs
+from . import git
+from . import local
+from . import svn
+from . import wget
+from . import svk
+from . import ssh
+from . import perforce
+from . import bzr
+from . import hg
+from . import osc
+from . import repo
+
+methods.append(local.Local())
+methods.append(wget.Wget())
+methods.append(svn.Svn())
+methods.append(git.Git())
+methods.append(cvs.Cvs())
+methods.append(svk.Svk())
+methods.append(ssh.SSH())
+methods.append(perforce.Perforce())
+methods.append(bzr.Bzr())
+methods.append(hg.Hg())
+methods.append(osc.Osc())
+methods.append(repo.Repo())
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py
new file mode 100644
index 000000000..bb175662b
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/bzr.py
@@ -0,0 +1,141 @@
+"""
+BitBake 'Fetch' implementation for bzr.
+
+"""
+
+# Copyright (C) 2007 Ross Burton
+# Copyright (C) 2007 Richard Purdie
+#
+# Classes for obtaining upstream sources for the
+# BitBake build tools.
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import logging
+import bb
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class Bzr(FetchMethod):
+ def supports(self, url, ud, d):
+ return ud.type in ['bzr']
+
+ def urldata_init(self, ud, d):
+ """
+ init bzr specific variable within url data
+ """
+ # Create paths to bzr checkouts
+ relpath = self._strip_leading_slashes(ud.path)
+ ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
+
+ if not ud.revision:
+ ud.revision = self.latest_revision(ud.url, ud, d)
+
+ ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
+
+ def _buildbzrcommand(self, ud, d, command):
+ """
+ Build up an bzr commandline based on ud
+ command is "fetch", "update", "revno"
+ """
+
+ basecmd = data.expand('${FETCHCMD_bzr}', d)
+
+ proto = ud.parm.get('proto', 'http')
+
+ bzrroot = ud.host + ud.path
+
+ options = []
+
+ if command is "revno":
+ bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
+ else:
+ if ud.revision:
+ options.append("-r %s" % ud.revision)
+
+ if command is "fetch":
+ bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
+ elif command is "update":
+ bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
+ else:
+ raise FetchError("Invalid bzr command %s" % command, ud.url)
+
+ return bzrcmd
+
+ def download(self, loc, ud, d):
+ """Fetch url"""
+
+ if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
+ bzrcmd = self._buildbzrcommand(ud, d, "update")
+ logger.debug(1, "BZR Update %s", loc)
+ bb.fetch2.check_network_access(d, bzrcmd)
+ os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
+ runfetchcmd(bzrcmd, d)
+ else:
+ bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
+ bzrcmd = self._buildbzrcommand(ud, d, "fetch")
+ bb.fetch2.check_network_access(d, bzrcmd)
+ logger.debug(1, "BZR Checkout %s", loc)
+ bb.mkdirhier(ud.pkgdir)
+ os.chdir(ud.pkgdir)
+ logger.debug(1, "Running %s", bzrcmd)
+ runfetchcmd(bzrcmd, d)
+
+ os.chdir(ud.pkgdir)
+
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
+
+ # tar them up to a defined filename
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
+
+ def supports_srcrev(self):
+ return True
+
+ def _revision_key(self, url, ud, d, name):
+ """
+ Return a unique key for the url
+ """
+ return "bzr:" + ud.pkgdir
+
+ def _latest_revision(self, url, ud, d, name):
+ """
+ Return the latest upstream revision number
+ """
+ logger.debug(2, "BZR fetcher hitting network for %s", url)
+
+ bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"))
+
+ output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
+
+ return output.strip()
+
+ def _sortable_revision(self, url, ud, d):
+ """
+ Return a sortable revision number which in our case is the revision number
+ """
+
+ return self._build_revision(url, ud, d)
+
+ def _build_revision(self, url, ud, d):
+ return ud.revision
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
new file mode 100644
index 000000000..3cd28b1fd
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/cvs.py
@@ -0,0 +1,169 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+#Based on functions from the base bb module, Copyright 2003 Holger Schurig
+#
+
+import os
+import logging
+import bb
+from bb import data
+from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
+from bb.fetch2 import runfetchcmd
+
+class Cvs(FetchMethod):
+ """
+ Class to fetch a module or modules from cvs repositories
+ """
+ def supports(self, url, ud, d):
+ """
+ Check to see if a given url can be fetched with cvs.
+ """
+ return ud.type in ['cvs']
+
+ def urldata_init(self, ud, d):
+ if not "module" in ud.parm:
+ raise MissingParameterError("module", ud.url)
+ ud.module = ud.parm["module"]
+
+ ud.tag = ud.parm.get('tag', "")
+
+ # Override the default date in certain cases
+ if 'date' in ud.parm:
+ ud.date = ud.parm['date']
+ elif ud.tag:
+ ud.date = ""
+
+ norecurse = ''
+ if 'norecurse' in ud.parm:
+ norecurse = '_norecurse'
+
+ fullpath = ''
+ if 'fullpath' in ud.parm:
+ fullpath = '_fullpath'
+
+ ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
+
+ def need_update(self, url, ud, d):
+ if (ud.date == "now"):
+ return True
+ if not os.path.exists(ud.localpath):
+ return True
+ return False
+
+ def download(self, loc, ud, d):
+
+ method = ud.parm.get('method', 'pserver')
+ localdir = ud.parm.get('localdir', ud.module)
+ cvs_port = ud.parm.get('port', '')
+
+ cvs_rsh = None
+ if method == "ext":
+ if "rsh" in ud.parm:
+ cvs_rsh = ud.parm["rsh"]
+
+ if method == "dir":
+ cvsroot = ud.path
+ else:
+ cvsroot = ":" + method
+ cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
+ if cvsproxyhost:
+ cvsroot += ";proxy=" + cvsproxyhost
+ cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
+ if cvsproxyport:
+ cvsroot += ";proxyport=" + cvsproxyport
+ cvsroot += ":" + ud.user
+ if ud.pswd:
+ cvsroot += ":" + ud.pswd
+ cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
+
+ options = []
+ if 'norecurse' in ud.parm:
+ options.append("-l")
+ if ud.date:
+ # treat YYYYMMDDHHMM specially for CVS
+ if len(ud.date) == 12:
+ options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
+ else:
+ options.append("-D \"%s UTC\"" % ud.date)
+ if ud.tag:
+ options.append("-r %s" % ud.tag)
+
+ localdata = data.createCopy(d)
+ data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
+ data.update_data(localdata)
+
+ data.setVar('CVSROOT', cvsroot, localdata)
+ data.setVar('CVSCOOPTS', " ".join(options), localdata)
+ data.setVar('CVSMODULE', ud.module, localdata)
+ cvscmd = data.getVar('FETCHCOMMAND', localdata, True)
+ cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, True)
+
+ if cvs_rsh:
+ cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
+ cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
+
+ # create module directory
+ logger.debug(2, "Fetch: checking for module directory")
+ pkg = data.expand('${PN}', d)
+ pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
+ moddir = os.path.join(pkgdir, localdir)
+ if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
+ logger.info("Update " + loc)
+ bb.fetch2.check_network_access(d, cvsupdatecmd)
+ # update sources there
+ os.chdir(moddir)
+ cmd = cvsupdatecmd
+ else:
+ logger.info("Fetch " + loc)
+ # check out sources there
+ bb.mkdirhier(pkgdir)
+ os.chdir(pkgdir)
+ logger.debug(1, "Running %s", cvscmd)
+ bb.fetch2.check_network_access(d, cvscmd)
+ cmd = cvscmd
+
+ runfetchcmd(cmd, d, cleanup = [moddir])
+
+ if not os.access(moddir, os.R_OK):
+ raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
+
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude 'CVS'"
+
+ # tar them up to a defined filename
+ if 'fullpath' in ud.parm:
+ os.chdir(pkgdir)
+ cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
+ else:
+ os.chdir(moddir)
+ os.chdir('..')
+ cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
+
+ runfetchcmd(cmd, d, cleanup = [ud.localpath])
+
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
new file mode 100644
index 000000000..4cb669350
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -0,0 +1,236 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' git implementation
+
+"""
+
+#Copyright (C) 2005 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import bb
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class Git(FetchMethod):
+ """Class to fetch a module or modules from git repositories"""
+ def init(self, d):
+ #
+ # Only enable _sortable revision if the key is set
+ #
+ if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
+ self._sortable_buildindex = self._sortable_buildindex_disabled
+ def supports(self, url, ud, d):
+ """
+ Check to see if a given url can be fetched with git.
+ """
+ return ud.type in ['git']
+
+ def urldata_init(self, ud, d):
+ """
+ init git specific variable within url data
+ so that the git method like latest_revision() can work
+ """
+ if 'protocol' in ud.parm:
+ ud.proto = ud.parm['protocol']
+ elif not ud.host:
+ ud.proto = 'file'
+ else:
+ ud.proto = "rsync"
+
+ ud.nocheckout = False
+ if 'nocheckout' in ud.parm:
+ ud.nocheckout = True
+
+ branches = ud.parm.get("branch", "master").split(',')
+ if len(branches) != len(ud.names):
+ raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
+ ud.branches = {}
+ for name in ud.names:
+ branch = branches[ud.names.index(name)]
+ ud.branches[name] = branch
+
+ gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
+ ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
+ ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
+
+ ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
+
+ for name in ud.names:
+ # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
+ if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
+ ud.revisions[name] = self.latest_revision(ud.url, ud, d, name)
+
+ ud.localfile = ud.clonedir
+
+ def localpath(self, url, ud, d):
+ return ud.clonedir
+
+ def need_update(self, u, ud, d):
+ if not os.path.exists(ud.clonedir):
+ return True
+ os.chdir(ud.clonedir)
+ for name in ud.names:
+ if not self._contains_ref(ud.revisions[name], d):
+ return True
+ return False
+
+ def try_premirror(self, u, ud, d):
+ # If we don't do this, updating an existing checkout with only premirrors
+ # is not possible
+ if bb.data.getVar("BB_FETCH_PREMIRRORONLY", d, True) is not None:
+ return True
+ if os.path.exists(ud.clonedir):
+ return False
+ return True
+
+ def download(self, loc, ud, d):
+ """Fetch url"""
+
+ if ud.user:
+ username = ud.user + '@'
+ else:
+ username = ""
+
+ repofile = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball)
+
+ ud.repochanged = not os.path.exists(repofile)
+
+ # If the checkout doesn't exist and the mirror tarball does, extract it
+ if not os.path.exists(ud.clonedir) and os.path.exists(repofile):
+ bb.mkdirhier(ud.clonedir)
+ os.chdir(ud.clonedir)
+ runfetchcmd("tar -xzf %s" % (repofile), d)
+
+ # If the repo still doesn't exist, fallback to cloning it
+ if not os.path.exists(ud.clonedir):
+ bb.fetch2.check_network_access(d, "git clone --bare %s%s" % (ud.host, ud.path))
+ runfetchcmd("%s clone --bare %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d)
+
+ os.chdir(ud.clonedir)
+ # Update the checkout if needed
+ needupdate = False
+ for name in ud.names:
+ if not self._contains_ref(ud.revisions[name], d):
+ needupdate = True
+ if needupdate:
+ bb.fetch2.check_network_access(d, "git fetch %s%s" % (ud.host, ud.path))
+ try:
+ runfetchcmd("%s remote prune origin" % ud.basecmd, d)
+ runfetchcmd("%s remote rm origin" % ud.basecmd, d)
+ except bb.fetch2.FetchError:
+ logger.debug(1, "No Origin")
+
+ runfetchcmd("%s remote add origin %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d)
+ runfetchcmd("%s fetch --all -t" % ud.basecmd, d)
+ runfetchcmd("%s prune-packed" % ud.basecmd, d)
+ runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
+ ud.repochanged = True
+
+ def build_mirror_data(self, url, ud, d):
+ # Generate a mirror tarball if needed
+ repofile = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball)
+
+ os.chdir(ud.clonedir)
+ mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
+ if mirror_tarballs != "0" and ud.repochanged:
+ logger.info("Creating tarball of git repository")
+ runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".") ), d)
+
+ def unpack(self, ud, destdir, d):
+ """ unpack the downloaded src to destdir"""
+
+ subdir = ud.parm.get("subpath", "")
+ if subdir != "":
+ readpathspec = ":%s" % (subdir)
+ else:
+ readpathspec = ""
+
+ destdir = os.path.join(destdir, "git/")
+ if os.path.exists(destdir):
+ bb.utils.prunedir(destdir)
+
+ runfetchcmd("git clone -s -n %s %s" % (ud.clonedir, destdir), d)
+ if not ud.nocheckout:
+ os.chdir(destdir)
+ runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
+ runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
+ return True
+
+ def supports_srcrev(self):
+ return True
+
+ def _contains_ref(self, tag, d):
+ basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
+ output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True)
+ return output.split()[0] != "0"
+
+ def _revision_key(self, url, ud, d, name):
+ """
+ Return a unique key for the url
+ """
+ return "git:" + ud.host + ud.path.replace('/', '.') + ud.branches[name]
+
+ def _latest_revision(self, url, ud, d, name):
+ """
+ Compute the HEAD revision for the url
+ """
+ if ud.user:
+ username = ud.user + '@'
+ else:
+ username = ""
+
+ bb.fetch2.check_network_access(d, "git ls-remote %s%s %s" % (ud.host, ud.path, ud.branches[name]))
+ basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
+ cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branches[name])
+ output = runfetchcmd(cmd, d, True)
+ if not output:
+ raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, url)
+ return output.split()[0]
+
+ def _build_revision(self, url, ud, d, name):
+ return ud.revisions[name]
+
+ def _sortable_buildindex_disabled(self, url, ud, d, rev):
+ """
+ Return a suitable buildindex for the revision specified. This is done by counting revisions
+ using "git rev-list" which may or may not work in different circumstances.
+ """
+
+ cwd = os.getcwd()
+
+ # Check if we have the rev already
+
+ if not os.path.exists(ud.clonedir):
+ print("no repo")
+ self.download(None, ud, d)
+ if not os.path.exists(ud.clonedir):
+ logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir)
+ return None
+
+
+ os.chdir(ud.clonedir)
+ if not self._contains_ref(rev, d):
+ self.download(None, ud, d)
+
+ output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True)
+ os.chdir(cwd)
+
+ buildindex = "%s" % output.split()[0]
+ logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev)
+ return buildindex
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
new file mode 100644
index 000000000..ac5825baa
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -0,0 +1,174 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for mercurial DRCS (hg).
+
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+# Copyright (C) 2004 Marcin Juszkiewicz
+# Copyright (C) 2007 Robert Schuster
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import sys
+import logging
+import bb
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import MissingParameterError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class Hg(FetchMethod):
+ """Class to fetch from mercurial repositories"""
+ def supports(self, url, ud, d):
+ """
+ Check to see if a given url can be fetched with mercurial.
+ """
+ return ud.type in ['hg']
+
+ def urldata_init(self, ud, d):
+ """
+ init hg specific variable within url data
+ """
+ if not "module" in ud.parm:
+ raise MissingParameterError('module', ud.url)
+
+ ud.module = ud.parm["module"]
+
+ # Create paths to mercurial checkouts
+ relpath = self._strip_leading_slashes(ud.path)
+ ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
+ ud.moddir = os.path.join(ud.pkgdir, ud.module)
+
+ if 'rev' in ud.parm:
+ ud.revision = ud.parm['rev']
+ elif not ud.revision:
+ ud.revision = self.latest_revision(ud.url, ud, d)
+
+ ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
+
+ def need_update(self, url, ud, d):
+ revTag = ud.parm.get('rev', 'tip')
+ if revTag == "tip":
+ return True
+ if not os.path.exists(ud.localpath):
+ return True
+ return False
+
+ def _buildhgcommand(self, ud, d, command):
+ """
+ Build up an hg commandline based on ud
+ command is "fetch", "update", "info"
+ """
+
+ basecmd = data.expand('${FETCHCMD_hg}', d)
+
+ proto = ud.parm.get('proto', 'http')
+
+ host = ud.host
+ if proto == "file":
+ host = "/"
+ ud.host = "localhost"
+
+ if not ud.user:
+ hgroot = host + ud.path
+ else:
+ hgroot = ud.user + "@" + host + ud.path
+
+ if command is "info":
+ return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
+
+ options = [];
+ if ud.revision:
+ options.append("-r %s" % ud.revision)
+
+ if command is "fetch":
+ cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
+ elif command is "pull":
+ # do not pass options list; limiting pull to rev causes the local
+ # repo not to contain it and immediately following "update" command
+ # will crash
+ cmd = "%s pull" % (basecmd)
+ elif command is "update":
+ cmd = "%s update -C %s" % (basecmd, " ".join(options))
+ else:
+ raise FetchError("Invalid hg command %s" % command, ud.url)
+
+ return cmd
+
+ def download(self, loc, ud, d):
+ """Fetch url"""
+
+ logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+
+ if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
+ updatecmd = self._buildhgcommand(ud, d, "pull")
+ logger.info("Update " + loc)
+ # update sources there
+ os.chdir(ud.moddir)
+ logger.debug(1, "Running %s", updatecmd)
+ bb.fetch2.check_network_access(d, updatecmd)
+ runfetchcmd(updatecmd, d)
+
+ else:
+ fetchcmd = self._buildhgcommand(ud, d, "fetch")
+ logger.info("Fetch " + loc)
+ # check out sources there
+ bb.mkdirhier(ud.pkgdir)
+ os.chdir(ud.pkgdir)
+ logger.debug(1, "Running %s", fetchcmd)
+ bb.fetch2.check_network_access(d, fetchcmd)
+ runfetchcmd(fetchcmd, d)
+
+ # Even when we clone (fetch), we still need to update as hg's clone
+ # won't checkout the specified revision if its on a branch
+ updatecmd = self._buildhgcommand(ud, d, "update")
+ os.chdir(ud.moddir)
+ logger.debug(1, "Running %s", updatecmd)
+ runfetchcmd(updatecmd, d)
+
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude '.hg' --exclude '.hgrags'"
+
+ os.chdir(ud.pkgdir)
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
+
+ def supports_srcrev(self):
+ return True
+
+ def _latest_revision(self, url, ud, d, name):
+ """
+ Compute tip revision for the url
+ """
+ bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"))
+ output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
+ return output.strip()
+
+ def _build_revision(self, url, ud, d):
+ return ud.revision
+
+ def _revision_key(self, url, ud, d, name):
+ """
+ Return a unique key for the url
+ """
+ return "hg:" + ud.moddir
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
new file mode 100644
index 000000000..d77d39375
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -0,0 +1,76 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import bb
+import bb.utils
+from bb import data
+from bb.fetch2 import FetchMethod
+
+class Local(FetchMethod):
+ def supports(self, url, urldata, d):
+ """
+ Check to see if a given url represents a local fetch.
+ """
+ return urldata.type in ['file']
+
+ def urldata_init(self, ud, d):
+ # We don't set localfile as for this fetcher the file is already local!
+ return
+
+ def localpath(self, url, urldata, d):
+ """
+ Return the local filename of a given url assuming a successful fetch.
+ """
+ path = url.split("://")[1]
+ path = path.split(";")[0]
+ newpath = path
+ if path[0] != "/":
+ filespath = data.getVar('FILESPATH', d, True)
+ if filespath:
+ newpath = bb.utils.which(filespath, path)
+ if not newpath:
+ filesdir = data.getVar('FILESDIR', d, True)
+ if filesdir:
+ newpath = os.path.join(filesdir, path)
+ return newpath
+
+ def download(self, url, urldata, d):
+ """Fetch urls (no-op for Local method)"""
+ # no need to fetch local files, we'll deal with them in place.
+ return 1
+
+ def checkstatus(self, url, urldata, d):
+ """
+ Check the status of the url
+ """
+ if urldata.localpath.find("*") != -1:
+ logger.info("URL %s looks like a glob and was therefore not checked.", url)
+ return True
+ if os.path.exists(urldata.localpath):
+ return True
+ return False
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
new file mode 100644
index 000000000..f252b5e4a
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -0,0 +1,135 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+Bitbake "Fetch" implementation for osc (Opensuse build service client).
+Based on the svn "Fetch" implementation.
+
+"""
+
+import os
+import sys
+import logging
+import bb
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import MissingParameterError
+from bb.fetch2 import runfetchcmd
+
+class Osc(FetchMethod):
+ """Class to fetch a module or modules from Opensuse build server
+ repositories."""
+
+ def supports(self, url, ud, d):
+ """
+ Check to see if a given url can be fetched with osc.
+ """
+ return ud.type in ['osc']
+
+ def urldata_init(self, ud, d):
+ if not "module" in ud.parm:
+ raise MissingParameterError('module', ud.url)
+
+ ud.module = ud.parm["module"]
+
+ # Create paths to osc checkouts
+ relpath = self._strip_leading_slashes(ud.path)
+ ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
+ ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
+
+ if 'rev' in ud.parm:
+ ud.revision = ud.parm['rev']
+ else:
+ pv = data.getVar("PV", d, 0)
+ rev = bb.fetch2.srcrev_internal_helper(ud, d)
+ if rev and rev != True:
+ ud.revision = rev
+ else:
+ ud.revision = ""
+
+ ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
+
+ def _buildosccommand(self, ud, d, command):
+ """
+ Build up an ocs commandline based on ud
+ command is "fetch", "update", "info"
+ """
+
+ basecmd = data.expand('${FETCHCMD_osc}', d)
+
+ proto = ud.parm.get('proto', 'ocs')
+
+ options = []
+
+ config = "-c %s" % self.generate_config(ud, d)
+
+ if ud.revision:
+ options.append("-r %s" % ud.revision)
+
+ coroot = self._strip_leading_slashes(ud.path)
+
+ if command is "fetch":
+ osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
+ elif command is "update":
+ osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
+ else:
+ raise FetchError("Invalid osc command %s" % command, ud.url)
+
+ return osccmd
+
+ def download(self, loc, ud, d):
+ """
+ Fetch url
+ """
+
+ logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+
+ if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
+ oscupdatecmd = self._buildosccommand(ud, d, "update")
+ logger.info("Update "+ loc)
+ # update sources there
+ os.chdir(ud.moddir)
+ logger.debug(1, "Running %s", oscupdatecmd)
+ bb.fetch2.check_network_access(d, oscupdatecmd)
+ runfetchcmd(oscupdatecmd, d)
+ else:
+ oscfetchcmd = self._buildosccommand(ud, d, "fetch")
+ logger.info("Fetch " + loc)
+ # check out sources there
+ bb.mkdirhier(ud.pkgdir)
+ os.chdir(ud.pkgdir)
+ logger.debug(1, "Running %s", oscfetchcmd)
+ bb.fetch2.check_network_access(d, oscfetchcmd)
+ runfetchcmd(oscfetchcmd, d)
+
+ os.chdir(os.path.join(ud.pkgdir + ud.path))
+ # tar them up to a defined filename
+ runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
+
+ def supports_srcrev(self):
+ return False
+
+ def generate_config(self, ud, d):
+ """
+ Generate a .oscrc to be used for this run.
+ """
+
+ config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
+ if (os.path.exists(config_path)):
+ os.remove(config_path)
+
+ f = open(config_path, 'w')
+ f.write("[general]\n")
+ f.write("apisrv = %s\n" % ud.host)
+ f.write("scheme = http\n")
+ f.write("su-wrapper = su -c\n")
+ f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
+ f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
+ f.write("extra-pkgs = gzip\n")
+ f.write("\n")
+ f.write("[%s]\n" % ud.host)
+ f.write("user = %s\n" % ud.parm["user"])
+ f.write("pass = %s\n" % ud.parm["pswd"])
+ f.close()
+
+ return config_path
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
new file mode 100644
index 000000000..6347834c7
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -0,0 +1,196 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+from future_builtins import zip
+import os
+import logging
+import bb
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import logger
+from bb.fetch2 import runfetchcmd
+
+class Perforce(FetchMethod):
+ def supports(self, url, ud, d):
+ return ud.type in ['p4']
+
+ def doparse(url, d):
+ parm = {}
+ path = url.split("://")[1]
+ delim = path.find("@");
+ if delim != -1:
+ (user, pswd, host, port) = path.split('@')[0].split(":")
+ path = path.split('@')[1]
+ else:
+ (host, port) = data.getVar('P4PORT', d).split(':')
+ user = ""
+ pswd = ""
+
+ if path.find(";") != -1:
+ keys=[]
+ values=[]
+ plist = path.split(';')
+ for item in plist:
+ if item.count('='):
+ (key, value) = item.split('=')
+ keys.append(key)
+ values.append(value)
+
+ parm = dict(zip(keys, values))
+ path = "//" + path.split(';')[0]
+ host += ":%s" % (port)
+ parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
+
+ return host, path, user, pswd, parm
+ doparse = staticmethod(doparse)
+
+ def getcset(d, depot, host, user, pswd, parm):
+ p4opt = ""
+ if "cset" in parm:
+ return parm["cset"];
+ if user:
+ p4opt += " -u %s" % (user)
+ if pswd:
+ p4opt += " -P %s" % (pswd)
+ if host:
+ p4opt += " -p %s" % (host)
+
+ p4date = data.getVar("P4DATE", d, True)
+ if "revision" in parm:
+ depot += "#%s" % (parm["revision"])
+ elif "label" in parm:
+ depot += "@%s" % (parm["label"])
+ elif p4date:
+ depot += "@%s" % (p4date)
+
+ p4cmd = data.getVar('FETCHCOMMAND_p4', d, True)
+ logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
+ p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
+ cset = p4file.readline().strip()
+ logger.debug(1, "READ %s", cset)
+ if not cset:
+ return -1
+
+ return cset.split(' ')[1]
+ getcset = staticmethod(getcset)
+
+ def urldata_init(self, ud, d):
+ (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
+
+ # If a label is specified, we use that as our filename
+
+ if "label" in parm:
+ ud.localfile = "%s.tar.gz" % (parm["label"])
+ return
+
+ base = path
+ which = path.find('/...')
+ if which != -1:
+ base = path[:which]
+
+ base = self._strip_leading_slashes(base)
+
+ cset = Perforce.getcset(d, path, host, user, pswd, parm)
+
+ ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
+
+ def download(self, loc, ud, d):
+ """
+ Fetch urls
+ """
+
+ (host, depot, user, pswd, parm) = Perforce.doparse(loc, d)
+
+ if depot.find('/...') != -1:
+ path = depot[:depot.find('/...')]
+ else:
+ path = depot
+
+ module = parm.get('module', os.path.basename(path))
+
+ localdata = data.createCopy(d)
+ data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
+ data.update_data(localdata)
+
+ # Get the p4 command
+ p4opt = ""
+ if user:
+ p4opt += " -u %s" % (user)
+
+ if pswd:
+ p4opt += " -P %s" % (pswd)
+
+ if host:
+ p4opt += " -p %s" % (host)
+
+ p4cmd = data.getVar('FETCHCOMMAND', localdata, True)
+
+ # create temp directory
+ logger.debug(2, "Fetch: creating temporary directory")
+ bb.mkdirhier(data.expand('${WORKDIR}', localdata))
+ data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
+ tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
+ tmpfile = tmppipe.readline().strip()
+ if not tmpfile:
+ raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
+
+ if "label" in parm:
+ depot = "%s@%s" % (depot, parm["label"])
+ else:
+ cset = Perforce.getcset(d, depot, host, user, pswd, parm)
+ depot = "%s@%s" % (depot, cset)
+
+ os.chdir(tmpfile)
+ logger.info("Fetch " + loc)
+ logger.info("%s%s files %s", p4cmd, p4opt, depot)
+ p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
+
+ if not p4file:
+ raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc)
+
+ count = 0
+
+ for file in p4file:
+ list = file.split()
+
+ if list[2] == "delete":
+ continue
+
+ dest = list[0][len(path)+1:]
+ where = dest.find("#")
+
+ os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
+ count = count + 1
+
+ if count == 0:
+ logger.error()
+ raise FetchError("Fetch: No files gathered from the P4 fetch", loc)
+
+ runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
+ # cleanup
+ bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py
new file mode 100644
index 000000000..3b16fc014
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/repo.py
@@ -0,0 +1,98 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake "Fetch" repo (git) implementation
+
+"""
+
+# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
+#
+# Based on git.py which is:
+#Copyright (C) 2005 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import bb
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import runfetchcmd
+
+class Repo(FetchMethod):
+ """Class to fetch a module or modules from repo (git) repositories"""
+ def supports(self, url, ud, d):
+ """
+ Check to see if a given url can be fetched with repo.
+ """
+ return ud.type in ["repo"]
+
+ def urldata_init(self, ud, d):
+ """
+ We don"t care about the git rev of the manifests repository, but
+ we do care about the manifest to use. The default is "default".
+ We also care about the branch or tag to be used. The default is
+ "master".
+ """
+
+ ud.proto = ud.parm.get('protocol', 'git')
+ ud.branch = ud.parm.get('branch', 'master')
+ ud.manifest = ud.parm.get('manifest', 'default.xml')
+ if not ud.manifest.endswith('.xml'):
+ ud.manifest += '.xml'
+
+ ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
+
+ def download(self, loc, ud, d):
+ """Fetch url"""
+
+ if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
+ logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
+ return
+
+ gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
+ repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
+ codir = os.path.join(repodir, gitsrcname, ud.manifest)
+
+ if ud.user:
+ username = ud.user + "@"
+ else:
+ username = ""
+
+ bb.mkdirhier(os.path.join(codir, "repo"))
+ os.chdir(os.path.join(codir, "repo"))
+ if not os.path.exists(os.path.join(codir, "repo", ".repo")):
+ bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path))
+ runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
+
+ bb.fetch2.check_network_access(d, "repo sync %s" % ud.url)
+ runfetchcmd("repo sync", d)
+ os.chdir(codir)
+
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude '.repo' --exclude '.git'"
+
+ # Create a cache
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
+
+ def supports_srcrev(self):
+ return False
+
+ def _build_revision(self, url, ud, d):
+ return ud.manifest
+
+ def _want_sortable_revision(self, url, ud, d):
+ return False
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
new file mode 100644
index 000000000..2ee9ab093
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -0,0 +1,120 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+'''
+BitBake 'Fetch' implementations
+
+This implementation is for Secure Shell (SSH), and attempts to comply with the
+IETF secsh internet draft:
+ http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
+
+ Currently does not support the sftp parameters, as this uses scp
+ Also does not support the 'fingerprint' connection parameter.
+
+'''
+
+# Copyright (C) 2006 OpenedHand Ltd.
+#
+#
+# Based in part on svk.py:
+# Copyright (C) 2006 Holger Hans Peter Freyther
+# Based on svn.py:
+# Copyright (C) 2003, 2004 Chris Larson
+# Based on functions from the base bb module:
+# Copyright 2003 Holger Schurig
+#
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import re, os
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import logger
+from bb.fetch2 import runfetchcmd
+
+
+__pattern__ = re.compile(r'''
+ \s* # Skip leading whitespace
+ ssh:// # scheme
+ ( # Optional username/password block
+ (?P<user>\S+) # username
+ (:(?P<pass>\S+))? # colon followed by the password (optional)
+ )?
+ (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
+ @
+ (?P<host>\S+?) # non-greedy match of the host
+ (:(?P<port>[0-9]+))? # colon followed by the port (optional)
+ /
+ (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
+ # and may include the use of '~' to reference the remote home
+ # directory
+ (?P<sparam>(;[^;]+)*)? # parameters block (optional)
+ $
+''', re.VERBOSE)
+
+class SSH(FetchMethod):
+ '''Class to fetch a module or modules via Secure Shell'''
+
+ def supports(self, url, urldata, d):
+ return __pattern__.match(url) != None
+
+ def localpath(self, url, urldata, d):
+ m = __pattern__.match(urldata.url)
+ path = m.group('path')
+ host = m.group('host')
+ lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
+ return lpath
+
+ def download(self, url, urldata, d):
+ dldir = data.getVar('DL_DIR', d, True)
+
+ m = __pattern__.match(url)
+ path = m.group('path')
+ host = m.group('host')
+ port = m.group('port')
+ user = m.group('user')
+ password = m.group('pass')
+
+ ldir = os.path.join(dldir, host)
+ lpath = os.path.join(ldir, os.path.basename(path))
+
+ if not os.path.exists(ldir):
+ os.makedirs(ldir)
+
+ if port:
+ port = '-P %s' % port
+ else:
+ port = ''
+
+ if user:
+ fr = user
+ if password:
+ fr += ':%s' % password
+ fr += '@%s' % host
+ else:
+ fr = host
+ fr += ':%s' % path
+
+
+ import commands
+ cmd = 'scp -B -r %s %s %s/' % (
+ port,
+ commands.mkarg(fr),
+ commands.mkarg(ldir)
+ )
+
+ bb.fetch2.check_network_access(d, cmd)
+
+ runfetchcmd(cmd, d)
+
diff --git a/bitbake/lib/bb/fetch2/svk.py b/bitbake/lib/bb/fetch2/svk.py
new file mode 100644
index 000000000..6211cac8d
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/svk.py
@@ -0,0 +1,97 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+This implementation is for svk. It is based on the svn implementation
+
+"""
+
+# Copyright (C) 2006 Holger Hans Peter Freyther
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import logging
+import bb
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import MissingParameterError
+from bb.fetch2 import logger
+from bb.fetch2 import runfetchcmd
+
+class Svk(FetchMethod):
+ """Class to fetch a module or modules from svk repositories"""
+ def supports(self, url, ud, d):
+ """
+ Check to see if a given url can be fetched with svk.
+ """
+ return ud.type in ['svk']
+
+ def urldata_init(self, ud, d):
+
+ if not "module" in ud.parm:
+ raise MissingParameterError('module', ud.url)
+ else:
+ ud.module = ud.parm["module"]
+
+ ud.revision = ud.parm.get('rev', "")
+
+ ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
+
+ def need_update(self, url, ud, d):
+ if ud.date == "now":
+ return True
+ if not os.path.exists(ud.localpath):
+ return True
+ return False
+
+ def download(self, loc, ud, d):
+ """Fetch urls"""
+
+ svkroot = ud.host + ud.path
+
+ svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)
+
+ if ud.revision:
+ svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)
+
+ # create temp directory
+ localdata = data.createCopy(d)
+ data.update_data(localdata)
+ logger.debug(2, "Fetch: creating temporary directory")
+ bb.mkdirhier(data.expand('${WORKDIR}', localdata))
+ data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
+ tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
+ tmpfile = tmppipe.readline().strip()
+ if not tmpfile:
+ logger.error()
+ raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
+
+ # check out sources there
+ os.chdir(tmpfile)
+ logger.info("Fetch " + loc)
+ logger.debug(1, "Running %s", svkcmd)
+ runfetchcmd(svkcmd, d, cleanup = [tmpfile])
+
+ os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
+ # tar them up to a defined filename
+ runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath])
+
+ # cleanup
+ bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
new file mode 100644
index 000000000..4ab643bcf
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -0,0 +1,173 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for svn.
+
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+# Copyright (C) 2004 Marcin Juszkiewicz
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import sys
+import logging
+import bb
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import MissingParameterError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class Svn(FetchMethod):
+ """Class to fetch a module or modules from svn repositories"""
+ def supports(self, url, ud, d):
+ """
+ Check to see if a given url can be fetched with svn.
+ """
+ return ud.type in ['svn']
+
+ def urldata_init(self, ud, d):
+ """
+ init svn specific variable within url data
+ """
+ if not "module" in ud.parm:
+ raise MissingParameterError('module', ud.url)
+
+ ud.module = ud.parm["module"]
+
+ # Create paths to svn checkouts
+ relpath = self._strip_leading_slashes(ud.path)
+ ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
+ ud.moddir = os.path.join(ud.pkgdir, ud.module)
+
+ if 'rev' in ud.parm:
+ ud.revision = ud.parm['rev']
+
+ ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
+
+ def _buildsvncommand(self, ud, d, command):
+ """
+ Build up an svn commandline based on ud
+ command is "fetch", "update", "info"
+ """
+
+ basecmd = data.expand('${FETCHCMD_svn}', d)
+
+ proto = ud.parm.get('proto', 'svn')
+
+ svn_rsh = None
+ if proto == "svn+ssh" and "rsh" in ud.parm:
+ svn_rsh = ud.parm["rsh"]
+
+ svnroot = ud.host + ud.path
+
+ options = []
+
+ if ud.user:
+ options.append("--username %s" % ud.user)
+
+ if ud.pswd:
+ options.append("--password %s" % ud.pswd)
+
+ if command is "info":
+ svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
+ else:
+ suffix = ""
+ if ud.revision:
+ options.append("-r %s" % ud.revision)
+ suffix = "@%s" % (ud.revision)
+
+ if command is "fetch":
+ svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
+ elif command is "update":
+ svncmd = "%s update %s" % (basecmd, " ".join(options))
+ else:
+ raise FetchError("Invalid svn command %s" % command, ud.url)
+
+ if svn_rsh:
+ svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
+
+ return svncmd
+
+ def download(self, loc, ud, d):
+ """Fetch url"""
+
+ logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+
+ if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
+ svnupdatecmd = self._buildsvncommand(ud, d, "update")
+ logger.info("Update " + loc)
+ # update sources there
+ os.chdir(ud.moddir)
+ logger.debug(1, "Running %s", svnupdatecmd)
+ bb.fetch2.check_network_access(d, svnupdatecmd)
+ runfetchcmd(svnupdatecmd, d)
+ else:
+ svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
+ logger.info("Fetch " + loc)
+ # check out sources there
+ bb.mkdirhier(ud.pkgdir)
+ os.chdir(ud.pkgdir)
+ logger.debug(1, "Running %s", svnfetchcmd)
+ bb.fetch2.check_network_access(d, svnfetchcmd)
+ runfetchcmd(svnfetchcmd, d)
+
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude '.svn'"
+
+ os.chdir(ud.pkgdir)
+ # tar them up to a defined filename
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
+
+ def supports_srcrev(self):
+ return True
+
+ def _revision_key(self, url, ud, d, name):
+ """
+ Return a unique key for the url
+ """
+ return "svn:" + ud.moddir
+
+ def _latest_revision(self, url, ud, d, name):
+ """
+ Return the latest upstream revision number
+ """
+ bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "info"))
+
+ output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
+
+ revision = None
+ for line in output.splitlines():
+ if "Last Changed Rev" in line:
+ revision = line.split(":")[1].strip()
+
+ return revision
+
+ def _sortable_revision(self, url, ud, d):
+ """
+ Return a sortable revision number which in our case is the revision number
+ """
+
+ return self._build_revision(url, ud, d)
+
+ def _build_revision(self, url, ud, d):
+ return ud.revision
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
new file mode 100644
index 000000000..cf6d5bf2a
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -0,0 +1,91 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import logging
+import bb
+import urllib
+from bb import data
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import encodeurl
+from bb.fetch2 import decodeurl
+from bb.fetch2 import logger
+from bb.fetch2 import runfetchcmd
+
+class Wget(FetchMethod):
+ """Class to fetch urls via 'wget'"""
+ def supports(self, url, ud, d):
+ """
+ Check to see if a given url can be fetched with wget.
+ """
+ return ud.type in ['http', 'https', 'ftp']
+
+ def urldata_init(self, ud, d):
+
+ ud.basename = os.path.basename(ud.path)
+ ud.localfile = data.expand(urllib.unquote(ud.basename), d)
+
+ def download(self, uri, ud, d, checkonly = False):
+ """Fetch urls"""
+
+ def fetch_uri(uri, ud, d):
+ if checkonly:
+ fetchcmd = data.getVar("CHECKCOMMAND", d, True)
+ elif os.path.exists(ud.localpath):
+ # file exists, but we didnt complete it.. trying again..
+ fetchcmd = data.getVar("RESUMECOMMAND", d, True)
+ else:
+ fetchcmd = data.getVar("FETCHCOMMAND", d, True)
+
+ uri = uri.split(";")[0]
+ uri_decoded = list(decodeurl(uri))
+ uri_type = uri_decoded[0]
+ uri_host = uri_decoded[1]
+
+ fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
+ fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
+ logger.info("fetch " + uri)
+ logger.debug(2, "executing " + fetchcmd)
+ bb.fetch2.check_network_access(d, fetchcmd)
+ runfetchcmd(fetchcmd, d)
+
+ # Sanity check since wget can pretend it succeed when it didn't
+ # Also, this used to happen if sourceforge sent us to the mirror page
+ if not os.path.exists(ud.localpath) and not checkonly:
+ raise FetchError("The fetch command returned success but %s doesn't exist?!" % (uri, ud.localpath), uri)
+
+ localdata = data.createCopy(d)
+ data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
+ data.update_data(localdata)
+
+ fetch_uri(uri, ud, localdata)
+
+ return True
+
+ def checkstatus(self, uri, ud, d):
+ return self.download(uri, ud, d, True)
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index 21692d930..1f9ff904a 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -23,12 +23,66 @@ Message handling infrastructure for bitbake
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
+import logging
import collections
+from itertools import groupby
+import warnings
import bb
import bb.event
-debug_level = collections.defaultdict(lambda: 0)
-verbose = False
+class BBLogFormatter(logging.Formatter):
+ """Formatter which ensures that our 'plain' messages (logging.INFO + 1) are used as is"""
+
+ DEBUG3 = logging.DEBUG - 2
+ DEBUG2 = logging.DEBUG - 1
+ DEBUG = logging.DEBUG
+ VERBOSE = logging.INFO - 1
+ NOTE = logging.INFO
+ PLAIN = logging.INFO + 1
+ ERROR = logging.ERROR
+ WARNING = logging.WARNING
+ CRITICAL = logging.CRITICAL
+
+ levelnames = {
+ DEBUG3 : 'DEBUG',
+ DEBUG2 : 'DEBUG',
+ DEBUG : 'DEBUG',
+ VERBOSE: 'NOTE',
+ NOTE : 'NOTE',
+ PLAIN : '',
+ WARNING : 'WARNING',
+ ERROR : 'ERROR',
+ CRITICAL: 'ERROR',
+ }
+
+ def getLevelName(self, levelno):
+ try:
+ return self.levelnames[levelno]
+ except KeyError:
+ self.levelnames[levelno] = value = 'Level %d' % levelno
+ return value
+
+ def format(self, record):
+ record.levelname = self.getLevelName(record.levelno)
+ if record.levelno == self.PLAIN:
+ return record.getMessage()
+ else:
+ return logging.Formatter.format(self, record)
+
+class Loggers(dict):
+ def __getitem__(self, key):
+ if key in self:
+ return dict.__getitem__(self, key)
+ else:
+ log = logging.getLogger("BitBake.%s" % domain._fields[key])
+ dict.__setitem__(self, key, log)
+ return log
+
+class DebugLevel(dict):
+ def __getitem__(self, key):
+ if key == "default":
+ key = domain.Default
+ return get_debug_level(key)
def _NamedTuple(name, fields):
Tuple = collections.namedtuple(name, " ".join(fields))
@@ -48,97 +102,99 @@ domain = _NamedTuple("Domain", (
"RunQueue",
"TaskData",
"Util"))
+logger = logging.getLogger("BitBake")
+loggers = Loggers()
+debug_level = DebugLevel()
-
-class MsgBase(bb.event.Event):
- """Base class for messages"""
-
- def __init__(self, msg):
- self._message = msg
- bb.event.Event.__init__(self)
-
-class MsgDebug(MsgBase):
- """Debug Message"""
-
-class MsgNote(MsgBase):
- """Note Message"""
-
-class MsgWarn(MsgBase):
- """Warning Message"""
-
-class MsgError(MsgBase):
- """Error Message"""
-
-class MsgFatal(MsgBase):
- """Fatal Message"""
-
-class MsgPlain(MsgBase):
- """General output"""
-
-#
# Message control functions
#
def set_debug_level(level):
- for d in domain:
- debug_level[d] = level
- debug_level[domain.Default] = level
+ for log in loggers.itervalues():
+ log.setLevel(logging.NOTSET)
+
+ if level:
+ logger.setLevel(logging.DEBUG - level + 1)
+ else:
+ logger.setLevel(logging.INFO)
def get_debug_level(msgdomain = domain.Default):
- return debug_level[msgdomain]
+ if not msgdomain:
+ level = logger.getEffectiveLevel()
+ else:
+ level = loggers[msgdomain].getEffectiveLevel()
+ return max(0, logging.DEBUG - level + 1)
def set_verbose(level):
- verbose = level
-
-def set_debug_domains(strdomains):
- for domainstr in strdomains:
- for d in domain:
- if domain._fields[d] == domainstr:
- debug_level[d] += 1
+ if level:
+ logger.setLevel(BBLogFormatter.VERBOSE)
+ else:
+ logger.setLevel(BBLogFormatter.INFO)
+
+def set_debug_domains(domainargs):
+ for (domainarg, iterator) in groupby(domainargs):
+ for index, msgdomain in enumerate(domain._fields):
+ if msgdomain == domainarg:
+ level = len(tuple(iterator))
+ if level:
+ loggers[index].setLevel(logging.DEBUG - level + 1)
break
else:
- warn(None, "Logging domain %s is not valid, ignoring" % domainstr)
+ warn(None, "Logging domain %s is not valid, ignoring" % domainarg)
#
# Message handling functions
#
-def debug(level, msgdomain, msg, fn = None):
+def debug(level, msgdomain, msg):
+ warnings.warn("bb.msg.debug will soon be deprecated in favor of the python 'logging' module",
+ PendingDeprecationWarning, stacklevel=2)
+ level = logging.DEBUG - (level - 1)
if not msgdomain:
- msgdomain = domain.Default
-
- if debug_level[msgdomain] >= level:
- bb.event.fire(MsgDebug(msg), None)
- if bb.event.useStdout:
- print('DEBUG: %s' % (msg))
+ logger.debug(level, msg)
+ else:
+ loggers[msgdomain].debug(level, msg)
+
+def plain(msg):
+ warnings.warn("bb.msg.plain will soon be deprecated in favor of the python 'logging' module",
+ PendingDeprecationWarning, stacklevel=2)
+ logger.plain(msg)
+
+def note(level, msgdomain, msg):
+ warnings.warn("bb.msg.note will soon be deprecated in favor of the python 'logging' module",
+ PendingDeprecationWarning, stacklevel=2)
+ if level > 1:
+ if msgdomain:
+ logger.verbose(msg)
+ else:
+ loggers[msgdomain].verbose(msg)
+ else:
+ if msgdomain:
+ logger.info(msg)
+ else:
+ loggers[msgdomain].info(msg)
-def note(level, msgdomain, msg, fn = None):
+def warn(msgdomain, msg):
+ warnings.warn("bb.msg.warn will soon be deprecated in favor of the python 'logging' module",
+ PendingDeprecationWarning, stacklevel=2)
if not msgdomain:
- msgdomain = domain.Default
+ logger.warn(msg)
+ else:
+ loggers[msgdomain].warn(msg)
- if level == 1 or verbose or debug_level[msgdomain] >= 1:
- bb.event.fire(MsgNote(msg), None)
- if bb.event.useStdout:
- print('NOTE: %s' % (msg))
-
-def warn(msgdomain, msg, fn = None):
- bb.event.fire(MsgWarn(msg), None)
- if bb.event.useStdout:
- print('WARNING: %s' % (msg))
-
-def error(msgdomain, msg, fn = None):
- bb.event.fire(MsgError(msg), None)
- if bb.event.useStdout:
- print('ERROR: %s' % (msg))
-
-def fatal(msgdomain, msg, fn = None):
- bb.event.fire(MsgFatal(msg), None)
+def error(msgdomain, msg):
+ warnings.warn("bb.msg.error will soon be deprecated in favor of the python 'logging' module",
+ PendingDeprecationWarning, stacklevel=2)
+ if not msgdomain:
+ logger.error(msg)
+ else:
+ loggers[msgdomain].error(msg)
- if bb.event.useStdout:
- print('FATAL: %s' % (msg))
+def fatal(msgdomain, msg):
+ warnings.warn("bb.msg.fatal will soon be deprecated in favor of raising appropriate exceptions",
+ PendingDeprecationWarning, stacklevel=2)
+ if not msgdomain:
+ logger.critical(msg)
+ else:
+ loggers[msgdomain].critical(msg)
sys.exit(1)
-
-def plain(msg, fn = None):
- bb.event.fire(MsgPlain(msg), None)
- if bb.event.useStdout:
- print(msg)
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index da160ceb2..eee8d9cdd 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -26,10 +26,15 @@ File parsers for the BitBake build tools.
handlers = []
-import bb, os
+import os
+import stat
+import logging
+import bb
import bb.utils
import bb.siggen
+logger = logging.getLogger("BitBake.Parsing")
+
class ParseError(Exception):
"""Exception raised when parsing fails"""
@@ -39,19 +44,19 @@ class SkipPackage(Exception):
__mtime_cache = {}
def cached_mtime(f):
if f not in __mtime_cache:
- __mtime_cache[f] = os.stat(f)[8]
+ __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
return __mtime_cache[f]
def cached_mtime_noerror(f):
if f not in __mtime_cache:
try:
- __mtime_cache[f] = os.stat(f)[8]
+ __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
except OSError:
return 0
return __mtime_cache[f]
def update_mtime(f):
- __mtime_cache[f] = os.stat(f)[8]
+ __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
return __mtime_cache[f]
def mark_dependency(d, f):
@@ -80,18 +85,18 @@ def init(fn, data):
if h['supports'](fn):
return h['init'](data)
-def init_parser(d, dumpsigs):
- bb.parse.siggen = bb.siggen.init(d, dumpsigs)
+def init_parser(d):
+ bb.parse.siggen = bb.siggen.init(d)
def resolve_file(fn, d):
if not os.path.isabs(fn):
bbpath = bb.data.getVar("BBPATH", d, True)
- newfn = bb.which(bbpath, fn)
+ newfn = bb.utils.which(bbpath, fn)
if not newfn:
raise IOError("file %s not found in %s" % (fn, bbpath))
fn = newfn
- bb.msg.debug(2, bb.msg.domain.Parsing, "LOAD %s" % fn)
+ logger.debug(2, "LOAD %s", fn)
return fn
# Used by OpenEmbedded metadata
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index 870ae65b0..8fffe1e8f 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -23,11 +23,14 @@
from __future__ import absolute_import
from future_builtins import filter
-import bb, re, string
-from bb import methodpool
+import re
+import string
+import logging
+import bb
import itertools
+from bb import methodpool
+from bb.parse import logger
-__word__ = re.compile(r"\S+")
__parsed_methods__ = bb.methodpool.get_parsed_dict()
_bbversions_re = re.compile(r"\[(?P<from>[0-9]+)-(?P<to>[0-9]+)\]")
@@ -37,13 +40,14 @@ class StatementGroup(list):
statement.eval(data)
class AstNode(object):
- pass
+ def __init__(self, filename, lineno):
+ self.filename = filename
+ self.lineno = lineno
class IncludeNode(AstNode):
- def __init__(self, what_file, fn, lineno, force):
+ def __init__(self, filename, lineno, what_file, force):
+ AstNode.__init__(self, filename, lineno)
self.what_file = what_file
- self.from_fn = fn
- self.from_lineno = lineno
self.force = force
def eval(self, data):
@@ -51,16 +55,17 @@ class IncludeNode(AstNode):
Include the file and evaluate the statements
"""
s = bb.data.expand(self.what_file, data)
- bb.msg.debug(3, bb.msg.domain.Parsing, "CONF %s:%d: including %s" % (self.from_fn, self.from_lineno, s))
+ logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s)
# TODO: Cache those includes... maybe not here though
if self.force:
- bb.parse.ConfHandler.include(self.from_fn, s, data, "include required")
+ bb.parse.ConfHandler.include(self.filename, s, data, "include required")
else:
- bb.parse.ConfHandler.include(self.from_fn, s, data, False)
+ bb.parse.ConfHandler.include(self.filename, s, data, False)
class ExportNode(AstNode):
- def __init__(self, var):
+ def __init__(self, filename, lineno, var):
+ AstNode.__init__(self, filename, lineno)
self.var = var
def eval(self, data):
@@ -73,7 +78,8 @@ class DataNode(AstNode):
this need to be re-evaluated... we might be able to do
that faster with multiple classes.
"""
- def __init__(self, groupd):
+ def __init__(self, filename, lineno, groupd):
+ AstNode.__init__(self, filename, lineno)
self.groupd = groupd
def getFunc(self, key, data):
@@ -116,19 +122,18 @@ class DataNode(AstNode):
else:
bb.data.setVar(key, val, data)
-class MethodNode:
- def __init__(self, func_name, body, lineno, fn):
+class MethodNode(AstNode):
+ def __init__(self, filename, lineno, func_name, body):
+ AstNode.__init__(self, filename, lineno)
self.func_name = func_name
self.body = body
- self.fn = fn
- self.lineno = lineno
def eval(self, data):
if self.func_name == "__anonymous":
- funcname = ("__anon_%s_%s" % (self.lineno, self.fn.translate(string.maketrans('/.+-', '____'))))
+ funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(string.maketrans('/.+-', '____'))))
if not funcname in bb.methodpool._parsed_fns:
text = "def %s(d):\n" % (funcname) + '\n'.join(self.body)
- bb.methodpool.insert_method(funcname, text, self.fn)
+ bb.methodpool.insert_method(funcname, text, self.filename)
anonfuncs = bb.data.getVar('__BBANONFUNCS', data) or []
anonfuncs.append(funcname)
bb.data.setVar('__BBANONFUNCS', anonfuncs, data)
@@ -137,25 +142,26 @@ class MethodNode:
bb.data.setVar(self.func_name, '\n'.join(self.body), data)
class PythonMethodNode(AstNode):
- def __init__(self, funcname, root, body, fn):
- self.func_name = funcname
- self.root = root
+ def __init__(self, filename, lineno, function, define, body):
+ AstNode.__init__(self, filename, lineno)
+ self.function = function
+ self.define = define
self.body = body
- self.fn = fn
def eval(self, data):
# Note we will add root to parsedmethods after having parse
# 'this' file. This means we will not parse methods from
# bb classes twice
text = '\n'.join(self.body)
- if not bb.methodpool.parsed_module(self.root):
- bb.methodpool.insert_method(self.root, text, self.fn)
- bb.data.setVarFlag(self.func_name, "func", 1, data)
- bb.data.setVarFlag(self.func_name, "python", 1, data)
- bb.data.setVar(self.func_name, text, data)
+ if not bb.methodpool.parsed_module(self.define):
+ bb.methodpool.insert_method(self.define, text, self.filename)
+ bb.data.setVarFlag(self.function, "func", 1, data)
+ bb.data.setVarFlag(self.function, "python", 1, data)
+ bb.data.setVar(self.function, text, data)
class MethodFlagsNode(AstNode):
- def __init__(self, key, m):
+ def __init__(self, filename, lineno, key, m):
+ AstNode.__init__(self, filename, lineno)
self.key = key
self.m = m
@@ -175,8 +181,9 @@ class MethodFlagsNode(AstNode):
bb.data.delVarFlag(self.key, "fakeroot", data)
class ExportFuncsNode(AstNode):
- def __init__(self, fns, classes):
- self.n = __word__.findall(fns)
+ def __init__(self, filename, lineno, fns, classes):
+ AstNode.__init__(self, filename, lineno)
+ self.n = fns.split()
self.classes = classes
def eval(self, data):
@@ -214,7 +221,8 @@ class ExportFuncsNode(AstNode):
bb.data.setVarFlag(var, 'export_func', '1', data)
class AddTaskNode(AstNode):
- def __init__(self, func, before, after):
+ def __init__(self, filename, lineno, func, before, after):
+ AstNode.__init__(self, filename, lineno)
self.func = func
self.before = before
self.after = after
@@ -245,8 +253,9 @@ class AddTaskNode(AstNode):
bb.data.setVarFlag(entry, "deps", [var] + existing, data)
class BBHandlerNode(AstNode):
- def __init__(self, fns):
- self.hs = __word__.findall(fns)
+ def __init__(self, filename, lineno, fns):
+ AstNode.__init__(self, filename, lineno)
+ self.hs = fns.split()
def eval(self, data):
bbhands = bb.data.getVar('__BBHANDLERS', data) or []
@@ -256,49 +265,49 @@ class BBHandlerNode(AstNode):
bb.data.setVar('__BBHANDLERS', bbhands, data)
class InheritNode(AstNode):
- def __init__(self, files):
- self.n = __word__.findall(files)
+ def __init__(self, filename, lineno, classes):
+ AstNode.__init__(self, filename, lineno)
+ self.classes = classes
def eval(self, data):
- bb.parse.BBHandler.inherit(self.n, data)
+ bb.parse.BBHandler.inherit(self.classes, data)
-def handleInclude(statements, m, fn, lineno, force):
- statements.append(IncludeNode(m.group(1), fn, lineno, force))
+def handleInclude(statements, filename, lineno, m, force):
+ statements.append(IncludeNode(filename, lineno, m.group(1), force))
-def handleExport(statements, m):
- statements.append(ExportNode(m.group(1)))
+def handleExport(statements, filename, lineno, m):
+ statements.append(ExportNode(filename, lineno, m.group(1)))
-def handleData(statements, groupd):
- statements.append(DataNode(groupd))
+def handleData(statements, filename, lineno, groupd):
+ statements.append(DataNode(filename, lineno, groupd))
-def handleMethod(statements, func_name, lineno, fn, body):
- statements.append(MethodNode(func_name, body, lineno, fn))
+def handleMethod(statements, filename, lineno, func_name, body):
+ statements.append(MethodNode(filename, lineno, func_name, body))
-def handlePythonMethod(statements, funcname, root, body, fn):
- statements.append(PythonMethodNode(funcname, root, body, fn))
+def handlePythonMethod(statements, filename, lineno, funcname, root, body):
+ statements.append(PythonMethodNode(filename, lineno, funcname, root, body))
-def handleMethodFlags(statements, key, m):
- statements.append(MethodFlagsNode(key, m))
+def handleMethodFlags(statements, filename, lineno, key, m):
+ statements.append(MethodFlagsNode(filename, lineno, key, m))
-def handleExportFuncs(statements, m, classes):
- statements.append(ExportFuncsNode(m.group(1), classes))
+def handleExportFuncs(statements, filename, lineno, m, classes):
+ statements.append(ExportFuncsNode(filename, lineno, m.group(1), classes))
-def handleAddTask(statements, m):
+def handleAddTask(statements, filename, lineno, m):
func = m.group("func")
before = m.group("before")
after = m.group("after")
if func is None:
return
- statements.append(AddTaskNode(func, before, after))
+ statements.append(AddTaskNode(filename, lineno, func, before, after))
-def handleBBHandlers(statements, m):
- statements.append(BBHandlerNode(m.group(1)))
+def handleBBHandlers(statements, filename, lineno, m):
+ statements.append(BBHandlerNode(filename, lineno, m.group(1)))
-def handleInherit(statements, m):
- files = m.group(1)
- n = __word__.findall(files)
- statements.append(InheritNode(m.group(1)))
+def handleInherit(statements, filename, lineno, m):
+ classes = m.group(1)
+ statements.append(InheritNode(filename, lineno, classes.split()))
def finalize(fn, d, variant = None):
for lazykey in bb.data.getVar("__lazy_assigned", d) or ():
@@ -365,7 +374,7 @@ def _expand_versions(versions):
def multi_finalize(fn, d):
appends = (d.getVar("__BBAPPEND", True) or "").split()
for append in appends:
- bb.msg.debug(2, bb.msg.domain.Parsing, "Appending .bbappend file " + append + " to " + fn)
+ logger.debug(2, "Appending .bbappend file %s to %s", append, fn)
bb.parse.BBHandler.handle(append, d, True)
safe_d = d
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 51ad10fb9..31d1e21c6 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -27,11 +27,12 @@
from __future__ import absolute_import
import re, bb, os
-import bb.fetch, bb.build, bb.utils
+import logging
+import bb.build, bb.utils
from bb import data
from . import ConfHandler
-from .. import resolve_file, ast
+from .. import resolve_file, ast, logger
from .ConfHandler import include, init
# For compatibility
@@ -64,7 +65,8 @@ IN_PYTHON_EOF = -9999999999999
def supports(fn, d):
- return fn[-3:] == ".bb" or fn[-8:] == ".bbclass" or fn[-4:] == ".inc"
+ """Return True if fn has a supported extension"""
+ return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
def inherit(files, d):
__inherit_cache = data.getVar('__inherit_cache', d) or []
@@ -72,11 +74,11 @@ def inherit(files, d):
lineno = 0
for file in files:
file = data.expand(file, d)
- if file[0] != "/" and file[-8:] != ".bbclass":
+ if not os.path.isabs(file) and not file.endswith(".bbclass"):
file = os.path.join('classes', '%s.bbclass' % file)
if not file in __inherit_cache:
- bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s:%d: inheriting %s" % (fn, lineno, file))
+ logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file)
__inherit_cache.append( file )
data.setVar('__inherit_cache', __inherit_cache, d)
include(fn, file, d, "inherit")
@@ -115,12 +117,12 @@ def handle(fn, d, include):
if include == 0:
- bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data)")
+ logger.debug(2, "BB %s: handle(data)", fn)
else:
- bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data, include)")
+ logger.debug(2, "BB %s: handle(data, include)", fn)
- (root, ext) = os.path.splitext(os.path.basename(fn))
- base_name = "%s%s" % (root, ext)
+ base_name = os.path.basename(fn)
+ (root, ext) = os.path.splitext(base_name)
init(d)
if ext == ".bbclass":
@@ -170,7 +172,7 @@ def feeder(lineno, s, fn, root, statements):
if __infunc__:
if s == '}':
__body__.append('')
- ast.handleMethod(statements, __infunc__, lineno, fn, __body__)
+ ast.handleMethod(statements, fn, lineno, __infunc__, __body__)
__infunc__ = ""
__body__ = []
else:
@@ -183,7 +185,8 @@ def feeder(lineno, s, fn, root, statements):
__body__.append(s)
return
else:
- ast.handlePythonMethod(statements, __inpython__, root, __body__, fn)
+ ast.handlePythonMethod(statements, fn, lineno, __inpython__,
+ root, __body__)
__body__ = []
__inpython__ = False
@@ -204,7 +207,7 @@ def feeder(lineno, s, fn, root, statements):
m = __func_start_regexp__.match(s)
if m:
__infunc__ = m.group("func") or "__anonymous"
- ast.handleMethodFlags(statements, __infunc__, m)
+ ast.handleMethodFlags(statements, fn, lineno, __infunc__, m)
return
m = __def_regexp__.match(s)
@@ -216,22 +219,22 @@ def feeder(lineno, s, fn, root, statements):
m = __export_func_regexp__.match(s)
if m:
- ast.handleExportFuncs(statements, m, classes)
+ ast.handleExportFuncs(statements, fn, lineno, m, classes)
return
m = __addtask_regexp__.match(s)
if m:
- ast.handleAddTask(statements, m)
+ ast.handleAddTask(statements, fn, lineno, m)
return
m = __addhandler_regexp__.match(s)
if m:
- ast.handleBBHandlers(statements, m)
+ ast.handleBBHandlers(statements, fn, lineno, m)
return
m = __inherit_regexp__.match(s)
if m:
- ast.handleInherit(statements, m)
+ ast.handleInherit(statements, fn, lineno, m)
return
return ConfHandler.feeder(lineno, s, fn, statements)
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index 9128a2ef8..fc239a354 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -25,8 +25,9 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import re, bb.data, os
+import logging
import bb.utils
-from bb.parse import ParseError, resolve_file, ast
+from bb.parse import ParseError, resolve_file, ast, logger
#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
@@ -45,10 +46,10 @@ def supports(fn, d):
def include(oldfn, fn, data, error_out):
"""
-
- error_out If True a ParseError will be reaised if the to be included
+ error_out If True a ParseError will be raised if the to be included
+ config-files could not be included.
"""
- if oldfn == fn: # prevent infinate recursion
+ if oldfn == fn: # prevent infinite recursion
return None
import bb
@@ -68,7 +69,7 @@ def include(oldfn, fn, data, error_out):
except IOError:
if error_out:
raise ParseError("Could not %(error_out)s file %(fn)s" % vars() )
- bb.msg.debug(2, bb.msg.domain.Parsing, "CONF file '%s' not found" % fn)
+ logger.debug(2, "CONF file '%s' not found", fn)
def handle(fn, data, include):
init(data)
@@ -112,22 +113,22 @@ def feeder(lineno, s, fn, statements):
m = __config_regexp__.match(s)
if m:
groupd = m.groupdict()
- ast.handleData(statements, groupd)
+ ast.handleData(statements, fn, lineno, groupd)
return
m = __include_regexp__.match(s)
if m:
- ast.handleInclude(statements, m, fn, lineno, False)
+ ast.handleInclude(statements, fn, lineno, m, False)
return
m = __require_regexp__.match(s)
if m:
- ast.handleInclude(statements, m, fn, lineno, True)
+ ast.handleInclude(statements, fn, lineno, m, True)
return
m = __export_regexp__.match(s)
if m:
- ast.handleExport(statements, m)
+ ast.handleExport(statements, fn, lineno, m)
return
raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s));
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
index 00f492994..b8c239294 100644
--- a/bitbake/lib/bb/persist_data.py
+++ b/bitbake/lib/bb/persist_data.py
@@ -1,6 +1,12 @@
-# BitBake Persistent Data Store
-#
+"""BitBake Persistent Data Store
+
+Used to store data in a central location such that other threads/tasks can
+access them at some future date. Acts as a convenience wrapper around sqlite,
+currently, providing a key/value store accessed by 'domain'.
+"""
+
# Copyright (C) 2007 Richard Purdie
+# Copyright (C) 2010 Chris Larson <chris_larson@mentor.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -15,119 +21,175 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-import bb, os
-import bb.utils
+import collections
+import logging
+import os.path
+import sys
+import warnings
+import bb.msg, bb.data, bb.utils
try:
import sqlite3
except ImportError:
- try:
- from pysqlite2 import dbapi2 as sqlite3
- except ImportError:
- bb.msg.fatal(bb.msg.domain.PersistData, "Importing sqlite3 and pysqlite2 failed, please install one of them. Python 2.5 or a 'python-pysqlite2' like package is likely to be what you need.")
+ from pysqlite2 import dbapi2 as sqlite3
sqlversion = sqlite3.sqlite_version_info
if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
- bb.msg.fatal(bb.msg.domain.PersistData, "sqlite3 version 3.3.0 or later is required.")
-
-class PersistData:
- """
- BitBake Persistent Data Store
-
- Used to store data in a central location such that other threads/tasks can
- access them at some future date.
-
- The "domain" is used as a key to isolate each data pool and in this
- implementation corresponds to an SQL table. The SQL table consists of a
- simple key and value pair.
-
- Why sqlite? It handles all the locking issues for us.
- """
- def __init__(self, d, persistent_database_connection):
- if "connection" in persistent_database_connection:
- self.cursor = persistent_database_connection["connection"].cursor()
- return
- self.cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True)
- if self.cachedir in [None, '']:
- bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'PERSISTENT_DIR' or 'CACHE' variable.")
- try:
- os.stat(self.cachedir)
- except OSError:
- bb.utils.mkdirhier(self.cachedir)
-
- self.cachefile = os.path.join(self.cachedir, "bb_persist_data.sqlite3")
- bb.msg.debug(1, bb.msg.domain.PersistData, "Using '%s' as the persistent data cache" % self.cachefile)
-
- connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None)
- persistent_database_connection["connection"] = connection
- self.cursor = persistent_database_connection["connection"].cursor()
+ raise Exception("sqlite3 version 3.3.0 or later is required.")
+
+
+logger = logging.getLogger("BitBake.PersistData")
+
+
+class SQLTable(collections.MutableMapping):
+ """Object representing a table/domain in the database"""
+ def __init__(self, cursor, table):
+ self.cursor = cursor
+ self.table = table
+
+ cursor.execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);"
+ % table)
+
+ def _execute(self, *query):
+ """Execute a query, waiting to acquire a lock if necessary"""
+ count = 0
+ while True:
+ try:
+ return self.cursor.execute(*query)
+ break
+ except sqlite3.OperationalError as exc:
+ if 'database is locked' in str(exc) and count < 500:
+ count = count + 1
+ continue
+ raise
+
+ def __getitem__(self, key):
+ data = self._execute("SELECT * from %s where key=?;" %
+ self.table, [key])
+ for row in data:
+ return row[1]
+
+ def __delitem__(self, key):
+ self._execute("DELETE from %s where key=?;" % self.table, [key])
+
+ def __setitem__(self, key, value):
+ data = self._execute("SELECT * from %s where key=?;" %
+ self.table, [key])
+ exists = len(list(data))
+ if exists:
+ self._execute("UPDATE %s SET value=? WHERE key=?;" % self.table,
+ [value, key])
+ else:
+ self._execute("INSERT into %s(key, value) values (?, ?);" %
+ self.table, [key, value])
+
+ def __contains__(self, key):
+ return key in set(self)
+
+ def __len__(self):
+ data = self._execute("SELECT COUNT(key) FROM %s;" % self.table)
+ for row in data:
+ return row[0]
+
+ def __iter__(self):
+ data = self._execute("SELECT key FROM %s;" % self.table)
+ for row in data:
+ yield row[0]
+
+ def iteritems(self):
+ data = self._execute("SELECT * FROM %s;" % self.table)
+ for row in data:
+ yield row[0], row[1]
+
+ def itervalues(self):
+ data = self._execute("SELECT value FROM %s;" % self.table)
+ for row in data:
+ yield row[0]
+
+
+class SQLData(object):
+ """Object representing the persistent data"""
+ def __init__(self, filename):
+ bb.utils.mkdirhier(os.path.dirname(filename))
+
+ self.filename = filename
+ self.connection = sqlite3.connect(filename, timeout=5,
+ isolation_level=None)
+ self.cursor = self.connection.cursor()
+ self._tables = {}
+
+ def __getitem__(self, table):
+ if not isinstance(table, basestring):
+ raise TypeError("table argument must be a string, not '%s'" %
+ type(table))
+
+ if table in self._tables:
+ return self._tables[table]
+ else:
+ tableobj = self._tables[table] = SQLTable(self.cursor, table)
+ return tableobj
+
+ def __delitem__(self, table):
+ if table in self._tables:
+ del self._tables[table]
+ self.cursor.execute("DROP TABLE IF EXISTS %s;" % table)
+
+
+class PersistData(object):
+ """Deprecated representation of the bitbake persistent data store"""
+ def __init__(self, d):
+ warnings.warn("Use of PersistData will be deprecated in the future",
+ category=PendingDeprecationWarning,
+ stacklevel=2)
+
+ self.data = persist(d)
+ logger.debug(1, "Using '%s' as the persistent data cache",
+ self.data.filename)
def addDomain(self, domain):
"""
- Should be called before any domain is used
- Creates it if it doesn't exist.
+ Add a domain (pending deprecation)
"""
- self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" % domain)
+ return self.data[domain]
def delDomain(self, domain):
"""
Removes a domain and all the data it contains
"""
- self._execute("DROP TABLE IF EXISTS %s;" % domain)
+ del self.data[domain]
def getKeyValues(self, domain):
"""
Return a list of key + value pairs for a domain
"""
- ret = {}
- data = self._execute("SELECT key, value from %s;" % domain)
- for row in data:
- ret[str(row[0])] = str(row[1])
-
- return ret
+ return self.data[domain].items()
def getValue(self, domain, key):
"""
Return the value of a key for a domain
"""
- data = self._execute("SELECT * from %s where key=?;" % domain, [key])
- for row in data:
- return row[1]
+ return self.data[domain][key]
def setValue(self, domain, key, value):
"""
Sets the value of a key for a domain
"""
- data = self._execute("SELECT * from %s where key=?;" % domain, [key])
- rows = 0
- for row in data:
- rows = rows + 1
- if rows:
- self._execute("UPDATE %s SET value=? WHERE key=?;" % domain, [value, key])
- else:
- self._execute("INSERT into %s(key, value) values (?, ?);" % domain, [key, value])
+ self.data[domain][key] = value
def delValue(self, domain, key):
"""
Deletes a key/value pair
"""
- self._execute("DELETE from %s where key=?;" % domain, [key])
-
- #
- # We wrap the sqlite execute calls as on contended machines or single threaded
- # systems we can have multiple processes trying to access the DB at once and it seems
- # sqlite sometimes doesn't wait for the timeout. We therefore loop but put in an
- # emergency brake too
- #
- def _execute(self, *query):
- count = 0
- while True:
- try:
- ret = self.cursor.execute(*query)
- #print "Had to retry %s times" % count
- return ret
- except sqlite3.OperationalError as e:
- if 'database is locked' in str(e) and count < 500:
- count = count + 1
- continue
- raise
+ del self.data[domain][key]
+
+
+def persist(d):
+ """Convenience factory for construction of SQLData based upon metadata"""
+ cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
+ bb.data.getVar("CACHE", d, True))
+ if not cachedir:
+ logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
+ sys.exit(1)
+
+ cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
+ return SQLData(cachefile)
diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
new file mode 100644
index 000000000..808cd60f9
--- /dev/null
+++ b/bitbake/lib/bb/process.py
@@ -0,0 +1,109 @@
+import logging
+import signal
+import subprocess
+
+logger = logging.getLogger('BitBake.Process')
+
+def subprocess_setup():
+ # Python installs a SIGPIPE handler by default. This is usually not what
+ # non-Python subprocesses expect.
+ signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+
+class CmdError(RuntimeError):
+ def __init__(self, command, message=None):
+ self.command = command
+ self.message = message
+
+ def __str__(self):
+ if not isinstance(self.command, basestring):
+ cmd = subprocess.list2cmdline(self.command)
+ else:
+ cmd = self.command
+
+ msg = "Execution of '%s' failed" % cmd
+ if self.message:
+ msg += ': %s' % self.message
+ return msg
+
+class NotFoundError(CmdError):
+ def __str__(self):
+ return CmdError.__str__(self) + ": command not found"
+
+class ExecutionError(CmdError):
+ def __init__(self, command, exitcode, stdout = None, stderr = None):
+ CmdError.__init__(self, command)
+ self.exitcode = exitcode
+ self.stdout = stdout
+ self.stderr = stderr
+
+ def __str__(self):
+ message = ""
+ if self.stderr:
+ message += self.stderr
+ if self.stdout:
+ message += self.stdout
+ if message:
+ message = ":\n" + message
+ return (CmdError.__str__(self) +
+ " with exit code %s" % self.exitcode + message)
+
+class Popen(subprocess.Popen):
+ defaults = {
+ "close_fds": True,
+ "preexec_fn": subprocess_setup,
+ "stdout": subprocess.PIPE,
+ "stderr": subprocess.STDOUT,
+ "stdin": subprocess.PIPE,
+ "shell": False,
+ }
+
+ def __init__(self, *args, **kwargs):
+ options = dict(self.defaults)
+ options.update(kwargs)
+ subprocess.Popen.__init__(self, *args, **options)
+
+def _logged_communicate(pipe, log, input):
+ if pipe.stdin:
+ if input is not None:
+ pipe.stdin.write(input)
+ pipe.stdin.close()
+
+ bufsize = 512
+ outdata, errdata = [], []
+ while pipe.poll() is None:
+ if pipe.stdout is not None:
+ data = pipe.stdout.read(bufsize)
+ if data is not None:
+ outdata.append(data)
+ log.write(data)
+
+ if pipe.stderr is not None:
+ data = pipe.stderr.read(bufsize)
+ if data is not None:
+ errdata.append(data)
+ log.write(data)
+ return ''.join(outdata), ''.join(errdata)
+
+def run(cmd, input=None, log=None, **options):
+ """Convenience function to run a command and return its output, raising an
+ exception when the command fails"""
+
+ if isinstance(cmd, basestring) and not "shell" in options:
+ options["shell"] = True
+
+ try:
+ pipe = Popen(cmd, **options)
+ except OSError, exc:
+ if exc.errno == 2:
+ raise NotFoundError(cmd)
+ else:
+ raise CmdError(cmd, exc)
+
+ if log:
+ stdout, stderr = _logged_communicate(pipe, log, input)
+ else:
+ stdout, stderr = pipe.communicate(input)
+
+ if pipe.returncode != 0:
+ raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
+ return stdout, stderr
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py
index 58326f039..dcba9ae25 100644
--- a/bitbake/lib/bb/providers.py
+++ b/bitbake/lib/bb/providers.py
@@ -22,9 +22,12 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import re
+import logging
from bb import data, utils
import bb
+logger = logging.getLogger("BitBake.Provider")
+
class NoProvider(Exception):
"""Exception raised when no provider of a build dependency can be found"""
@@ -120,9 +123,9 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
if item:
itemstr = " (for item %s)" % item
if preferred_file is None:
- bb.msg.note(1, bb.msg.domain.Provider, "preferred version %s of %s not available%s" % (pv_str, pn, itemstr))
+ logger.info("preferred version %s of %s not available%s", pv_str, pn, itemstr)
else:
- bb.msg.debug(1, bb.msg.domain.Provider, "selecting %s as PREFERRED_VERSION %s of package %s%s" % (preferred_file, pv_str, pn, itemstr))
+ logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
return (preferred_ver, preferred_file)
@@ -189,7 +192,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
pkg_pn[pn] = []
pkg_pn[pn].append(p)
- bb.msg.debug(1, bb.msg.domain.Provider, "providers for %s are: %s" % (item, pkg_pn.keys()))
+ logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys())
# First add PREFERRED_VERSIONS
for pn in pkg_pn:
@@ -206,7 +209,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
eligible.append(preferred_versions[pn][1])
if len(eligible) == 0:
- bb.msg.error(bb.msg.domain.Provider, "no eligible providers for %s" % item)
+ logger.error("no eligible providers for %s", item)
return 0
# If pn == item, give it a slight default preference
@@ -242,13 +245,13 @@ def filterProviders(providers, item, cfgData, dataCache):
for p in eligible:
pn = dataCache.pkg_fn[p]
if dataCache.preferred[item] == pn:
- bb.msg.note(2, bb.msg.domain.Provider, "selecting %s to satisfy %s due to PREFERRED_PROVIDERS" % (pn, item))
+ logger.verbose("selecting %s to satisfy %s due to PREFERRED_PROVIDERS", pn, item)
eligible.remove(p)
eligible = [p] + eligible
foundUnique = True
break
- bb.msg.debug(1, bb.msg.domain.Provider, "sorted providers for %s are: %s" % (item, eligible))
+ logger.debug(1, "sorted providers for %s are: %s", item, eligible)
return eligible, foundUnique
@@ -264,27 +267,31 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache):
# Should use dataCache.preferred here?
preferred = []
preferred_vars = []
+ pns = {}
+ for p in eligible:
+ pns[dataCache.pkg_fn[p]] = p
for p in eligible:
pn = dataCache.pkg_fn[p]
provides = dataCache.pn_provides[pn]
for provide in provides:
- bb.msg.note(2, bb.msg.domain.Provider, "checking PREFERRED_PROVIDER_%s" % (provide))
prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1)
- if prefervar == pn:
+ logger.verbose("checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
+ if prefervar in pns and pns[prefervar] not in preferred:
var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
- bb.msg.note(2, bb.msg.domain.Provider, "selecting %s to satisfy runtime %s due to %s" % (pn, item, var))
+ logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var)
preferred_vars.append(var)
- eligible.remove(p)
- eligible = [p] + eligible
- preferred.append(p)
+ pref = pns[prefervar]
+ eligible.remove(pref)
+ eligible = [pref] + eligible
+ preferred.append(pref)
break
numberPreferred = len(preferred)
if numberPreferred > 1:
- bb.msg.error(bb.msg.domain.Provider, "Conflicting PREFERRED_PROVIDER entries were found which resulted in an attempt to select multiple providers (%s) for runtime dependecy %s\nThe entries resulting in this conflict were: %s" % (preferred, item, preferred_vars))
+ logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s", item, preferred, preferred_vars)
- bb.msg.debug(1, bb.msg.domain.Provider, "sorted providers for %s are: %s" % (item, eligible))
+ logger.debug(1, "sorted providers for %s are: %s", item, eligible)
return eligible, numberPreferred
@@ -314,7 +321,7 @@ def getRuntimeProviders(dataCache, rdepend):
try:
regexp = re.compile(pattern)
except:
- bb.msg.error(bb.msg.domain.Provider, "Error parsing re expression: %s" % pattern)
+ logger.error("Error parsing regular expression '%s'", pattern)
raise
regexp_cache[pattern] = regexp
if regexp.match(rdepend):
diff --git a/bitbake/lib/pysh/__init__.py b/bitbake/lib/bb/pysh/__init__.py
index e69de29bb..e69de29bb 100644
--- a/bitbake/lib/pysh/__init__.py
+++ b/bitbake/lib/bb/pysh/__init__.py
diff --git a/bitbake/lib/pysh/builtin.py b/bitbake/lib/bb/pysh/builtin.py
index 25ad22eb7..25ad22eb7 100644
--- a/bitbake/lib/pysh/builtin.py
+++ b/bitbake/lib/bb/pysh/builtin.py
diff --git a/bitbake/lib/pysh/interp.py b/bitbake/lib/bb/pysh/interp.py
index efe5181e1..efe5181e1 100644
--- a/bitbake/lib/pysh/interp.py
+++ b/bitbake/lib/bb/pysh/interp.py
diff --git a/bitbake/lib/pysh/lsprof.py b/bitbake/lib/bb/pysh/lsprof.py
index b1831c22a..b1831c22a 100644
--- a/bitbake/lib/pysh/lsprof.py
+++ b/bitbake/lib/bb/pysh/lsprof.py
diff --git a/bitbake/lib/pysh/pysh.py b/bitbake/lib/bb/pysh/pysh.py
index b4e6145b5..b4e6145b5 100644
--- a/bitbake/lib/pysh/pysh.py
+++ b/bitbake/lib/bb/pysh/pysh.py
diff --git a/bitbake/lib/pysh/pyshlex.py b/bitbake/lib/bb/pysh/pyshlex.py
index b977b5e86..b977b5e86 100644
--- a/bitbake/lib/pysh/pyshlex.py
+++ b/bitbake/lib/bb/pysh/pyshlex.py
diff --git a/bitbake/lib/pysh/pyshyacc.py b/bitbake/lib/bb/pysh/pyshyacc.py
index 3d9510c0c..e8e80aac4 100644
--- a/bitbake/lib/pysh/pyshyacc.py
+++ b/bitbake/lib/bb/pysh/pyshyacc.py
@@ -7,6 +7,7 @@
"""PLY grammar file.
"""
+import os.path
import sys
import pyshlex
@@ -648,7 +649,10 @@ def p_error(p):
try:
import pyshtables
except ImportError:
- yacc.yacc(tabmodule = 'pyshtables')
+ outputdir = os.path.dirname(__file__)
+ if not os.access(outputdir, os.W_OK):
+ outputdir = ''
+ yacc.yacc(tabmodule = 'pyshtables', outputdir = outputdir, debug = 0)
else:
yacc.yacc(tabmodule = 'pysh.pyshtables', write_tables = 0, debug = 0)
@@ -704,6 +708,9 @@ def format_commands(v):
if v.reverse_status:
name = '!' + name
return [name, format_commands(v.commands)]
+ elif isinstance(v, Case):
+ name = ['Case']
+ name += [v.name, format_commands(v.items)]
elif isinstance(v, SimpleCommand):
name = ['SimpleCommand']
if v.words:
diff --git a/bitbake/lib/pysh/sherrors.py b/bitbake/lib/bb/pysh/sherrors.py
index 1d5bd53b3..1d5bd53b3 100644
--- a/bitbake/lib/pysh/sherrors.py
+++ b/bitbake/lib/bb/pysh/sherrors.py
diff --git a/bitbake/lib/pysh/subprocess_fix.py b/bitbake/lib/bb/pysh/subprocess_fix.py
index 46eca2280..46eca2280 100644
--- a/bitbake/lib/pysh/subprocess_fix.py
+++ b/bitbake/lib/bb/pysh/subprocess_fix.py
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index b4134f826..a3f444c2a 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -22,19 +22,18 @@ Handles preparation and execution of a queue of tasks
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-import bb, os, sys
-import subprocess
-from bb import msg, data, event
+import copy
+import os
+import sys
import signal
import stat
import fcntl
-import copy
+import logging
+import bb
+from bb import msg, data, event
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
+bblogger = logging.getLogger("BitBake")
+logger = logging.getLogger("BitBake.RunQueue")
class RunQueueStats:
"""
@@ -87,21 +86,28 @@ class RunQueueScheduler(object):
"""
self.rq = runqueue
self.rqdata = rqdata
- numTasks = len(self.rq.runq_fnid)
+ numTasks = len(self.rqdata.runq_fnid)
self.prio_map = []
self.prio_map.extend(range(numTasks))
- def next(self):
+ def next_buildable_task(self):
"""
Return the id of the first task we find that is buildable
"""
- for task1 in range(len(self.rqdata.runq_fnid)):
- task = self.prio_map[task1]
- if self.rq.runq_running[task] == 1:
+ for tasknum in xrange(len(self.rqdata.runq_fnid)):
+ taskid = self.prio_map[tasknum]
+ if self.rq.runq_running[taskid] == 1:
continue
- if self.rq.runq_buildable[task] == 1:
- return task
+ if self.rq.runq_buildable[taskid] == 1:
+ return taskid
+
+ def next(self):
+ """
+ Return the id of the task we should build next
+ """
+ if self.rq.stats.active < self.rq.number_tasks:
+ return self.next_buildable_task()
class RunQueueSchedulerSpeed(RunQueueScheduler):
"""
@@ -114,13 +120,12 @@ class RunQueueSchedulerSpeed(RunQueueScheduler):
"""
The priority map is sorted by task weight.
"""
- from copy import deepcopy
self.rq = runqueue
self.rqdata = rqdata
- sortweight = sorted(deepcopy(self.rqdata.runq_weight))
- copyweight = deepcopy(self.rqdata.runq_weight)
+ sortweight = sorted(copy.deepcopy(self.rqdata.runq_weight))
+ copyweight = copy.deepcopy(self.rqdata.runq_weight)
self.prio_map = []
for weight in sortweight:
@@ -142,12 +147,11 @@ class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
def __init__(self, runqueue, rqdata):
RunQueueSchedulerSpeed.__init__(self, runqueue, rqdata)
- from copy import deepcopy
#FIXME - whilst this groups all fnids together it does not reorder the
#fnid groups optimally.
- basemap = deepcopy(self.prio_map)
+ basemap = copy.deepcopy(self.prio_map)
self.prio_map = []
while (len(basemap) > 0):
entry = basemap.pop(0)
@@ -201,7 +205,7 @@ class RunQueueData:
return "%s, %s" % (fn, taskname)
def get_task_id(self, fnid, taskname):
- for listid in range(len(self.runq_fnid)):
+ for listid in xrange(len(self.runq_fnid)):
if self.runq_fnid[listid] == fnid and self.runq_task[listid] == taskname:
return listid
return None
@@ -223,7 +227,7 @@ class RunQueueData:
"""
lowest = 0
new_chain = []
- for entry in range(len(chain)):
+ for entry in xrange(len(chain)):
if chain[entry] < chain[lowest]:
lowest = entry
new_chain.extend(chain[lowest:])
@@ -236,7 +240,7 @@ class RunQueueData:
"""
if len(chain1) != len(chain2):
return False
- for index in range(len(chain1)):
+ for index in xrange(len(chain1)):
if chain1[index] != chain2[index]:
return False
return True
@@ -281,7 +285,7 @@ class RunQueueData:
if dep in explored_deps[revdep]:
scan = True
if scan:
- find_chains(revdep, deepcopy(prev_chain))
+ find_chains(revdep, copy.deepcopy(prev_chain))
for dep in explored_deps[revdep]:
if dep not in total_deps:
total_deps.append(dep)
@@ -298,7 +302,7 @@ class RunQueueData:
Calculate a number representing the "weight" of each task. Heavier weighted tasks
have more dependencies and hence should be executed sooner for maximum speed.
- This function also sanity checks the task list finding tasks that its not
+ This function also sanity checks the task list finding tasks that are not
possible to execute due to circular dependencies.
"""
@@ -307,7 +311,7 @@ class RunQueueData:
deps_left = []
task_done = []
- for listid in range(numTasks):
+ for listid in xrange(numTasks):
task_done.append(False)
weight.append(0)
deps_left.append(len(self.runq_revdeps[listid]))
@@ -331,17 +335,17 @@ class RunQueueData:
# Circular dependency sanity check
problem_tasks = []
- for task in range(numTasks):
+ for task in xrange(numTasks):
if task_done[task] is False or deps_left[task] != 0:
problem_tasks.append(task)
- bb.msg.debug(2, bb.msg.domain.RunQueue, "Task %s (%s) is not buildable\n" % (task, self.get_user_idstring(task)))
- bb.msg.debug(2, bb.msg.domain.RunQueue, "(Complete marker was %s and the remaining dependency count was %s)\n\n" % (task_done[task], deps_left[task]))
+ logger.debug(2, "Task %s (%s) is not buildable", task, self.get_user_idstring(task))
+ logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n", task_done[task], deps_left[task])
if problem_tasks:
message = "Unbuildable tasks were found.\n"
message = message + "These are usually caused by circular dependencies and any circular dependency chains found will be printed below. Increase the debug level to see a list of unbuildable tasks.\n\n"
message = message + "Identifying dependency loops (this may take a short while)...\n"
- bb.msg.error(bb.msg.domain.RunQueue, message)
+ logger.error(message)
msgs = self.circular_depchains_handler(problem_tasks)
@@ -369,7 +373,7 @@ class RunQueueData:
# Nothing to do
return 0
- bb.msg.note(1, bb.msg.domain.RunQueue, "Preparing runqueue")
+ logger.info("Preparing runqueue")
# Step A - Work out a list of tasks to run
#
@@ -409,14 +413,14 @@ class RunQueueData:
if taskid is not None:
depends.append(taskid)
- for task in range(len(taskData.tasks_name)):
+ for task in xrange(len(taskData.tasks_name)):
depends = []
recrdepends = []
fnid = taskData.tasks_fnid[task]
fn = taskData.fn_index[fnid]
task_deps = self.dataCache.task_deps[fn]
- bb.msg.debug(2, bb.msg.domain.RunQueue, "Processing %s:%s" %(fn, taskData.tasks_name[task]))
+ logger.debug(2, "Processing %s:%s", fn, taskData.tasks_name[task])
if fnid not in taskData.failed_fnids:
@@ -454,7 +458,9 @@ class RunQueueData:
depdata = taskData.build_targets[depid][0]
if depdata is not None:
dep = taskData.fn_index[depdata]
- taskid = taskData.gettask_id(dep, idependtask)
+ taskid = taskData.gettask_id(dep, idependtask, False)
+ if taskid is None:
+ bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s in %s depends upon nonexistant task %s in %s" % (taskData.tasks_name[task], fn, idependtask, dep))
depends.append(taskid)
if depdata != fnid:
tdepends_fnid[fnid].add(taskid)
@@ -474,7 +480,7 @@ class RunQueueData:
# Rmove all self references
if task in depends:
newdep = []
- bb.msg.debug(2, bb.msg.domain.RunQueue, "Task %s (%s %s) contains self reference! %s" % (task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends))
+ logger.debug(2, "Task %s (%s %s) contains self reference! %s", task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends)
for dep in depends:
if task != dep:
newdep.append(dep)
@@ -498,7 +504,7 @@ class RunQueueData:
# Algorithm is O(tasks) + O(tasks)*O(fnids)
#
reccumdepends = {}
- for task in range(len(self.runq_fnid)):
+ for task in xrange(len(self.runq_fnid)):
fnid = self.runq_fnid[task]
if fnid not in reccumdepends:
if fnid in tdepends_fnid:
@@ -506,7 +512,7 @@ class RunQueueData:
else:
reccumdepends[fnid] = set()
reccumdepends[fnid].update(self.runq_depends[task])
- for task in range(len(self.runq_fnid)):
+ for task in xrange(len(self.runq_fnid)):
taskfnid = self.runq_fnid[task]
for fnid in reccumdepends:
if task in reccumdepends[fnid]:
@@ -519,7 +525,7 @@ class RunQueueData:
#
# e.g. do_sometask[recrdeptask] = "do_someothertask"
# (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
- for task in range(len(self.runq_fnid)):
+ for task in xrange(len(self.runq_fnid)):
if len(runq_recrdepends[task]) > 0:
taskfnid = self.runq_fnid[task]
for dep in reccumdepends[taskfnid]:
@@ -536,7 +542,7 @@ class RunQueueData:
# as active too. If the task is to be 'forced', clear its stamp. Once
# all active tasks are marked, prune the ones we don't need.
- bb.msg.note(2, bb.msg.domain.RunQueue, "Marking Active Tasks")
+ logger.verbose("Marking Active Tasks")
def mark_active(listid, depth):
"""
@@ -567,11 +573,6 @@ class RunQueueData:
fn = taskData.fn_index[fnid]
self.target_pairs.append((fn, target[1]))
- # Remove stamps for targets if force mode active
- if self.cooker.configuration.force:
- bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (target[1], fn))
- bb.build.del_stamp(target[1], self.dataCache, fn)
-
if fnid in taskData.failed_fnids:
continue
@@ -588,7 +589,7 @@ class RunQueueData:
maps = []
delcount = 0
- for listid in range(len(self.runq_fnid)):
+ for listid in xrange(len(self.runq_fnid)):
if runq_build[listid-delcount] == 1:
maps.append(listid-delcount)
else:
@@ -612,11 +613,11 @@ class RunQueueData:
else:
bb.msg.fatal(bb.msg.domain.RunQueue, "No active tasks and not in --continue mode?! Please report this bug.")
- bb.msg.note(2, bb.msg.domain.RunQueue, "Pruned %s inactive tasks, %s left" % (delcount, len(self.runq_fnid)))
+ logger.verbose("Pruned %s inactive tasks, %s left", delcount, len(self.runq_fnid))
# Remap the dependencies to account for the deleted tasks
# Check we didn't delete a task we depend on
- for listid in range(len(self.runq_fnid)):
+ for listid in xrange(len(self.runq_fnid)):
newdeps = []
origdeps = self.runq_depends[listid]
for origdep in origdeps:
@@ -625,17 +626,17 @@ class RunQueueData:
newdeps.append(maps[origdep])
self.runq_depends[listid] = set(newdeps)
- bb.msg.note(2, bb.msg.domain.RunQueue, "Assign Weightings")
+ logger.verbose("Assign Weightings")
# Generate a list of reverse dependencies to ease future calculations
- for listid in range(len(self.runq_fnid)):
+ for listid in xrange(len(self.runq_fnid)):
for dep in self.runq_depends[listid]:
self.runq_revdeps[dep].add(listid)
# Identify tasks at the end of dependency chains
# Error on circular dependency loops (length two)
endpoints = []
- for listid in range(len(self.runq_fnid)):
+ for listid in xrange(len(self.runq_fnid)):
revdeps = self.runq_revdeps[listid]
if len(revdeps) == 0:
endpoints.append(listid)
@@ -644,7 +645,7 @@ class RunQueueData:
#self.dump_data(taskData)
bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep], taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid]))
- bb.msg.note(2, bb.msg.domain.RunQueue, "Compute totals (have %s endpoint(s))" % len(endpoints))
+ logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
# Calculate task weights
# Check of higher length circular dependencies
@@ -653,7 +654,7 @@ class RunQueueData:
# Sanity Check - Check for multiple tasks building the same provider
prov_list = {}
seen_fn = []
- for task in range(len(self.runq_fnid)):
+ for task in xrange(len(self.runq_fnid)):
fn = taskData.fn_index[self.runq_fnid[task]]
if fn in seen_fn:
continue
@@ -667,9 +668,7 @@ class RunQueueData:
for prov in prov_list:
if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist:
error = True
- bb.msg.error(bb.msg.domain.RunQueue, "Multiple .bb files are due to be built which each provide %s (%s).\n This usually means one provides something the other doesn't and should." % (prov, " ".join(prov_list[prov])))
- #if error:
- # bb.msg.fatal(bb.msg.domain.RunQueue, "Corrupted metadata configuration detected, aborting...")
+ logger.error("Multiple .bb files are due to be built which each provide %s (%s).\n This usually means one provides something the other doesn't and should.", prov, " ".join(prov_list[prov]))
# Create a whitelist usable by the stamp checks
@@ -683,20 +682,15 @@ class RunQueueData:
stampfnwhitelist.append(fn)
self.stampfnwhitelist = stampfnwhitelist
- #self.dump_data(taskData)
-
# Interate over the task list looking for tasks with a 'setscene' function
-
self.runq_setscene = []
for task in range(len(self.runq_fnid)):
setscene = taskData.gettask_id(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task] + "_setscene", False)
if not setscene:
continue
- #bb.note("Found setscene for %s %s" % (self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task]))
self.runq_setscene.append(task)
# Interate over the task list and call into the siggen code
-
dealtwith = set()
todeal = set(range(len(self.runq_fnid)))
while len(todeal) > 0:
@@ -709,21 +703,24 @@ class RunQueueData:
procdep.append(self.taskData.fn_index[self.runq_fnid[dep]] + "." + self.runq_task[dep])
self.runq_hash[task] = bb.parse.siggen.get_taskhash(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task], procdep, self.dataCache)
- hashdata = {}
- hashdata["hashes"] = {}
- hashdata["deps"] = {}
- for task in range(len(self.runq_fnid)):
- hashdata["hashes"][self.taskData.fn_index[self.runq_fnid[task]] + "." + self.runq_task[task]] = self.runq_hash[task]
+ self.hashes = {}
+ self.hash_deps = {}
+ for task in xrange(len(self.runq_fnid)):
+ identifier = '%s.%s' % (self.taskData.fn_index[self.runq_fnid[task]],
+ self.runq_task[task])
+ self.hashes[identifier] = self.runq_hash[task]
deps = []
for dep in self.runq_depends[task]:
- deps.append(self.taskData.fn_index[self.runq_fnid[dep]] + "." + self.runq_task[dep])
- hashdata["deps"][self.taskData.fn_index[self.runq_fnid[task]] + "." + self.runq_task[task]] = deps
+ depidentifier = '%s.%s' % (self.taskData.fn_index[self.runq_fnid[dep]],
+ self.runq_task[dep])
+ deps.append(depidentifier)
+ self.hash_deps[identifier] = deps
- hashdata["msg-debug"] = self.cooker.configuration.debug
- hashdata["msg-debug-domains"] = self.cooker.configuration.debug_domains
- hashdata["verbose"] = self.cooker.configuration.verbose
-
- self.hashdata = hashdata
+ # Remove stamps for targets if force mode active
+ if self.cooker.configuration.force:
+ for (fn, target) in self.target_pairs:
+ logger.verbose("Remove stamp %s, %s", target, fn)
+ bb.build.del_stamp(target, self.dataCache, fn)
return len(self.runq_fnid)
@@ -731,25 +728,25 @@ class RunQueueData:
"""
Dump some debug information on the internal data structures
"""
- bb.msg.debug(3, bb.msg.domain.RunQueue, "run_tasks:")
- for task in range(len(self.rqdata.runq_task)):
- bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
- taskQueue.fn_index[self.rqdata.runq_fnid[task]],
- self.rqdata.runq_task[task],
- self.rqdata.runq_weight[task],
- self.rqdata.runq_depends[task],
- self.rqdata.runq_revdeps[task]))
-
- bb.msg.debug(3, bb.msg.domain.RunQueue, "sorted_tasks:")
- for task1 in range(len(self.rqdata.runq_task)):
+ logger.debug(3, "run_tasks:")
+ for task in xrange(len(self.rqdata.runq_task)):
+ logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
+ taskQueue.fn_index[self.rqdata.runq_fnid[task]],
+ self.rqdata.runq_task[task],
+ self.rqdata.runq_weight[task],
+ self.rqdata.runq_depends[task],
+ self.rqdata.runq_revdeps[task])
+
+ logger.debug(3, "sorted_tasks:")
+ for task1 in xrange(len(self.rqdata.runq_task)):
if task1 in self.prio_map:
task = self.prio_map[task1]
- bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
- taskQueue.fn_index[self.rqdata.runq_fnid[task]],
- self.rqdata.runq_task[task],
- self.rqdata.runq_weight[task],
- self.rqdata.runq_depends[task],
- self.rqdata.runq_revdeps[task]))
+ logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
+ taskQueue.fn_index[self.rqdata.runq_fnid[task]],
+ self.rqdata.runq_task[task],
+ self.rqdata.runq_weight[task],
+ self.rqdata.runq_depends[task],
+ self.rqdata.runq_revdeps[task])
class RunQueue:
def __init__(self, cooker, cfgData, dataCache, taskData, targets):
@@ -777,7 +774,7 @@ class RunQueue:
if self.stamppolicy == "whitelist":
stampwhitelist = self.rqdata.stampfnwhitelist
- for task in range(len(self.rqdata.runq_fnid)):
+ for task in xrange(len(self.rqdata.runq_fnid)):
unchecked[task] = ""
if len(self.rqdata.runq_depends[task]) == 0:
buildable.append(task)
@@ -792,12 +789,12 @@ class RunQueue:
if revdep in unchecked:
buildable.append(revdep)
- for task in range(len(self.rqdata.runq_fnid)):
+ for task in xrange(len(self.rqdata.runq_fnid)):
if task not in unchecked:
continue
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
taskname = self.rqdata.runq_task[task]
- stampfile = "%s.%s" % (self.rqdata.dataCache.stamp[fn], taskname)
+ stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
# If the stamp is missing its not current
if not os.access(stampfile, os.F_OK):
del unchecked[task]
@@ -818,7 +815,7 @@ class RunQueue:
if task in unchecked:
fn = self.taskData.fn_index[self.rqdata.runq_fnid[task]]
taskname = self.rqdata.runq_task[task]
- stampfile = "%s.%s" % (self.rqdata.dataCache.stamp[fn], taskname)
+ stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
iscurrent = True
t1 = os.stat(stampfile)[stat.ST_MTIME]
@@ -826,7 +823,7 @@ class RunQueue:
if iscurrent:
fn2 = self.taskData.fn_index[self.rqdata.runq_fnid[dep]]
taskname2 = self.rqdata.runq_task[dep]
- stampfile2 = "%s.%s" % (self.rqdata.dataCache.stamp[fn2], taskname2)
+ stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
if dep in notcurrent:
iscurrent = False
@@ -877,20 +874,20 @@ class RunQueue:
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
if taskname is None:
taskname = self.rqdata.runq_task[task]
-
- stampfile = bb.parse.siggen.stampfile(self.rqdata.dataCache.stamp[fn], taskname, self.rqdata.runq_hash[task])
+
+ stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
# If the stamp is missing its not current
if not os.access(stampfile, os.F_OK):
- bb.msg.debug(2, bb.msg.domain.RunQueue, "Stampfile %s not available\n" % stampfile)
+ logger.debug(2, "Stampfile %s not available", stampfile)
return False
# If its a 'nostamp' task, it's not current
taskdep = self.rqdata.dataCache.task_deps[fn]
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
- bb.msg.debug(2, bb.msg.domain.RunQueue, "%s.%s is nostamp\n" % (fn, taskname))
+ logger.debug(2, "%s.%s is nostamp\n", fn, taskname)
return False
- if taskname.endswith("_setscene"):
+ if taskname != "do_setscene" and taskname.endswith("_setscene"):
return True
iscurrent = True
@@ -899,18 +896,18 @@ class RunQueue:
if iscurrent:
fn2 = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[dep]]
taskname2 = self.rqdata.runq_task[dep]
- stampfile2 = bb.parse.siggen.stampfile(self.rqdata.dataCache.stamp[fn2], taskname2, self.rqdata.runq_hash[dep])
- stampfile3 = bb.parse.siggen.stampfile(self.rqdata.dataCache.stamp[fn2], taskname2 + "_setscene", self.rqdata.runq_hash[dep])
+ stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
+ stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCache, fn2)
t2 = get_timestamp(stampfile2)
t3 = get_timestamp(stampfile3)
if t3 and t3 > t2:
continue
if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
if not t2:
- bb.msg.debug(2, bb.msg.domain.RunQueue, "Stampfile %s does not exist" % (stampfile2))
+ logger.debug(2, 'Stampfile %s does not exist', stampfile2)
iscurrent = False
if t1 < t2:
- bb.msg.debug(2, bb.msg.domain.RunQueue, "Stampfile %s < %s" % (stampfile, stampfile2))
+ logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2)
iscurrent = False
return iscurrent
@@ -941,7 +938,7 @@ class RunQueue:
retval = self.rqexe.execute()
if self.state is runQueueRunInit:
- bb.msg.note(1, bb.msg.domain.RunQueue, "Executing RunQueue Tasks")
+ logger.info("Executing RunQueue Tasks")
self.rqexe = RunQueueExecuteTasks(self)
self.state = runQueueRunning
@@ -960,7 +957,7 @@ class RunQueue:
if self.state is runQueueComplete:
# All done
- bb.msg.note(1, bb.msg.domain.RunQueue, "Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed." % (self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed))
+ logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
return False
if self.state is runQueueChildProcess:
@@ -982,8 +979,8 @@ class RunQueue:
bb.note("Reparsing files to collect dependency data")
for task in range(len(self.rqdata.runq_fnid)):
if self.rqdata.runq_fnid[task] not in done:
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- the_data = self.cooker.bb_cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data)
+ fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
+ the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data)
done.add(self.rqdata.runq_fnid[task])
bb.parse.siggen.dump_sigs(self.rqdata.dataCache)
@@ -1022,16 +1019,16 @@ class RunQueueExecute:
self.build_pipes[result[0]].close()
del self.build_pipes[result[0]]
if result[1] != 0:
- self.task_fail(task, result[1])
+ self.task_fail(task, result[1]>>8)
else:
self.task_complete(task)
def finish_now(self):
if self.stats.active:
- bb.msg.note(1, bb.msg.domain.RunQueue, "Sending SIGINT to remaining %s tasks" % self.stats.active)
+ logger.info("Sending SIGTERM to remaining %s tasks", self.stats.active)
for k, v in self.build_pids.iteritems():
try:
- os.kill(-k, signal.SIGINT)
+ os.kill(-k, signal.SIGTERM)
except:
pass
for pipe in self.build_pipes:
@@ -1055,8 +1052,8 @@ class RunQueueExecute:
self.rq.state = runQueueComplete
return
- def fork_off_task(self, fn, task, taskname):
- the_data = self.cooker.bb_cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data)
+ def fork_off_task(self, fn, task, taskname, quieterrors=False):
+ the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data)
env = bb.data.export_vars(the_data)
env = bb.data.export_envvars(env, the_data)
@@ -1070,55 +1067,59 @@ class RunQueueExecute:
fakedirs = (the_data.getVar("FAKEROOTDIRS", True) or "").split()
for p in fakedirs:
bb.mkdirhier(p)
- bb.msg.debug(2, bb.msg.domain.RunQueue, "Running %s:%s under fakeroot, state dir is %s" % (fn, taskname, fakedirs))
+ logger.debug(2, "Running %s:%s under fakeroot, state dir is %s" % (fn, taskname, fakedirs))
- env['BB_TASKHASH'] = self.rqdata.runq_hash[task]
env['PATH'] = self.cooker.configuration.initial_path
envbackup = os.environ.copy()
- os.environ = env
+ for e in envbackup:
+ os.unsetenv(e)
+ for e in env:
+ os.putenv(e, env[e])
sys.stdout.flush()
sys.stderr.flush()
-
try:
- pipeinfd, pipeoutfd = os.pipe()
- pipein = os.fdopen(pipeinfd, 'rb', 4096)
- pipeout = os.fdopen(pipeoutfd, 'wb', 4096)
-
+ pipein, pipeout = os.pipe()
+ pipein = os.fdopen(pipein, 'rb', 4096)
+ pipeout = os.fdopen(pipeout, 'wb', 0)
pid = os.fork()
except OSError as e:
bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror))
if pid == 0:
pipein.close()
+
# Save out the PID so that the event can include it the
# events
bb.event.worker_pid = os.getpid()
bb.event.worker_pipe = pipeout
bb.event.useStdout = False
+ # Child processes should send their messages to the UI
+ # process via the server process, not print them
+ # themselves
+ bblogger.handlers = [bb.event.LogHandler()]
+
self.rq.state = runQueueChildProcess
# Make the child the process group leader
os.setpgid(0, 0)
# No stdin
- newsi = os.open('/dev/null', os.O_RDWR)
+ newsi = os.open(os.devnull, os.O_RDWR)
os.dup2(newsi, sys.stdin.fileno())
- # Stdout to a logfile
- #logout = data.expand("${TMPDIR}/log/stdout.%s" % os.getpid(), self.cfgData, True)
- #mkdirhier(os.path.dirname(logout))
- #newso = open(logout, 'w')
- #os.dup2(newso.fileno(), sys.stdout.fileno())
- #os.dup2(newso.fileno(), sys.stderr.fileno())
- if taskname.endswith("_setscene"):
+ if quieterrors:
the_data.setVarFlag(taskname, "quieterrors", "1")
+ bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", self, self.cooker.configuration.data)
+ bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", fn, self.cooker.configuration.data)
bb.data.setVar("BB_WORKERCONTEXT", "1", the_data)
- bb.parse.siggen.set_taskdata(self.rqdata.hashdata["hashes"], self.rqdata.hashdata["deps"])
+ bb.parse.siggen.set_taskdata(self.rqdata.hashes, self.rqdata.hash_deps)
- for h in self.rqdata.hashdata["hashes"]:
- bb.data.setVar("BBHASH_%s" % h, self.rqdata.hashdata["hashes"][h], the_data)
- for h in self.rqdata.hashdata["deps"]:
- bb.data.setVar("BBHASHDEPS_%s" % h, self.rqdata.hashdata["deps"][h], the_data)
+ for h in self.rqdata.hashes:
+ bb.data.setVar("BBHASH_%s" % h, self.rqdata.hashes[h], the_data)
+ for h in self.rqdata.hash_deps:
+ bb.data.setVar("BBHASHDEPS_%s" % h, self.rqdata.hash_deps[h], the_data)
+
+ bb.data.setVar("BB_TASKHASH", self.rqdata.runq_hash[task], the_data)
ret = 0
try:
@@ -1128,7 +1129,10 @@ class RunQueueExecute:
except:
os._exit(1)
- os.environ = envbackup
+ for e in env:
+ os.unsetenv(e)
+ for e in envbackup:
+ os.putenv(e, envbackup[e])
return pid, pipein, pipeout
@@ -1136,9 +1140,10 @@ class RunQueueExecuteDummy(RunQueueExecute):
def __init__(self, rq):
self.rq = rq
self.stats = RunQueueStats(0)
+
def finish(self):
self.rq.state = runQueueComplete
- return
+ return
class RunQueueExecuteTasks(RunQueueExecute):
def __init__(self, rq):
@@ -1147,7 +1152,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
self.stats = RunQueueStats(len(self.rqdata.runq_fnid))
# Mark initial buildable tasks
- for task in range(self.stats.total):
+ for task in xrange(self.stats.total):
self.runq_running.append(0)
self.runq_complete.append(0)
if len(self.rqdata.runq_depends[task]) == 0:
@@ -1160,31 +1165,52 @@ class RunQueueExecuteTasks(RunQueueExecute):
found = True
while found:
found = False
- for task in range(self.stats.total):
+ for task in xrange(self.stats.total):
if task in self.rq.scenequeue_covered:
continue
if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
self.rq.scenequeue_covered.add(task)
found = True
- bb.debug("Full skip list %s" % self.rq.scenequeue_covered)
+ logger.debug(1, 'Full skip list %s', self.rq.scenequeue_covered)
for task in self.rq.scenequeue_covered:
self.task_skip(task)
event.fire(bb.event.StampUpdate(self.rqdata.target_pairs, self.rqdata.dataCache.stamp), self.cfgData)
- schedulers = [obj for obj in globals().itervalues()
- if type(obj) is type and issubclass(obj, RunQueueScheduler)]
+ schedulers = self.get_schedulers()
for scheduler in schedulers:
if self.scheduler == scheduler.name:
self.sched = scheduler(self, self.rqdata)
+ logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name)
break
else:
- bb.error("Invalid scheduler '%s', using default 'speed' scheduler" % self.scheduler)
- bb.error("Available schedulers: %s" % ", ".join(obj.name for obj in schedulers))
- self.sched = RunQueueSchedulerSpeed(self, self.rqdata)
+ bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
+ (self.scheduler, ", ".join(obj.name for obj in schedulers)))
+
+
+ def get_schedulers(self):
+ schedulers = set(obj for obj in globals().values()
+ if type(obj) is type and
+ issubclass(obj, RunQueueScheduler))
+ user_schedulers = bb.data.getVar("BB_SCHEDULERS", self.cfgData, True)
+ if user_schedulers:
+ for sched in user_schedulers.split():
+ if not "." in sched:
+ bb.note("Ignoring scheduler '%s' from BB_SCHEDULERS: not an import" % sched)
+ continue
+
+ modname, name = sched.rsplit(".", 1)
+ try:
+ module = __import__(modname, fromlist=(name,))
+ except ImportError, exc:
+ logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
+ raise SystemExit(1)
+ else:
+ schedulers.add(getattr(module, name))
+ return schedulers
def task_completeoutright(self, task):
"""
@@ -1206,7 +1232,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
self.runq_buildable[revdep] = 1
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
taskname = self.rqdata.runq_task[revdep]
- bb.msg.debug(1, bb.msg.domain.RunQueue, "Marking task %s (%s, %s) as buildable" % (revdep, fn, taskname))
+ logger.debug(1, "Marking task %s (%s, %s) as buildable", revdep, fn, taskname)
def task_complete(self, task):
self.stats.taskCompleted()
@@ -1218,11 +1244,10 @@ class RunQueueExecuteTasks(RunQueueExecute):
Called when a task has failed
Updates the state engine with the failure
"""
- bb.msg.error(bb.msg.domain.RunQueue, "Task %s (%s) failed with %s" % (task, self.rqdata.get_user_idstring(task), exitcode))
self.stats.taskFailed()
fnid = self.rqdata.runq_fnid[task]
self.failed_fnids.append(fnid)
- bb.event.fire(runQueueTaskFailed(task, self.stats, self.rq), self.cfgData)
+ bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq), self.cfgData)
if self.rqdata.taskData.abort:
self.rq.state = runQueueCleanUp
@@ -1242,38 +1267,30 @@ class RunQueueExecuteTasks(RunQueueExecute):
# nothing to do
self.rq.state = runQueueCleanUp
- task = None
- if self.stats.active < self.number_tasks:
- task = self.sched.next()
+ task = self.sched.next()
if task is not None:
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
taskname = self.rqdata.runq_task[task]
if self.rq.check_stamp_task(task, taskname):
- bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp current task %s (%s)" % (task, self.rqdata.get_user_idstring(task)))
+ logger.debug(2, "Stamp current task %s (%s)", task,
+ self.rqdata.get_user_idstring(task))
self.task_skip(task)
return True
- bb.event.fire(runQueueTaskStarted(task, self.stats, self.rq), self.cfgData)
-
taskdep = self.rqdata.dataCache.task_deps[fn]
if 'noexec' in taskdep and taskname in taskdep['noexec']:
- bb.msg.note(1, bb.msg.domain.RunQueue,
- "Noexec task %d of %d (ID: %s, %s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
- self.stats.total,
- task,
- self.rqdata.get_user_idstring(task)))
+ startevent = runQueueTaskStarted(task, self.stats, self.rq,
+ noexec=True)
+ bb.event.fire(startevent, self.cfgData)
self.runq_running[task] = 1
self.stats.taskActive()
bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
self.task_complete(task)
return True
-
- bb.msg.note(1, bb.msg.domain.RunQueue,
- "Running task %d of %d (ID: %s, %s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
- self.stats.total,
- task,
- self.rqdata.get_user_idstring(task)))
+ else:
+ startevent = runQueueTaskStarted(task, self.stats, self.rq)
+ bb.event.fire(startevent, self.cfgData)
pid, pipein, pipeout = self.fork_off_task(fn, task, taskname)
@@ -1281,8 +1298,6 @@ class RunQueueExecuteTasks(RunQueueExecute):
self.build_pipes[pid] = runQueuePipe(pipein, pipeout, self.cfgData)
self.runq_running[task] = 1
self.stats.taskActive()
- if self.stats.active < self.number_tasks:
- return True
for pipe in self.build_pipes:
self.build_pipes[pipe].read()
@@ -1297,13 +1312,13 @@ class RunQueueExecuteTasks(RunQueueExecute):
return True
# Sanity Checks
- for task in range(self.stats.total):
+ for task in xrange(self.stats.total):
if self.runq_buildable[task] == 0:
- bb.msg.error(bb.msg.domain.RunQueue, "Task %s never buildable!" % task)
+ logger.error("Task %s never buildable!", task)
if self.runq_running[task] == 0:
- bb.msg.error(bb.msg.domain.RunQueue, "Task %s never ran!" % task)
+ logger.error("Task %s never ran!", task)
if self.runq_complete[task] == 0:
- bb.msg.error(bb.msg.domain.RunQueue, "Task %s never completed!" % task)
+ logger.error("Task %s never completed!", task)
self.rq.state = runQueueComplete
return True
@@ -1332,12 +1347,12 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
# therefore aims to collapse the huge runqueue dependency tree into a smaller one
# only containing the setscene functions.
- for task in range(self.stats.total):
+ for task in xrange(self.stats.total):
self.runq_running.append(0)
self.runq_complete.append(0)
self.runq_buildable.append(0)
- for task in range(len(self.rqdata.runq_fnid)):
+ for task in xrange(len(self.rqdata.runq_fnid)):
sq_revdeps.append(copy.copy(self.rqdata.runq_revdeps[task]))
sq_revdeps_new.append(set())
if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene:
@@ -1368,7 +1383,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
process_endpoints(endpoints)
- for task in range(len(self.rqdata.runq_fnid)):
+ for task in xrange(len(self.rqdata.runq_fnid)):
if task in self.rqdata.runq_setscene:
deps = set()
for dep in sq_revdeps_new[task]:
@@ -1377,20 +1392,20 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
elif len(sq_revdeps_new[task]) != 0:
bb.msg.fatal(bb.msg.domain.RunQueue, "Something went badly wrong during scenequeue generation, aborting. Please report this problem.")
- #for task in range(len(sq_revdeps_squash)):
+ #for task in xrange(len(sq_revdeps_squash)):
# print "Task %s: %s.%s is %s " % (task, self.taskData.fn_index[self.runq_fnid[self.runq_setscene[task]]], self.runq_task[self.runq_setscene[task]] + "_setscene", sq_revdeps_squash[task])
self.sq_deps = []
self.sq_revdeps = sq_revdeps_squash
self.sq_revdeps2 = copy.deepcopy(self.sq_revdeps)
- for task in range(len(self.sq_revdeps)):
+ for task in xrange(len(self.sq_revdeps)):
self.sq_deps.append(set())
- for task in range(len(self.sq_revdeps)):
+ for task in xrange(len(self.sq_revdeps)):
for dep in self.sq_revdeps[task]:
self.sq_deps[dep].add(task)
- for task in range(len(self.sq_revdeps)):
+ for task in xrange(len(self.sq_revdeps)):
if len(self.sq_revdeps[task]) == 0:
self.runq_buildable[task] = 1
@@ -1401,7 +1416,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
sq_taskname = []
sq_task = []
noexec = []
- for task in range(len(self.sq_revdeps)):
+ for task in xrange(len(self.sq_revdeps)):
realtask = self.rqdata.runq_setscene[task]
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
taskname = self.rqdata.runq_task[realtask]
@@ -1424,14 +1439,13 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
for v in valid:
valid_new.append(sq_task[v])
- for task in range(len(self.sq_revdeps)):
+ for task in xrange(len(self.sq_revdeps)):
if task not in valid_new and task not in noexec:
- bb.msg.debug(2, bb.msg.domain.RunQueue, "No package found so skipping setscene task %s" % (self.rqdata.get_user_idstring(self.rqdata.runq_setscene[task])))
+ logger.debug(2, 'No package found, so skipping setscene task %s',
+ self.rqdata.get_user_idstring(task))
self.task_failoutright(task)
- #print(str(valid))
-
- bb.msg.note(1, bb.msg.domain.RunQueue, "Executing SetScene Tasks")
+ logger.info('Executing SetScene Tasks')
self.rq.state = runQueueSceneRun
@@ -1449,7 +1463,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
"""
index = self.rqdata.runq_setscene[task]
- bb.msg.debug(1, bb.msg.domain.RunQueue, "Found task %s could be accelerated" % self.rqdata.get_user_idstring(index))
+ logger.debug(1, 'Found task %s which could be accelerated',
+ self.rqdata.get_user_idstring(index))
self.scenequeue_covered.add(task)
self.scenequeue_updatecounters(task)
@@ -1461,7 +1476,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
def task_fail(self, task, result):
self.stats.taskFailed()
index = self.rqdata.runq_setscene[task]
- bb.event.fire(runQueueTaskFailed(task, self.stats, self), self.cfgData)
+ bb.event.fire(runQueueTaskFailed(task, self.stats, result, self), self.cfgData)
self.scenequeue_notcovered.add(task)
self.scenequeue_updatecounters(task)
@@ -1489,13 +1504,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
task = None
if self.stats.active < self.number_tasks:
# Find the next setscene to run
- for nexttask in range(self.stats.total):
+ for nexttask in xrange(self.stats.total):
if self.runq_buildable[nexttask] == 1 and self.runq_running[nexttask] != 1:
- #bb.note("Comparing %s to %s" % (self.sq_revdeps[nexttask], self.scenequeue_covered))
- #if len(self.sq_revdeps[nexttask]) > 0 and self.sq_revdeps[nexttask].issubset(self.scenequeue_covered):
- # bb.note("Skipping task %s" % nexttask)
- # self.scenequeue_skip(nexttask)
- # return True
task = nexttask
break
if task is not None:
@@ -1504,7 +1514,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
taskname = self.rqdata.runq_task[realtask] + "_setscene"
if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask]):
- bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp for underlying task %s (%s) is current so skipping setscene varient" % (task, self.rqdata.get_user_idstring(task)))
+ logger.debug(2, 'Stamp for underlying task %s(%s) is current, so skipping setscene variant',
+ task, self.rqdata.get_user_idstring(task))
self.task_failoutright(task)
return True
@@ -1515,12 +1526,12 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
return True
if self.rq.check_stamp_task(realtask, taskname):
- bb.msg.debug(2, bb.msg.domain.RunQueue, "Setscene stamp current task %s (%s) so skip it and its dependencies" % (task, self.rqdata.get_user_idstring(realtask)))
+ logger.debug(2, 'Setscene stamp current task %s(%s), so skip it and its dependencies',
+ task, self.rqdata.get_user_idstring(realtask))
self.task_skip(task)
return True
- bb.msg.note(1, bb.msg.domain.RunQueue,
- "Running setscene task %d of %d (%s:%s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
+ logger.info("Running setscene task %d of %d (%s:%s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
self.stats.total, fn, taskname))
pid, pipein, pipeout = self.fork_off_task(fn, realtask, taskname)
@@ -1546,11 +1557,14 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
for task in oldcovered:
self.rq.scenequeue_covered.add(self.rqdata.runq_setscene[task])
- bb.debug("We can skip tasks %s" % self.rq.scenequeue_covered)
+ logger.debug(1, 'We can skip tasks %s', self.rq.scenequeue_covered)
self.rq.state = runQueueRunInit
return True
+ def fork_off_task(self, fn, task, taskname):
+ return RunQueueExecute.fork_off_task(self, fn, task, taskname, quieterrors=True)
+
class TaskFailure(Exception):
"""
Exception raised when a task in a runqueue fails
@@ -1583,51 +1597,48 @@ class runQueueTaskStarted(runQueueEvent):
"""
Event notifing a task was started
"""
- def __init__(self, task, stats, rq):
+ def __init__(self, task, stats, rq, noexec=False):
runQueueEvent.__init__(self, task, stats, rq)
- self.message = "Running task %s (%d of %d) (%s)" % (task, stats.completed + stats.active + 1, self.stats.total, self.taskstring)
+ self.noexec = noexec
class runQueueTaskFailed(runQueueEvent):
"""
Event notifing a task failed
"""
- def __init__(self, task, stats, rq):
+ def __init__(self, task, stats, exitcode, rq):
runQueueEvent.__init__(self, task, stats, rq)
- self.message = "Task %s failed (%s)" % (task, self.taskstring)
+ self.exitcode = exitcode
class runQueueTaskCompleted(runQueueEvent):
"""
Event notifing a task completed
"""
- def __init__(self, task, stats, rq):
- runQueueEvent.__init__(self, task, stats, rq)
- self.message = "Task %s completed (%s)" % (task, self.taskstring)
-#def check_stamp_fn(fn, taskname, d):
-# rq = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d)
-# fn = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", d)
-# fnid = rq.rqdata.taskData.getfn_id(fn)
-# taskid = rq.get_task_id(fnid, taskname)
-# if taskid is not None:
-# return rq.check_stamp_task(taskid)
-# return None
+def check_stamp_fn(fn, taskname, d):
+ rqexe = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d)
+ fn = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", d)
+ fnid = rqexe.rqdata.taskData.getfn_id(fn)
+ taskid = rqexe.rqdata.get_task_id(fnid, taskname)
+ if taskid is not None:
+ return rqexe.rq.check_stamp_task(taskid)
+ return None
class runQueuePipe():
"""
Abstraction for a pipe between a worker thread and the server
"""
def __init__(self, pipein, pipeout, d):
- self.fd = pipein
+ self.input = pipein
pipeout.close()
- fcntl.fcntl(self.fd, fcntl.F_SETFL, fcntl.fcntl(self.fd, fcntl.F_GETFL) | os.O_NONBLOCK)
+ fcntl.fcntl(self.input, fcntl.F_SETFL, fcntl.fcntl(self.input, fcntl.F_GETFL) | os.O_NONBLOCK)
self.queue = ""
self.d = d
def read(self):
start = len(self.queue)
try:
- self.queue = self.queue + self.fd.read(1024)
- except IOError:
+ self.queue = self.queue + self.input.read(102400)
+ except (OSError, IOError):
pass
end = len(self.queue)
index = self.queue.find("</event>")
@@ -1642,4 +1653,4 @@ class runQueuePipe():
continue
if len(self.queue) > 0:
print("Warning, worker left partial message: %s" % self.queue)
- self.fd.close()
+ self.input.close()
diff --git a/bitbake/lib/bb/server/none.py b/bitbake/lib/bb/server/none.py
index dafb2feba..2708807df 100644
--- a/bitbake/lib/bb/server/none.py
+++ b/bitbake/lib/bb/server/none.py
@@ -174,6 +174,8 @@ class BitBakeServerConnection():
self.server = serverinfo.server
self.connection = serverinfo.commands
self.events = bb.server.none.BBUIEventQueue(self.server)
+ for event in bb.event.ui_queue:
+ self.events.queue_event(event)
def terminate(self):
try:
diff --git a/bitbake/lib/bb/server/xmlrpc.py b/bitbake/lib/bb/server/xmlrpc.py
index c2bfe1217..0d03e308d 100644
--- a/bitbake/lib/bb/server/xmlrpc.py
+++ b/bitbake/lib/bb/server/xmlrpc.py
@@ -243,6 +243,8 @@ class BitBakeServerConnection():
t = BBTransport()
self.connection = xmlrpclib.Server("http://%s:%s" % (serverinfo.host, serverinfo.port), transport=t, allow_none=True)
self.events = uievent.BBUIEventQueue(self.connection)
+ for event in bb.event.ui_queue:
+ self.events.queue_event(event)
def terminate(self):
# Don't wait for server indefinitely
diff --git a/bitbake/lib/bb/shell.py b/bitbake/lib/bb/shell.py
index f9ca9d5bd..3319e2d1c 100644
--- a/bitbake/lib/bb/shell.py
+++ b/bitbake/lib/bb/shell.py
@@ -180,11 +180,9 @@ class BitBakeShellCommands:
last_exception = Providers.NoProvider
except runqueue.TaskFailure as fnids:
- for fnid in fnids:
- print("ERROR: '%s' failed" % td.fn_index[fnid])
last_exception = runqueue.TaskFailure
- except build.EventException as e:
+ except build.FuncFailed as e:
print("ERROR: Couldn't build '%s'" % names)
last_exception = e
@@ -247,7 +245,7 @@ class BitBakeShellCommands:
cooker.buildFile(bf, cmd)
except parse.ParseError:
print("ERROR: Unable to open or parse '%s'" % bf)
- except build.EventException as e:
+ except build.FuncFailed as e:
print("ERROR: Couldn't build '%s'" % name)
last_exception = e
@@ -274,9 +272,7 @@ class BitBakeShellCommands:
bbfile = params[0]
print("SHELL: Parsing '%s'" % bbfile)
parse.update_mtime( bbfile )
- cooker.bb_cache.cacheValidUpdate(bbfile)
- fromCache = cooker.bb_cache.loadData(bbfile, cooker.configuration.data, cooker.status)
- cooker.bb_cache.sync()
+ cooker.parser.reparse(bbfile)
if False: #fromCache:
print("SHELL: File has not been updated, not reparsing")
else:
@@ -445,7 +441,7 @@ SRC_URI = ""
name, var = params
bbfile = self._findProvider( name )
if bbfile is not None:
- the_data = cooker.bb_cache.loadDataFull(bbfile, cooker.configuration.data)
+ the_data = cache.Cache.loadDataFull(bbfile, cooker.configuration.data)
value = the_data.getVar( var, 1 )
print(value)
else:
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 48f600a21..a101ce8bb 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -1,53 +1,63 @@
import hashlib
+import logging
import re
+logger = logging.getLogger('BitBake.SigGen')
+
try:
import cPickle as pickle
except ImportError:
import pickle
- bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
+ logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
-def init(d, dumpsigs):
+def init(d):
siggens = [obj for obj in globals().itervalues()
if type(obj) is type and issubclass(obj, SignatureGenerator)]
desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop"
for sg in siggens:
if desired == sg.name:
- return sg(d, dumpsigs)
+ return sg(d)
break
else:
- bb.error("Invalid signature generator '%s', using default 'noop' generator" % desired)
- bb.error("Available generators: %s" % ", ".join(obj.name for obj in siggens))
- return SignatureGenerator(d, dumpsigs)
+ logger.error("Invalid signature generator '%s', using default 'noop'\n"
+ "Available generators: %s",
+ ', '.join(obj.name for obj in siggens))
+ return SignatureGenerator(d)
class SignatureGenerator(object):
"""
"""
name = "noop"
- def __init__(self, data, dumpsigs):
+ def __init__(self, data):
return
def finalise(self, fn, d, varient):
return
- def stampfile(self, stampbase, taskname, taskhash):
- return "%s.%s" % (stampbase, taskname)
+ def get_taskhash(self, fn, task, deps, dataCache):
+ return 0
+
+ def set_taskdata(self, hashes, deps):
+ return
+
+ def stampfile(self, stampbase, file_name, taskname, extrainfo):
+ return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
class SignatureGeneratorBasic(SignatureGenerator):
"""
"""
name = "basic"
- def __init__(self, data, dumpsigs):
+ def __init__(self, data):
self.basehash = {}
self.taskhash = {}
self.taskdeps = {}
self.runtaskdeps = {}
self.gendeps = {}
self.lookupcache = {}
- self.basewhitelist = (data.getVar("BB_HASHBASE_WHITELIST", True) or "").split()
+ self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
if self.taskwhitelist:
@@ -57,17 +67,31 @@ class SignatureGeneratorBasic(SignatureGenerator):
def _build_data(self, fn, d):
- taskdeps, gendeps = bb.data.generate_dependencies(d)
+ tasklist, gendeps = bb.data.generate_dependencies(d)
+ taskdeps = {}
basehash = {}
lookupcache = {}
- for task in taskdeps:
+ for task in tasklist:
data = d.getVar(task, False)
lookupcache[task] = data
- for dep in sorted(taskdeps[task]):
- if dep in self.basewhitelist:
- continue
+
+ newdeps = gendeps[task]
+ seen = set()
+ while newdeps:
+ nextdeps = newdeps
+ seen |= nextdeps
+ newdeps = set()
+ for dep in nextdeps:
+ if dep in self.basewhitelist:
+ continue
+ newdeps |= gendeps[dep]
+ newdeps -= seen
+
+ alldeps = seen - self.basewhitelist
+
+ for dep in sorted(alldeps):
if dep in lookupcache:
var = lookupcache[dep]
else:
@@ -78,7 +102,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
if data is None:
bb.error("Task %s from %s seems to be empty?!" % (task, fn))
self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
- #bb.note("Hash for %s is %s" % (task, tashhash[task]))
+ taskdeps[task] = sorted(alldeps)
self.taskdeps[fn] = taskdeps
self.gendeps[fn] = gendeps
@@ -110,7 +134,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
# then process the actual dependencies
dep_fn = re.search("(?P<fn>.*)\..*", dep).group('fn')
if self.twl.search(dataCache.pkg_fn[dep_fn]):
- #bb.note("Skipping %s" % dep)
continue
if dep not in self.taskhash:
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
@@ -170,6 +193,17 @@ class SignatureGeneratorBasic(SignatureGenerator):
bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k]))
self.dump_sigtask(fn, task, dataCache.stamp[fn], True)
+class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
+ name = "basichash"
+
+ def stampfile(self, stampbase, fn, taskname, extrainfo):
+ if taskname != "do_setscene" and taskname.endswith("_setscene"):
+ k = fn + "." + taskname[:-9]
+ else:
+ k = fn + "." + taskname
+ h = self.taskhash[k]
+ return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
+
def dump_this_task(outfile, d):
fn = d.getVar("BB_FILENAME", True)
task = "do_" + d.getVar("BB_CURRENTTASK", True)
@@ -181,10 +215,6 @@ def compare_sigfiles(a, b):
p2 = pickle.Unpickler(file(b, "rb"))
b_data = p2.load()
- #print "Checking"
- #print str(a_data)
- #print str(b_data)
-
def dict_diff(a, b):
sa = set(a.keys())
sb = set(b.keys())
@@ -195,7 +225,7 @@ def compare_sigfiles(a, b):
changed.add(i)
added = sa - sb
removed = sb - sa
- return changed, added, removed
+ return changed, added, removed
if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist'])
@@ -225,18 +255,20 @@ def compare_sigfiles(a, b):
if changed:
for dep in changed:
print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep])
- #if added:
- # print "Dependency on variable %s was added (value %s)" % (dep, b_data['gendeps'][dep])
- #if removed:
- # print "Dependency on Variable %s was removed (value %s)" % (dep, a_data['gendeps'][dep])
- if 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and sorted(a_data['runtaskdeps']) != sorted(b_data['runtaskdeps']):
- print "Tasks this task depends on changed from %s to %s" % (sorted(a_data['runtaskdeps']), sorted(b_data['runtaskdeps']))
-
- if 'runtaskhashes' in a_data:
- for dep in a_data['runtaskhashes']:
- if a_data['runtaskhashes'][dep] != b_data['runtaskhashes'][dep]:
+ if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
+ changed, added, removed = dict_diff(a_data['runtaskhashes'], b_data['runtaskhashes'])
+ if added:
+ for dep in added:
+ print "Dependency on task %s was added" % (dep)
+ if removed:
+ for dep in removed:
+ print "Dependency on task %s was removed" % (dep)
+ if changed:
+ for dep in changed:
print "Hash for dependent task %s changed from %s to %s" % (dep, a_data['runtaskhashes'][dep], b_data['runtaskhashes'][dep])
+ elif 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and sorted(a_data['runtaskdeps']) != sorted(b_data['runtaskdeps']):
+ print "Tasks this task depends on changed from %s to %s" % (sorted(a_data['runtaskdeps']), sorted(b_data['runtaskdeps']))
def dump_sigfile(a):
p1 = pickle.Unpickler(file(a, "rb"))
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py
index e31f96785..81a42b7b5 100644
--- a/bitbake/lib/bb/taskdata.py
+++ b/bitbake/lib/bb/taskdata.py
@@ -23,20 +23,19 @@ Task data collection and handling
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+import logging
+import re
import bb
+logger = logging.getLogger("BitBake.TaskData")
+
def re_match_strings(target, strings):
"""
Whether or not the string 'target' matches
any one string of the strings which can be regular expression string
"""
- import re
-
- for name in strings:
- if (name==target or
- re.search(name, target)!=None):
- return True
- return False
+ return any(name == target or re.match(name, target)
+ for name in strings)
class TaskData:
"""
@@ -182,7 +181,7 @@ class TaskData:
if not fnid in self.depids:
dependids = {}
for depend in dataCache.deps[fn]:
- bb.msg.debug(2, bb.msg.domain.TaskData, "Added dependency %s for %s" % (depend, fn))
+ logger.debug(2, "Added dependency %s for %s", depend, fn)
dependids[self.getbuild_id(depend)] = None
self.depids[fnid] = dependids.keys()
@@ -192,12 +191,12 @@ class TaskData:
rdepends = dataCache.rundeps[fn]
rrecs = dataCache.runrecs[fn]
for package in rdepends:
- for rdepend in bb.utils.explode_deps(rdepends[package]):
- bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime dependency %s for %s" % (rdepend, fn))
+ for rdepend in rdepends[package]:
+ logger.debug(2, "Added runtime dependency %s for %s", rdepend, fn)
rdependids[self.getrun_id(rdepend)] = None
for package in rrecs:
- for rdepend in bb.utils.explode_deps(rrecs[package]):
- bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime recommendation %s for %s" % (rdepend, fn))
+ for rdepend in rrecs[package]:
+ logger.debug(2, "Added runtime recommendation %s for %s", rdepend, fn)
rdependids[self.getrun_id(rdepend)] = None
self.rdepids[fnid] = rdependids.keys()
@@ -397,7 +396,7 @@ class TaskData:
fnid = self.getfn_id(fn)
if fnid in self.failed_fnids:
continue
- bb.msg.debug(2, bb.msg.domain.Provider, "adding %s to satisfy %s" % (fn, item))
+ logger.debug(2, "adding %s to satisfy %s", fn, item)
self.add_build_target(fn, item)
self.add_tasks(fn, dataCache)
@@ -450,7 +449,7 @@ class TaskData:
fnid = self.getfn_id(fn)
if fnid in self.failed_fnids:
continue
- bb.msg.debug(2, bb.msg.domain.Provider, "adding '%s' to satisfy runtime '%s'" % (fn, item))
+ logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item)
self.add_runtime_target(fn, item)
self.add_tasks(fn, dataCache)
@@ -463,7 +462,7 @@ class TaskData:
"""
if fnid in self.failed_fnids:
return
- bb.msg.debug(1, bb.msg.domain.Provider, "File '%s' is unbuildable, removing..." % self.fn_index[fnid])
+ logger.debug(1, "File '%s' is unbuildable, removing...", self.fn_index[fnid])
self.failed_fnids.append(fnid)
for target in self.build_targets:
if fnid in self.build_targets[target]:
@@ -485,12 +484,12 @@ class TaskData:
missing_list = [self.build_names_index[targetid]]
else:
missing_list = [self.build_names_index[targetid]] + missing_list
- bb.msg.note(2, bb.msg.domain.Provider, "Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s" % (self.build_names_index[targetid], missing_list))
+ logger.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.build_names_index[targetid], missing_list)
self.failed_deps.append(targetid)
dependees = self.get_dependees(targetid)
for fnid in dependees:
self.fail_fnid(fnid, missing_list)
- for taskid in range(len(self.tasks_idepends)):
+ for taskid in xrange(len(self.tasks_idepends)):
idepends = self.tasks_idepends[taskid]
for (idependid, idependtask) in idepends:
if idependid == targetid:
@@ -498,7 +497,7 @@ class TaskData:
if self.abort and targetid in self.external_targets:
target = self.build_names_index[targetid]
- bb.msg.error(bb.msg.domain.Provider, "Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s" % (target, missing_list))
+ logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
raise bb.providers.NoProvider(target)
def remove_runtarget(self, targetid, missing_list = []):
@@ -511,7 +510,7 @@ class TaskData:
else:
missing_list = [self.run_names_index[targetid]] + missing_list
- bb.msg.note(1, bb.msg.domain.Provider, "Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s" % (self.run_names_index[targetid], missing_list))
+ logger.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.run_names_index[targetid], missing_list)
self.failed_rdeps.append(targetid)
dependees = self.get_rdependees(targetid)
for fnid in dependees:
@@ -521,7 +520,7 @@ class TaskData:
"""
Resolve all unresolved build and runtime targets
"""
- bb.msg.note(1, bb.msg.domain.TaskData, "Resolving any missing task queue dependencies")
+ logger.info("Resolving any missing task queue dependencies")
while True:
added = 0
for target in self.get_unresolved_build_targets(dataCache):
@@ -539,7 +538,7 @@ class TaskData:
added = added + 1
except bb.providers.NoRProvider:
self.remove_runtarget(self.getrun_id(target))
- bb.msg.debug(1, bb.msg.domain.TaskData, "Resolved " + str(added) + " extra dependencies")
+ logger.debug(1, "Resolved " + str(added) + " extra dependencies")
if added == 0:
break
# self.dump_data()
@@ -548,40 +547,40 @@ class TaskData:
"""
Dump some debug information on the internal data structures
"""
- bb.msg.debug(3, bb.msg.domain.TaskData, "build_names:")
- bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.build_names_index))
+ logger.debug(3, "build_names:")
+ logger.debug(3, ", ".join(self.build_names_index))
- bb.msg.debug(3, bb.msg.domain.TaskData, "run_names:")
- bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.run_names_index))
+ logger.debug(3, "run_names:")
+ logger.debug(3, ", ".join(self.run_names_index))
- bb.msg.debug(3, bb.msg.domain.TaskData, "build_targets:")
- for buildid in range(len(self.build_names_index)):
+ logger.debug(3, "build_targets:")
+ for buildid in xrange(len(self.build_names_index)):
target = self.build_names_index[buildid]
targets = "None"
if buildid in self.build_targets:
targets = self.build_targets[buildid]
- bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (buildid, target, targets))
+ logger.debug(3, " (%s)%s: %s", buildid, target, targets)
- bb.msg.debug(3, bb.msg.domain.TaskData, "run_targets:")
- for runid in range(len(self.run_names_index)):
+ logger.debug(3, "run_targets:")
+ for runid in xrange(len(self.run_names_index)):
target = self.run_names_index[runid]
targets = "None"
if runid in self.run_targets:
targets = self.run_targets[runid]
- bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (runid, target, targets))
+ logger.debug(3, " (%s)%s: %s", runid, target, targets)
- bb.msg.debug(3, bb.msg.domain.TaskData, "tasks:")
- for task in range(len(self.tasks_name)):
- bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s - %s: %s" % (
- task,
- self.fn_index[self.tasks_fnid[task]],
- self.tasks_name[task],
- self.tasks_tdepends[task]))
+ logger.debug(3, "tasks:")
+ for task in xrange(len(self.tasks_name)):
+ logger.debug(3, " (%s)%s - %s: %s",
+ task,
+ self.fn_index[self.tasks_fnid[task]],
+ self.tasks_name[task],
+ self.tasks_tdepends[task])
- bb.msg.debug(3, bb.msg.domain.TaskData, "dependency ids (per fn):")
+ logger.debug(3, "dependency ids (per fn):")
for fnid in self.depids:
- bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.depids[fnid]))
+ logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.depids[fnid])
- bb.msg.debug(3, bb.msg.domain.TaskData, "runtime dependency ids (per fn):")
+ logger.debug(3, "runtime dependency ids (per fn):")
for fnid in self.rdepids:
- bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.rdepids[fnid]))
+ logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.rdepids[fnid])
diff --git a/bitbake/lib/bb/ui/crumbs/progress.py b/bitbake/lib/bb/ui/crumbs/progress.py
index 8bd87108e..36eca3829 100644
--- a/bitbake/lib/bb/ui/crumbs/progress.py
+++ b/bitbake/lib/bb/ui/crumbs/progress.py
@@ -14,4 +14,4 @@ class ProgressBar(gtk.Dialog):
def update(self, x, y):
self.progress.set_fraction(float(x)/float(y))
- self.progress.set_text("%d/%d (%2d %%)" % (x, y, x*100/y))
+ self.progress.set_text("%2d %%" % (x*100/y))
diff --git a/bitbake/lib/bb/ui/crumbs/runningbuild.py b/bitbake/lib/bb/ui/crumbs/runningbuild.py
index 9730bfd47..4703e6d84 100644
--- a/bitbake/lib/bb/ui/crumbs/runningbuild.py
+++ b/bitbake/lib/bb/ui/crumbs/runningbuild.py
@@ -1,3 +1,4 @@
+
#
# BitBake Graphical GTK User Interface
#
@@ -20,9 +21,20 @@
import gtk
import gobject
+import logging
+import time
+import urllib
+import urllib2
+
+class Colors(object):
+ OK = "#ffffff"
+ RUNNING = "#aaffaa"
+ WARNING ="#f88017"
+ ERROR = "#ffaaaa"
class RunningBuildModel (gtk.TreeStore):
- (COL_TYPE, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_ACTIVE) = (0, 1, 2, 3, 4, 5)
+ (COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = range(7)
+
def __init__ (self):
gtk.TreeStore.__init__ (self,
gobject.TYPE_STRING,
@@ -30,7 +42,8 @@ class RunningBuildModel (gtk.TreeStore):
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
- gobject.TYPE_BOOLEAN)
+ gobject.TYPE_STRING,
+ gobject.TYPE_INT)
class RunningBuild (gobject.GObject):
__gsignals__ = {
@@ -63,32 +76,42 @@ class RunningBuild (gobject.GObject):
# for the message.
if hasattr(event, 'pid'):
pid = event.pid
- if pid in self.pids_to_task:
- (package, task) = self.pids_to_task[pid]
- parent = self.tasks_to_iter[(package, task)]
+ if hasattr(event, 'process'):
+ pid = event.process
+
+ if pid and pid in self.pids_to_task:
+ (package, task) = self.pids_to_task[pid]
+ parent = self.tasks_to_iter[(package, task)]
- if isinstance(event, bb.msg.MsgBase):
- # Ignore the "Running task i of n .."
- if (event._message.startswith ("Running task")):
+ if(isinstance(event, logging.LogRecord)):
+ if (event.msg.startswith ("Running task")):
return # don't add these to the list
- # Set a pretty icon for the message based on it's type.
- if isinstance(event, bb.msg.MsgWarn):
- icon = "dialog-warning"
- elif isinstance(event, bb.msg.MsgError):
+ if event.levelno >= logging.ERROR:
icon = "dialog-error"
+ color = Colors.ERROR
+ elif event.levelno >= logging.WARNING:
+ icon = "dialog-warning"
+ color = Colors.WARNING
else:
icon = None
+ color = Colors.OK
+
+ # if we know which package we belong to, we'll append onto its list.
+ # otherwise, we'll jump to the top of the master list
+ if parent:
+ tree_add = self.model.append
+ else:
+ tree_add = self.model.prepend
+ tree_add(parent,
+ (None,
+ package,
+ task,
+ event.getMessage(),
+ icon,
+ color,
+ 0))
- # Add the message to the tree either at the top level if parent is
- # None otherwise as a descendent of a task.
- self.model.append (parent,
- (event.__class__.__name__.split()[-1], # e.g. MsgWarn, MsgError
- package,
- task,
- event._message,
- icon,
- False))
elif isinstance(event, bb.build.TaskStarted):
(package, task) = (event._package, event._task)
@@ -101,76 +124,142 @@ class RunningBuild (gobject.GObject):
if ((package, None) in self.tasks_to_iter):
parent = self.tasks_to_iter[(package, None)]
else:
- parent = self.model.append (None, (None,
+ parent = self.model.prepend(None, (None,
package,
None,
"Package: %s" % (package),
None,
- False))
+ Colors.OK,
+ 0))
self.tasks_to_iter[(package, None)] = parent
# Because this parent package now has an active child mark it as
# such.
- self.model.set(parent, self.model.COL_ICON, "gtk-execute")
+ # @todo if parent is already in error, don't mark it green
+ self.model.set(parent, self.model.COL_ICON, "gtk-execute",
+ self.model.COL_COLOR, Colors.RUNNING)
# Add an entry in the model for this task
i = self.model.append (parent, (None,
package,
task,
"Task: %s" % (task),
- None,
- False))
+ "gtk-execute",
+ Colors.RUNNING,
+ 0))
+
+ # update the parent's active task count
+ num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] + 1
+ self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
# Save out the iter so that we can find it when we have a message
# that we need to attach to a task.
self.tasks_to_iter[(package, task)] = i
- # Mark this task as active.
- self.model.set(i, self.model.COL_ICON, "gtk-execute")
-
elif isinstance(event, bb.build.TaskBase):
+ current = self.tasks_to_iter[(package, task)]
+ parent = self.tasks_to_iter[(package, None)]
+
+ # remove this task from the parent's active count
+ num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] - 1
+ self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
if isinstance(event, bb.build.TaskFailed):
- # Mark the task as failed
- i = self.tasks_to_iter[(package, task)]
- self.model.set(i, self.model.COL_ICON, "dialog-error")
+ # Mark the task and parent as failed
+ icon = "dialog-error"
+ color = Colors.ERROR
- # Mark the parent package as failed
- i = self.tasks_to_iter[(package, None)]
- self.model.set(i, self.model.COL_ICON, "dialog-error")
+ logfile = event.logfile
+ if logfile and os.path.exists(logfile):
+ with open(logfile) as f:
+ logdata = f.read()
+ self.model.append(current, ('pastebin', None, None, logdata, 'gtk-error', Colors.OK, 0))
+
+ for i in (current, parent):
+ self.model.set(i, self.model.COL_ICON, icon,
+ self.model.COL_COLOR, color)
else:
+ icon = None
+ color = Colors.OK
+
# Mark the task as inactive
- i = self.tasks_to_iter[(package, task)]
- self.model.set(i, self.model.COL_ICON, None)
+ self.model.set(current, self.model.COL_ICON, icon,
+ self.model.COL_COLOR, color)
- # Mark the parent package as inactive
+ # Mark the parent package as inactive, but make sure to
+ # preserve error and active states
i = self.tasks_to_iter[(package, None)]
- self.model.set(i, self.model.COL_ICON, None)
-
+ if self.model.get(parent, self.model.COL_ICON) != 'dialog-error':
+ self.model.set(parent, self.model.COL_ICON, icon)
+ if num_active == 0:
+ self.model.set(parent, self.model.COL_COLOR, Colors.OK)
# Clear the iters and the pids since when the task goes away the
# pid will no longer be used for messages
del self.tasks_to_iter[(package, task)]
del self.pids_to_task[pid]
+ elif isinstance(event, bb.event.BuildStarted):
+
+ self.model.prepend(None, (None,
+ None,
+ None,
+ "Build Started (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
+ None,
+ Colors.OK,
+ 0))
elif isinstance(event, bb.event.BuildCompleted):
failures = int (event._failures)
+ self.model.prepend(None, (None,
+ None,
+ None,
+ "Build Completed (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
+ None,
+ Colors.OK,
+ 0))
# Emit the appropriate signal depending on the number of failures
- if (failures > 1):
+ if (failures >= 1):
self.emit ("build-failed")
else:
self.emit ("build-succeeded")
+ elif isinstance(event, bb.event.CacheLoadStarted) and pbar:
+ pbar.set_title("Loading cache")
+ self.progress_total = event.total
+ pbar.update(0, self.progress_total)
+ elif isinstance(event, bb.event.CacheLoadProgress) and pbar:
+ pbar.update(event.current, self.progress_total)
+ elif isinstance(event, bb.event.CacheLoadCompleted) and pbar:
+ pbar.update(self.progress_total, self.progress_total)
+
+ elif isinstance(event, bb.event.ParseStarted) and pbar:
+ pbar.set_title("Processing recipes")
+ self.progress_total = event.total
+ pbar.update(0, self.progress_total)
elif isinstance(event, bb.event.ParseProgress) and pbar:
- x = event.sofar
- y = event.total
- if x == y:
- pbar.hide()
- return
- pbar.update(x, y)
+ pbar.update(event.current, self.progress_total)
+ elif isinstance(event, bb.event.ParseCompleted) and pbar:
+ pbar.hide()
+
+ return
+
+
+def do_pastebin(text):
+ url = 'http://pastebin.com/api_public.php'
+ params = {'paste_code': text, 'paste_format': 'text'}
+
+ req = urllib2.Request(url, urllib.urlencode(params))
+ response = urllib2.urlopen(req)
+ paste_url = response.read()
+
+ return paste_url
+
class RunningBuildTreeView (gtk.TreeView):
+ __gsignals__ = {
+ "button_press_event" : "override"
+ }
def __init__ (self):
gtk.TreeView.__init__ (self)
@@ -181,6 +270,42 @@ class RunningBuildTreeView (gtk.TreeView):
self.append_column (col)
# The message of the build.
- renderer = gtk.CellRendererText ()
- col = gtk.TreeViewColumn ("Message", renderer, text=3)
- self.append_column (col)
+ self.message_renderer = gtk.CellRendererText ()
+ self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=3)
+ self.message_column.add_attribute(self.message_renderer, 'background', 5)
+ self.message_renderer.set_property('editable', 5)
+ self.append_column (self.message_column)
+
+ def do_button_press_event(self, event):
+ gtk.TreeView.do_button_press_event(self, event)
+
+ if event.button == 3:
+ selection = super(RunningBuildTreeView, self).get_selection()
+ (model, iter) = selection.get_selected()
+ if iter is not None:
+ can_paste = model.get(iter, model.COL_LOG)[0]
+ if can_paste == 'pastebin':
+ # build a simple menu with a pastebin option
+ menu = gtk.Menu()
+ menuitem = gtk.MenuItem("Send log to pastebin")
+ menu.append(menuitem)
+ menuitem.connect("activate", self.pastebin_handler, (model, iter))
+ menuitem.show()
+ menu.show()
+ menu.popup(None, None, None, event.button, event.time)
+
+ def pastebin_handler(self, widget, data):
+ """
+ Send the log data to pastebin, then add the new paste url to the
+ clipboard.
+ """
+ (model, iter) = data
+ paste_url = do_pastebin(model.get(iter, model.COL_MESSAGE)[0])
+
+ # @todo Provide visual feedback to the user that it is done and that
+ # it worked.
+ print paste_url
+
+ clipboard = gtk.clipboard_get()
+ clipboard.set_text(paste_url)
+ clipboard.store() \ No newline at end of file
diff --git a/bitbake/lib/bb/ui/depexp.py b/bitbake/lib/bb/ui/depexp.py
index 6fd18d168..3dbd5e0ec 100644
--- a/bitbake/lib/bb/ui/depexp.py
+++ b/bitbake/lib/bb/ui/depexp.py
@@ -19,8 +19,11 @@
import gobject
import gtk
+import Queue
import threading
import xmlrpclib
+import bb
+import bb.event
from bb.ui.crumbs.progress import ProgressBar
# Package Model
@@ -30,6 +33,7 @@ from bb.ui.crumbs.progress import ProgressBar
(TYPE_DEP, TYPE_RDEP) = (0, 1)
(COL_DEP_TYPE, COL_DEP_PARENT, COL_DEP_PACKAGE) = (0, 1, 2)
+
class PackageDepView(gtk.TreeView):
def __init__(self, model, dep_type, label):
gtk.TreeView.__init__(self)
@@ -50,6 +54,7 @@ class PackageDepView(gtk.TreeView):
self.current = package
self.filter_model.refilter()
+
class PackageReverseDepView(gtk.TreeView):
def __init__(self, model, label):
gtk.TreeView.__init__(self)
@@ -67,6 +72,7 @@ class PackageReverseDepView(gtk.TreeView):
self.current = package
self.filter_model.refilter()
+
class DepExplorer(gtk.Window):
def __init__(self):
gtk.Window.__init__(self)
@@ -76,7 +82,9 @@ class DepExplorer(gtk.Window):
# Create the data models
self.pkg_model = gtk.ListStore(gobject.TYPE_STRING)
+ self.pkg_model.set_sort_column_id(COL_PKG_NAME, gtk.SORT_ASCENDING)
self.depends_model = gtk.ListStore(gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_STRING)
+ self.depends_model.set_sort_column_id(COL_DEP_PACKAGE, gtk.SORT_ASCENDING)
pane = gtk.HPaned()
pane.set_position(250)
@@ -86,9 +94,11 @@ class DepExplorer(gtk.Window):
scrolled = gtk.ScrolledWindow()
scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
scrolled.set_shadow_type(gtk.SHADOW_IN)
+
self.pkg_treeview = gtk.TreeView(self.pkg_model)
self.pkg_treeview.get_selection().connect("changed", self.on_cursor_changed)
- self.pkg_treeview.append_column(gtk.TreeViewColumn("Package", gtk.CellRendererText(), text=COL_PKG_NAME))
+ column = gtk.TreeViewColumn("Package", gtk.CellRendererText(), text=COL_PKG_NAME)
+ self.pkg_treeview.append_column(column)
pane.add1(scrolled)
scrolled.add(self.pkg_treeview)
@@ -154,7 +164,6 @@ class DepExplorer(gtk.Window):
def parse(depgraph, pkg_model, depends_model):
-
for package in depgraph["pn"]:
pkg_model.set(pkg_model.append(), COL_PKG_NAME, package)
@@ -172,6 +181,7 @@ def parse(depgraph, pkg_model, depends_model):
COL_DEP_PARENT, package,
COL_DEP_PACKAGE, rdepend)
+
class gtkthread(threading.Thread):
quit = threading.Event()
def __init__(self, shutdown):
@@ -185,8 +195,8 @@ class gtkthread(threading.Thread):
gtk.main()
gtkthread.quit.set()
-def init(server, eventHandler):
+def main(server, eventHandler):
try:
cmdline = server.runCommand(["getCmdLineAction"])
if not cmdline or cmdline[0] != "generateDotGraph":
@@ -208,44 +218,81 @@ def init(server, eventHandler):
gtk.gdk.threads_enter()
dep = DepExplorer()
pbar = ProgressBar(dep)
+ pbar.connect("delete-event", gtk.main_quit)
gtk.gdk.threads_leave()
+ progress_total = 0
while True:
try:
event = eventHandler.waitEvent(0.25)
if gtkthread.quit.isSet():
+ server.runCommand(["stateStop"])
break
if event is None:
continue
+
+ if isinstance(event, bb.event.CacheLoadStarted):
+ progress_total = event.total
+ gtk.gdk.threads_enter()
+ pbar.set_title("Loading Cache")
+ pbar.update(0, progress_total)
+ gtk.gdk.threads_leave()
+
+ if isinstance(event, bb.event.CacheLoadProgress):
+ x = event.current
+ gtk.gdk.threads_enter()
+ pbar.update(x, progress_total)
+ gtk.gdk.threads_leave()
+ continue
+
+ if isinstance(event, bb.event.CacheLoadCompleted):
+ gtk.gdk.threads_enter()
+ pbar.update(progress_total, progress_total)
+ gtk.gdk.threads_leave()
+ continue
+
+ if isinstance(event, bb.event.ParseStarted):
+ progress_total = event.total
+ gtk.gdk.threads_enter()
+ pbar.set_title("Processing recipes")
+ pbar.update(0, progress_total)
+ gtk.gdk.threads_leave()
+
if isinstance(event, bb.event.ParseProgress):
- x = event.sofar
- y = event.total
- if x == y:
- print(("\nParsing finished. %d cached, %d parsed, %d skipped, %d masked, %d errors."
- % ( event.cached, event.parsed, event.skipped, event.masked, event.errors)))
- pbar.hide()
- return
+ x = event.current
gtk.gdk.threads_enter()
- pbar.update(x, y)
+ pbar.update(x, progress_total)
gtk.gdk.threads_leave()
continue
+ if isinstance(event, bb.event.ParseCompleted):
+ pbar.hide()
+ continue
+
if isinstance(event, bb.event.DepTreeGenerated):
gtk.gdk.threads_enter()
parse(event._depgraph, dep.pkg_model, dep.depends_model)
gtk.gdk.threads_leave()
- if isinstance(event, bb.command.CookerCommandCompleted):
+ if isinstance(event, bb.command.CommandCompleted):
continue
- if isinstance(event, bb.command.CookerCommandFailed):
+
+ if isinstance(event, bb.command.CommandFailed):
print("Command execution failed: %s" % event.error)
- break
+ return event.exitcode
+
+ if isinstance(event, bb.command.CommandExit):
+ return event.exitcode
+
if isinstance(event, bb.cooker.CookerExit):
break
continue
-
+ except EnvironmentError as ioerror:
+ # ignore interrupted io
+ if ioerror.args[0] == 4:
+ pass
except KeyboardInterrupt:
if shutdown == 2:
print("\nThird Keyboard Interrupt, exit.\n")
diff --git a/bitbake/lib/bb/ui/goggle.py b/bitbake/lib/bb/ui/goggle.py
index 858ca14c4..ec5a38dd4 100644
--- a/bitbake/lib/bb/ui/goggle.py
+++ b/bitbake/lib/bb/ui/goggle.py
@@ -24,6 +24,9 @@ import xmlrpclib
from bb.ui.crumbs.runningbuild import RunningBuildTreeView, RunningBuild
from bb.ui.crumbs.progress import ProgressBar
+import Queue
+
+
def event_handle_idle_func (eventHandler, build, pbar):
# Consume as many messages as we can in the time available to us
@@ -37,6 +40,16 @@ def event_handle_idle_func (eventHandler, build, pbar):
def scroll_tv_cb (model, path, iter, view):
view.scroll_to_cell (path)
+
+# @todo hook these into the GUI so the user has feedback...
+def running_build_failed_cb (running_build):
+ pass
+
+
+def running_build_succeeded_cb (running_build):
+ pass
+
+
class MainWindow (gtk.Window):
def __init__ (self):
gtk.Window.__init__ (self, gtk.WINDOW_TOPLEVEL)
@@ -49,21 +62,25 @@ class MainWindow (gtk.Window):
self.set_default_size(640, 480)
scrolled_window.add (self.cur_build_tv)
-def init (server, eventHandler):
+
+def main (server, eventHandler):
gobject.threads_init()
gtk.gdk.threads_init()
window = MainWindow ()
window.show_all ()
pbar = ProgressBar(window)
+ pbar.connect("delete-event", gtk.main_quit)
# Create the object for the current build
running_build = RunningBuild ()
window.cur_build_tv.set_model (running_build.model)
running_build.model.connect("row-inserted", scroll_tv_cb, window.cur_build_tv)
+ running_build.connect ("build-succeeded", running_build_succeeded_cb)
+ running_build.connect ("build-failed", running_build_failed_cb)
+
try:
cmdline = server.runCommand(["getCmdLineAction"])
- print(cmdline)
if not cmdline:
return 1
ret = server.runCommand(cmdline)
@@ -76,10 +93,18 @@ def init (server, eventHandler):
# Use a timeout function for probing the event queue to find out if we
# have a message waiting for us.
- gobject.timeout_add (200,
+ gobject.timeout_add (100,
event_handle_idle_func,
eventHandler,
running_build,
pbar)
- gtk.main()
+ try:
+ gtk.main()
+ except EnvironmentError as ioerror:
+ # ignore interrupted io
+ if ioerror.args[0] == 4:
+ pass
+ finally:
+ server.runCommand(["stateStop"])
+
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
index 858a00b56..042dbe902 100644
--- a/bitbake/lib/bb/ui/knotty.py
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -22,15 +22,49 @@ from __future__ import division
import os
import sys
-import itertools
import xmlrpclib
-from bb import ui
+import logging
+import progressbar
+import bb.msg
from bb.ui import uihelper
+logger = logging.getLogger("BitBake")
+interactive = sys.stdout.isatty()
-parsespin = itertools.cycle( r'|/-\\' )
+class BBProgress(progressbar.ProgressBar):
+ def __init__(self, msg, maxval):
+ self.msg = msg
+ widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ',
+ progressbar.ETA()]
-def init(server, eventHandler):
+ progressbar.ProgressBar.__init__(self, maxval, [self.msg + ": "] + widgets)
+
+class NonInteractiveProgress(object):
+ fobj = sys.stdout
+
+ def __init__(self, msg, maxval):
+ self.msg = msg
+ self.maxval = maxval
+
+ def start(self):
+ self.fobj.write("%s..." % self.msg)
+ self.fobj.flush()
+ return self
+
+ def update(self, value):
+ pass
+
+ def finish(self):
+ self.fobj.write("done.\n")
+ self.fobj.flush()
+
+def new_progress(msg, maxval):
+ if interactive:
+ return BBProgress(msg, maxval)
+ else:
+ return NonInteractiveProgress(msg, maxval)
+
+def main(server, eventHandler):
# Get values of variables which control our output
includelogs = server.runCommand(["getVariable", "BBINCLUDELOGS"])
@@ -38,9 +72,13 @@ def init(server, eventHandler):
helper = uihelper.BBUIHelper()
+ console = logging.StreamHandler(sys.stdout)
+ format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
+ console.setFormatter(format)
+ logger.addHandler(console)
+
try:
cmdline = server.runCommand(["getCmdLineAction"])
- #print cmdline
if not cmdline:
return 1
ret = server.runCommand(cmdline)
@@ -51,6 +89,9 @@ def init(server, eventHandler):
print("XMLRPC Fault getting commandline:\n %s" % x)
return 1
+
+ parseprogress = None
+ cacheprogress = None
shutdown = 0
return_value = 0
while True:
@@ -58,7 +99,6 @@ def init(server, eventHandler):
event = eventHandler.waitEvent(0.25)
if event is None:
continue
- #print event
helper.eventHandler(event)
if isinstance(event, bb.runqueue.runQueueExitWait):
if not shutdown:
@@ -67,31 +107,21 @@ def init(server, eventHandler):
activetasks, failedtasks = helper.getTasks()
if activetasks:
print("Waiting for %s active tasks to finish:" % len(activetasks))
- tasknum = 1
- for task in activetasks:
+ for tasknum, task in enumerate(activetasks):
print("%s: %s (pid %s)" % (tasknum, activetasks[task]["title"], task))
- tasknum = tasknum + 1
- if isinstance(event, bb.msg.MsgPlain):
- print(event._message)
- continue
- if isinstance(event, bb.msg.MsgDebug):
- print('DEBUG: ' + event._message)
- continue
- if isinstance(event, bb.msg.MsgNote):
- print('NOTE: ' + event._message)
- continue
- if isinstance(event, bb.msg.MsgWarn):
- print('WARNING: ' + event._message)
- continue
- if isinstance(event, bb.msg.MsgError):
- return_value = 1
- print('ERROR: ' + event._message)
- continue
- if isinstance(event, bb.msg.MsgFatal):
- return_value = 1
- print('FATAL: ' + event._message)
+ if isinstance(event, logging.LogRecord):
+ if event.levelno >= format.ERROR:
+ return_value = 1
+ # For "normal" logging conditions, don't show note logs from tasks
+ # but do show them if the user has changed the default log level to
+ # include verbose/debug messages
+ if logger.getEffectiveLevel() > format.VERBOSE:
+ if event.taskpid != 0 and event.levelno <= format.NOTE:
+ continue
+ logger.handle(event)
continue
+
if isinstance(event, bb.build.TaskFailed):
return_value = 1
logfile = event.logfile
@@ -117,42 +147,47 @@ def init(server, eventHandler):
for line in lines:
print(line)
if isinstance(event, bb.build.TaskBase):
- print("NOTE: %s" % event._message)
+ logger.info(event._message)
+ continue
+ if isinstance(event, bb.event.ParseStarted):
+ parseprogress = new_progress("Parsing recipes", event.total).start()
continue
if isinstance(event, bb.event.ParseProgress):
- x = event.sofar
- y = event.total
- if os.isatty(sys.stdout.fileno()):
- sys.stdout.write("\rNOTE: Handling BitBake files: %s (%04d/%04d) [%2d %%]" % ( next(parsespin), x, y, x*100//y ) )
- sys.stdout.flush()
- else:
- if x == 1:
- sys.stdout.write("Parsing .bb files, please wait...")
- sys.stdout.flush()
- if x == y:
- sys.stdout.write("done.")
- sys.stdout.flush()
- if x == y:
- print(("\nParsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors."
- % ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)))
+ parseprogress.update(event.current)
+ continue
+ if isinstance(event, bb.event.ParseCompleted):
+ parseprogress.finish()
+ print(("Parsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors."
+ % ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)))
+ continue
+
+ if isinstance(event, bb.event.CacheLoadStarted):
+ cacheprogress = new_progress("Loading cache", event.total).start()
+ continue
+ if isinstance(event, bb.event.CacheLoadProgress):
+ cacheprogress.update(event.current)
+ continue
+ if isinstance(event, bb.event.CacheLoadCompleted):
+ cacheprogress.finish()
+ print("Loaded %d entries from dependency cache." % event.num_entries)
continue
- if isinstance(event, bb.command.CookerCommandCompleted):
+ if isinstance(event, bb.command.CommandCompleted):
break
- if isinstance(event, bb.command.CookerCommandSetExitCode):
+ if isinstance(event, bb.command.CommandFailed):
return_value = event.exitcode
- continue
- if isinstance(event, bb.command.CookerCommandFailed):
- return_value = 1
- print("Command execution failed: %s" % event.error)
+ logger.error("Command execution failed: %s", event.error)
break
+ if isinstance(event, bb.command.CommandExit):
+ return_value = event.exitcode
+ continue
if isinstance(event, bb.cooker.CookerExit):
break
if isinstance(event, bb.event.MultipleProviders):
- print("NOTE: multiple providers are available for %s%s (%s)" % (event._is_runtime and "runtime " or "",
- event._item,
- ", ".join(event._candidates)))
- print("NOTE: consider defining a PREFERRED_PROVIDER entry to match %s" % event._item)
+ logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
+ event._item,
+ ", ".join(event._candidates))
+ logger.info("consider defining a PREFERRED_PROVIDER entry to match %s", event._item)
continue
if isinstance(event, bb.event.NoProvider):
if event._runtime:
@@ -161,9 +196,26 @@ def init(server, eventHandler):
r = ""
if event._dependees:
- print("ERROR: Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)" % (r, event._item, ", ".join(event._dependees), r))
+ logger.error("Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)", r, event._item, ", ".join(event._dependees), r)
+ else:
+ logger.error("Nothing %sPROVIDES '%s'", r, event._item)
+ continue
+
+ if isinstance(event, bb.runqueue.runQueueTaskStarted):
+ if event.noexec:
+ tasktype = 'noexec task'
else:
- print("ERROR: Nothing %sPROVIDES '%s'" % (r, event._item))
+ tasktype = 'task'
+ logger.info("Running %s %s of %s (ID: %s, %s)",
+ tasktype,
+ event.stats.completed + event.stats.active +
+ event.stats.failed + 1,
+ event.stats.total, event.taskid, event.taskstring)
+ continue
+
+ if isinstance(event, bb.runqueue.runQueueTaskFailed):
+ logger.error("Task %s (%s) failed with exit code '%s'",
+ event.taskid, event.taskstring, event.exitcode)
continue
# ignore
@@ -175,8 +227,12 @@ def init(server, eventHandler):
bb.runqueue.runQueueExitWait)):
continue
- print("Unknown Event: %s" % event)
+ logger.error("Unknown event: %s", event)
+ except EnvironmentError as ioerror:
+ # ignore interrupted io
+ if ioerror.args[0] == 4:
+ pass
except KeyboardInterrupt:
if shutdown == 2:
print("\nThird Keyboard Interrupt, exit.\n")
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py
index 3fed4c58a..469f1b730 100644
--- a/bitbake/lib/bb/ui/ncurses.py
+++ b/bitbake/lib/bb/ui/ncurses.py
@@ -44,8 +44,9 @@
"""
-from __future__ import division
+from __future__ import division
+import logging
import os, sys, curses, itertools, time
import bb
import xmlrpclib
@@ -246,29 +247,35 @@ class NCursesUI:
event = eventHandler.waitEvent(0.25)
if not event:
continue
+
helper.eventHandler(event)
- #mw.appendText("%s\n" % event[0])
if isinstance(event, bb.build.TaskBase):
mw.appendText("NOTE: %s\n" % event._message)
- if isinstance(event, bb.msg.MsgDebug):
- mw.appendText('DEBUG: ' + event._message + '\n')
- if isinstance(event, bb.msg.MsgNote):
- mw.appendText('NOTE: ' + event._message + '\n')
- if isinstance(event, bb.msg.MsgWarn):
- mw.appendText('WARNING: ' + event._message + '\n')
- if isinstance(event, bb.msg.MsgError):
- mw.appendText('ERROR: ' + event._message + '\n')
- if isinstance(event, bb.msg.MsgFatal):
- mw.appendText('FATAL: ' + event._message + '\n')
+ if isinstance(event, logging.LogRecord):
+ mw.appendText(logging.getLevelName(event.levelno) + ': ' + event.getMessage() + '\n')
+
+ if isinstance(event, bb.event.CacheLoadStarted):
+ self.parse_total = event.total
+ if isinstance(event, bb.event.CacheLoadProgress):
+ x = event.current
+ y = self.parse_total
+ mw.setStatus("Loading Cache: %s [%2d %%]" % ( next(parsespin), x*100/y ) )
+ if isinstance(event, bb.event.CacheLoadCompleted):
+ mw.setStatus("Idle")
+ mw.appendText("Loaded %d entries from dependency cache.\n"
+ % ( event.num_entries))
+
+ if isinstance(event, bb.event.ParseStarted):
+ self.parse_total = event.total
if isinstance(event, bb.event.ParseProgress):
- x = event.sofar
- y = event.total
- if x == y:
- mw.setStatus("Idle")
- mw.appendText("Parsing finished. %d cached, %d parsed, %d skipped, %d masked."
+ x = event.current
+ y = self.parse_total
+ mw.setStatus("Parsing Recipes: %s [%2d %%]" % ( next(parsespin), x*100/y ) )
+ if isinstance(event, bb.event.ParseCompleted):
+ mw.setStatus("Idle")
+ mw.appendText("Parsing finished. %d cached, %d parsed, %d skipped, %d masked.\n"
% ( event.cached, event.parsed, event.skipped, event.masked ))
- else:
- mw.setStatus("Parsing: %s (%04d/%04d) [%2d %%]" % ( next(parsespin), x, y, x*100//y ) )
+
# if isinstance(event, bb.build.TaskFailed):
# if event.logfile:
# if data.getVar("BBINCLUDELOGS", d):
@@ -288,12 +295,16 @@ class NCursesUI:
# else:
# bb.msg.error(bb.msg.domain.Build, "see log in %s" % logfile)
- if isinstance(event, bb.command.CookerCommandCompleted):
- exitflag = True
- if isinstance(event, bb.command.CookerCommandFailed):
+ if isinstance(event, bb.command.CommandCompleted):
+ # stop so the user can see the result of the build, but
+ # also allow them to now exit with a single ^C
+ shutdown = 2
+ if isinstance(event, bb.command.CommandFailed):
mw.appendText("Command execution failed: %s" % event.error)
time.sleep(2)
exitflag = True
+ if isinstance(event, bb.command.CommandExit):
+ exitflag = True
if isinstance(event, bb.cooker.CookerExit):
exitflag = True
@@ -304,13 +315,18 @@ class NCursesUI:
if activetasks:
taw.appendText("Active Tasks:\n")
for task in activetasks.itervalues():
- taw.appendText(task["title"])
+ taw.appendText(task["title"] + '\n')
if failedtasks:
taw.appendText("Failed Tasks:\n")
for task in failedtasks:
- taw.appendText(task["title"])
+ taw.appendText(task["title"] + '\n')
curses.doupdate()
+ except EnvironmentError as ioerror:
+ # ignore interrupted io
+ if ioerror.args[0] == 4:
+ pass
+
except KeyboardInterrupt:
if shutdown == 2:
mw.appendText("Third Keyboard Interrupt, exit.\n")
@@ -324,7 +340,7 @@ class NCursesUI:
shutdown = shutdown + 1
pass
-def init(server, eventHandler):
+def main(server, eventHandler):
if not os.isatty(sys.stdout.fileno()):
print("FATAL: Unable to run 'ncurses' UI without a TTY.")
return
diff --git a/bitbake/lib/bb/ui/puccho.py b/bitbake/lib/bb/ui/puccho.py
index a627fc803..3ce4590c1 100644
--- a/bitbake/lib/bb/ui/puccho.py
+++ b/bitbake/lib/bb/ui/puccho.py
@@ -390,7 +390,7 @@ def running_build_failed_cb (running_build, manager):
print("build failed")
manager.notify_build_failed ()
-def init (server, eventHandler):
+def main (server, eventHandler):
# Initialise threading...
gobject.threads_init()
gtk.gdk.threads_init()
diff --git a/bitbake/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py
index f1e4d791e..2fef4e465 100644
--- a/bitbake/lib/bb/ui/uievent.py
+++ b/bitbake/lib/bb/ui/uievent.py
@@ -37,8 +37,8 @@ class BBUIEventQueue:
self.BBServer = BBServer
self.t = threading.Thread()
- self.t.setDaemon(True)
- self.t.run = self.startCallbackHandler
+ self.t.setDaemon(True)
+ self.t.run = self.startCallbackHandler
self.t.start()
def getEvent(self):
@@ -63,17 +63,20 @@ class BBUIEventQueue:
def queue_event(self, event):
self.eventQueueLock.acquire()
- self.eventQueue.append(pickle.loads(event))
+ self.eventQueue.append(event)
self.eventQueueNotify.set()
self.eventQueueLock.release()
+ def send_event(self, event):
+ self.queue_event(pickle.loads(event))
+
def startCallbackHandler(self):
server = UIXMLRPCServer()
- self.host, self.port = server.socket.getsockname()
+ self.host, self.port = server.socket.getsockname()
server.register_function( self.system_quit, "event.quit" )
- server.register_function( self.queue_event, "event.send" )
+ server.register_function( self.send_event, "event.send" )
server.socket.settimeout(1)
self.EventHandle = self.BBServer.registerEventHandler(self.host, self.port)
@@ -83,7 +86,7 @@ class BBUIEventQueue:
server.handle_request()
server.server_close()
- def system_quit( self ):
+ def system_quit( self ):
"""
Shut down the callback thread
"""
@@ -95,11 +98,11 @@ class BBUIEventQueue:
class UIXMLRPCServer (SimpleXMLRPCServer):
- def __init__( self, interface = ("localhost", 0) ):
+ def __init__( self, interface = ("localhost", 0) ):
self.quit = False
- SimpleXMLRPCServer.__init__( self,
- interface,
- requestHandler=SimpleXMLRPCRequestHandler,
+ SimpleXMLRPCServer.__init__( self,
+ interface,
+ requestHandler=SimpleXMLRPCRequestHandler,
logRequests=False, allow_none=True)
def get_request(self):
@@ -121,4 +124,4 @@ class UIXMLRPCServer (SimpleXMLRPCServer):
if request is None:
return
SimpleXMLRPCServer.process_request(self, request, client_address)
-
+
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index f468fafc1..6373912d8 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -21,10 +21,14 @@ BitBake Utility Functions
import re, fcntl, os, string, stat, shutil, time
import sys
-import bb
import errno
+import logging
+import bb
import bb.msg
from commands import getstatusoutput
+from contextlib import contextmanager
+
+logger = logging.getLogger("BitBake.Util")
# Version comparison
separators = ".-"
@@ -90,7 +94,7 @@ def vercmp(ta, tb):
(ea, va, ra) = ta
(eb, vb, rb) = tb
- r = int(ea)-int(eb)
+ r = int(ea or 0) - int(eb or 0)
if (r == 0):
r = vercmp_part(va, vb)
if (r == 0):
@@ -191,10 +195,10 @@ def vercmp_string(val1, val2):
val2 = val2[0].split('.')
# add back decimal point so that .03 does not become "3" !
- for x in range(1, len(val1)):
+ for x in xrange(1, len(val1)):
if val1[x][0] == '0' :
val1[x] = '.' + val1[x]
- for x in range(1, len(val2)):
+ for x in xrange(1, len(val2)):
if val2[x][0] == '0' :
val2[x] = '.' + val2[x]
@@ -211,10 +215,10 @@ def vercmp_string(val1, val2):
val2[-1] += '_' + val2_prepart
# The above code will extend version numbers out so they
# have the same number of digits.
- for x in range(0, len(val1)):
+ for x in xrange(0, len(val1)):
cmp1 = relparse(val1[x])
cmp2 = relparse(val2[x])
- for y in range(0, 3):
+ for y in xrange(0, 3):
myret = cmp1[y] - cmp2[y]
if myret != 0:
__vercmp_cache__[valkey] = myret
@@ -287,17 +291,6 @@ def join_deps(deps):
result.append(dep)
return ", ".join(result)
-def extend_deps(dest, src):
- """
- Extend the results from explode_dep_versions by appending all of the items
- in the second list, avoiding duplicates.
- """
- for dep in src:
- if dep not in dest:
- dest[dep] = src[dep]
- elif dest[dep] != src[dep]:
- dest[dep] = src[dep]
-
def _print_trace(body, line):
"""
Print the Environment of a Text Body
@@ -305,12 +298,11 @@ def _print_trace(body, line):
# print the environment of the method
min_line = max(1, line-4)
max_line = min(line + 4, len(body))
- for i in range(min_line, max_line + 1):
+ for i in xrange(min_line, max_line + 1):
if line == i:
- bb.msg.error(bb.msg.domain.Util, " *** %.4d:%s" % (i, body[i-1]) )
+ logger.error(' *** %.4d:%s', i, body[i-1])
else:
- bb.msg.error(bb.msg.domain.Util, " %.4d:%s" % (i, body[i-1]) )
-
+ logger.error(' %.4d:%s', i, body[i-1])
def better_compile(text, file, realfile, mode = "exec"):
"""
@@ -322,62 +314,65 @@ def better_compile(text, file, realfile, mode = "exec"):
except Exception as e:
# split the text into lines again
body = text.split('\n')
- bb.msg.error(bb.msg.domain.Util, "Error in compiling python function in: %s" % (realfile))
- bb.msg.error(bb.msg.domain.Util, str(e))
+ logger.error("Error in compiling python function in %s", realfile)
+ logger.error(str(e))
if e.lineno:
- bb.msg.error(bb.msg.domain.Util, "The lines leading to this error were:")
- bb.msg.error(bb.msg.domain.Util, "\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1]))
+ logger.error("The lines leading to this error were:")
+ logger.error("\t%d:%s:'%s'", e.lineno, e.__class__.__name__, body[e.lineno-1])
_print_trace(body, e.lineno)
else:
- bb.msg.error(bb.msg.domain.Util, "The function causing this error was:")
+ logger.error("The function causing this error was:")
for line in body:
- bb.msg.error(bb.msg.domain.Util, line)
+ logger.error(line)
+
raise
-def better_exec(code, context, text, realfile):
+def better_exec(code, context, text, realfile = "<code>"):
"""
Similiar to better_compile, better_exec will
print the lines that are responsible for the
error.
"""
import bb.parse
+ if not hasattr(code, "co_filename"):
+ code = better_compile(code, realfile, realfile)
try:
exec(code, _context, context)
- except:
+ except Exception:
(t, value, tb) = sys.exc_info()
if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
raise
- # print the Header of the Error Message
- bb.msg.error(bb.msg.domain.Util, "There was an error when executing a python function in: %s" % realfile)
- bb.msg.error(bb.msg.domain.Util, "Exception:%s Message:%s" % (t, value))
+ import traceback
+ exception = traceback.format_exception_only(t, value)
+ logger.error('Error executing a python function in %s:\n%s',
+ realfile, ''.join(exception))
# Strip 'us' from the stack (better_exec call)
tb = tb.tb_next
- import traceback
textarray = text.split('\n')
linefailed = traceback.tb_lineno(tb)
tbextract = traceback.extract_tb(tb)
tbformat = "\n".join(traceback.format_list(tbextract))
- bb.msg.error(bb.msg.domain.Util, "The stack trace of python calls that resulted in thie exception/failure was:")
+ logger.error("The stack trace of python calls that resulted in this exception/failure was:")
for line in tbformat.split('\n'):
- bb.msg.error(bb.msg.domain.Util, line)
+ logger.error(line)
- bb.msg.error(bb.msg.domain.Util, "The code that was being executed was:")
+ logger.error("The code that was being executed was:")
_print_trace(textarray, linefailed)
- bb.msg.error(bb.msg.domain.Util, "(file: '%s', lineno: %s, function: %s)" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
+ logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[0][0], tbextract[0][1], tbextract[0][2])
- # See if this is a function we constructed and has calls back into other functions in
+ # See if this is a function we constructed and has calls back into other functions in
# "text". If so, try and improve the context of the error by diving down the trace
level = 0
nexttb = tb.tb_next
while nexttb is not None:
if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
_print_trace(textarray, tbextract[level+1][1])
- bb.msg.error(bb.msg.domain.Util, "(file: '%s', lineno: %s, function: %s)" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
+ logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2])
else:
break
nexttb = tb.tb_next
@@ -391,16 +386,37 @@ def simple_exec(code, context):
def better_eval(source, locals):
return eval(source, _context, locals)
-def lockfile(name):
+@contextmanager
+def fileslocked(files):
+ """Context manager for locking and unlocking file locks."""
+ locks = []
+ if files:
+ for lockfile in files:
+ locks.append(bb.utils.lockfile(lockfile))
+
+ yield
+
+ for lock in locks:
+ bb.utils.unlockfile(lock)
+
+def lockfile(name, shared=False):
"""
Use the file fn as a lock file, return when the lock has been acquired.
Returns a variable to pass to unlockfile().
"""
path = os.path.dirname(name)
if not os.path.isdir(path):
- bb.msg.error(bb.msg.domain.Util, "Error, lockfile path does not exist!: %s" % path)
+ logger.error("Lockfile destination directory '%s' does not exist", path)
+ sys.exit(1)
+
+ if not os.access(path, os.W_OK):
+ logger.error("Error, lockfile path is not writable!: %s" % path)
sys.exit(1)
+ op = fcntl.LOCK_EX
+ if shared:
+ op = fcntl.LOCK_SH
+
while True:
# If we leave the lockfiles lying around there is no problem
# but we should clean up after ourselves. This gives potential
@@ -413,25 +429,31 @@ def lockfile(name):
# lock is the most likely to win it.
try:
- lf = open(name, "a + ")
- fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
- statinfo = os.fstat(lf.fileno())
+ lf = open(name, 'a+')
+ fileno = lf.fileno()
+ fcntl.flock(fileno, op)
+ statinfo = os.fstat(fileno)
if os.path.exists(lf.name):
statinfo2 = os.stat(lf.name)
if statinfo.st_ino == statinfo2.st_ino:
return lf
- # File no longer exists or changed, retry
- lf.close
- except Exception as e:
+ lf.close()
+ except Exception:
continue
def unlockfile(lf):
"""
Unlock a file locked using lockfile()
"""
- os.unlink(lf.name)
+ try:
+ # If we had a shared lock, we need to promote to exclusive before
+ # removing the lockfile. Attempt this, ignore failures.
+ fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
+ os.unlink(lf.name)
+ except (IOError, OSError):
+ pass
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
- lf.close
+ lf.close()
def md5_file(filename):
"""
@@ -465,9 +487,9 @@ def sha256_file(filename):
s.update(line)
return s.hexdigest()
-# Variables which are preserved from the original environment *and* exported
-# into our worker context
-def preserved_envvars_export_list():
+def preserved_envvars_exported():
+ """Variables which are taken from the environment and placed in and exported
+ from the metadata"""
return [
'BB_TASKHASH',
'HOME',
@@ -480,9 +502,9 @@ def preserved_envvars_export_list():
'USERNAME',
]
-# Variables which are preserved from the original environment *and* exported
-# into our worker context for interactive tasks (e.g. requiring X)
-def preserved_envvars_export_interactive_list():
+def preserved_envvars_exported_interactive():
+ """Variables which are taken from the environment and placed in and exported
+ from the metadata, for interactive tasks"""
return [
'COLORTERM',
'DBUS_SESSION_BUS_ADDRESS',
@@ -500,8 +522,8 @@ def preserved_envvars_export_interactive_list():
'XDG_SESSION_COOKIE',
]
-# Variables which are preserved from the original environment into the datastore
-def preserved_envvars_list():
+def preserved_envvars():
+ """Variables which are taken from the environment and placed in the metadata"""
v = [
'BBPATH',
'BB_PRESERVE_ENV',
@@ -510,7 +532,7 @@ def preserved_envvars_list():
'LANG',
'_',
]
- return v + preserved_envvars_export_list() + preserved_envvars_export_interactive_list()
+ return v + preserved_envvars_exported() + preserved_envvars_exported_interactive()
def filter_environment(good_vars):
"""
@@ -528,12 +550,12 @@ def filter_environment(good_vars):
del os.environ[key]
if len(removed_vars):
- bb.msg.debug(1, bb.msg.domain.Util, "Removed the following variables from the environment: %s" % (", ".join(removed_vars)))
+ logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars))
return removed_vars
-def create_intereactive_env(d):
- for k in preserved_envvars_export_interactive_list():
+def create_interactive_env(d):
+ for k in preserved_envvars_exported_interactive():
os.setenv(k, bb.data.getVar(k, d, True))
def clean_environment():
@@ -545,7 +567,7 @@ def clean_environment():
if 'BB_ENV_WHITELIST' in os.environ:
good_vars = os.environ['BB_ENV_WHITELIST'].split()
else:
- good_vars = preserved_envvars_list()
+ good_vars = preserved_envvars()
if 'BB_ENV_EXTRAWHITE' in os.environ:
good_vars.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
filter_environment(good_vars)
@@ -568,6 +590,20 @@ def build_environment(d):
if export:
os.environ[var] = bb.data.getVar(var, d, True) or ""
+def remove(path, recurse=False):
+ """Equivalent to rm -f or rm -rf"""
+ if not path:
+ return
+ import os, errno, shutil, glob
+ for name in glob.glob(path):
+ try:
+ os.unlink(name)
+ except OSError as exc:
+ if recurse and exc.errno == errno.EISDIR:
+ shutil.rmtree(name)
+ elif exc.errno != errno.ENOENT:
+ raise
+
def prunedir(topdir):
# Delete everything reachable from the directory named in 'topdir'.
# CAUTION: This is dangerous!
@@ -593,15 +629,13 @@ def prune_suffix(var, suffixes, d):
return var.replace(suffix, "")
return var
-def mkdirhier(dir):
+def mkdirhier(directory):
"""Create a directory like 'mkdir -p', but does not complain if
directory already exists like os.makedirs
"""
- bb.msg.debug(3, bb.msg.domain.Util, "mkdirhier(%s)" % dir)
try:
- os.makedirs(dir)
- bb.msg.debug(2, bb.msg.domain.Util, "created " + dir)
+ os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise e
@@ -787,13 +821,12 @@ def init_logger(logger, verbose, debug, debug_domains):
Set verbosity and debug levels in the logger
"""
- if verbose:
- logger.set_verbose(True)
-
if debug:
- logger.set_debug_level(debug)
+ bb.msg.set_debug_level(debug)
+ elif verbose:
+ bb.msg.set_verbose(True)
else:
- logger.set_debug_level(0)
+ bb.msg.set_debug_level(0)
if debug_domains:
- logger.set_debug_domains(debug_domains)
+ bb.msg.set_debug_domains(debug_domains)
diff --git a/bitbake/lib/progressbar.py b/bitbake/lib/progressbar.py
new file mode 100644
index 000000000..b668647a3
--- /dev/null
+++ b/bitbake/lib/progressbar.py
@@ -0,0 +1,384 @@
+#!/usr/bin/python
+# -*- coding: iso-8859-1 -*-
+#
+# progressbar - Text progressbar library for python.
+# Copyright (c) 2005 Nilton Volpato
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+
+"""Text progressbar library for python.
+
+This library provides a text mode progressbar. This is typically used
+to display the progress of a long running operation, providing a
+visual clue that processing is underway.
+
+The ProgressBar class manages the progress, and the format of the line
+is given by a number of widgets. A widget is an object that may
+display diferently depending on the state of the progress. There are
+three types of widget:
+- a string, which always shows itself;
+- a ProgressBarWidget, which may return a diferent value every time
+it's update method is called; and
+- a ProgressBarWidgetHFill, which is like ProgressBarWidget, except it
+expands to fill the remaining width of the line.
+
+The progressbar module is very easy to use, yet very powerful. And
+automatically supports features like auto-resizing when available.
+"""
+
+from __future__ import division
+
+__author__ = "Nilton Volpato"
+__author_email__ = "first-name dot last-name @ gmail.com"
+__date__ = "2006-05-07"
+__version__ = "2.3-dev"
+
+import sys, time, os
+from array import array
+try:
+ from fcntl import ioctl
+ import termios
+except ImportError:
+ pass
+import signal
+try:
+ basestring
+except NameError:
+ basestring = (str,)
+
+class ProgressBarWidget(object):
+ """This is an element of ProgressBar formatting.
+
+ The ProgressBar object will call it's update value when an update
+ is needed. It's size may change between call, but the results will
+ not be good if the size changes drastically and repeatedly.
+ """
+ def update(self, pbar):
+ """Returns the string representing the widget.
+
+ The parameter pbar is a reference to the calling ProgressBar,
+ where one can access attributes of the class for knowing how
+ the update must be made.
+
+ At least this function must be overriden."""
+ pass
+
+class ProgressBarWidgetHFill(object):
+ """This is a variable width element of ProgressBar formatting.
+
+ The ProgressBar object will call it's update value, informing the
+ width this object must the made. This is like TeX \\hfill, it will
+ expand to fill the line. You can use more than one in the same
+ line, and they will all have the same width, and together will
+ fill the line.
+ """
+ def update(self, pbar, width):
+ """Returns the string representing the widget.
+
+ The parameter pbar is a reference to the calling ProgressBar,
+ where one can access attributes of the class for knowing how
+ the update must be made. The parameter width is the total
+ horizontal width the widget must have.
+
+ At least this function must be overriden."""
+ pass
+
+
+class ETA(ProgressBarWidget):
+ "Widget for the Estimated Time of Arrival"
+ def format_time(self, seconds):
+ return time.strftime('%H:%M:%S', time.gmtime(seconds))
+ def update(self, pbar):
+ if pbar.currval == 0:
+ return 'ETA: --:--:--'
+ elif pbar.finished:
+ return 'Time: %s' % self.format_time(pbar.seconds_elapsed)
+ else:
+ elapsed = pbar.seconds_elapsed
+ eta = elapsed * pbar.maxval / pbar.currval - elapsed
+ return 'ETA: %s' % self.format_time(eta)
+
+class FileTransferSpeed(ProgressBarWidget):
+ "Widget for showing the transfer speed (useful for file transfers)."
+ def __init__(self, unit='B'):
+ self.unit = unit
+ self.fmt = '%6.2f %s'
+ self.prefixes = ['', 'K', 'M', 'G', 'T', 'P']
+ def update(self, pbar):
+ if pbar.seconds_elapsed < 2e-6:#== 0:
+ bps = 0.0
+ else:
+ bps = pbar.currval / pbar.seconds_elapsed
+ spd = bps
+ for u in self.prefixes:
+ if spd < 1000:
+ break
+ spd /= 1000
+ return self.fmt % (spd, u + self.unit + '/s')
+
+class RotatingMarker(ProgressBarWidget):
+ "A rotating marker for filling the bar of progress."
+ def __init__(self, markers='|/-\\'):
+ self.markers = markers
+ self.curmark = -1
+ def update(self, pbar):
+ if pbar.finished:
+ return self.markers[0]
+ self.curmark = (self.curmark + 1) % len(self.markers)
+ return self.markers[self.curmark]
+
+class Percentage(ProgressBarWidget):
+ "Just the percentage done."
+ def update(self, pbar):
+ return '%3d%%' % pbar.percentage()
+
+class SimpleProgress(ProgressBarWidget):
+ "Returns what is already done and the total, e.g.: '5 of 47'"
+ def __init__(self, sep=' of '):
+ self.sep = sep
+ def update(self, pbar):
+ return '%d%s%d' % (pbar.currval, self.sep, pbar.maxval)
+
+class Bar(ProgressBarWidgetHFill):
+ "The bar of progress. It will stretch to fill the line."
+ def __init__(self, marker='#', left='|', right='|'):
+ self.marker = marker
+ self.left = left
+ self.right = right
+ def _format_marker(self, pbar):
+ if isinstance(self.marker, basestring):
+ return self.marker
+ else:
+ return self.marker.update(pbar)
+ def update(self, pbar, width):
+ percent = pbar.percentage()
+ cwidth = width - len(self.left) - len(self.right)
+ marked_width = int(percent * cwidth // 100)
+ m = self._format_marker(pbar)
+ bar = (self.left + (m * marked_width).ljust(cwidth) + self.right)
+ return bar
+
+class ReverseBar(Bar):
+ "The reverse bar of progress, or bar of regress. :)"
+ def update(self, pbar, width):
+ percent = pbar.percentage()
+ cwidth = width - len(self.left) - len(self.right)
+ marked_width = int(percent * cwidth // 100)
+ m = self._format_marker(pbar)
+ bar = (self.left + (m*marked_width).rjust(cwidth) + self.right)
+ return bar
+
+default_widgets = [Percentage(), ' ', Bar()]
+class ProgressBar(object):
+ """This is the ProgressBar class, it updates and prints the bar.
+
+ A common way of using it is like:
+ >>> pbar = ProgressBar().start()
+ >>> for i in xrange(100):
+ ... # do something
+ ... pbar.update(i+1)
+ ...
+ >>> pbar.finish()
+
+ You can also use a progressbar as an iterator:
+ >>> progress = ProgressBar()
+ >>> for i in progress(some_iterable):
+ ... # do something
+ ...
+
+ But anything you want to do is possible (well, almost anything).
+ You can supply different widgets of any type in any order. And you
+ can even write your own widgets! There are many widgets already
+ shipped and you should experiment with them.
+
+ The term_width parameter must be an integer or None. In the latter case
+ it will try to guess it, if it fails it will default to 80 columns.
+
+ When implementing a widget update method you may access any
+ attribute or function of the ProgressBar object calling the
+ widget's update method. The most important attributes you would
+ like to access are:
+ - currval: current value of the progress, 0 <= currval <= maxval
+ - maxval: maximum (and final) value of the progress
+ - finished: True if the bar has finished (reached 100%), False o/w
+ - start_time: the time when start() method of ProgressBar was called
+ - seconds_elapsed: seconds elapsed since start_time
+ - percentage(): percentage of the progress [0..100]. This is a method.
+
+ The attributes above are unlikely to change between different versions,
+ the other ones may change or cease to exist without notice, so try to rely
+ only on the ones documented above if you are extending the progress bar.
+ """
+
+ __slots__ = ('currval', 'fd', 'finished', 'last_update_time', 'maxval',
+ 'next_update', 'num_intervals', 'seconds_elapsed',
+ 'signal_set', 'start_time', 'term_width', 'update_interval',
+ 'widgets', '_iterable')
+
+ _DEFAULT_MAXVAL = 100
+
+ def __init__(self, maxval=None, widgets=default_widgets, term_width=None,
+ fd=sys.stderr):
+ self.maxval = maxval
+ self.widgets = widgets
+ self.fd = fd
+ self.signal_set = False
+ if term_width is not None:
+ self.term_width = term_width
+ else:
+ try:
+ self._handle_resize(None, None)
+ signal.signal(signal.SIGWINCH, self._handle_resize)
+ self.signal_set = True
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except:
+ self.term_width = int(os.environ.get('COLUMNS', 80)) - 1
+
+ self.currval = 0
+ self.finished = False
+ self.start_time = None
+ self.last_update_time = None
+ self.seconds_elapsed = 0
+ self._iterable = None
+
+ def __call__(self, iterable):
+ try:
+ self.maxval = len(iterable)
+ except TypeError:
+ # If the iterable has no length, then rely on the value provided
+ # by the user, otherwise fail.
+ if not (isinstance(self.maxval, (int, long)) and self.maxval > 0):
+ raise RuntimeError('Could not determine maxval from iterable. '
+ 'You must explicitly provide a maxval.')
+ self._iterable = iter(iterable)
+ self.start()
+ return self
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ try:
+ next = self._iterable.next()
+ self.update(self.currval + 1)
+ return next
+ except StopIteration:
+ self.finish()
+ raise
+
+ def _handle_resize(self, signum, frame):
+ h, w = array('h', ioctl(self.fd, termios.TIOCGWINSZ, '\0' * 8))[:2]
+ self.term_width = w
+
+ def percentage(self):
+ "Returns the percentage of the progress."
+ return self.currval * 100.0 / self.maxval
+
+ def _format_widgets(self):
+ r = []
+ hfill_inds = []
+ num_hfill = 0
+ currwidth = 0
+ for i, w in enumerate(self.widgets):
+ if isinstance(w, ProgressBarWidgetHFill):
+ r.append(w)
+ hfill_inds.append(i)
+ num_hfill += 1
+ elif isinstance(w, basestring):
+ r.append(w)
+ currwidth += len(w)
+ else:
+ weval = w.update(self)
+ currwidth += len(weval)
+ r.append(weval)
+ for iw in hfill_inds:
+ widget_width = int((self.term_width - currwidth) // num_hfill)
+ r[iw] = r[iw].update(self, widget_width)
+ return r
+
+ def _format_line(self):
+ return ''.join(self._format_widgets()).ljust(self.term_width)
+
+ def _next_update(self):
+ return int((int(self.num_intervals *
+ (self.currval / self.maxval)) + 1) *
+ self.update_interval)
+
+ def _need_update(self):
+ """Returns true when the progressbar should print an updated line.
+
+ You can override this method if you want finer grained control over
+ updates.
+
+ The current implementation is optimized to be as fast as possible and
+ as economical as possible in the number of updates. However, depending
+ on your usage you may want to do more updates. For instance, if your
+ progressbar stays in the same percentage for a long time, and you want
+ to update other widgets, like ETA, then you could return True after
+ some time has passed with no updates.
+
+ Ideally you could call self._format_line() and see if it's different
+ from the previous _format_line() call, but calling _format_line() takes
+ around 20 times more time than calling this implementation of
+ _need_update().
+ """
+ return self.currval >= self.next_update
+
+ def update(self, value):
+ "Updates the progress bar to a new value."
+ assert 0 <= value <= self.maxval, '0 <= %d <= %d' % (value, self.maxval)
+ self.currval = value
+ if not self._need_update():
+ return
+ if self.start_time is None:
+ raise RuntimeError('You must call start() before calling update()')
+ now = time.time()
+ self.seconds_elapsed = now - self.start_time
+ self.next_update = self._next_update()
+ self.fd.write(self._format_line() + '\r')
+ self.last_update_time = now
+
+ def start(self):
+ """Starts measuring time, and prints the bar at 0%.
+
+ It returns self so you can use it like this:
+ >>> pbar = ProgressBar().start()
+ >>> for i in xrange(100):
+ ... # do something
+ ... pbar.update(i+1)
+ ...
+ >>> pbar.finish()
+ """
+ if self.maxval is None:
+ self.maxval = self._DEFAULT_MAXVAL
+ assert self.maxval > 0
+
+ self.num_intervals = max(100, self.term_width)
+ self.update_interval = self.maxval / self.num_intervals
+ self.next_update = 0
+
+ self.start_time = self.last_update_time = time.time()
+ self.update(0)
+ return self
+
+ def finish(self):
+ """Used to tell the progress is finished."""
+ self.finished = True
+ self.update(self.maxval)
+ self.fd.write('\n')
+ if self.signal_set:
+ signal.signal(signal.SIGWINCH, signal.SIG_DFL)