summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Purdie <richard@openedhand.com>2006-08-27 16:01:33 +0000
committerRichard Purdie <richard@openedhand.com>2006-08-27 16:01:33 +0000
commit93a8d0662ed361c76defe75a5cd84f203fdb1af1 (patch)
tree3481256915c4474980290e53c086af8f8389a10e
parent37d03b1b00610716b27259a42c8718c8628b2ad3 (diff)
downloadopenembedded-core-93a8d0662ed361c76defe75a5cd84f203fdb1af1.tar.gz
openembedded-core-93a8d0662ed361c76defe75a5cd84f203fdb1af1.tar.bz2
openembedded-core-93a8d0662ed361c76defe75a5cd84f203fdb1af1.tar.xz
openembedded-core-93a8d0662ed361c76defe75a5cd84f203fdb1af1.zip
classes: Sync with OE
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@651 311d38ba-8fff-0310-9ca6-ca027cbcb966
-rw-r--r--meta/classes/base.bbclass20
-rw-r--r--meta/classes/cpan.bbclass3
-rw-r--r--meta/classes/efl.bbclass2
-rw-r--r--meta/classes/icecc.bbclass120
-rw-r--r--meta/classes/kernel-arch.bbclass1
-rw-r--r--meta/classes/kernel.bbclass21
-rw-r--r--meta/classes/module.bbclass2
-rw-r--r--meta/classes/opie.bbclass2
-rw-r--r--meta/classes/pkgconfig.bbclass2
-rw-r--r--meta/classes/sanity.bbclass9
-rw-r--r--meta/classes/sdl.bbclass2
-rw-r--r--meta/classes/tinderclient.bbclass48
12 files changed, 155 insertions, 77 deletions
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index 0081a52d5..546992e71 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -64,7 +64,7 @@ def base_set_filespath(path, d):
overrides = overrides + ":"
for o in overrides.split(":"):
filespath.append(os.path.join(p, o))
- bb.data.setVar("FILESPATH", ":".join(filespath), d)
+ return ":".join(filespath)
FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
@@ -187,7 +187,7 @@ oe_libinstall() {
dir=`pwd`
fi
dotlai=$libname.lai
- dir=$dir`(cd $dir; find -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
+ dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
olddir=`pwd`
__runcmd cd $dir
@@ -413,10 +413,10 @@ def oe_unpack_file(file, data, url = None):
destdir = "."
bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
+
if not cmd:
return True
-
dest = os.path.join(os.getcwd(), os.path.basename(file))
if os.path.exists(dest):
if os.path.samefile(file, dest):
@@ -478,7 +478,8 @@ python base_eventhandler() {
msg += messages.get(name[5:]) or name[5:]
elif name == "UnsatisfiedDep":
msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
- note(msg)
+ if msg:
+ note(msg)
if name.startswith("BuildStarted"):
bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
@@ -486,7 +487,7 @@ python base_eventhandler() {
path_to_packages = path_to_bbfiles[:path_to_bbfiles.rindex( "packages" )]
monotone_revision = "<unknown>"
try:
- monotone_revision = file( "%s/MT/revision" % path_to_packages ).read().strip()
+ monotone_revision = file( "%s/_MTN/revision" % path_to_packages ).read().strip()
except IOError:
pass
bb.data.setVar( 'OE_REVISION', monotone_revision, e.data )
@@ -519,6 +520,7 @@ python base_eventhandler() {
addtask configure after do_unpack do_patch
do_configure[dirs] = "${S} ${B}"
do_configure[bbdepcmd] = "do_populate_staging"
+do_configure[deptask] = "do_populate_staging"
base_do_configure() {
:
}
@@ -645,7 +647,7 @@ python __anonymous () {
if need_machine:
import re
this_machine = bb.data.getVar('MACHINE', d, 1)
- if not re.match(need_machine, this_machine):
+ if this_machine and not re.match(need_machine, this_machine):
raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
pn = bb.data.getVar('PN', d, 1)
@@ -719,12 +721,18 @@ ftp://ftp.kernel.org/pub ftp://ftp.jp.kernel.org/pub
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
+ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
+ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
+ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
+
+
ftp://.*/.*/ http://www.oesources.org/source/current/
http://.*/.*/ http://www.oesources.org/source/current/
}
+
diff --git a/meta/classes/cpan.bbclass b/meta/classes/cpan.bbclass
index 853abfd1b..78b902f85 100644
--- a/meta/classes/cpan.bbclass
+++ b/meta/classes/cpan.bbclass
@@ -1,7 +1,8 @@
FILES_${PN} += '${libdir}/perl5'
+EXTRA_CPANFLAGS = ""
cpan_do_configure () {
- perl Makefile.PL
+ perl Makefile.PL ${EXTRA_CPANFLAGS}
if [ "${BUILD_SYS}" != "${HOST_SYS}" ]; then
. ${STAGING_DIR}/${TARGET_SYS}/perl/config.sh
sed -e "s:\(SITELIBEXP = \).*:\1${sitelibexp}:; s:\(SITEARCHEXP = \).*:\1${sitearchexp}:; s:\(INSTALLVENDORLIB = \).*:\1${D}${libdir}/perl5:; s:\(INSTALLVENDORARCH = \).*:\1${D}${libdir}/perl5:" < Makefile > Makefile.new
diff --git a/meta/classes/efl.bbclass b/meta/classes/efl.bbclass
index 9c490284c..c258758d3 100644
--- a/meta/classes/efl.bbclass
+++ b/meta/classes/efl.bbclass
@@ -44,6 +44,6 @@ do_stage_append () {
}
PACKAGES = "${PN} ${PN}-themes ${PN}-dev ${PN}-examples"
-FILES_${PN}-dev = "${bindir}/${PN}-config ${libdir}/pkgconfig ${libdir}/lib*.?a ${libdir}/lib*.a"
+FILES_${PN}-dev = "${bindir}/${PN}-config ${libdir}/pkgconfig ${libdir}/lib*.?a ${libdir}/lib*.a ${includedir}"
FILES_${PN}-examples = "${bindir} ${datadir}"
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass
index 7dfcfc29a..66a5bf79e 100644
--- a/meta/classes/icecc.bbclass
+++ b/meta/classes/icecc.bbclass
@@ -1,9 +1,17 @@
# IceCream distributed compiling support
-#
+#
# We need to create a tar.bz2 of our toolchain and set
# ICECC_VERSION, ICECC_CXX and ICEC_CC
#
+def icc_determine_gcc_version(gcc):
+ """
+ Hack to determine the version of GCC
+
+ 'i686-apple-darwin8-gcc-4.0.1 (GCC) 4.0.1 (Apple Computer, Inc. build 5363)'
+ """
+ return os.popen("%s --version" % gcc ).readline()[2]
+
def create_env(bb,d):
"""
Create a tar.bz of the current toolchain
@@ -13,7 +21,7 @@ def create_env(bb,d):
# host prefix is empty (let us duplicate the query for ease)
prefix = bb.data.expand('${HOST_PREFIX}', d)
if len(prefix) == 0:
- return ""
+ return ""
import tarfile
import socket
@@ -23,51 +31,66 @@ def create_env(bb,d):
prefix = bb.data.expand('${HOST_PREFIX}' , d)
distro = bb.data.expand('${DISTRO}', d)
target_sys = bb.data.expand('${TARGET_SYS}', d)
- #float = bb.data.getVar('${TARGET_FPU}', d)
- float = "anyfloat"
+ float = bb.data.getVar('${TARGET_FPU}', d) or "hard"
name = socket.gethostname()
+ # Stupid check to determine if we have built a libc and a cross
+ # compiler.
try:
- os.stat(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2')
- os.stat(ice_dir + '/' + target_sys + '/bin/g++')
+ os.stat(os.path.join(ice_dir, target_sys, 'lib', 'ld-linux.so.2'))
+ os.stat(os.path.join(ice_dir, target_sys, 'bin', 'g++'))
except:
- return ""
+ return ""
- VERSION = '3.4.3'
+ VERSION = icc_determine_gcc_version( os.path.join(ice_dir,target_sys,"bin","g++") )
cross_name = prefix + distro + target_sys + float +VERSION+ name
- tar_file = ice_dir + '/ice/' + cross_name + '.tar.bz2'
+ tar_file = os.path.join(ice_dir, 'ice', cross_name + '.tar.bz2')
try:
os.stat(tar_file)
return tar_file
except:
- try:
- os.makedirs(ice_dir+'/ice')
- except:
- pass
+ try:
+ os.makedirs(os.path.join(ice_dir,'ice'))
+ except:
+ pass
# FIXME find out the version of the compiler
+ # Consider using -print-prog-name={cc1,cc1plus}
+ # and -print-file-name=specs
+
+ # We will use the GCC to tell us which tools to use
+ # What we need is:
+ # -gcc
+ # -g++
+ # -as
+ # -cc1
+ # -cc1plus
+ # and we add them to /usr/bin
+
tar = tarfile.open(tar_file, 'w:bz2')
- tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2',
- target_sys + 'cross/lib/ld-linux.so.2')
- tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2',
- target_sys + 'cross/lib/ld-2.3.3.so')
- tar.add(ice_dir + '/' + target_sys + '/lib/libc-2.3.3.so',
- target_sys + 'cross/lib/libc-2.3.3.so')
- tar.add(ice_dir + '/' + target_sys + '/lib/libc.so.6',
- target_sys + 'cross/lib/libc.so.6')
- tar.add(ice_dir + '/' + target_sys + '/bin/gcc',
- target_sys + 'cross/usr/bin/gcc')
- tar.add(ice_dir + '/' + target_sys + '/bin/g++',
- target_sys + 'cross/usr/bin/g++')
- tar.add(ice_dir + '/' + target_sys + '/bin/as',
- target_sys + 'cross/usr/bin/as')
- tar.add(ice_dir + '/lib/gcc/' + target_sys +'/'+ VERSION + '/specs',
- target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/specs')
- tar.add(ice_dir + '/libexec/gcc/'+target_sys+'/' + VERSION + '/cc1',
- target_sys + 'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1')
- tar.add(ice_dir + '/libexec/gcc/arm-linux/' + VERSION + '/cc1plus',
- target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1plus')
+
+ # Now add the required files
+ tar.add(os.path.join(ice_dir,target_sys,'bin','gcc'),
+ os.path.join("usr","bin","gcc") )
+ tar.add(os.path.join(ice_dir,target_sys,'bin','g++'),
+ os.path.join("usr","bin","g++") )
+ tar.add(os.path.join(ice_dir,target_sys,'bin','as'),
+ os.path.join("usr","bin","as") )
+
+ # Now let us find cc1 and cc1plus
+ cc1 = os.popen("%s -print-prog-name=cc1" % data.getVar('CC', d, True)).read()[:-1]
+ cc1plus = os.popen("%s -print-prog-name=cc1plus" % data.getVar('CC', d, True)).read()[:-1]
+ spec = os.popen("%s -print-file-name=specs" % data.getVar('CC', d, True)).read()[:-1]
+
+ # CC1 and CC1PLUS should be there...
+ tar.add(cc1, os.path.join('usr', 'bin', 'cc1'))
+ tar.add(cc1plus, os.path.join('usr', 'bin', 'cc1plus'))
+
+ # spec - if it exists
+ if os.path.exists(spec):
+ tar.add(spec)
+
tar.close()
return tar_file
@@ -78,7 +101,7 @@ def create_path(compilers, type, bb, d):
"""
import os
- staging = bb.data.expand('${STAGING_DIR}', d) + "/ice/" + type
+ staging = os.path.join(bb.data.expand('${STAGING_DIR}', d), "ice", type)
icecc = bb.data.getVar('ICECC_PATH', d)
# Create the dir if necessary
@@ -89,7 +112,7 @@ def create_path(compilers, type, bb, d):
for compiler in compilers:
- gcc_path = staging + "/" + compiler
+ gcc_path = os.path.join(staging, compiler)
try:
os.stat(gcc_path)
except:
@@ -102,15 +125,14 @@ def use_icc_version(bb,d):
# Constin native native
prefix = bb.data.expand('${HOST_PREFIX}', d)
if len(prefix) == 0:
- return "no"
-
-
- native = bb.data.expand('${PN}', d)
- blacklist = [ "-cross", "-native" ]
+ return "no"
+
+
+ blacklist = [ "cross", "native" ]
for black in blacklist:
- if black in native:
- return "no"
+ if bb.data.inherits_class(black, d):
+ return "no"
return "yes"
@@ -118,13 +140,13 @@ def icc_path(bb,d,compile):
native = bb.data.expand('${PN}', d)
blacklist = [ "ulibc", "glibc", "ncurses" ]
for black in blacklist:
- if black in native:
- return ""
+ if black in native:
+ return ""
- if "-native" in native:
- compile = False
- if "-cross" in native:
- compile = False
+ blacklist = [ "cross", "native" ]
+ for black in blacklist:
+ if bb.data.inherits_class(black, d):
+ compile = False
prefix = bb.data.expand('${HOST_PREFIX}', d)
if compile and len(prefix) != 0:
@@ -151,6 +173,6 @@ do_compile_prepend() {
export ICECC_CXX="${HOST_PREFIX}g++"
if [ "${@use_icc_version(bb,d)}" = "yes" ]; then
- export ICECC_VERSION="${@icc_version(bb,d)}"
+ export ICECC_VERSION="${@icc_version(bb,d)}"
fi
}
diff --git a/meta/classes/kernel-arch.bbclass b/meta/classes/kernel-arch.bbclass
index 92a6c982f..b331d2561 100644
--- a/meta/classes/kernel-arch.bbclass
+++ b/meta/classes/kernel-arch.bbclass
@@ -19,6 +19,7 @@ def map_kernel_arch(a, d):
elif re.match('armeb$', a): return 'arm'
elif re.match('powerpc$', a): return 'ppc'
elif re.match('mipsel$', a): return 'mips'
+ elif re.match('sh(3|4)$', a): return 'sh'
elif a in valid_archs: return a
else:
bb.error("cannot map '%s' to a linux kernel architecture" % a)
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass
index ad51c4e03..c81112ede 100644
--- a/meta/classes/kernel.bbclass
+++ b/meta/classes/kernel.bbclass
@@ -109,6 +109,21 @@ kernel_do_stage() {
mkdir -p ${STAGING_KERNEL_DIR}/include/pcmcia
cp -fR include/pcmcia/* ${STAGING_KERNEL_DIR}/include/pcmcia/
+ if [ -d drivers/crypto ]; then
+ mkdir -p ${STAGING_KERNEL_DIR}/drivers/crypto
+ cp -fR drivers/crypto/* ${STAGING_KERNEL_DIR}/drivers/crypto/
+ fi
+
+ if [ -d include/media ]; then
+ mkdir -p ${STAGING_KERNEL_DIR}/include/media
+ cp -fR include/media/* ${STAGING_KERNEL_DIR}/include/media/
+ fi
+
+ if [ -d include/acpi ]; then
+ mkdir -p ${STAGING_KERNEL_DIR}/include/acpi
+ cp -fR include/acpi/* ${STAGING_KERNEL_DIR}/include/acpi/
+ fi
+
if [ -d include/sound ]; then
mkdir -p ${STAGING_KERNEL_DIR}/include/sound
cp -fR include/sound/* ${STAGING_KERNEL_DIR}/include/sound/
@@ -133,7 +148,7 @@ kernel_do_stage() {
# Check if arch/${ARCH}/Makefile exists and install it
if [ -e arch/${ARCH}/Makefile ]; then
install -d ${STAGING_KERNEL_DIR}/arch/${ARCH}
- install -m 0644 arch/${ARCH}/Makefile ${STAGING_KERNEL_DIR}/arch/${ARCH}
+ install -m 0644 arch/${ARCH}/Makefile* ${STAGING_KERNEL_DIR}/arch/${ARCH}
fi
cp -fR include/config* ${STAGING_KERNEL_DIR}/include/
install -m 0644 ${KERNEL_OUTPUT} ${STAGING_KERNEL_DIR}/${KERNEL_IMAGETYPE}
@@ -199,7 +214,7 @@ fi
if [ -n "$D" ]; then
${HOST_PREFIX}depmod-${KERNEL_MAJOR_VERSION} -A -b $D -F ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE} ${KERNEL_VERSION}
else
- depmod -A
+ depmod -a
fi
}
@@ -207,7 +222,7 @@ pkg_postinst_modules () {
if [ -n "$D" ]; then
${HOST_PREFIX}depmod-${KERNEL_MAJOR_VERSION} -A -b $D -F ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE} ${KERNEL_VERSION}
else
- depmod -A
+ depmod -a
update-modules || true
fi
}
diff --git a/meta/classes/module.bbclass b/meta/classes/module.bbclass
index 8a13f1f85..6089f9046 100644
--- a/meta/classes/module.bbclass
+++ b/meta/classes/module.bbclass
@@ -38,7 +38,7 @@ pkg_postinst_append () {
if [ -n "$D" ]; then
exit 1
fi
- depmod -A
+ depmod -a
update-modules || true
}
diff --git a/meta/classes/opie.bbclass b/meta/classes/opie.bbclass
index 47f364a64..6430d46d6 100644
--- a/meta/classes/opie.bbclass
+++ b/meta/classes/opie.bbclass
@@ -18,7 +18,7 @@ inherit palmtop
# Note that when CVS changes to 1.2.2, the dash
# should be removed from OPIE_CVS_PV to convert
# to the standardised version format
-OPIE_CVS_PV = "1.2.1+cvs-${SRCDATE}"
+OPIE_CVS_PV = "1.2.2+cvs-${SRCDATE}"
DEPENDS_prepend = "${@["libopie2 ", ""][(bb.data.getVar('PN', d, 1) == 'libopie2')]}"
diff --git a/meta/classes/pkgconfig.bbclass b/meta/classes/pkgconfig.bbclass
index 62f15f312..f2054b0b0 100644
--- a/meta/classes/pkgconfig.bbclass
+++ b/meta/classes/pkgconfig.bbclass
@@ -20,7 +20,7 @@ def get_pkgconfig_mangle(d):
return s
do_stage_append () {
- for pc in `find ${S} -name '*.pc' | grep -v -- '-uninstalled.pc$'`; do
+ for pc in `find ${S} -name '*.pc' -type f | grep -v -- '-uninstalled.pc$'`; do
pcname=`basename $pc`
install -d ${PKG_CONFIG_PATH}
cat $pc | sed ${@get_pkgconfig_mangle(d)} > ${PKG_CONFIG_PATH}/$pcname
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass
index a626162ff..91ca9865f 100644
--- a/meta/classes/sanity.bbclass
+++ b/meta/classes/sanity.bbclass
@@ -64,13 +64,14 @@ def check_sanity(e):
if "diffstat-native" not in data.getVar('ASSUME_PROVIDED', e.data, True).split():
raise_sanity_error('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf')
- # Check the MACHINE is valid
+ # Check that the MACHINE is valid
if not check_conf_exists("conf/machine/${MACHINE}.conf", e.data):
raise_sanity_error('Please set a valid MACHINE in your local.conf')
- # Check the distro is valid
- if not check_conf_exists("conf/distro/${DISTRO}.conf", e.data):
- raise_sanity_error('Please set a valid DISTRO in your local.conf')
+ # Check that the DISTRO is valid
+ # need to take into account DISTRO renaming DISTRO
+ if not ( check_conf_exists("conf/distro/${DISTRO}.conf", e.data) or check_conf_exists("conf/distro/include/${DISTRO}.inc", e.data) ):
+ raise_sanity_error("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf" % data.getVar("DISTRO", e.data, True ))
if not check_app_exists("${MAKE}", e.data):
raise_sanity_error('GNU make missing. Please install GNU make')
diff --git a/meta/classes/sdl.bbclass b/meta/classes/sdl.bbclass
index c0b21427a..d478d97f1 100644
--- a/meta/classes/sdl.bbclass
+++ b/meta/classes/sdl.bbclass
@@ -26,7 +26,7 @@ sdl_do_sdl_install() {
Note=Auto Generated... this may be not what you want
Comment=${DESCRIPTION}
Exec=${APPNAME}
-Icon=${APPIMAGE}
+Icon=${PN}.png
Type=Application
Name=${PN}
EOF
diff --git a/meta/classes/tinderclient.bbclass b/meta/classes/tinderclient.bbclass
index f544c203f..d36ef0b34 100644
--- a/meta/classes/tinderclient.bbclass
+++ b/meta/classes/tinderclient.bbclass
@@ -240,8 +240,8 @@ def tinder_tinder_start(d, event):
output.append( "---> TINDERBOX BUILDING '%(packages)s'" )
output.append( "<--- TINDERBOX STARTING BUILD NOW" )
- output.append( "" )
-
+ output.append( "" )
+
return "\n".join(output) % vars()
def tinder_do_tinder_report(event):
@@ -255,6 +255,14 @@ def tinder_do_tinder_report(event):
information immediately. The caching/queuing needs to be
implemented. Also sending more or less information is not
implemented yet.
+
+ We have two temporary files stored in the TMP directory. One file
+ contains the assigned machine id for the tinderclient. This id gets
+ assigned when we connect the box and start the build process the second
+ file is used to workaround an EventHandler limitation. If BitBake is ran
+ with the continue option we want the Build to fail even if we get the
+ BuildCompleted Event. In this case we have to look up the status and
+ send it instead of 100/success.
"""
from bb.event import getName
from bb import data, mkdirhier, build
@@ -264,7 +272,6 @@ def tinder_do_tinder_report(event):
name = getName(event)
log = ""
status = 1
- #print asd
# Check what we need to do Build* shows we start or are done
if name == "BuildStarted":
tinder_build_start(event.data)
@@ -272,9 +279,18 @@ def tinder_do_tinder_report(event):
try:
# truncate the tinder log file
- f = file(data.getVar('TINDER_LOG', event.data, True), 'rw+')
- f.truncate(0)
+ f = file(data.getVar('TINDER_LOG', event.data, True), 'w')
+ f.write("")
f.close()
+ except:
+ pass
+
+ try:
+ # write a status to the file. This is needed for the -k option
+ # of BitBake
+ g = file(data.getVar('TMPDIR', event.data, True)+"/tinder-status", 'w')
+ g.write("")
+ g.close()
except IOError:
pass
@@ -295,16 +311,27 @@ def tinder_do_tinder_report(event):
elif name == "TaskFailed":
log += "<--- TINDERBOX Task %s failed (FAILURE)\n" % event.task
elif name == "PkgStarted":
- log += "---> TINDERBOX Package %s started\n" % data.getVar('P', event.data, True)
+ log += "---> TINDERBOX Package %s started\n" % data.getVar('PF', event.data, True)
elif name == "PkgSucceeded":
- log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % data.getVar('P', event.data, True)
+ log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % data.getVar('PF', event.data, True)
elif name == "PkgFailed":
- build.exec_task('do_clean', event.data)
- log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % data.getVar('P', event.data, True)
+ if not data.getVar('TINDER_AUTOBUILD', event.data, True) == "0":
+ build.exec_task('do_clean', event.data)
+ log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % data.getVar('PF', event.data, True)
status = 200
+ # remember the failure for the -k case
+ h = file(data.getVar('TMPDIR', event.data, True)+"/tinder-status", 'w')
+ h.write("200")
elif name == "BuildCompleted":
log += "Build Completed\n"
status = 100
+ # Check if we have a old status...
+ try:
+ h = file(data.getVar('TMPDIR',event.data,True)+'/tinder-status', 'r')
+ status = int(h.read())
+ except:
+ pass
+
elif name == "MultipleProviders":
log += "---> TINDERBOX Multiple Providers\n"
log += "multiple providers are available (%s);\n" % ", ".join(event.getCandidates())
@@ -315,6 +342,9 @@ def tinder_do_tinder_report(event):
log += "Error: No Provider for: %s\n" % event.getItem()
log += "Error:Was Runtime: %d\n" % event.isRuntime()
status = 200
+ # remember the failure for the -k case
+ h = file(data.getVar('TMPDIR', event.data, True)+"/tinder-status", 'w')
+ h.write("200")
# now post the log
if len(log) == 0: