diff options
author | Richard Purdie <rpurdie@linux.intel.com> | 2009-11-08 22:32:43 +0000 |
---|---|---|
committer | Richard Purdie <rpurdie@linux.intel.com> | 2009-11-13 12:15:28 +0000 |
commit | c54117458a19d05d404ec00907a8f3e9c73a416b (patch) | |
tree | 6cc5695742a1198668b022b35c8b2a456c4f3f4f | |
parent | 80d55bbd6ea2ff93510f3b87ea97322b0b02eaa8 (diff) | |
download | openembedded-core-c54117458a19d05d404ec00907a8f3e9c73a416b.tar.gz openembedded-core-c54117458a19d05d404ec00907a8f3e9c73a416b.tar.bz2 openembedded-core-c54117458a19d05d404ec00907a8f3e9c73a416b.tar.xz openembedded-core-c54117458a19d05d404ec00907a8f3e9c73a416b.zip |
classes: Remove and sanitise import statements
Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
31 files changed, 34 insertions, 147 deletions
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass index 4946222f7..57a8b0ed1 100644 --- a/meta/classes/autotools.bbclass +++ b/meta/classes/autotools.bbclass @@ -2,8 +2,6 @@ AUTOTOOLS_NATIVE_STAGE_INSTALL = "1" def autotools_dep_prepend(d): - import bb; - if bb.data.getVar('INHIBIT_AUTOTOOLS_DEPS', d, 1): return '' @@ -29,7 +27,6 @@ acpaths = "default" EXTRA_AUTORECONF = "--exclude=autopoint" def autotools_set_crosscompiling(d): - import bb if not bb.data.inherits_class('native', d): return " cross_compiling=yes" return "" diff --git a/meta/classes/binconfig.bbclass b/meta/classes/binconfig.bbclass index 31e5cc517..73ca4d621 100644 --- a/meta/classes/binconfig.bbclass +++ b/meta/classes/binconfig.bbclass @@ -1,6 +1,5 @@ # The namespaces can clash here hence the two step replace def get_binconfig_mangle(d): - import bb.data s = "-e ''" if not bb.data.inherits_class('native', d): optional_quote = r"\(\"\?\)" diff --git a/meta/classes/cpan-base.bbclass b/meta/classes/cpan-base.bbclass index cc0d11e51..919a9b06a 100644 --- a/meta/classes/cpan-base.bbclass +++ b/meta/classes/cpan-base.bbclass @@ -9,7 +9,7 @@ RDEPENDS += "${@["perl", ""][(bb.data.inherits_class('native', d))]}" # Determine the staged version of perl from the perl configuration file def get_perl_version(d): - import os, bb, re + import re cfg = bb.data.expand('${STAGING_DIR}/${HOST_SYS}/perl/config.sh', d) try: f = open(cfg, 'r') @@ -33,7 +33,6 @@ def is_new_perl(d): # Determine where the library directories are def perl_get_libdirs(d): - import bb libdir = bb.data.getVar('libdir', d, 1) if is_new_perl(d) == "yes": libdirs = libdir + '/perl5' @@ -42,7 +41,6 @@ def perl_get_libdirs(d): return libdirs def is_target(d): - import bb if not bb.data.inherits_class('native', d): return "yes" return "no" diff --git a/meta/classes/cpan_build.bbclass b/meta/classes/cpan_build.bbclass index 4aff7c804..9cfe72bcb 100644 --- a/meta/classes/cpan_build.bbclass +++ b/meta/classes/cpan_build.bbclass @@ -10,7 +10,6 @@ inherit cpan-base # libmodule-build-perl) # def cpan_build_dep_prepend(d): - import bb; if bb.data.getVar('CPAN_BUILD_DEPS', d, 1): return '' pn = bb.data.getVar('PN', d, 1) diff --git a/meta/classes/distutils-base.bbclass b/meta/classes/distutils-base.bbclass index 5150be76b..5d6d9981e 100644 --- a/meta/classes/distutils-base.bbclass +++ b/meta/classes/distutils-base.bbclass @@ -3,7 +3,6 @@ DEPENDS += "${@["python-native python", ""][(bb.data.getVar('PACKAGES', d, 1) = RDEPENDS += "python-core" def python_dir(d): - import os, bb staging_incdir = bb.data.getVar( "STAGING_INCDIR", d, 1 ) if os.path.exists( "%s/python2.5" % staging_incdir ): return "python2.5" if os.path.exists( "%s/python2.4" % staging_incdir ): return "python2.4" diff --git a/meta/classes/gettext.bbclass b/meta/classes/gettext.bbclass index a1e00e72c..0b69fa939 100644 --- a/meta/classes/gettext.bbclass +++ b/meta/classes/gettext.bbclass @@ -1,5 +1,4 @@ def gettext_after_parse(d): - import bb # Remove the NLS bits if USE_NLS is no. if bb.data.getVar('USE_NLS', d, 1) == 'no': cfg = oe_filter_out('^--(dis|en)able-nls$', bb.data.getVar('EXTRA_OECONF', d, 1) or "", d) diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass index 56cbd6444..724074231 100644 --- a/meta/classes/icecc.bbclass +++ b/meta/classes/icecc.bbclass @@ -33,7 +33,6 @@ def icc_determine_gcc_version(gcc): 'i686-apple-darwin8-gcc-4.0.1 (GCC) 4.0.1 (Apple Computer, Inc. build 5363)' """ - import os return os.popen("%s --version" % gcc ).readline().split()[2] def create_cross_env(bb,d): @@ -47,7 +46,7 @@ def create_cross_env(bb,d): if len(prefix) == 0: return "" - import tarfile, socket, time, os + import tarfile, socket, time ice_dir = bb.data.expand('${CROSS_DIR}', d) prefix = bb.data.expand('${HOST_PREFIX}' , d) distro = bb.data.expand('${DISTRO}', d) @@ -96,7 +95,7 @@ def create_cross_env(bb,d): def create_native_env(bb,d): - import tarfile, socket, time, os + import tarfile, socket, time ice_dir = bb.data.expand('${CROSS_DIR}', d) prefix = bb.data.expand('${HOST_PREFIX}' , d) distro = bb.data.expand('${DISTRO}', d) @@ -137,7 +136,7 @@ def create_native_env(bb,d): def create_cross_kernel_env(bb,d): - import tarfile, socket, time, os + import tarfile, socket, time ice_dir = bb.data.expand('${CROSS_DIR}', d) prefix = bb.data.expand('${HOST_PREFIX}' , d) distro = bb.data.expand('${DISTRO}', d) @@ -204,8 +203,6 @@ def create_path(compilers, type, bb, d): """ Create Symlinks for the icecc in the staging directory """ - import os - staging = os.path.join(bb.data.expand('${STAGING_DIR}', d), "ice", type) #check if the icecc path is set by the user diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass index 6b0a14d9a..c5a2dd2c5 100644 --- a/meta/classes/image.bbclass +++ b/meta/classes/image.bbclass @@ -26,8 +26,6 @@ PACKAGE_ARCH = "${MACHINE_ARCH}" do_rootfs[depends] += "makedevs-native:do_populate_staging fakeroot-native:do_populate_staging ldconfig-native:do_populate_staging" python () { - import bb - deps = bb.data.getVarFlag('do_rootfs', 'depends', d) or "" for type in (bb.data.getVar('IMAGE_FSTYPES', d, True) or "").split(): for dep in ((bb.data.getVar('IMAGE_DEPENDS_%s' % type, d) or "").split() or []): @@ -49,7 +47,6 @@ python () { # is searched for in the BBPATH (same as the old version.) # def get_devtable_list(d): - import bb devtable = bb.data.getVar('IMAGE_DEVICE_TABLE', d, 1) if devtable != None: return devtable @@ -62,7 +59,6 @@ def get_devtable_list(d): return str def get_imagecmds(d): - import bb cmds = "\n" old_overrides = bb.data.getVar('OVERRIDES', d, 0) for type in bb.data.getVar('IMAGE_FSTYPES', d, True).split(): diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass index 56d6a0b88..4b993b55d 100644 --- a/meta/classes/insane.bbclass +++ b/meta/classes/insane.bbclass @@ -169,7 +169,6 @@ def package_qa_get_elf(path, bits32): def package_qa_clean_path(path,d): """ Remove the common prefix from the path. In this case it is the TMPDIR""" - import bb return path.replace(bb.data.getVar('TMPDIR',d,True),"") def package_qa_make_fatal_error(error_class, name, path,d): @@ -184,7 +183,6 @@ def package_qa_write_error(error_class, name, path, d): """ Log the error """ - import bb, os ERROR_NAMES =[ "non dev contains .so", @@ -214,7 +212,6 @@ def package_qa_write_error(error_class, name, path, d): f.close() def package_qa_handle_error(error_class, error_msg, name, path, d): - import bb fatal = package_qa_make_fatal_error(error_class, name, path, d) if fatal: bb.error("QA Issue: %s" % error_msg) @@ -229,7 +226,6 @@ def package_qa_check_rpath(file,name,d): """ Check for dangerous RPATHs """ - import bb, os sane = True scanelf = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'scanelf') bad_dir = bb.data.getVar('TMPDIR', d, True) + "/work" @@ -255,7 +251,6 @@ def package_qa_check_devdbg(path, name,d): non dev packages containing """ - import bb, os sane = True if not "-dev" in name: @@ -283,7 +278,6 @@ def package_qa_check_arch(path,name,d): """ Check if archs are compatible """ - import bb, os sane = True target_os = bb.data.getVar('TARGET_OS', d, True) target_arch = bb.data.getVar('TARGET_ARCH', d, True) @@ -322,7 +316,6 @@ def package_qa_check_desktop(path, name, d): """ Run all desktop files through desktop-file-validate. """ - import bb, os sane = True if path.endswith(".desktop"): desktop_file_validate = os.path.join(bb.data.getVar('STAGING_BINDIR_NATIVE',d,True),'desktop-file-validate') @@ -337,7 +330,6 @@ def package_qa_check_buildpaths(path, name, d): """ Check for build paths inside target files and error if not found in the whitelist """ - import bb, os sane = True # Ignore .debug files, not interesting @@ -364,7 +356,6 @@ def package_qa_check_staged(path,d): to find the one responsible for the errors easily even if we look at every .pc and .la file """ - import os, bb sane = True tmpdir = bb.data.getVar('TMPDIR', d, True) @@ -402,7 +393,6 @@ def package_qa_check_staged(path,d): # Walk over all files in a directory and call func def package_qa_walk(path, funcs, package,d): - import os sane = True for root, dirs, files in os.walk(path): @@ -415,7 +405,6 @@ def package_qa_walk(path, funcs, package,d): return sane def package_qa_check_rdepends(pkg, workdir, d): - import bb sane = True if not "-dbg" in pkg and not "task-" in pkg and not "-image" in pkg: # Copied from package_ipk.bbclass @@ -496,7 +485,6 @@ python do_qa_staging() { addtask qa_configure after do_configure before do_compile python do_qa_configure() { bb.note("Checking sanity of the config.log file") - import os for root, dirs, files in os.walk(bb.data.getVar('WORKDIR', d, True)): statement = "grep 'CROSS COMPILE Badness:' %s > /dev/null" % \ os.path.join(root,"config.log") diff --git a/meta/classes/kernel-arch.bbclass b/meta/classes/kernel-arch.bbclass index 2ce0f9727..35c26b89f 100644 --- a/meta/classes/kernel-arch.bbclass +++ b/meta/classes/kernel-arch.bbclass @@ -15,7 +15,7 @@ valid_archs = "alpha cris ia64 \ avr32 blackfin" def map_kernel_arch(a, d): - import bb, re + import re valid_archs = bb.data.getVar('valid_archs', d, 1).split() diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass index b2266bee5..74ec7d56b 100644 --- a/meta/classes/kernel.bbclass +++ b/meta/classes/kernel.bbclass @@ -9,9 +9,6 @@ INHIBIT_DEFAULT_DEPS = "1" KERNEL_IMAGETYPE ?= "zImage" python __anonymous () { - - import bb - kerneltype = bb.data.getVar('KERNEL_IMAGETYPE', d, 1) or '' if kerneltype == 'uImage': depends = bb.data.getVar("DEPENDS", d, 1) @@ -271,7 +268,7 @@ module_conf_rfcomm = "alias bt-proto-3 rfcomm" python populate_packages_prepend () { def extract_modinfo(file): - import os, re + import re tmpfile = os.tmpnam() cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (bb.data.getVar("PATH", d, 1), bb.data.getVar("HOST_PREFIX", d, 1) or "", file, tmpfile) os.system(cmd) @@ -289,7 +286,7 @@ python populate_packages_prepend () { return vals def parse_depmod(): - import os, re + import re dvar = bb.data.getVar('D', d, 1) if not dvar: @@ -343,7 +340,7 @@ python populate_packages_prepend () { file = file.replace(bb.data.getVar('D', d, 1) or '', '', 1) if module_deps.has_key(file): - import os.path, re + import re dependencies = [] for i in module_deps[file]: m = re.match(pattern, os.path.basename(i)) @@ -411,7 +408,7 @@ python populate_packages_prepend () { postrm = bb.data.getVar('pkg_postrm_modules', d, 1) do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % bb.data.getVar("KERNEL_VERSION", d, 1)) - import re, os + import re metapkg = "kernel-modules" bb.data.setVar('ALLOW_EMPTY_' + metapkg, "1", d) bb.data.setVar('FILES_' + metapkg, "", d) diff --git a/meta/classes/linux-kernel-base.bbclass b/meta/classes/linux-kernel-base.bbclass index 4e2e2da37..b3e0fdad7 100644 --- a/meta/classes/linux-kernel-base.bbclass +++ b/meta/classes/linux-kernel-base.bbclass @@ -1,6 +1,6 @@ # parse kernel ABI version out of <linux/version.h> def get_kernelversion(p): - import re, os + import re fn = p + '/include/linux/utsrelease.h' if not os.path.isfile(fn): @@ -30,7 +30,6 @@ def get_kernelmajorversion(p): return None def linux_module_packages(s, d): - import bb, os.path suffix = "" return " ".join(map(lambda s: "kernel-module-%s%s" % (s.lower().replace('_', '-').replace('@', '+'), suffix), s.split())) diff --git a/meta/classes/openmoko-base.bbclass b/meta/classes/openmoko-base.bbclass index 8643daa7a..d7be1c293 100644 --- a/meta/classes/openmoko-base.bbclass +++ b/meta/classes/openmoko-base.bbclass @@ -4,7 +4,6 @@ OPENMOKO_RELEASE ?= "OM-2007" OPENMOKO_MIRROR ?= "svn://svn.openmoko.org/trunk" def openmoko_base_get_subdir(d): - import bb openmoko, section = bb.data.getVar('SECTION', d, 1).split("/") if section == 'base' or section == 'libs': return "" elif section in 'apps tools pim'.split(): return "applications" diff --git a/meta/classes/openmoko2.bbclass b/meta/classes/openmoko2.bbclass index ef734e431..233c721ff 100644 --- a/meta/classes/openmoko2.bbclass +++ b/meta/classes/openmoko2.bbclass @@ -5,12 +5,10 @@ OPENMOKO_RELEASE ?= "OM-2007.2" OPENMOKO_MIRROR ?= "svn://svn.openmoko.org/trunk" def openmoko_two_get_license(d): - import bb openmoko, section = bb.data.getVar('SECTION', d, 1).split("/") return "LGPL GPL".split()[section != "libs"] def openmoko_two_get_subdir(d): - import bb openmoko, section = bb.data.getVar('SECTION', d, 1).split("/") if section == 'base': return "" elif section == 'libs': return "libraries" diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index 41eeb8d3d..6d384bebc 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass @@ -29,7 +29,6 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst Used in .bb files to split up dynamically generated subpackages of a given package, usually plugins or modules. """ - import os, os.path, bb dvar = bb.data.getVar('PKGD', d, True) @@ -117,7 +116,6 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst PACKAGE_DEPENDS += "file-native" python () { - import bb if bb.data.getVar('PACKAGES', d, True) != '': deps = bb.data.getVarFlag('do_package', 'depends', d) or "" for dep in (bb.data.getVar('PACKAGE_DEPENDS', d, True) or "").split(): @@ -135,7 +133,7 @@ def runstrip(file, d): # A working 'file' (one which works on the target architecture) # is necessary for this stuff to work, hence the addition to do_package[depends] - import bb, os, commands, stat + import commands, stat pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True) @@ -192,8 +190,6 @@ def runstrip(file, d): # def get_package_mapping (pkg, d): - import bb, os - data = read_subpkgdata(pkg, d) key = "PKG_%s" % pkg @@ -203,8 +199,6 @@ def get_package_mapping (pkg, d): return pkg def runtime_mapping_rename (varname, d): - import bb, os - #bb.note("%s before: %s" % (varname, bb.data.getVar(varname, d, True))) new_depends = [] @@ -226,8 +220,6 @@ def runtime_mapping_rename (varname, d): # python package_do_split_locales() { - import os - if (bb.data.getVar('PACKAGE_NO_LOCALE', d, True) == '1'): bb.debug(1, "package requested not splitting locales") return @@ -284,8 +276,6 @@ python package_do_split_locales() { } python perform_packagecopy () { - import os - dest = bb.data.getVar('D', d, True) dvar = bb.data.getVar('PKGD', d, True) @@ -297,7 +287,7 @@ python perform_packagecopy () { } python populate_packages () { - import os, glob, stat, errno, re + import glob, stat, errno, re workdir = bb.data.getVar('WORKDIR', d, True) outdir = bb.data.getVar('DEPLOY_DIR', d, True) @@ -530,7 +520,7 @@ fi SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs" python package_do_shlibs() { - import os, re, os.path + import re exclude_shlibs = bb.data.getVar('EXCLUDE_FROM_SHLIBS', d, 0) if exclude_shlibs: @@ -746,7 +736,7 @@ python package_do_shlibs() { } python package_do_pkgconfig () { - import re, os + import re packages = bb.data.getVar('PACKAGES', d, True) workdir = bb.data.getVar('WORKDIR', d, True) diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass index d90939fdb..5c002465b 100644 --- a/meta/classes/package_deb.bbclass +++ b/meta/classes/package_deb.bbclass @@ -15,13 +15,11 @@ DPKG_ARCH_i686 ?= "i386" DPKG_ARCH_pentium ?= "i386" python package_deb_fn () { - from bb import data bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d) } addtask package_deb_install python do_package_deb_install () { - import os, sys pkg = bb.data.getVar('PKG', d, 1) pkgfn = bb.data.getVar('PKGFN', d, 1) rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1) @@ -37,6 +35,7 @@ python do_package_deb_install () { os.makedirs(rootfs) os.chdir(rootfs) except OSError: + import sys raise bb.build.FuncFailed(str(sys.exc_value)) # update packages file @@ -67,14 +66,13 @@ python do_package_deb_install () { } python do_package_deb () { - import sys, re, copy + import re, copy workdir = bb.data.getVar('WORKDIR', d, 1) if not workdir: bb.error("WORKDIR not defined, unable to package") return - import os # path manipulations outdir = bb.data.getVar('DEPLOY_DIR_DEB', d, 1) if not outdir: bb.error("DEPLOY_DIR_DEB not defined, unable to package") @@ -135,8 +133,7 @@ python do_package_deb () { except ValueError: pass if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": - from bb import note - note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1))) + bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1))) bb.utils.unlockfile(lf) continue @@ -186,6 +183,7 @@ python do_package_deb () { for (c, fs) in fields: ctrlfile.write(unicode(c % tuple(pullData(fs, localdata)))) except KeyError: + import sys (type, value, traceback) = sys.exc_info() bb.utils.unlockfile(lf) ctrlfile.close() @@ -252,7 +250,6 @@ python do_package_deb () { } python () { - import bb if bb.data.getVar('PACKAGES', d, True) != '': deps = (bb.data.getVarFlag('do_package_write_deb', 'depends', d) or "").split() deps.append('dpkg-native:do_populate_staging') diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass index 68a8b5c40..2621fa8e9 100644 --- a/meta/classes/package_ipk.bbclass +++ b/meta/classes/package_ipk.bbclass @@ -6,12 +6,10 @@ IPKGCONF_TARGET = "${WORKDIR}/opkg.conf" IPKGCONF_SDK = "${WORKDIR}/opkg-sdk.conf" python package_ipk_fn () { - from bb import data bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d) } python package_ipk_install () { - import os, sys pkg = bb.data.getVar('PKG', d, 1) pkgfn = bb.data.getVar('PKGFN', d, 1) rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1) @@ -25,6 +23,7 @@ python package_ipk_install () { bb.mkdirhier(rootfs) os.chdir(rootfs) except OSError: + import sys (type, value, traceback) = sys.exc_info() print value raise bb.build.FuncFailed @@ -126,14 +125,13 @@ package_generate_archlist () { } python do_package_ipk () { - import sys, re, copy + import re, copy workdir = bb.data.getVar('WORKDIR', d, 1) if not workdir: bb.error("WORKDIR not defined, unable to package") return - import os # path manipulations outdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1) if not outdir: bb.error("DEPLOY_DIR_IPK not defined, unable to package") @@ -192,8 +190,7 @@ python do_package_ipk () { except ValueError: pass if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1": - from bb import note - note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1))) + bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1))) bb.utils.unlockfile(lf) continue @@ -234,6 +231,7 @@ python do_package_ipk () { raise KeyError(f) ctrlfile.write(c % tuple(pullData(fs, localdata))) except KeyError: + import sys (type, value, traceback) = sys.exc_info() ctrlfile.close() bb.utils.unlockfile(lf) @@ -302,7 +300,6 @@ python do_package_ipk () { } python () { - import bb if bb.data.getVar('PACKAGES', d, True) != '': deps = (bb.data.getVarFlag('do_package_write_ipk', 'depends', d) or "").split() deps.append('opkg-utils-native:do_populate_staging') diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass index d291733dc..49e84678b 100644 --- a/meta/classes/package_rpm.bbclass +++ b/meta/classes/package_rpm.bbclass @@ -12,9 +12,6 @@ RPMOPTS="--rcfile=${WORKDIR}/rpmrc --target ${TARGET_SYS}" RPM="rpm ${RPMOPTS}" python write_specfile() { - from bb import data, build - import sys - version = bb.data.getVar('PV', d, 1) version = version.replace('-', '+') bb.data.setVar('RPMPV', version, d) @@ -55,8 +52,7 @@ python write_specfile() { pass if not files and bb.data.getVar('ALLOW_EMPTY', d) != "1": - from bb import note - note("Not creating empty archive for %s-%s-%s" % (bb.data.getVar('PKG',d, 1), bb.data.getVar('PV', d, 1), bb.data.getVar('PR', d, 1))) + bb.note("Not creating empty archive for %s-%s-%s" % (bb.data.getVar('PKG',d, 1), bb.data.getVar('PV', d, 1), bb.data.getVar('PR', d, 1))) return # output .spec using this metadata store @@ -159,7 +155,6 @@ python do_package_rpm () { bb.error("WORKDIR not defined, unable to package") return - import os # path manipulations outdir = bb.data.getVar('DEPLOY_DIR_RPM', d, 1) if not outdir: bb.error("DEPLOY_DIR_RPM not defined, unable to package") @@ -213,7 +208,6 @@ python do_package_rpm () { } python () { - import bb if bb.data.getVar('PACKAGES', d, True) != '': deps = (bb.data.getVarFlag('do_package_write_rpm', 'depends', d) or "").split() deps.append('rpm-native:do_populate_staging') diff --git a/meta/classes/package_tar.bbclass b/meta/classes/package_tar.bbclass index 876cec6cf..24a77be93 100644 --- a/meta/classes/package_tar.bbclass +++ b/meta/classes/package_tar.bbclass @@ -3,15 +3,12 @@ inherit package IMAGE_PKGTYPE ?= "tar" python package_tar_fn () { - import os - from bb import data fn = os.path.join(bb.data.getVar('DEPLOY_DIR_TAR', d), "%s-%s-%s.tar.gz" % (bb.data.getVar('PKG', d), bb.data.getVar('PV', d), bb.data.getVar('PR', d))) fn = bb.data.expand(fn, d) bb.data.setVar('PKGFN', fn, d) } python package_tar_install () { - import os, sys pkg = bb.data.getVar('PKG', d, 1) pkgfn = bb.data.getVar('PKGFN', d, 1) rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1) @@ -23,6 +20,7 @@ python package_tar_install () { bb.mkdirhier(rootfs) os.chdir(rootfs) except OSError: + import sys (type, value, traceback) = sys.exc_info() print value raise bb.build.FuncFailed @@ -42,7 +40,6 @@ python do_package_tar () { bb.error("WORKDIR not defined, unable to package") return - import os # path manipulations outdir = bb.data.getVar('DEPLOY_DIR_TAR', d, 1) if not outdir: bb.error("DEPLOY_DIR_TAR not defined, unable to package") @@ -94,7 +91,6 @@ python do_package_tar () { } python () { - import bb if bb.data.getVar('PACKAGES', d, True) != '': deps = (bb.data.getVarFlag('do_package_write_tar', 'depends', d) or "").split() deps.append('tar-native:do_populate_staging') diff --git a/meta/classes/packaged-staging.bbclass b/meta/classes/packaged-staging.bbclass index b9d59bbd8..82a4450bc 100644 --- a/meta/classes/packaged-staging.bbclass +++ b/meta/classes/packaged-staging.bbclass @@ -27,7 +27,6 @@ PSTAGE_NATIVEDEPENDS = "\ BB_STAMP_WHITELIST = "${PSTAGE_NATIVEDEPENDS}" python () { - import bb pstage_allowed = True # These classes encode staging paths into the binary data so can only be @@ -81,8 +80,6 @@ PSTAGE_LIST_CMD = "${PSTAGE_PKGMANAGER} -f ${PSTAGE_MACHCONFIG} -o ${TMP PSTAGE_TMPDIR_STAGE = "${WORKDIR}/staging-pkg" def pstage_manualclean(srcname, destvarname, d): - import os, bb - src = os.path.join(bb.data.getVar('PSTAGE_TMPDIR_STAGE', d, True), srcname) dest = bb.data.getVar(destvarname, d, True) @@ -95,7 +92,6 @@ def pstage_manualclean(srcname, destvarname, d): os.system("rm %s 2> /dev/null" % filepath) def pstage_set_pkgmanager(d): - import bb path = bb.data.getVar("PATH", d, 1) pkgmanager = bb.which(path, 'opkg-cl') if pkgmanager == "": @@ -105,8 +101,6 @@ def pstage_set_pkgmanager(d): def pstage_cleanpackage(pkgname, d): - import os, bb - path = bb.data.getVar("PATH", d, 1) pstage_set_pkgmanager(d) list_cmd = bb.data.getVar("PSTAGE_LIST_CMD", d, True) @@ -168,8 +162,6 @@ PSTAGE_TASKS_COVERED = "fetch unpack munge patch configure qa_configure rig_loca SCENEFUNCS += "packagestage_scenefunc" python packagestage_scenefunc () { - import os - if bb.data.getVar("PSTAGING_ACTIVE", d, 1) == "0": return @@ -249,10 +241,7 @@ packagestage_scenefunc[dirs] = "${STAGING_DIR}" addhandler packagedstage_stampfixing_eventhandler python packagedstage_stampfixing_eventhandler() { - from bb.event import getName - import os - - if getName(e) == "StampUpdate": + if bb.event.getName(e) == "StampUpdate": taskscovered = bb.data.getVar("PSTAGE_TASKS_COVERED", e.data, 1).split() for (fn, task) in e.targets: # strip off 'do_' diff --git a/meta/classes/packagedata.bbclass b/meta/classes/packagedata.bbclass index c9d64d6da..86f18a9e9 100644 --- a/meta/classes/packagedata.bbclass +++ b/meta/classes/packagedata.bbclass @@ -1,5 +1,4 @@ def packaged(pkg, d): - import os, bb return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK) def read_pkgdatafile(fn): @@ -10,7 +9,6 @@ def read_pkgdatafile(fn): c = codecs.getdecoder("string_escape") return c(str)[0] - import os if os.access(fn, os.R_OK): import re f = file(fn, 'r') @@ -25,7 +23,6 @@ def read_pkgdatafile(fn): return pkgdata def get_subpkgedata_fn(pkg, d): - import bb, os archs = bb.data.expand("${PACKAGE_ARCHS}", d).split(" ") archs.reverse() pkgdata = bb.data.expand('${TMPDIR}/pkgdata/', d) @@ -37,25 +34,20 @@ def get_subpkgedata_fn(pkg, d): return bb.data.expand('${PKGDATA_DIR}/runtime/%s' % pkg, d) def has_subpkgdata(pkg, d): - import bb, os return os.access(get_subpkgedata_fn(pkg, d), os.R_OK) def read_subpkgdata(pkg, d): - import bb return read_pkgdatafile(get_subpkgedata_fn(pkg, d)) def has_pkgdata(pn, d): - import bb, os fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d) return os.access(fn, os.R_OK) def read_pkgdata(pn, d): - import bb fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d) return read_pkgdatafile(fn) python read_subpackage_metadata () { - import bb data = read_pkgdata(bb.data.getVar('PN', d, 1), d) for key in data.keys(): @@ -72,7 +64,6 @@ python read_subpackage_metadata () { # Collapse FOO_pkg variables into FOO # def read_subpkgdata_dict(pkg, d): - import bb ret = {} subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d)) for var in subd: diff --git a/meta/classes/packagehistory.bbclass b/meta/classes/packagehistory.bbclass index 185ab545f..492bbac21 100644 --- a/meta/classes/packagehistory.bbclass +++ b/meta/classes/packagehistory.bbclass @@ -61,8 +61,6 @@ python emit_pkghistory() { def check_pkghistory(pkg, pe, pv, pr, lastversion): - import bb - (last_pe, last_pv, last_pr) = lastversion bb.debug(2, "Checking package history") @@ -72,7 +70,6 @@ def check_pkghistory(pkg, pe, pv, pr, lastversion): def write_pkghistory(pkg, pe, pv, pr, d): - import bb, os bb.debug(2, "Writing package history") pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True) @@ -82,8 +79,6 @@ def write_pkghistory(pkg, pe, pv, pr, d): os.makedirs(verpath) def write_latestlink(pkg, pe, pv, pr, d): - import bb, os - pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True) def rm_link(path): diff --git a/meta/classes/patch.bbclass b/meta/classes/patch.bbclass index ba0f19215..0706a02bc 100644 --- a/meta/classes/patch.bbclass +++ b/meta/classes/patch.bbclass @@ -4,8 +4,6 @@ QUILTRCFILE ?= "${STAGING_BINDIR_NATIVE}/quiltrc" def patch_init(d): - import os, sys - class NotFoundError(Exception): def __init__(self, path): self.path = path @@ -13,8 +11,6 @@ def patch_init(d): return "Error: %s not found." % self.path def md5sum(fname): - import sys - # when we move to Python 2.5 as minimal supported # we can kill that try/except as hashlib is 2.5+ try: @@ -76,8 +72,6 @@ def patch_init(d): def __str__(self): return "Patch Error: %s" % self.msg - import bb, bb.data, bb.fetch - class PatchSet(object): defaults = { "strippath": 1 @@ -251,6 +245,7 @@ def patch_init(d): try: output = runcmd(["quilt", "applied"], self.dir) except CmdError: + import sys if sys.exc_value.output.strip() == "No patches applied": return else: @@ -364,6 +359,7 @@ def patch_init(d): try: self.patchset.Push() except Exception: + import sys os.chdir(olddir) raise sys.exc_value @@ -458,9 +454,6 @@ PATCHDEPENDENCY = "${PATCHTOOL}-native:do_populate_staging" do_patch[depends] = "${PATCHDEPENDENCY}" python patch_do_patch() { - import re - import bb.fetch - patch_init(d) src_uri = (bb.data.getVar('SRC_URI', d, 1) or '').split() diff --git a/meta/classes/rootfs_ipk.bbclass b/meta/classes/rootfs_ipk.bbclass index aa28cd63f..065b78b81 100644 --- a/meta/classes/rootfs_ipk.bbclass +++ b/meta/classes/rootfs_ipk.bbclass @@ -159,7 +159,7 @@ ipk_insert_feed_uris () { } python () { - import bb + if bb.data.getVar('BUILD_IMAGES_FROM_FEEDS', d, True): flags = bb.data.getVarFlag('do_rootfs', 'recrdeptask', d) flags = flags.replace("do_package_write_ipk", "") diff --git a/meta/classes/rootfs_rpm.bbclass b/meta/classes/rootfs_rpm.bbclass index 1e8ad6d9e..da5243ddf 100644 --- a/meta/classes/rootfs_rpm.bbclass +++ b/meta/classes/rootfs_rpm.bbclass @@ -234,7 +234,6 @@ install_all_locales() { } python () { - import bb if bb.data.getVar('BUILD_IMAGES_FROM_FEEDS', d, True): flags = bb.data.getVarFlag('do_rootfs', 'recrdeptask', d) flags = flags.replace("do_package_write_rpm", "") diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass index e11bdd221..39f1e2200 100644 --- a/meta/classes/sanity.bbclass +++ b/meta/classes/sanity.bbclass @@ -3,7 +3,6 @@ # def raise_sanity_error(msg): - import bb bb.fatal(""" Poky's config sanity checker detected a potential misconfiguration. Either fix the cause of this error or at your own risk disable the checker (see sanity.conf). Following is the list of potential problems / advisories: @@ -11,8 +10,6 @@ def raise_sanity_error(msg): %s""" % msg) def check_conf_exists(fn, data): - import bb, os - bbpath = [] fn = bb.data.expand(fn, data) vbbpath = bb.data.getVar("BBPATH", data) @@ -26,12 +23,12 @@ def check_conf_exists(fn, data): def check_sanity(e): from bb import note, error, data, __version__ - from bb.event import Handled, NotHandled, getName + try: from distutils.version import LooseVersion except ImportError: def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1 - import os, commands + import commands # Check the bitbake version meets minimum requirements minversion = data.getVar('BB_MIN_VERSION', e.data , True) @@ -163,10 +160,8 @@ def check_sanity(e): addhandler check_sanity_eventhandler python check_sanity_eventhandler() { - from bb import note, error, data, __version__ - from bb.event import getName - - if getName(e) == "ConfigParsed": + from bb.event import Handled, NotHandled + if bb.event.getName(e) == "ConfigParsed": check_sanity(e) return NotHandled diff --git a/meta/classes/siteinfo.bbclass b/meta/classes/siteinfo.bbclass index 431b81ce2..7b012b7ba 100644 --- a/meta/classes/siteinfo.bbclass +++ b/meta/classes/siteinfo.bbclass @@ -16,8 +16,6 @@ # If 'what' doesn't exist then an empty value is returned # def get_siteinfo_list(d): - import bb - target = bb.data.getVar('HOST_ARCH', d, 1) + "-" + bb.data.getVar('HOST_OS', d, 1) targetinfo = {\ @@ -74,8 +72,6 @@ def get_siteinfo_list(d): # 2) ${FILE_DIRNAME}/site-${PV} - app version specific # def siteinfo_get_files(d): - import bb, os - sitefiles = "" # Determine which site files to look for diff --git a/meta/classes/sourcepkg.bbclass b/meta/classes/sourcepkg.bbclass index 390d3684d..f73855303 100644 --- a/meta/classes/sourcepkg.bbclass +++ b/meta/classes/sourcepkg.bbclass @@ -5,8 +5,6 @@ EXCLUDE_FROM ?= ".pc autom4te.cache" DISTRO ?= "openembedded" def get_src_tree(d): - import bb - import os, os.path workdir = bb.data.getVar('WORKDIR', d, 1) if not workdir: @@ -56,8 +54,6 @@ sourcepkg_do_archive_bb() { } python sourcepkg_do_dumpdata() { - import os - import os.path workdir = bb.data.getVar('WORKDIR', d, 1) distro = bb.data.getVar('DISTRO', d, 1) diff --git a/meta/classes/tinderclient.bbclass b/meta/classes/tinderclient.bbclass index bc004efb2..28df0f950 100644 --- a/meta/classes/tinderclient.bbclass +++ b/meta/classes/tinderclient.bbclass @@ -51,8 +51,7 @@ def tinder_format_http_post(d,status,log): for the tinderbox to be happy. """ - from bb import data, build - import os,random + import random # the variables we will need to send on this form post variables = { @@ -125,7 +124,6 @@ def tinder_build_start(d): report = report[report.find(search)+len(search):] report = report[0:report.find("'")] - import bb bb.note("Machine ID assigned by tinderbox: %s" % report ) # now we will need to save the machine number @@ -165,7 +163,6 @@ def tinder_print_info(d): """ from bb import data - import os # get the local vars time = tinder_time_string() @@ -216,7 +213,6 @@ def tinder_print_env(): Print the environment variables of this build """ from bb import data - import os time_start = tinder_time_string() time_end = tinder_time_string() @@ -278,7 +274,7 @@ def tinder_do_tinder_report(event): """ from bb.event import getName from bb import data, mkdirhier, build - import os, glob + import glob # variables name = getName(event) diff --git a/meta/classes/update-alternatives.bbclass b/meta/classes/update-alternatives.bbclass index c63581c5d..ddbf4c194 100644 --- a/meta/classes/update-alternatives.bbclass +++ b/meta/classes/update-alternatives.bbclass @@ -11,7 +11,6 @@ update-alternatives --remove ${ALTERNATIVE_NAME} ${ALTERNATIVE_PATH} } def update_alternatives_after_parse(d): - import bb if bb.data.getVar('ALTERNATIVE_NAME', d) == None: raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % bb.data.getVar('FILE', d) if bb.data.getVar('ALTERNATIVE_PATH', d) == None: diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass index 74053edb8..5a8062cc7 100644 --- a/meta/classes/update-rc.d.bbclass +++ b/meta/classes/update-rc.d.bbclass @@ -26,7 +26,6 @@ update-rc.d $D ${INITSCRIPT_NAME} remove def update_rc_after_parse(d): - import bb if bb.data.getVar('INITSCRIPT_PACKAGES', d) == None: if bb.data.getVar('INITSCRIPT_NAME', d) == None: raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_NAME" % bb.data.getVar('FILE', d) |