1 BB_DEFAULT_TASK ?= "build"
3 # like os.path.join but doesn't treat absolute RHS specially
4 def base_path_join(a, *p):
7 if path == '' or path.endswith('/'):
13 def base_path_relative(src, dest):
14 """ Return a relative path from src to dest.
16 >>> base_path_relative("/usr/bin", "/tmp/foo/bar")
19 >>> base_path_relative("/usr/bin", "/usr/lib")
22 >>> base_path_relative("/tmp", "/tmp/foo/bar")
25 from os.path import sep, pardir, normpath, commonprefix
27 destlist = normpath(dest).split(sep)
28 srclist = normpath(src).split(sep)
30 # Find common section of the path
31 common = commonprefix([destlist, srclist])
32 commonlen = len(common)
34 # Climb back to the point where they differentiate
35 relpath = [ pardir ] * (len(srclist) - commonlen)
36 if commonlen < len(destlist):
37 # Add remaining portion
38 relpath += destlist[commonlen:]
40 return sep.join(relpath)
42 def base_path_out(path, d):
43 """ Prepare a path for display to the user. """
44 rel = base_path_relative(d.getVar("TOPDIR", 1), path)
45 if len(rel) > len(path):
50 # for MD5/SHA handling
51 def base_chk_load_parser(config_paths):
53 parser = ConfigParser.ConfigParser()
54 if len(parser.read(config_paths)) < 1:
55 raise ValueError("no ini files could be found")
59 def base_chk_file_vars(parser, localpath, params, data):
64 flagName = "%s.md5sum" % name
65 want_md5sum = bb.data.getVarFlag("SRC_URI", flagName, data)
66 flagName = "%s.sha256sum" % name
67 want_sha256sum = bb.data.getVarFlag("SRC_URI", flagName, data)
69 if (want_sha256sum == None and want_md5sum == None):
70 # no checksums to check, nothing to do
73 if not os.path.exists(localpath):
74 localpath = base_path_out(localpath, data)
75 bb.note("The localpath does not exist '%s'" % localpath)
76 raise Exception("The path does not exist '%s'" % localpath)
80 md5pipe = os.popen('PATH=%s md5sum %s' % (bb.data.getVar('PATH', data, True), localpath))
81 md5data = (md5pipe.readline().split() or [ "" ])[0]
84 raise Exception("Executing md5sum failed")
85 if want_md5sum != md5data:
86 bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (want_md5sum, md5data))
87 raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (want_md5sum, md5data))
91 shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
92 sha256data = (shapipe.readline().split() or [ "" ])[0]
95 raise Exception("Executing shasum failed")
96 if want_sha256sum != sha256data:
97 bb.note("The SHA256Sums did not match. Wanted: '%s' and Got: '%s'" % (want_sha256sum, sha256data))
98 raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (want_sha256sum, sha256data))
103 def base_chk_file(parser, pn, pv, src_uri, localpath, data):
105 # Try PN-PV-SRC_URI first and then try PN-SRC_URI
106 # we rely on the get method to create errors
107 pn_pv_src = "%s-%s-%s" % (pn,pv,src_uri)
108 pn_src = "%s-%s" % (pn,src_uri)
109 if parser.has_section(pn_pv_src):
110 md5 = parser.get(pn_pv_src, "md5")
111 sha256 = parser.get(pn_pv_src, "sha256")
112 elif parser.has_section(pn_src):
113 md5 = parser.get(pn_src, "md5")
114 sha256 = parser.get(pn_src, "sha256")
115 elif parser.has_section(src_uri):
116 md5 = parser.get(src_uri, "md5")
117 sha256 = parser.get(src_uri, "sha256")
121 # md5 and sha256 should be valid now
122 if not os.path.exists(localpath):
123 localpath = base_path_out(localpath, data)
124 bb.note("The localpath does not exist '%s'" % localpath)
125 raise Exception("The path does not exist '%s'" % localpath)
128 # call md5(sum) and shasum
130 md5pipe = os.popen('PATH=%s md5sum %s' % (bb.data.getVar('PATH', data, True), localpath))
131 md5data = (md5pipe.readline().split() or [ "" ])[0]
134 raise Exception("Executing md5sum failed")
137 shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
138 shadata = (shapipe.readline().split() or [ "" ])[0]
141 raise Exception("Executing shasum failed")
143 if no_checksum == True: # we do not have conf/checksums.ini entry
145 file = open("%s/checksums.ini" % bb.data.getVar("TMPDIR", data, 1), "a")
150 raise Exception("Creating checksums.ini failed")
152 file.write("[%s]\nmd5=%s\nsha256=%s\n\n" % (src_uri, md5data, shadata))
154 if not bb.data.getVar("OE_STRICT_CHECKSUMS",data, True):
155 bb.note("This package has no entry in checksums.ini, please add one")
156 bb.note("\n[%s]\nmd5=%s\nsha256=%s" % (src_uri, md5data, shadata))
159 bb.note("Missing checksum")
162 if not md5 == md5data:
163 bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (md5,md5data))
164 raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (md5, md5data))
166 if not sha256 == shadata:
167 bb.note("The SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256,shadata))
168 raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256, shadata))
173 def base_dep_prepend(d):
175 # Ideally this will check a flag so we will operate properly in
176 # the case where host == build == target, for now we don't work in
179 deps = "shasum-native coreutils-native"
180 if bb.data.getVar('PN', d, True) == "shasum-native" or bb.data.getVar('PN', d, True) == "stagemanager-native":
182 if bb.data.getVar('PN', d, True) == "coreutils-native":
183 deps = "shasum-native"
185 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
186 # we need that built is the responsibility of the patch function / class, not
188 if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
189 if (bb.data.getVar('HOST_SYS', d, 1) !=
190 bb.data.getVar('BUILD_SYS', d, 1)):
191 deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
194 def base_read_file(filename):
196 f = file( filename, "r" )
197 except IOError, reason:
198 return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
200 return f.read().strip()
203 def base_ifelse(condition, iftrue = True, iffalse = False):
209 def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
210 if bb.data.getVar(variable,d,1) == checkvalue:
215 def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
216 if float(bb.data.getVar(variable,d,1)) <= float(checkvalue):
221 def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
222 result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue)
228 def base_contains(variable, checkvalues, truevalue, falsevalue, d):
230 if type(checkvalues).__name__ == "str":
231 checkvalues = [checkvalues]
232 for value in checkvalues:
233 if bb.data.getVar(variable,d,1).find(value) != -1:
234 matches = matches + 1
235 if matches == len(checkvalues):
239 def base_both_contain(variable1, variable2, checkvalue, d):
240 if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1:
245 DEPENDS_prepend="${@base_dep_prepend(d)} "
246 DEPENDS_virtclass-native_prepend="${@base_dep_prepend(d)} "
247 DEPENDS_virtclass-nativesdk_prepend="${@base_dep_prepend(d)} "
249 def base_prune_suffix(var, suffixes, d):
250 # See if var ends with any of the suffixes listed and
252 for suffix in suffixes:
253 if var.endswith(suffix):
254 return var.replace(suffix, "")
257 def base_set_filespath(path, d):
258 bb.note("base_set_filespath usage is deprecated, %s should be fixed" % d.getVar("P", 1))
260 # The ":" ensures we have an 'empty' override
261 overrides = (bb.data.getVar("OVERRIDES", d, 1) or "") + ":"
263 for o in overrides.split(":"):
264 filespath.append(os.path.join(p, o))
265 return ":".join(filespath)
267 def oe_filter(f, str, d):
269 return " ".join(filter(lambda x: match(f, x, 0), str.split()))
271 def oe_filter_out(f, str, d):
273 return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
294 echo "Usage: oedebug level \"message\""
298 test ${OEDEBUG:-0} -ge $1 && {
305 if [ x"$MAKE" = x ]; then MAKE=make; fi
306 oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
307 ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
311 # Purpose: Install shared library file and
312 # create the necessary links
317 #oenote installing shared library $1 to $2
319 libname=`basename $1`
320 install -m 755 $1 $2/$libname
321 sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
322 solink=`echo $libname | sed -e 's/\.so\..*/.so/'`
323 ln -sf $libname $2/$sonamelink
324 ln -sf $libname $2/$solink
328 # Purpose: Install a library, in all its forms
331 # oe_libinstall libltdl ${STAGING_LIBDIR}/
332 # oe_libinstall -C src/libblah libblah ${D}/${libdir}/
339 while [ "$#" -gt 0 ]; do
355 oefatal "oe_libinstall: unknown option: $1"
367 if [ -z "$destpath" ]; then
368 oefatal "oe_libinstall: no destination path specified"
370 if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null
376 if [ -z "$silent" ]; then
377 echo >&2 "oe_libinstall: $*"
382 if [ -z "$dir" ]; then
388 # Sanity check that the libname.lai is unique
389 number_of_files=`(cd $dir; find . -name "$dotlai") | wc -l`
390 if [ $number_of_files -gt 1 ]; then
391 oefatal "oe_libinstall: $dotlai is not unique in $dir"
395 dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
401 # If such file doesn't exist, try to cut version suffix
402 if [ ! -f "$lafile" ]; then
403 libname1=`echo "$libname" | sed 's/-[0-9.]*$//'`
405 if [ -f "$lafile1" ]; then
411 if [ -f "$lafile" ]; then
413 eval `cat $lafile|grep "^library_names="`
416 library_names="$libname.so* $libname.dll.a"
419 __runcmd install -d $destpath/
421 if [ -f "$dota" -o -n "$require_static" ]; then
422 __runcmd install -m 0644 $dota $destpath/
424 if [ -f "$dotlai" -a -n "$libtool" ]; then
425 if test -n "$staging_install"
427 # stop libtool using the final directory name for libraries
429 __runcmd rm -f $destpath/$libname.la
430 __runcmd sed -e 's/^installed=yes$/installed=no/' \
431 -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
432 -e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
433 $dotlai >$destpath/$libname.la
435 __runcmd install -m 0644 $dotlai $destpath/$libname.la
439 for name in $library_names; do
440 files=`eval echo $name`
442 if [ ! -e "$f" ]; then
443 if [ -n "$libtool" ]; then
444 oefatal "oe_libinstall: $dir/$f not found."
446 elif [ -L "$f" ]; then
447 __runcmd cp -P "$f" $destpath/
448 elif [ ! -L "$f" ]; then
450 __runcmd install -m 0755 $libfile $destpath/
455 if [ -z "$libfile" ]; then
456 if [ -n "$require_shared" ]; then
457 oefatal "oe_libinstall: unable to locate shared library"
459 elif [ -z "$libtool" ]; then
460 # special case hack for non-libtool .so.#.#.# links
461 baselibfile=`basename "$libfile"`
462 if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
463 sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
464 solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
465 if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
466 __runcmd ln -sf $baselibfile $destpath/$sonamelink
468 __runcmd ln -sf $baselibfile $destpath/$solink
472 __runcmd cd "$olddir"
475 def package_stagefile(file, d):
477 if bb.data.getVar('PSTAGING_ACTIVE', d, True) == "1":
478 destfile = file.replace(bb.data.getVar("TMPDIR", d, 1), bb.data.getVar("PSTAGE_TMPDIR_STAGE", d, 1))
479 bb.mkdirhier(os.path.dirname(destfile))
480 #print "%s to %s" % (file, destfile)
481 bb.copyfile(file, destfile)
483 package_stagefile_shell() {
484 if [ "$PSTAGING_ACTIVE" = "1" ]; then
486 destfile=`echo $srcfile | sed s#${TMPDIR}#${PSTAGE_TMPDIR_STAGE}#`
487 destdir=`dirname $destfile`
489 cp -dp $srcfile $destfile
494 # Purpose: Install machine dependent files, if available
495 # If not available, check if there is a default
496 # If no default, just touch the destination
499 # oe_machinstall -m 0644 fstab ${D}/etc/fstab
501 # TODO: Check argument number?
503 filename=`basename $3`
506 for o in `echo ${OVERRIDES} | tr ':' ' '`; do
507 if [ -e $dirname/$o/$filename ]; then
508 oenote $dirname/$o/$filename present, installing to $4
509 install $1 $2 $dirname/$o/$filename $4
513 # oenote overrides specific file NOT present, trying default=$3...
515 oenote $3 present, installing to $4
518 oenote $3 NOT present, touching empty $4
524 do_listtasks[nostamp] = "1"
525 python do_listtasks() {
527 # emit variables and shell functions
528 #bb.data.emit_env(sys.__stdout__, d)
529 # emit the metadata which isnt valid shell
531 if bb.data.getVarFlag(e, 'task', d):
532 sys.__stdout__.write("%s\n" % e)
536 do_clean[dirs] = "${TOPDIR}"
537 do_clean[nostamp] = "1"
538 python base_do_clean() {
539 """clear the build and temp directories"""
540 dir = bb.data.expand("${WORKDIR}", d)
541 if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
542 bb.note("removing " + base_path_out(dir, d))
543 os.system('rm -rf ' + dir)
545 dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
546 bb.note("removing " + base_path_out(dir, d))
547 os.system('rm -f '+ dir)
550 python do_cleanall() {
553 do_cleanall[recrdeptask] = "do_clean"
554 addtask cleanall after do_clean
556 addtask rebuild after do_${BB_DEFAULT_TASK}
557 do_rebuild[dirs] = "${TOPDIR}"
558 do_rebuild[nostamp] = "1"
559 python base_do_rebuild() {
560 """rebuild a package"""
564 do_mrproper[dirs] = "${TOPDIR}"
565 do_mrproper[nostamp] = "1"
566 python base_do_mrproper() {
567 """clear downloaded sources, build and temp directories"""
568 dir = bb.data.expand("${DL_DIR}", d)
569 if dir == '/': bb.build.FuncFailed("wrong DATADIR")
570 bb.debug(2, "removing " + dir)
571 os.system('rm -rf ' + dir)
572 bb.build.exec_func('do_clean', d)
576 do_distclean[dirs] = "${TOPDIR}"
577 do_distclean[nostamp] = "1"
578 python base_do_distclean() {
579 """clear downloaded sources, build and temp directories"""
581 bb.build.exec_func('do_clean', d)
583 src_uri = bb.data.getVar('SRC_URI', d, 1)
587 for uri in src_uri.split():
588 if bb.decodeurl(uri)[0] == "file":
592 local = bb.data.expand(bb.fetch.localpath(uri, d), d)
593 except bb.MalformedUrl, e:
594 bb.debug(1, 'Unable to generate local path for malformed uri: %s' % e)
596 bb.note("removing %s" % base_path_out(local, d))
598 if os.path.exists(local + ".md5"):
599 os.remove(local + ".md5")
600 if os.path.exists(local):
603 bb.note("Error in removal: %s" % e)
606 SCENEFUNCS += "base_scenefunction"
608 python base_do_setscene () {
609 for f in (bb.data.getVar('SCENEFUNCS', d, 1) or '').split():
610 bb.build.exec_func(f, d)
611 if not os.path.exists(bb.data.getVar('STAMP', d, 1) + ".do_setscene"):
612 bb.build.make_stamp("do_setscene", d)
614 do_setscene[selfstamp] = "1"
615 addtask setscene before do_fetch
617 python base_scenefunction () {
618 stamp = bb.data.getVar('STAMP', d, 1) + ".needclean"
619 if os.path.exists(stamp):
620 bb.build.exec_func("do_clean", d)
625 do_fetch[dirs] = "${DL_DIR}"
626 do_fetch[depends] = "shasum-native:do_populate_staging"
627 python base_do_fetch() {
630 localdata = bb.data.createCopy(d)
631 bb.data.update_data(localdata)
633 src_uri = bb.data.getVar('SRC_URI', localdata, 1)
638 bb.fetch.init(src_uri.split(),d)
639 except bb.fetch.NoMethodError:
640 (type, value, traceback) = sys.exc_info()
641 raise bb.build.FuncFailed("No method: %s" % value)
642 except bb.MalformedUrl:
643 (type, value, traceback) = sys.exc_info()
644 raise bb.build.FuncFailed("Malformed URL: %s" % value)
647 bb.fetch.go(localdata)
648 except bb.fetch.MissingParameterError:
649 (type, value, traceback) = sys.exc_info()
650 raise bb.build.FuncFailed("Missing parameters: %s" % value)
651 except bb.fetch.FetchError:
652 (type, value, traceback) = sys.exc_info()
653 raise bb.build.FuncFailed("Fetch failed: %s" % value)
654 except bb.fetch.MD5SumError:
655 (type, value, traceback) = sys.exc_info()
656 raise bb.build.FuncFailed("MD5 failed: %s" % value)
658 (type, value, traceback) = sys.exc_info()
659 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
662 # Verify the SHA and MD5 sums we have in OE and check what do
664 checksum_paths = bb.data.getVar('BBPATH', d, True).split(":")
666 # reverse the list to give precedence to directories that
667 # appear first in BBPATH
668 checksum_paths.reverse()
670 checksum_files = ["%s/conf/checksums.ini" % path for path in checksum_paths]
672 parser = base_chk_load_parser(checksum_files)
674 bb.note("No conf/checksums.ini found, not checking checksums")
677 bb.note("Creating the CheckSum parser failed: %s:%s" % (sys.exc_info()[0], sys.exc_info()[1]))
680 pv = bb.data.getVar('PV', d, True)
681 pn = bb.data.getVar('PN', d, True)
684 for url in src_uri.split():
685 localpath = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
686 (type,host,path,_,_,params) = bb.decodeurl(url)
687 uri = "%s://%s%s" % (type,host,path)
689 if type in [ "http", "https", "ftp", "ftps" ]:
690 if not (base_chk_file_vars(parser, localpath, params, d) or base_chk_file(parser, pn, pv,uri, localpath, d)):
691 if not bb.data.getVar("OE_ALLOW_INSECURE_DOWNLOADS", d, True):
692 bb.fatal("%s-%s: %s has no checksum defined, cannot check archive integrity" % (pn,pv,uri))
694 bb.note("%s-%s: %s has no checksum defined, archive integrity not checked" % (pn,pv,uri))
696 raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
699 addtask fetchall after do_fetch
700 do_fetchall[recrdeptask] = "do_fetch"
706 do_checkuri[nostamp] = "1"
707 python do_checkuri() {
710 localdata = bb.data.createCopy(d)
711 bb.data.update_data(localdata)
713 src_uri = bb.data.getVar('SRC_URI', localdata, 1)
716 bb.fetch.init(src_uri.split(),d)
717 except bb.fetch.NoMethodError:
718 (type, value, traceback) = sys.exc_info()
719 raise bb.build.FuncFailed("No method: %s" % value)
722 bb.fetch.checkstatus(localdata)
723 except bb.fetch.MissingParameterError:
724 (type, value, traceback) = sys.exc_info()
725 raise bb.build.FuncFailed("Missing parameters: %s" % value)
726 except bb.fetch.FetchError:
727 (type, value, traceback) = sys.exc_info()
728 raise bb.build.FuncFailed("Fetch failed: %s" % value)
729 except bb.fetch.MD5SumError:
730 (type, value, traceback) = sys.exc_info()
731 raise bb.build.FuncFailed("MD5 failed: %s" % value)
733 (type, value, traceback) = sys.exc_info()
734 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
737 addtask checkuriall after do_checkuri
738 do_checkuriall[recrdeptask] = "do_checkuri"
739 do_checkuriall[nostamp] = "1"
740 base_do_checkuriall() {
744 addtask buildall after do_build
745 do_buildall[recrdeptask] = "do_build"
750 def subprocess_setup():
752 # Python installs a SIGPIPE handler by default. This is usually not what
753 # non-Python subprocesses expect.
754 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
756 def oe_unpack_file(file, data, url = None):
759 url = "file://%s" % file
760 dots = file.split(".")
761 if dots[-1] in ['gz', 'bz2', 'Z']:
762 efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
766 if file.endswith('.tar'):
767 cmd = 'tar x --no-same-owner -f %s' % file
768 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
769 cmd = 'tar xz --no-same-owner -f %s' % file
770 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
771 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
772 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
773 cmd = 'gzip -dc %s > %s' % (file, efile)
774 elif file.endswith('.bz2'):
775 cmd = 'bzip2 -dc %s > %s' % (file, efile)
776 elif file.endswith('.zip') or file.endswith('.jar'):
778 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
781 cmd = '%s %s' % (cmd, file)
782 elif os.path.isdir(file):
784 filespath = bb.data.getVar("FILESPATH", data, 1).split(":")
786 if file[0:len(fp)] == fp:
787 destdir = file[len(fp):file.rfind('/')]
788 destdir = destdir.strip('/')
791 elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
792 os.makedirs("%s/%s" % (os.getcwd(), destdir))
795 cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
797 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
798 if not 'patch' in parm:
799 # The "destdir" handling was specifically done for FILESPATH
800 # items. So, only do so for file:// entries.
802 destdir = bb.decodeurl(url)[1] or "."
805 bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
806 cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
811 dest = os.path.join(os.getcwd(), os.path.basename(file))
812 if os.path.exists(dest):
813 if os.path.samefile(file, dest):
816 # Change to subdir before executing command
817 save_cwd = os.getcwd();
818 parm = bb.decodeurl(url)[5]
820 newdir = ("%s/%s" % (os.getcwd(), parm['subdir']))
824 cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
825 bb.note("Unpacking %s to %s/" % (base_path_out(file, data), base_path_out(os.getcwd(), data)))
826 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
832 addtask unpack after do_fetch
833 do_unpack[dirs] = "${WORKDIR}"
834 python base_do_unpack() {
837 localdata = bb.data.createCopy(d)
838 bb.data.update_data(localdata)
840 src_uri = bb.data.getVar('SRC_URI', localdata)
843 src_uri = bb.data.expand(src_uri, localdata)
844 for url in src_uri.split():
846 local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
847 except bb.MalformedUrl, e:
848 raise bb.build.FuncFailed('Unable to generate local path for malformed uri: %s' % e)
850 raise bb.build.FuncFailed('Unable to locate local file for %s' % url)
851 local = os.path.realpath(local)
852 ret = oe_unpack_file(local, localdata, url)
854 raise bb.build.FuncFailed()
857 METADATA_SCM = "${@base_get_scm(d)}"
858 METADATA_REVISION = "${@base_get_scm_revision(d)}"
859 METADATA_BRANCH = "${@base_get_scm_branch(d)}"
863 baserepo = os.path.dirname(os.path.dirname(which(d.getVar("BBPATH", 1), "classes/base.bbclass")))
864 for (scm, scmpath) in {"svn": ".svn",
866 "monotone": "_MTN"}.iteritems():
867 if os.path.exists(os.path.join(baserepo, scmpath)):
868 return "%s %s" % (scm, baserepo)
869 return "<unknown> %s" % baserepo
871 def base_get_scm_revision(d):
872 (scm, path) = d.getVar("METADATA_SCM", 1).split()
874 if scm != "<unknown>":
875 return globals()["base_get_metadata_%s_revision" % scm](path, d)
881 def base_get_scm_branch(d):
882 (scm, path) = d.getVar("METADATA_SCM", 1).split()
884 if scm != "<unknown>":
885 return globals()["base_get_metadata_%s_branch" % scm](path, d)
891 def base_get_metadata_monotone_branch(path, d):
892 monotone_branch = "<unknown>"
894 monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
895 if monotone_branch.startswith( "database" ):
896 monotone_branch_words = monotone_branch.split()
897 monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
900 return monotone_branch
902 def base_get_metadata_monotone_revision(path, d):
903 monotone_revision = "<unknown>"
905 monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
906 if monotone_revision.startswith( "format_version" ):
907 monotone_revision_words = monotone_revision.split()
908 monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
911 return monotone_revision
913 def base_get_metadata_svn_revision(path, d):
914 revision = "<unknown>"
916 revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
921 def base_get_metadata_git_branch(path, d):
922 branch = os.popen('cd %s; PATH=%s git symbolic-ref HEAD 2>/dev/null' % (path, d.getVar("PATH", 1))).read().rstrip()
925 return branch.replace("refs/heads/", "")
928 def base_get_metadata_git_revision(path, d):
929 rev = os.popen("cd %s; PATH=%s git show-ref HEAD 2>/dev/null" % (path, d.getVar("PATH", 1))).read().split(" ")[0].rstrip()
935 addhandler base_eventhandler
936 python base_eventhandler() {
937 from bb import note, error, data
938 from bb.event import Handled, NotHandled, getName
942 if name == "TaskCompleted":
943 msg = "package %s: task %s is complete." % (data.getVar("PF", e.data, 1), e.task)
944 elif name == "UnsatisfiedDep":
945 msg = "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
949 # Only need to output when using 1.8 or lower, the UI code handles it
951 if (int(bb.__version__.split(".")[0]) <= 1 and int(bb.__version__.split(".")[1]) <= 8):
955 if name.startswith("BuildStarted"):
956 bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
957 statusvars = bb.data.getVar("BUILDCFG_VARS", e.data, 1).split()
958 statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
959 statusmsg = "\n%s\n%s\n" % (bb.data.getVar("BUILDCFG_HEADER", e.data, 1), "\n".join(statuslines))
962 needed_vars = bb.data.getVar("BUILDCFG_NEEDEDVARS", e.data, 1).split()
964 for v in needed_vars:
965 val = bb.data.getVar(v, e.data, 1)
966 if not val or val == 'INVALID':
969 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
972 # Handle removing stamps for 'rebuild' task
974 if name.startswith("StampUpdate"):
975 for (fn, task) in e.targets:
976 #print "%s %s" % (task, fn)
977 if task == "do_rebuild":
978 dir = "%s.*" % e.stampPrefix[fn]
979 bb.note("Removing stamps: " + dir)
980 os.system('rm -f '+ dir)
981 os.system('touch ' + e.stampPrefix[fn] + '.needclean')
983 if not data in e.__dict__:
986 log = data.getVar("EVENTLOG", e.data, 1)
988 logfile = file(log, "a")
989 logfile.write("%s\n" % msg)
995 addtask configure after do_unpack do_patch
996 do_configure[dirs] = "${S} ${B}"
997 do_configure[deptask] = "do_populate_staging"
998 base_do_configure() {
1002 addtask compile after do_configure
1003 do_compile[dirs] = "${S} ${B}"
1005 if [ -e Makefile -o -e makefile ]; then
1006 oe_runmake || die "make failed"
1008 oenote "nothing to compile"
1013 sysroot_stage_dir() {
1016 # This will remove empty directories so we can ignore them
1017 rmdir "$src" 2> /dev/null || true
1018 if [ -d "$src" ]; then
1020 cp -fpPR "$src"/* "$dest"
1024 sysroot_stage_libdir() {
1030 las=$(find . -name \*.la -type f)
1032 echo "Found la files: $las"
1035 sed -e 's/^installed=yes$/installed=no/' \
1036 -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
1037 -e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
1040 sysroot_stage_dir $src $dest
1043 sysroot_stage_dirs() {
1047 sysroot_stage_dir $from${includedir} $to${STAGING_INCDIR}
1048 if [ "${BUILD_SYS}" = "${HOST_SYS}" ]; then
1049 sysroot_stage_dir $from${bindir} $to${STAGING_DIR_HOST}${bindir}
1050 sysroot_stage_dir $from${sbindir} $to${STAGING_DIR_HOST}${sbindir}
1051 sysroot_stage_dir $from${base_bindir} $to${STAGING_DIR_HOST}${base_bindir}
1052 sysroot_stage_dir $from${base_sbindir} $to${STAGING_DIR_HOST}${base_sbindir}
1053 sysroot_stage_dir $from${libexecdir} $to${STAGING_DIR_HOST}${libexecdir}
1054 if [ "${prefix}/lib" != "${libdir}" ]; then
1055 # python puts its files in here, make sure they are staged as well
1056 autotools_stage_dir $from/${prefix}/lib $to${STAGING_DIR_HOST}${prefix}/lib
1059 if [ -d $from${libdir} ]
1061 sysroot_stage_libdir $from/${libdir} $to${STAGING_LIBDIR}
1063 if [ -d $from${base_libdir} ]
1065 sysroot_stage_libdir $from${base_libdir} $to${STAGING_DIR_HOST}${base_libdir}
1067 sysroot_stage_dir $from${datadir} $to${STAGING_DATADIR}
1070 sysroot_stage_all() {
1071 sysroot_stage_dirs ${D} ${SYSROOT_DESTDIR}
1074 def is_legacy_staging(d):
1075 stagefunc = bb.data.getVar('do_stage', d, True)
1077 if stagefunc is None:
1079 elif stagefunc.strip() == "autotools_stage_all":
1081 elif stagefunc.strip() == "do_stage_native" and bb.data.getVar('AUTOTOOLS_NATIVE_STAGE_INSTALL', d, 1) == "1":
1083 elif bb.data.getVar('NATIVE_INSTALL_WORKS', d, 1) == "1":
1085 if bb.data.getVar('PSTAGE_BROKEN_DESTDIR', d, 1) == "1":
1087 if bb.data.getVar('FORCE_LEGACY_STAGING', d, 1) == "1":
1091 do_populate_staging[dirs] = "${STAGING_DIR_TARGET}/${bindir} ${STAGING_DIR_TARGET}/${libdir} \
1092 ${STAGING_DIR_TARGET}/${includedir} \
1093 ${STAGING_BINDIR_NATIVE} ${STAGING_LIBDIR_NATIVE} \
1094 ${STAGING_INCDIR_NATIVE} \
1095 ${STAGING_DATADIR} \
1098 # Could be compile but populate_staging and do_install shouldn't run at the same time
1099 addtask populate_staging after do_install
1101 SYSROOT_PREPROCESS_FUNCS ?= ""
1102 SYSROOT_DESTDIR = "${WORKDIR}/sysroot-destdir/"
1103 SYSROOT_LOCK = "${STAGING_DIR}/staging.lock"
1105 python populate_staging_prehook () {
1109 python populate_staging_posthook () {
1113 packagedstaging_fastpath () {
1117 python do_populate_staging () {
1119 # if do_stage exists, we're legacy. In that case run the do_stage,
1120 # modify the SYSROOT_DESTDIR variable and then run the staging preprocess
1121 # functions against staging directly.
1123 # Otherwise setup a destdir, copy the results from do_install
1124 # and run the staging preprocess against that
1126 pstageactive = (bb.data.getVar("PSTAGING_ACTIVE", d, True) == "1")
1127 lockfile = bb.data.getVar("SYSROOT_LOCK", d, True)
1128 stagefunc = bb.data.getVar('do_stage', d, True)
1129 legacy = is_legacy_staging(d)
1131 bb.data.setVar("SYSROOT_DESTDIR", "", d)
1132 bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
1133 if bb.data.getVarFlags('do_stage', d) is None:
1134 bb.fatal("This recipe (%s) has a do_stage_prepend or do_stage_append and do_stage now doesn't exist. Please rename this to do_stage()" % bb.data.getVar("FILE", d, True))
1135 lock = bb.utils.lockfile(lockfile)
1136 bb.build.exec_func('populate_staging_prehook', d)
1137 bb.build.exec_func('do_stage', d)
1138 for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
1139 bb.build.exec_func(f, d)
1140 bb.build.exec_func('populate_staging_posthook', d)
1141 bb.utils.unlockfile(lock)
1143 dest = bb.data.getVar('D', d, True)
1144 sysrootdest = bb.data.expand('${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}', d)
1145 bb.mkdirhier(sysrootdest)
1147 bb.build.exec_func("sysroot_stage_all", d)
1148 #os.system('cp -pPR %s/* %s/' % (dest, sysrootdest))
1149 for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
1150 bb.build.exec_func(f, d)
1151 bb.build.exec_func("packagedstaging_fastpath", d)
1153 lock = bb.utils.lockfile(lockfile)
1154 os.system(bb.data.expand('cp -pPR ${SYSROOT_DESTDIR}${TMPDIR}/* ${TMPDIR}/', d))
1155 bb.utils.unlockfile(lock)
1158 addtask install after do_compile
1159 do_install[dirs] = "${D} ${S} ${B}"
1160 # Remove and re-create ${D} so that is it guaranteed to be empty
1161 do_install[cleandirs] = "${D}"
1171 addtask build after do_populate_staging
1173 do_build[func] = "1"
1177 # Functions that update metadata based on files outputted
1178 # during the build process.
1180 def explode_deps(s):
1192 r[-1] += ' ' + ' '.join(j)
1197 # Make sure MACHINE isn't exported
1198 # (breaks binutils at least)
1199 MACHINE[unexport] = "1"
1201 # Make sure TARGET_ARCH isn't exported
1202 # (breaks Makefiles using implicit rules, e.g. quilt, as GNU make has this
1203 # in them, undocumented)
1204 TARGET_ARCH[unexport] = "1"
1206 # Make sure DISTRO isn't exported
1207 # (breaks sysvinit at least)
1208 DISTRO[unexport] = "1"
1211 def base_after_parse(d):
1214 source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
1215 if not source_mirror_fetch:
1216 need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
1219 this_host = bb.data.getVar('HOST_SYS', d, 1)
1220 if not re.match(need_host, this_host):
1221 raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
1223 need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
1226 this_machine = bb.data.getVar('MACHINE', d, 1)
1227 if this_machine and not re.match(need_machine, this_machine):
1228 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
1230 pn = bb.data.getVar('PN', d, 1)
1232 # OBSOLETE in bitbake 1.7.4
1233 srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
1235 bb.data.setVar('SRCDATE', srcdate, d)
1237 use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
1239 bb.data.setVar('USE_NLS', use_nls, d)
1241 # Git packages should DEPEND on git-native
1242 srcuri = bb.data.getVar('SRC_URI', d, 1)
1243 if "git://" in srcuri:
1244 depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
1245 depends = depends + " git-native:do_populate_staging"
1246 bb.data.setVarFlag('do_fetch', 'depends', depends, d)
1248 # 'multimachine' handling
1249 mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
1250 pkg_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
1252 if (pkg_arch == mach_arch):
1253 # Already machine specific - nothing further to do
1257 # We always try to scan SRC_URI for urls with machine overrides
1258 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
1260 override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
1263 for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
1264 path = bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d)
1265 if os.path.isdir(path):
1268 for s in srcuri.split():
1269 if not s.startswith("file://"):
1271 local = bb.data.expand(bb.fetch.localpath(s, d), d)
1273 if local.startswith(mp):
1274 #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch))
1275 bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
1276 bb.data.setVar('MULTIMACH_ARCH', mach_arch, d)
1279 multiarch = pkg_arch
1281 packages = bb.data.getVar('PACKAGES', d, 1).split()
1282 for pkg in packages:
1283 pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1)
1285 # We could look for != PACKAGE_ARCH here but how to choose
1286 # if multiple differences are present?
1287 # Look through PACKAGE_ARCHS for the priority order?
1288 if pkgarch and pkgarch == mach_arch:
1289 multiarch = mach_arch
1292 bb.data.setVar('MULTIMACH_ARCH', multiarch, d)
1296 if is_legacy_staging(d):
1297 bb.debug(1, "Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
1298 if bb.data.getVarFlags('do_stage', d) is None:
1299 bb.error("This recipe (%s) has a do_stage_prepend or do_stage_append and do_stage now doesn't exist. Please rename this to do_stage()" % bb.data.getVar("FILE", d, True))
1303 def check_app_exists(app, d):
1304 from bb import which, data
1306 app = data.expand(app, d)
1307 path = data.getVar('PATH', d, 1)
1308 return len(which(path, app)) != 0
1310 def check_gcc3(data):
1311 # Primarly used by qemu to make sure we have a workable gcc-3.4.x.
1312 # Start by checking for the program name as we build it, was not
1313 # all host-provided gcc-3.4's will work.
1315 gcc3_versions = 'gcc-3.4.6 gcc-3.4.4 gcc34 gcc-3.4 gcc-3.4.7 gcc-3.3 gcc33 gcc-3.3.6 gcc-3.2 gcc32'
1317 for gcc3 in gcc3_versions.split():
1318 if check_app_exists(gcc3, data):
1326 # Configuration data from site files
1327 # Move to autotools.bbclass?
1330 EXPORT_FUNCTIONS do_setscene do_clean do_mrproper do_distclean do_fetch do_unpack do_configure do_compile do_install do_package do_populate_pkgs do_rebuild do_fetchall
1334 ${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool
1335 ${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool
1336 ${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool
1337 ${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool
1338 ${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool
1339 ${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool
1340 ${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool
1341 ${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool
1342 ${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool
1343 ${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool
1344 ${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool
1345 ${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool
1346 ${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool
1347 ${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool
1348 ${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool
1349 ${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool
1350 ${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool
1351 ${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool
1352 ${GNU_MIRROR} ftp://mirrors.kernel.org/gnu
1353 ${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu
1354 ${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu
1355 ${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu
1356 ${KERNELORG_MIRROR} http://www.kernel.org/pub
1357 ${KERNELORG_MIRROR} ftp://ftp.us.kernel.org/pub
1358 ${KERNELORG_MIRROR} ftp://ftp.uk.kernel.org/pub
1359 ${KERNELORG_MIRROR} ftp://ftp.hk.kernel.org/pub
1360 ${KERNELORG_MIRROR} ftp://ftp.au.kernel.org/pub
1361 ${KERNELORG_MIRROR} ftp://ftp.jp.kernel.org/pub
1362 ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
1363 ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
1364 ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
1365 ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
1366 ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
1367 ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
1368 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
1369 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
1370 ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
1371 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
1372 ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
1373 http://ftp.info-zip.org/pub/infozip/src/ http://mirror.switch.ch/ftp/mirror/infozip/src/
1374 http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/
1375 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cerias.purdue.edu/pub/tools/unix/sysutils/lsof/
1376 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tau.ac.il/pub/unix/admin/
1377 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cert.dfn.de/pub/tools/admin/lsof/
1378 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.fu-berlin.de/pub/unix/tools/lsof/
1379 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.kaizo.org/pub/lsof/
1380 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tu-darmstadt.de/pub/sysadmin/lsof/
1381 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tux.org/pub/sites/vic.cc.purdue.edu/tools/unix/lsof/
1382 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://gd.tuwien.ac.at/utils/admin-tools/lsof/
1383 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://sunsite.ualberta.ca/pub/Mirror/lsof/
1384 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://the.wiretapped.net/pub/security/host-security/lsof/
1385 http://www.apache.org/dist http://archive.apache.org/dist
1386 ftp://.*/.* http://mirrors.openembedded.org/
1387 https?$://.*/.* http://mirrors.openembedded.org/
1388 ftp://.*/.* http://sources.openembedded.org/
1389 https?$://.*/.* http://sources.openembedded.org/