libsdl-x11: use newer sdl.m4 macro from svn
[openembedded.git] / classes / base.bbclass
blob384a67d3d6ed534fd55de12a8677c27a03bad053
1 BB_DEFAULT_TASK ?= "build"
3 # like os.path.join but doesn't treat absolute RHS specially
4 def base_path_join(a, *p):
5     path = a
6     for b in p:
7         if path == '' or path.endswith('/'):
8             path +=  b
9         else:
10             path += '/' + b
11     return path
13 def base_path_relative(src, dest):
14     """ Return a relative path from src to dest.
16     >>> base_path_relative("/usr/bin", "/tmp/foo/bar")
17     ../../tmp/foo/bar
19     >>> base_path_relative("/usr/bin", "/usr/lib")
20     ../lib
22     >>> base_path_relative("/tmp", "/tmp/foo/bar")
23     foo/bar
24     """
25     from os.path import sep, pardir, normpath, commonprefix
27     destlist = normpath(dest).split(sep)
28     srclist = normpath(src).split(sep)
30     # Find common section of the path
31     common = commonprefix([destlist, srclist])
32     commonlen = len(common)
34     # Climb back to the point where they differentiate
35     relpath = [ pardir ] * (len(srclist) - commonlen)
36     if commonlen < len(destlist):
37         # Add remaining portion
38         relpath += destlist[commonlen:]
40     return sep.join(relpath)
42 def base_path_out(path, d):
43     """ Prepare a path for display to the user. """
44     rel = base_path_relative(d.getVar("TOPDIR", 1), path)
45     if len(rel) > len(path):
46         return path
47     else:
48         return rel
50 # for MD5/SHA handling
51 def base_chk_load_parser(config_paths):
52     import ConfigParser
53     parser = ConfigParser.ConfigParser()
54     if len(parser.read(config_paths)) < 1:
55         raise ValueError("no ini files could be found")
57     return parser
59 def base_chk_file_vars(parser, localpath, params, data):
60     try:
61         name = params["name"]
62     except KeyError:
63         return False
64     flagName = "%s.md5sum" % name
65     want_md5sum = bb.data.getVarFlag("SRC_URI", flagName, data)
66     flagName = "%s.sha256sum" % name
67     want_sha256sum = bb.data.getVarFlag("SRC_URI", flagName, data)
69     if (want_sha256sum == None and want_md5sum == None):
70         # no checksums to check, nothing to do
71         return False
73     if not os.path.exists(localpath):
74         localpath = base_path_out(localpath, data)
75         bb.note("The localpath does not exist '%s'" % localpath)
76         raise Exception("The path does not exist '%s'" % localpath)
78     if want_md5sum:
79         try:
80             md5pipe = os.popen('PATH=%s md5sum %s' % (bb.data.getVar('PATH', data, True), localpath))
81             md5data = (md5pipe.readline().split() or [ "" ])[0]
82             md5pipe.close()
83         except OSError, e:
84             raise Exception("Executing md5sum failed")
85         if want_md5sum != md5data:
86             bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (want_md5sum, md5data))
87             raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (want_md5sum, md5data))
89     if want_sha256sum:
90         try:
91             shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
92             sha256data = (shapipe.readline().split() or [ "" ])[0]
93             shapipe.close()
94         except OSError, e:
95             raise Exception("Executing shasum failed")
96         if want_sha256sum != sha256data:
97             bb.note("The SHA256Sums did not match. Wanted: '%s' and Got: '%s'" % (want_sha256sum, sha256data))
98             raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (want_sha256sum, sha256data))
100     return True
103 def base_chk_file(parser, pn, pv, src_uri, localpath, data):
104     no_checksum = False
105     # Try PN-PV-SRC_URI first and then try PN-SRC_URI
106     # we rely on the get method to create errors
107     pn_pv_src = "%s-%s-%s" % (pn,pv,src_uri)
108     pn_src    = "%s-%s" % (pn,src_uri)
109     if parser.has_section(pn_pv_src):
110         md5    = parser.get(pn_pv_src, "md5")
111         sha256 = parser.get(pn_pv_src, "sha256")
112     elif parser.has_section(pn_src):
113         md5    = parser.get(pn_src, "md5")
114         sha256 = parser.get(pn_src, "sha256")
115     elif parser.has_section(src_uri):
116         md5    = parser.get(src_uri, "md5")
117         sha256 = parser.get(src_uri, "sha256")
118     else:
119         no_checksum = True
121     # md5 and sha256 should be valid now
122     if not os.path.exists(localpath):
123         localpath = base_path_out(localpath, data)
124         bb.note("The localpath does not exist '%s'" % localpath)
125         raise Exception("The path does not exist '%s'" % localpath)
128     # call md5(sum) and shasum
129     try:
130         md5pipe = os.popen('PATH=%s md5sum %s' % (bb.data.getVar('PATH', data, True), localpath))
131         md5data = (md5pipe.readline().split() or [ "" ])[0]
132         md5pipe.close()
133     except OSError:
134         raise Exception("Executing md5sum failed")
136     try:
137         shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
138         shadata = (shapipe.readline().split() or [ "" ])[0]
139         shapipe.close()
140     except OSError:
141         raise Exception("Executing shasum failed")
143     if no_checksum == True:     # we do not have conf/checksums.ini entry
144         try:
145             file = open("%s/checksums.ini" % bb.data.getVar("TMPDIR", data, 1), "a")
146         except:
147             return False
149         if not file:
150             raise Exception("Creating checksums.ini failed")
151         
152         file.write("[%s]\nmd5=%s\nsha256=%s\n\n" % (src_uri, md5data, shadata))
153         file.close()
154         if not bb.data.getVar("OE_STRICT_CHECKSUMS",data, True):
155             bb.note("This package has no entry in checksums.ini, please add one")
156             bb.note("\n[%s]\nmd5=%s\nsha256=%s" % (src_uri, md5data, shadata))
157             return True
158         else:
159             bb.note("Missing checksum")
160             return False
162     if not md5 == md5data:
163         bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (md5,md5data))
164         raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (md5, md5data))
166     if not sha256 == shadata:
167         bb.note("The SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256,shadata))
168         raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256, shadata))
170     return True
173 def base_dep_prepend(d):
174         #
175         # Ideally this will check a flag so we will operate properly in
176         # the case where host == build == target, for now we don't work in
177         # that case though.
178         #
179         deps = "shasum-native coreutils-native"
180         if bb.data.getVar('PN', d, True) == "shasum-native" or bb.data.getVar('PN', d, True) == "stagemanager-native":
181                 deps = ""
182         if bb.data.getVar('PN', d, True) == "coreutils-native":
183                 deps = "shasum-native"
185         # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command.  Whether or  not
186         # we need that built is the responsibility of the patch function / class, not
187         # the application.
188         if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
189                 if (bb.data.getVar('HOST_SYS', d, 1) !=
190                     bb.data.getVar('BUILD_SYS', d, 1)):
191                         deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
192         return deps
194 def base_read_file(filename):
195         try:
196                 f = file( filename, "r" )
197         except IOError, reason:
198                 return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
199         else:
200                 return f.read().strip()
201         return None
203 def base_ifelse(condition, iftrue = True, iffalse = False):
204     if condition:
205         return iftrue
206     else:
207         return iffalse
209 def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
210         if bb.data.getVar(variable,d,1) == checkvalue:
211                 return truevalue
212         else:
213                 return falsevalue
215 def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
216         if float(bb.data.getVar(variable,d,1)) <= float(checkvalue):
217                 return truevalue
218         else:
219                 return falsevalue
221 def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
222     result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue)
223     if result <= 0:
224         return truevalue
225     else:
226         return falsevalue
228 def base_contains(variable, checkvalues, truevalue, falsevalue, d):
229         matches = 0
230         if type(checkvalues).__name__ == "str":
231                 checkvalues = [checkvalues]
232         for value in checkvalues:
233                 if bb.data.getVar(variable,d,1).find(value) != -1:      
234                         matches = matches + 1
235         if matches == len(checkvalues):
236                 return truevalue                
237         return falsevalue
239 def base_both_contain(variable1, variable2, checkvalue, d):
240        if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1:
241                return checkvalue
242        else:
243                return ""
245 DEPENDS_prepend="${@base_dep_prepend(d)} "
246 DEPENDS_virtclass-native_prepend="${@base_dep_prepend(d)} "
247 DEPENDS_virtclass-nativesdk_prepend="${@base_dep_prepend(d)} "
249 def base_prune_suffix(var, suffixes, d):
250     # See if var ends with any of the suffixes listed and 
251     # remove it if found
252     for suffix in suffixes:
253         if var.endswith(suffix):
254             return var.replace(suffix, "")
255     return var
257 def base_set_filespath(path, d):
258         bb.note("base_set_filespath usage is deprecated, %s should be fixed" % d.getVar("P", 1))
259         filespath = []
260         # The ":" ensures we have an 'empty' override
261         overrides = (bb.data.getVar("OVERRIDES", d, 1) or "") + ":"
262         for p in path:
263                 for o in overrides.split(":"):
264                         filespath.append(os.path.join(p, o))
265         return ":".join(filespath)
267 def oe_filter(f, str, d):
268         from re import match
269         return " ".join(filter(lambda x: match(f, x, 0), str.split()))
271 def oe_filter_out(f, str, d):
272         from re import match
273         return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
275 die() {
276         oefatal "$*"
279 oenote() {
280         echo "NOTE:" "$*"
283 oewarn() {
284         echo "WARNING:" "$*"
287 oefatal() {
288         echo "FATAL:" "$*"
289         exit 1
292 oedebug() {
293         test $# -ge 2 || {
294                 echo "Usage: oedebug level \"message\""
295                 exit 1
296         }
298         test ${OEDEBUG:-0} -ge $1 && {
299                 shift
300                 echo "DEBUG:" $*
301         }
304 oe_runmake() {
305         if [ x"$MAKE" = x ]; then MAKE=make; fi
306         oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
307         ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
310 oe_soinstall() {
311         # Purpose: Install shared library file and
312         #          create the necessary links
313         # Example:
314         #
315         # oe_
316         #
317         #oenote installing shared library $1 to $2
318         #
319         libname=`basename $1`
320         install -m 755 $1 $2/$libname
321         sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
322         solink=`echo $libname | sed -e 's/\.so\..*/.so/'`
323         ln -sf $libname $2/$sonamelink
324         ln -sf $libname $2/$solink
327 oe_libinstall() {
328         # Purpose: Install a library, in all its forms
329         # Example
330         #
331         # oe_libinstall libltdl ${STAGING_LIBDIR}/
332         # oe_libinstall -C src/libblah libblah ${D}/${libdir}/
333         dir=""
334         libtool=""
335         silent=""
336         require_static=""
337         require_shared=""
338         staging_install=""
339         while [ "$#" -gt 0 ]; do
340                 case "$1" in
341                 -C)
342                         shift
343                         dir="$1"
344                         ;;
345                 -s)
346                         silent=1
347                         ;;
348                 -a)
349                         require_static=1
350                         ;;
351                 -so)
352                         require_shared=1
353                         ;;
354                 -*)
355                         oefatal "oe_libinstall: unknown option: $1"
356                         ;;
357                 *)
358                         break;
359                         ;;
360                 esac
361                 shift
362         done
364         libname="$1"
365         shift
366         destpath="$1"
367         if [ -z "$destpath" ]; then
368                 oefatal "oe_libinstall: no destination path specified"
369         fi
370         if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null
371         then
372                 staging_install=1
373         fi
375         __runcmd () {
376                 if [ -z "$silent" ]; then
377                         echo >&2 "oe_libinstall: $*"
378                 fi
379                 $*
380         }
382         if [ -z "$dir" ]; then
383                 dir=`pwd`
384         fi
386         dotlai=$libname.lai
388         # Sanity check that the libname.lai is unique
389         number_of_files=`(cd $dir; find . -name "$dotlai") | wc -l`
390         if [ $number_of_files -gt 1 ]; then
391                 oefatal "oe_libinstall: $dotlai is not unique in $dir"
392         fi
395         dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
396         olddir=`pwd`
397         __runcmd cd $dir
399         lafile=$libname.la
401         # If such file doesn't exist, try to cut version suffix
402         if [ ! -f "$lafile" ]; then
403                 libname1=`echo "$libname" | sed 's/-[0-9.]*$//'`
404                 lafile1=$libname.la
405                 if [ -f "$lafile1" ]; then
406                         libname=$libname1
407                         lafile=$lafile1
408                 fi
409         fi
411         if [ -f "$lafile" ]; then
412                 # libtool archive
413                 eval `cat $lafile|grep "^library_names="`
414                 libtool=1
415         else
416                 library_names="$libname.so* $libname.dll.a"
417         fi
419         __runcmd install -d $destpath/
420         dota=$libname.a
421         if [ -f "$dota" -o -n "$require_static" ]; then
422                 __runcmd install -m 0644 $dota $destpath/
423         fi
424         if [ -f "$dotlai" -a -n "$libtool" ]; then
425                 if test -n "$staging_install"
426                 then
427                         # stop libtool using the final directory name for libraries
428                         # in staging:
429                         __runcmd rm -f $destpath/$libname.la
430                         __runcmd sed -e 's/^installed=yes$/installed=no/' \
431                                      -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
432                                      -e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
433                                      $dotlai >$destpath/$libname.la
434                 else
435                         __runcmd install -m 0644 $dotlai $destpath/$libname.la
436                 fi
437         fi
439         for name in $library_names; do
440                 files=`eval echo $name`
441                 for f in $files; do
442                         if [ ! -e "$f" ]; then
443                                 if [ -n "$libtool" ]; then
444                                         oefatal "oe_libinstall: $dir/$f not found."
445                                 fi
446                         elif [ -L "$f" ]; then
447                                 __runcmd cp -P "$f" $destpath/
448                         elif [ ! -L "$f" ]; then
449                                 libfile="$f"
450                                 __runcmd install -m 0755 $libfile $destpath/
451                         fi
452                 done
453         done
455         if [ -z "$libfile" ]; then
456                 if  [ -n "$require_shared" ]; then
457                         oefatal "oe_libinstall: unable to locate shared library"
458                 fi
459         elif [ -z "$libtool" ]; then
460                 # special case hack for non-libtool .so.#.#.# links
461                 baselibfile=`basename "$libfile"`
462                 if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
463                         sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
464                         solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
465                         if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
466                                 __runcmd ln -sf $baselibfile $destpath/$sonamelink
467                         fi
468                         __runcmd ln -sf $baselibfile $destpath/$solink
469                 fi
470         fi
472         __runcmd cd "$olddir"
475 def package_stagefile(file, d):
477     if bb.data.getVar('PSTAGING_ACTIVE', d, True) == "1":
478         destfile = file.replace(bb.data.getVar("TMPDIR", d, 1), bb.data.getVar("PSTAGE_TMPDIR_STAGE", d, 1))
479         bb.mkdirhier(os.path.dirname(destfile))
480         #print "%s to %s" % (file, destfile)
481         bb.copyfile(file, destfile)
483 package_stagefile_shell() {
484         if [ "$PSTAGING_ACTIVE" = "1" ]; then
485                 srcfile=$1
486                 destfile=`echo $srcfile | sed s#${TMPDIR}#${PSTAGE_TMPDIR_STAGE}#`
487                 destdir=`dirname $destfile`
488                 mkdir -p $destdir
489                 cp -dp $srcfile $destfile
490         fi
493 oe_machinstall() {
494         # Purpose: Install machine dependent files, if available
495         #          If not available, check if there is a default
496         #          If no default, just touch the destination
497         # Example:
498         #                $1  $2   $3         $4
499         # oe_machinstall -m 0644 fstab ${D}/etc/fstab
500         #
501         # TODO: Check argument number?
502         #
503         filename=`basename $3`
504         dirname=`dirname $3`
506         for o in `echo ${OVERRIDES} | tr ':' ' '`; do
507                 if [ -e $dirname/$o/$filename ]; then
508                         oenote $dirname/$o/$filename present, installing to $4
509                         install $1 $2 $dirname/$o/$filename $4
510                         return
511                 fi
512         done
513 #       oenote overrides specific file NOT present, trying default=$3...
514         if [ -e $3 ]; then
515                 oenote $3 present, installing to $4
516                 install $1 $2 $3 $4
517         else
518                 oenote $3 NOT present, touching empty $4
519                 touch $4
520         fi
523 addtask listtasks
524 do_listtasks[nostamp] = "1"
525 python do_listtasks() {
526         import sys
527         # emit variables and shell functions
528         #bb.data.emit_env(sys.__stdout__, d)
529         # emit the metadata which isnt valid shell
530         for e in d.keys():
531                 if bb.data.getVarFlag(e, 'task', d):
532                         sys.__stdout__.write("%s\n" % e)
535 addtask clean
536 do_clean[dirs] = "${TOPDIR}"
537 do_clean[nostamp] = "1"
538 python base_do_clean() {
539         """clear the build and temp directories"""
540         dir = bb.data.expand("${WORKDIR}", d)
541         if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
542         bb.note("removing " + base_path_out(dir, d))
543         os.system('rm -rf ' + dir)
545         dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
546         bb.note("removing " + base_path_out(dir, d))
547         os.system('rm -f '+ dir)
550 python do_cleanall() {
551     pass
553 do_cleanall[recrdeptask] = "do_clean"
554 addtask cleanall after do_clean
556 addtask rebuild after do_${BB_DEFAULT_TASK}
557 do_rebuild[dirs] = "${TOPDIR}"
558 do_rebuild[nostamp] = "1"
559 python base_do_rebuild() {
560         """rebuild a package"""
563 addtask mrproper
564 do_mrproper[dirs] = "${TOPDIR}"
565 do_mrproper[nostamp] = "1"
566 python base_do_mrproper() {
567         """clear downloaded sources, build and temp directories"""
568         dir = bb.data.expand("${DL_DIR}", d)
569         if dir == '/': bb.build.FuncFailed("wrong DATADIR")
570         bb.debug(2, "removing " + dir)
571         os.system('rm -rf ' + dir)
572         bb.build.exec_func('do_clean', d)
575 addtask distclean
576 do_distclean[dirs] = "${TOPDIR}"
577 do_distclean[nostamp] = "1"
578 python base_do_distclean() {
579         """clear downloaded sources, build and temp directories"""
581         bb.build.exec_func('do_clean', d)
583         src_uri = bb.data.getVar('SRC_URI', d, 1)
584         if not src_uri:
585                 return
587         for uri in src_uri.split():
588                 if bb.decodeurl(uri)[0] == "file":
589                         continue
591                 try:
592                         local = bb.data.expand(bb.fetch.localpath(uri, d), d)
593                 except bb.MalformedUrl, e:
594                         bb.debug(1, 'Unable to generate local path for malformed uri: %s' % e)
595                 else:
596                         bb.note("removing %s" % base_path_out(local, d))
597                         try:
598                                 if os.path.exists(local + ".md5"):
599                                         os.remove(local + ".md5")
600                                 if os.path.exists(local):
601                                         os.remove(local)
602                         except OSError, e:
603                                 bb.note("Error in removal: %s" % e)
606 SCENEFUNCS += "base_scenefunction"
607                                                                                         
608 python base_do_setscene () {
609         for f in (bb.data.getVar('SCENEFUNCS', d, 1) or '').split():
610                 bb.build.exec_func(f, d)
611         if not os.path.exists(bb.data.getVar('STAMP', d, 1) + ".do_setscene"):
612                 bb.build.make_stamp("do_setscene", d)
614 do_setscene[selfstamp] = "1"
615 addtask setscene before do_fetch
617 python base_scenefunction () {
618         stamp = bb.data.getVar('STAMP', d, 1) + ".needclean"
619         if os.path.exists(stamp):
620                 bb.build.exec_func("do_clean", d)
624 addtask fetch
625 do_fetch[dirs] = "${DL_DIR}"
626 do_fetch[depends] = "shasum-native:do_populate_staging"
627 python base_do_fetch() {
628         import sys
630         localdata = bb.data.createCopy(d)
631         bb.data.update_data(localdata)
633         src_uri = bb.data.getVar('SRC_URI', localdata, 1)
634         if not src_uri:
635                 return 1
637         try:
638                 bb.fetch.init(src_uri.split(),d)
639         except bb.fetch.NoMethodError:
640                 (type, value, traceback) = sys.exc_info()
641                 raise bb.build.FuncFailed("No method: %s" % value)
642         except bb.MalformedUrl:
643                 (type, value, traceback) = sys.exc_info()
644                 raise bb.build.FuncFailed("Malformed URL: %s" % value)
646         try:
647                 bb.fetch.go(localdata)
648         except bb.fetch.MissingParameterError:
649                 (type, value, traceback) = sys.exc_info()
650                 raise bb.build.FuncFailed("Missing parameters: %s" % value)
651         except bb.fetch.FetchError:
652                 (type, value, traceback) = sys.exc_info()
653                 raise bb.build.FuncFailed("Fetch failed: %s" % value)
654         except bb.fetch.MD5SumError:
655                 (type, value, traceback) = sys.exc_info()
656                 raise bb.build.FuncFailed("MD5  failed: %s" % value)
657         except:
658                 (type, value, traceback) = sys.exc_info()
659                 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
662         # Verify the SHA and MD5 sums we have in OE and check what do
663         # in
664         checksum_paths = bb.data.getVar('BBPATH', d, True).split(":")
666         # reverse the list to give precedence to directories that
667         # appear first in BBPATH
668         checksum_paths.reverse()
670         checksum_files = ["%s/conf/checksums.ini" % path for path in checksum_paths]
671         try:
672                 parser = base_chk_load_parser(checksum_files)
673         except ValueError:
674                 bb.note("No conf/checksums.ini found, not checking checksums")
675                 return
676         except:
677                 bb.note("Creating the CheckSum parser failed: %s:%s" % (sys.exc_info()[0], sys.exc_info()[1]))
678                 return
680         pv = bb.data.getVar('PV', d, True)
681         pn = bb.data.getVar('PN', d, True)
683         # Check each URI
684         for url in src_uri.split():
685                 localpath = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
686                 (type,host,path,_,_,params) = bb.decodeurl(url)
687                 uri = "%s://%s%s" % (type,host,path)
688                 try:
689                         if type in [ "http", "https", "ftp", "ftps" ]:
690                                 if not (base_chk_file_vars(parser, localpath, params, d) or base_chk_file(parser, pn, pv,uri, localpath, d)):
691                                         if not bb.data.getVar("OE_ALLOW_INSECURE_DOWNLOADS", d, True):
692                                                 bb.fatal("%s-%s: %s has no checksum defined, cannot check archive integrity" % (pn,pv,uri))
693                                         else:
694                                                 bb.note("%s-%s: %s has no checksum defined, archive integrity not checked" % (pn,pv,uri))
695                 except Exception:
696                         raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
699 addtask fetchall after do_fetch
700 do_fetchall[recrdeptask] = "do_fetch"
701 base_do_fetchall() {
702         :
705 addtask checkuri
706 do_checkuri[nostamp] = "1"
707 python do_checkuri() {
708         import sys
710         localdata = bb.data.createCopy(d)
711         bb.data.update_data(localdata)
713         src_uri = bb.data.getVar('SRC_URI', localdata, 1)
715         try:
716                 bb.fetch.init(src_uri.split(),d)
717         except bb.fetch.NoMethodError:
718                 (type, value, traceback) = sys.exc_info()
719                 raise bb.build.FuncFailed("No method: %s" % value)
721         try:
722                 bb.fetch.checkstatus(localdata)
723         except bb.fetch.MissingParameterError:
724                 (type, value, traceback) = sys.exc_info()
725                 raise bb.build.FuncFailed("Missing parameters: %s" % value)
726         except bb.fetch.FetchError:
727                 (type, value, traceback) = sys.exc_info()
728                 raise bb.build.FuncFailed("Fetch failed: %s" % value)
729         except bb.fetch.MD5SumError:
730                 (type, value, traceback) = sys.exc_info()
731                 raise bb.build.FuncFailed("MD5  failed: %s" % value)
732         except:
733                 (type, value, traceback) = sys.exc_info()
734                 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
737 addtask checkuriall after do_checkuri
738 do_checkuriall[recrdeptask] = "do_checkuri"
739 do_checkuriall[nostamp] = "1"
740 base_do_checkuriall() {
741         :
744 addtask buildall after do_build
745 do_buildall[recrdeptask] = "do_build"
746 base_do_buildall() {
747         :
750 def subprocess_setup():
751         import signal
752         # Python installs a SIGPIPE handler by default. This is usually not what
753         # non-Python subprocesses expect.
754         signal.signal(signal.SIGPIPE, signal.SIG_DFL)
756 def oe_unpack_file(file, data, url = None):
757         import subprocess
758         if not url:
759                 url = "file://%s" % file
760         dots = file.split(".")
761         if dots[-1] in ['gz', 'bz2', 'Z']:
762                 efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
763         else:
764                 efile = file
765         cmd = None
766         if file.endswith('.tar'):
767                 cmd = 'tar x --no-same-owner -f %s' % file
768         elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
769                 cmd = 'tar xz --no-same-owner -f %s' % file
770         elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
771                 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
772         elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
773                 cmd = 'gzip -dc %s > %s' % (file, efile)
774         elif file.endswith('.bz2'):
775                 cmd = 'bzip2 -dc %s > %s' % (file, efile)
776         elif file.endswith('.zip') or file.endswith('.jar'):
777                 cmd = 'unzip -q -o'
778                 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
779                 if 'dos' in parm:
780                         cmd = '%s -a' % cmd
781                 cmd = '%s %s' % (cmd, file)
782         elif os.path.isdir(file):
783                 destdir = "."
784                 filespath = bb.data.getVar("FILESPATH", data, 1).split(":")
785                 for fp in filespath:
786                         if file[0:len(fp)] == fp:
787                                 destdir = file[len(fp):file.rfind('/')]
788                                 destdir = destdir.strip('/')
789                                 if len(destdir) < 1:
790                                         destdir = "."
791                                 elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
792                                         os.makedirs("%s/%s" % (os.getcwd(), destdir))
793                                 break
795                 cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
796         else:
797                 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
798                 if not 'patch' in parm:
799                         # The "destdir" handling was specifically done for FILESPATH
800                         # items.  So, only do so for file:// entries.
801                         if type == "file":
802                                 destdir = bb.decodeurl(url)[1] or "."
803                         else:
804                                 destdir = "."
805                         bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
806                         cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
808         if not cmd:
809                 return True
811         dest = os.path.join(os.getcwd(), os.path.basename(file))
812         if os.path.exists(dest):
813                 if os.path.samefile(file, dest):
814                         return True
816         # Change to subdir before executing command
817         save_cwd = os.getcwd();
818         parm = bb.decodeurl(url)[5]
819         if 'subdir' in parm:
820                 newdir = ("%s/%s" % (os.getcwd(), parm['subdir']))
821                 bb.mkdirhier(newdir)
822                 os.chdir(newdir)
824         cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
825         bb.note("Unpacking %s to %s/" % (base_path_out(file, data), base_path_out(os.getcwd(), data)))
826         ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
828         os.chdir(save_cwd)
830         return ret == 0
832 addtask unpack after do_fetch
833 do_unpack[dirs] = "${WORKDIR}"
834 python base_do_unpack() {
835         import re
837         localdata = bb.data.createCopy(d)
838         bb.data.update_data(localdata)
840         src_uri = bb.data.getVar('SRC_URI', localdata)
841         if not src_uri:
842                 return
843         src_uri = bb.data.expand(src_uri, localdata)
844         for url in src_uri.split():
845                 try:
846                         local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
847                 except bb.MalformedUrl, e:
848                         raise bb.build.FuncFailed('Unable to generate local path for malformed uri: %s' % e)
849                 if not local:
850                         raise bb.build.FuncFailed('Unable to locate local file for %s' % url)
851                 local = os.path.realpath(local)
852                 ret = oe_unpack_file(local, localdata, url)
853                 if not ret:
854                         raise bb.build.FuncFailed()
857 METADATA_SCM = "${@base_get_scm(d)}"
858 METADATA_REVISION = "${@base_get_scm_revision(d)}"
859 METADATA_BRANCH = "${@base_get_scm_branch(d)}"
861 def base_get_scm(d):
862         from bb import which
863         baserepo = os.path.dirname(os.path.dirname(which(d.getVar("BBPATH", 1), "classes/base.bbclass")))
864         for (scm, scmpath) in {"svn": ".svn",
865                                "git": ".git",
866                                "monotone": "_MTN"}.iteritems():
867                 if os.path.exists(os.path.join(baserepo, scmpath)):
868                         return "%s %s" % (scm, baserepo)
869         return "<unknown> %s" % baserepo
871 def base_get_scm_revision(d):
872         (scm, path) = d.getVar("METADATA_SCM", 1).split()
873         try:
874                 if scm != "<unknown>":
875                         return globals()["base_get_metadata_%s_revision" % scm](path, d)
876                 else:
877                         return scm
878         except KeyError:
879                 return "<unknown>"
881 def base_get_scm_branch(d):
882         (scm, path) = d.getVar("METADATA_SCM", 1).split()
883         try:
884                 if scm != "<unknown>":
885                         return globals()["base_get_metadata_%s_branch" % scm](path, d)
886                 else:
887                         return scm
888         except KeyError:
889                 return "<unknown>"
891 def base_get_metadata_monotone_branch(path, d):
892         monotone_branch = "<unknown>"
893         try:
894                 monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
895                 if monotone_branch.startswith( "database" ):
896                         monotone_branch_words = monotone_branch.split()
897                         monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
898         except:
899                 pass
900         return monotone_branch
902 def base_get_metadata_monotone_revision(path, d):
903         monotone_revision = "<unknown>"
904         try:
905                 monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
906                 if monotone_revision.startswith( "format_version" ):
907                         monotone_revision_words = monotone_revision.split()
908                         monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
909         except IOError:
910                 pass
911         return monotone_revision
913 def base_get_metadata_svn_revision(path, d):
914         revision = "<unknown>"
915         try:
916                 revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
917         except IOError:
918                 pass
919         return revision
921 def base_get_metadata_git_branch(path, d):
922         branch = os.popen('cd %s; PATH=%s git symbolic-ref HEAD 2>/dev/null' % (path, d.getVar("PATH", 1))).read().rstrip()
924         if len(branch) != 0:
925                 return branch.replace("refs/heads/", "")
926         return "<unknown>"
928 def base_get_metadata_git_revision(path, d):
929         rev = os.popen("cd %s; PATH=%s git show-ref HEAD 2>/dev/null" % (path, d.getVar("PATH", 1))).read().split(" ")[0].rstrip()
930         if len(rev) != 0:
931                 return rev
932         return "<unknown>"
935 addhandler base_eventhandler
936 python base_eventhandler() {
937         from bb import note, error, data
938         from bb.event import Handled, NotHandled, getName
941         name = getName(e)
942         if name == "TaskCompleted":
943                 msg = "package %s: task %s is complete." % (data.getVar("PF", e.data, 1), e.task)
944         elif name == "UnsatisfiedDep":
945                 msg = "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
946         else:
947                 return NotHandled
949         # Only need to output when using 1.8 or lower, the UI code handles it
950         # otherwise
951         if (int(bb.__version__.split(".")[0]) <= 1 and int(bb.__version__.split(".")[1]) <= 8):
952                 if msg:
953                         note(msg)
955         if name.startswith("BuildStarted"):
956                 bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
957                 statusvars = bb.data.getVar("BUILDCFG_VARS", e.data, 1).split()
958                 statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
959                 statusmsg = "\n%s\n%s\n" % (bb.data.getVar("BUILDCFG_HEADER", e.data, 1), "\n".join(statuslines))
960                 print statusmsg
962                 needed_vars = bb.data.getVar("BUILDCFG_NEEDEDVARS", e.data, 1).split()
963                 pesteruser = []
964                 for v in needed_vars:
965                         val = bb.data.getVar(v, e.data, 1)
966                         if not val or val == 'INVALID':
967                                 pesteruser.append(v)
968                 if pesteruser:
969                         bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
971         #
972         # Handle removing stamps for 'rebuild' task
973         #
974         if name.startswith("StampUpdate"):
975                 for (fn, task) in e.targets:
976                         #print "%s %s" % (task, fn)         
977                         if task == "do_rebuild":
978                                 dir = "%s.*" % e.stampPrefix[fn]
979                                 bb.note("Removing stamps: " + dir)
980                                 os.system('rm -f '+ dir)
981                                 os.system('touch ' + e.stampPrefix[fn] + '.needclean')
983         if not data in e.__dict__:
984                 return NotHandled
986         log = data.getVar("EVENTLOG", e.data, 1)
987         if log:
988                 logfile = file(log, "a")
989                 logfile.write("%s\n" % msg)
990                 logfile.close()
992         return NotHandled
995 addtask configure after do_unpack do_patch
996 do_configure[dirs] = "${S} ${B}"
997 do_configure[deptask] = "do_populate_staging"
998 base_do_configure() {
999         :
1002 addtask compile after do_configure
1003 do_compile[dirs] = "${S} ${B}"
1004 base_do_compile() {
1005         if [ -e Makefile -o -e makefile ]; then
1006                 oe_runmake || die "make failed"
1007         else
1008                 oenote "nothing to compile"
1009         fi
1013 sysroot_stage_dir() {
1014         src="$1"
1015         dest="$2"
1016         # This will remove empty directories so we can ignore them
1017         rmdir "$src" 2> /dev/null || true
1018         if [ -d "$src" ]; then
1019                 mkdir -p "$dest"
1020                 cp -fpPR "$src"/* "$dest"
1021         fi
1024 sysroot_stage_libdir() {
1025         src="$1"
1026         dest="$2"
1028         olddir=`pwd`
1029         cd $src
1030         las=$(find . -name \*.la -type f)
1031         cd $olddir
1032         echo "Found la files: $las"              
1033         for i in $las
1034         do
1035                 sed -e 's/^installed=yes$/installed=no/' \
1036                     -e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
1037                     -e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
1038                     -i $src/$i
1039         done
1040         sysroot_stage_dir $src $dest
1043 sysroot_stage_dirs() {
1044         from="$1"
1045         to="$2"
1047         sysroot_stage_dir $from${includedir} $to${STAGING_INCDIR}
1048         if [ "${BUILD_SYS}" = "${HOST_SYS}" ]; then
1049                 sysroot_stage_dir $from${bindir} $to${STAGING_DIR_HOST}${bindir}
1050                 sysroot_stage_dir $from${sbindir} $to${STAGING_DIR_HOST}${sbindir}
1051                 sysroot_stage_dir $from${base_bindir} $to${STAGING_DIR_HOST}${base_bindir}
1052                 sysroot_stage_dir $from${base_sbindir} $to${STAGING_DIR_HOST}${base_sbindir}
1053                 sysroot_stage_dir $from${libexecdir} $to${STAGING_DIR_HOST}${libexecdir}
1054                 if [ "${prefix}/lib" != "${libdir}" ]; then
1055                         # python puts its files in here, make sure they are staged as well
1056                         autotools_stage_dir $from/${prefix}/lib $to${STAGING_DIR_HOST}${prefix}/lib
1057                 fi
1058         fi
1059         if [ -d $from${libdir} ]
1060         then
1061                 sysroot_stage_libdir $from/${libdir} $to${STAGING_LIBDIR}
1062         fi
1063         if [ -d $from${base_libdir} ]
1064         then
1065                 sysroot_stage_libdir $from${base_libdir} $to${STAGING_DIR_HOST}${base_libdir}
1066         fi
1067         sysroot_stage_dir $from${datadir} $to${STAGING_DATADIR}
1070 sysroot_stage_all() {
1071         sysroot_stage_dirs ${D} ${SYSROOT_DESTDIR}
1074 def is_legacy_staging(d):
1075     stagefunc = bb.data.getVar('do_stage', d, True)
1076     legacy = True
1077     if stagefunc is None:
1078         legacy = False
1079     elif stagefunc.strip() == "autotools_stage_all":
1080         legacy = False
1081     elif stagefunc.strip() == "do_stage_native" and bb.data.getVar('AUTOTOOLS_NATIVE_STAGE_INSTALL', d, 1) == "1":
1082         legacy = False
1083     elif bb.data.getVar('NATIVE_INSTALL_WORKS', d, 1) == "1":
1084         legacy = False
1085     if bb.data.getVar('PSTAGE_BROKEN_DESTDIR', d, 1) == "1":
1086         legacy = True
1087     if bb.data.getVar('FORCE_LEGACY_STAGING', d, 1) == "1":
1088         legacy = True
1089     return legacy
1091 do_populate_staging[dirs] = "${STAGING_DIR_TARGET}/${bindir} ${STAGING_DIR_TARGET}/${libdir} \
1092                              ${STAGING_DIR_TARGET}/${includedir} \
1093                              ${STAGING_BINDIR_NATIVE} ${STAGING_LIBDIR_NATIVE} \
1094                              ${STAGING_INCDIR_NATIVE} \
1095                              ${STAGING_DATADIR} \
1096                              ${S} ${B}"
1098 # Could be compile but populate_staging and do_install shouldn't run at the same time
1099 addtask populate_staging after do_install
1101 SYSROOT_PREPROCESS_FUNCS ?= ""
1102 SYSROOT_DESTDIR = "${WORKDIR}/sysroot-destdir/"
1103 SYSROOT_LOCK = "${STAGING_DIR}/staging.lock"
1105 python populate_staging_prehook () {
1106         return
1109 python populate_staging_posthook () {
1110         return
1113 packagedstaging_fastpath () {
1114         :
1117 python do_populate_staging () {
1118     #
1119     # if do_stage exists, we're legacy. In that case run the do_stage,
1120     # modify the SYSROOT_DESTDIR variable and then run the staging preprocess
1121     # functions against staging directly.
1122     #
1123     # Otherwise setup a destdir, copy the results from do_install
1124     # and run the staging preprocess against that
1125     #
1126     pstageactive = (bb.data.getVar("PSTAGING_ACTIVE", d, True) == "1")
1127     lockfile = bb.data.getVar("SYSROOT_LOCK", d, True)
1128     stagefunc = bb.data.getVar('do_stage', d, True)
1129     legacy = is_legacy_staging(d)
1130     if legacy:
1131         bb.data.setVar("SYSROOT_DESTDIR", "", d)
1132         bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
1133         if bb.data.getVarFlags('do_stage', d) is None:
1134             bb.fatal("This recipe (%s) has a do_stage_prepend or do_stage_append and do_stage now doesn't exist. Please rename this to do_stage()" % bb.data.getVar("FILE", d, True))
1135         lock = bb.utils.lockfile(lockfile)
1136         bb.build.exec_func('populate_staging_prehook', d)
1137         bb.build.exec_func('do_stage', d)
1138         for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
1139             bb.build.exec_func(f, d)
1140         bb.build.exec_func('populate_staging_posthook', d)
1141         bb.utils.unlockfile(lock)
1142     else:
1143         dest = bb.data.getVar('D', d, True)
1144         sysrootdest = bb.data.expand('${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}', d)
1145         bb.mkdirhier(sysrootdest)
1147         bb.build.exec_func("sysroot_stage_all", d)
1148         #os.system('cp -pPR %s/* %s/' % (dest, sysrootdest))
1149         for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
1150             bb.build.exec_func(f, d)
1151         bb.build.exec_func("packagedstaging_fastpath", d)
1153         lock = bb.utils.lockfile(lockfile)
1154         os.system(bb.data.expand('cp -pPR ${SYSROOT_DESTDIR}${TMPDIR}/* ${TMPDIR}/', d))
1155         bb.utils.unlockfile(lock)
1158 addtask install after do_compile
1159 do_install[dirs] = "${D} ${S} ${B}"
1160 # Remove and re-create ${D} so that is it guaranteed to be empty
1161 do_install[cleandirs] = "${D}"
1163 base_do_install() {
1164         :
1167 base_do_package() {
1168         :
1171 addtask build after do_populate_staging
1172 do_build = ""
1173 do_build[func] = "1"
1175 inherit packagedata
1177 # Functions that update metadata based on files outputted
1178 # during the build process.
1180 def explode_deps(s):
1181         r = []
1182         l = s.split()
1183         flag = False
1184         for i in l:
1185                 if i[0] == '(':
1186                         flag = True
1187                         j = []
1188                 if flag:
1189                         j.append(i)
1190                         if i.endswith(')'):
1191                                 flag = False
1192                                 r[-1] += ' ' + ' '.join(j)
1193                 else:
1194                         r.append(i)
1195         return r
1197 # Make sure MACHINE isn't exported
1198 # (breaks binutils at least)
1199 MACHINE[unexport] = "1"
1201 # Make sure TARGET_ARCH isn't exported
1202 # (breaks Makefiles using implicit rules, e.g. quilt, as GNU make has this 
1203 # in them, undocumented)
1204 TARGET_ARCH[unexport] = "1"
1206 # Make sure DISTRO isn't exported
1207 # (breaks sysvinit at least)
1208 DISTRO[unexport] = "1"
1211 def base_after_parse(d):
1212     import exceptions
1214     source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
1215     if not source_mirror_fetch:
1216         need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
1217         if need_host:
1218             import re
1219             this_host = bb.data.getVar('HOST_SYS', d, 1)
1220             if not re.match(need_host, this_host):
1221                 raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
1223         need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
1224         if need_machine:
1225             import re
1226             this_machine = bb.data.getVar('MACHINE', d, 1)
1227             if this_machine and not re.match(need_machine, this_machine):
1228                 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
1230     pn = bb.data.getVar('PN', d, 1)
1232     # OBSOLETE in bitbake 1.7.4
1233     srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
1234     if srcdate != None:
1235         bb.data.setVar('SRCDATE', srcdate, d)
1237     use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
1238     if use_nls != None:
1239         bb.data.setVar('USE_NLS', use_nls, d)
1241     # Git packages should DEPEND on git-native
1242     srcuri = bb.data.getVar('SRC_URI', d, 1)
1243     if "git://" in srcuri:
1244         depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
1245         depends = depends + " git-native:do_populate_staging"
1246         bb.data.setVarFlag('do_fetch', 'depends', depends, d)
1248     # 'multimachine' handling
1249     mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
1250     pkg_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
1252     if (pkg_arch == mach_arch):
1253         # Already machine specific - nothing further to do
1254         return
1256     #
1257     # We always try to scan SRC_URI for urls with machine overrides
1258     # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
1259     #
1260     override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
1261     if override != '0':
1262         paths = []
1263         for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
1264             path = bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d)
1265             if os.path.isdir(path):
1266                 paths.append(path)
1267         if len(paths) != 0:
1268             for s in srcuri.split():
1269                 if not s.startswith("file://"):
1270                     continue
1271                 local = bb.data.expand(bb.fetch.localpath(s, d), d)
1272                 for mp in paths:
1273                     if local.startswith(mp):
1274                         #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch))
1275                         bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
1276                         bb.data.setVar('MULTIMACH_ARCH', mach_arch, d)
1277                         return
1279     multiarch = pkg_arch
1281     packages = bb.data.getVar('PACKAGES', d, 1).split()
1282     for pkg in packages:
1283         pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1)
1285         # We could look for != PACKAGE_ARCH here but how to choose 
1286         # if multiple differences are present?
1287         # Look through PACKAGE_ARCHS for the priority order?
1288         if pkgarch and pkgarch == mach_arch:
1289             multiarch = mach_arch
1290             break
1292     bb.data.setVar('MULTIMACH_ARCH', multiarch, d)
1294 python () {
1295     base_after_parse(d)
1296     if is_legacy_staging(d):
1297         bb.debug(1, "Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
1298         if bb.data.getVarFlags('do_stage', d) is None:
1299             bb.error("This recipe (%s) has a do_stage_prepend or do_stage_append and do_stage now doesn't exist. Please rename this to do_stage()" % bb.data.getVar("FILE", d, True))
1303 def check_app_exists(app, d):
1304         from bb import which, data
1306         app = data.expand(app, d)
1307         path = data.getVar('PATH', d, 1)
1308         return len(which(path, app)) != 0
1310 def check_gcc3(data):
1311         # Primarly used by qemu to make sure we have a workable gcc-3.4.x.
1312         # Start by checking for the program name as we build it, was not
1313         # all host-provided gcc-3.4's will work.
1315         gcc3_versions = 'gcc-3.4.6 gcc-3.4.4 gcc34 gcc-3.4 gcc-3.4.7 gcc-3.3 gcc33 gcc-3.3.6 gcc-3.2 gcc32'
1317         for gcc3 in gcc3_versions.split():
1318                 if check_app_exists(gcc3, data):
1319                         return gcc3
1320         
1321         return False
1323 # Patch handling
1324 inherit patch
1326 # Configuration data from site files
1327 # Move to autotools.bbclass?
1328 inherit siteinfo
1330 EXPORT_FUNCTIONS do_setscene do_clean do_mrproper do_distclean do_fetch do_unpack do_configure do_compile do_install do_package do_populate_pkgs do_rebuild do_fetchall
1332 MIRRORS[func] = "0"
1333 MIRRORS () {
1334 ${DEBIAN_MIRROR}/main   http://snapshot.debian.net/archive/pool
1335 ${DEBIAN_MIRROR}        ftp://ftp.de.debian.org/debian/pool
1336 ${DEBIAN_MIRROR}        ftp://ftp.au.debian.org/debian/pool
1337 ${DEBIAN_MIRROR}        ftp://ftp.cl.debian.org/debian/pool
1338 ${DEBIAN_MIRROR}        ftp://ftp.hr.debian.org/debian/pool
1339 ${DEBIAN_MIRROR}        ftp://ftp.fi.debian.org/debian/pool
1340 ${DEBIAN_MIRROR}        ftp://ftp.hk.debian.org/debian/pool
1341 ${DEBIAN_MIRROR}        ftp://ftp.hu.debian.org/debian/pool
1342 ${DEBIAN_MIRROR}        ftp://ftp.ie.debian.org/debian/pool
1343 ${DEBIAN_MIRROR}        ftp://ftp.it.debian.org/debian/pool
1344 ${DEBIAN_MIRROR}        ftp://ftp.jp.debian.org/debian/pool
1345 ${DEBIAN_MIRROR}        ftp://ftp.no.debian.org/debian/pool
1346 ${DEBIAN_MIRROR}        ftp://ftp.pl.debian.org/debian/pool
1347 ${DEBIAN_MIRROR}        ftp://ftp.ro.debian.org/debian/pool
1348 ${DEBIAN_MIRROR}        ftp://ftp.si.debian.org/debian/pool
1349 ${DEBIAN_MIRROR}        ftp://ftp.es.debian.org/debian/pool
1350 ${DEBIAN_MIRROR}        ftp://ftp.se.debian.org/debian/pool
1351 ${DEBIAN_MIRROR}        ftp://ftp.tr.debian.org/debian/pool
1352 ${GNU_MIRROR}   ftp://mirrors.kernel.org/gnu
1353 ${GNU_MIRROR}   ftp://ftp.cs.ubc.ca/mirror2/gnu
1354 ${GNU_MIRROR}   ftp://sunsite.ust.hk/pub/gnu
1355 ${GNU_MIRROR}   ftp://ftp.ayamura.org/pub/gnu
1356 ${KERNELORG_MIRROR}     http://www.kernel.org/pub
1357 ${KERNELORG_MIRROR}     ftp://ftp.us.kernel.org/pub
1358 ${KERNELORG_MIRROR}     ftp://ftp.uk.kernel.org/pub
1359 ${KERNELORG_MIRROR}     ftp://ftp.hk.kernel.org/pub
1360 ${KERNELORG_MIRROR}     ftp://ftp.au.kernel.org/pub
1361 ${KERNELORG_MIRROR}     ftp://ftp.jp.kernel.org/pub
1362 ftp://ftp.gnupg.org/gcrypt/     ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
1363 ftp://ftp.gnupg.org/gcrypt/     ftp://ftp.surfnet.nl/pub/security/gnupg/
1364 ftp://ftp.gnupg.org/gcrypt/     http://gulus.USherbrooke.ca/pub/appl/GnuPG/
1365 ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
1366 ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
1367 ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
1368 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
1369 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
1370 ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
1371 ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
1372 ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
1373 http://ftp.info-zip.org/pub/infozip/src/ http://mirror.switch.ch/ftp/mirror/infozip/src/
1374 http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/
1375 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/  ftp://ftp.cerias.purdue.edu/pub/tools/unix/sysutils/lsof/
1376 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/  ftp://ftp.tau.ac.il/pub/unix/admin/
1377 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/  ftp://ftp.cert.dfn.de/pub/tools/admin/lsof/
1378 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/  ftp://ftp.fu-berlin.de/pub/unix/tools/lsof/
1379 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/  ftp://ftp.kaizo.org/pub/lsof/
1380 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/  ftp://ftp.tu-darmstadt.de/pub/sysadmin/lsof/
1381 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/  ftp://ftp.tux.org/pub/sites/vic.cc.purdue.edu/tools/unix/lsof/
1382 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/  ftp://gd.tuwien.ac.at/utils/admin-tools/lsof/
1383 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/  ftp://sunsite.ualberta.ca/pub/Mirror/lsof/
1384 ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/  ftp://the.wiretapped.net/pub/security/host-security/lsof/
1385 http://www.apache.org/dist  http://archive.apache.org/dist
1386 ftp://.*/.*     http://mirrors.openembedded.org/
1387 https?$://.*/.* http://mirrors.openembedded.org/
1388 ftp://.*/.*     http://sources.openembedded.org/
1389 https?$://.*/.* http://sources.openembedded.org/