1 BB_DEFAULT_TASK ?= "build"
12 python sys_path_eh () {
13 if isinstance(e, bb.event.ConfigParsed):
18 bbpath = e.data.getVar("BBPATH", True).split(":")
19 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
21 def inject(name, value):
22 """Make a python object accessible from everywhere for the metadata"""
23 if hasattr(bb.utils, "_context"):
24 bb.utils._context[name] = value
26 __builtins__[name] = value
36 addhandler sys_path_eh
56 oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
57 ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
62 # Ideally this will check a flag so we will operate properly in
63 # the case where host == build == target, for now we don't work in
66 deps = "coreutils-native"
67 if bb.data.getVar('PN', d, True) in ("shasum-native", "stagemanager-native",
71 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
72 # we need that built is the responsibility of the patch function / class, not
74 if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
75 if (bb.data.getVar('HOST_SYS', d, 1) !=
76 bb.data.getVar('BUILD_SYS', d, 1)):
77 deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
78 elif bb.data.inherits_class('native', d) and \
79 bb.data.getVar('PN', d, True) not in \
80 ("linux-libc-headers-native", "quilt-native",
81 "unifdef-native", "shasum-native",
82 "stagemanager-native", "coreutils-native"):
83 deps += " linux-libc-headers-native"
86 DEPENDS_prepend="${@base_deps(d)} "
87 DEPENDS_virtclass-native_prepend="${@base_deps(d)} "
88 DEPENDS_virtclass-nativesdk_prepend="${@base_deps(d)} "
91 SCENEFUNCS += "base_scenefunction"
93 python base_scenefunction () {
94 stamp = bb.data.getVar('STAMP', d, 1) + ".needclean"
95 if os.path.exists(stamp):
96 bb.build.exec_func("do_clean", d)
99 python base_do_setscene () {
100 for f in (bb.data.getVar('SCENEFUNCS', d, 1) or '').split():
101 bb.build.exec_func(f, d)
102 if not os.path.exists(bb.data.getVar('STAMP', d, 1) + ".do_setscene"):
103 bb.build.make_stamp("do_setscene", d)
105 do_setscene[selfstamp] = "1"
106 addtask setscene before do_fetch
109 do_fetch[dirs] = "${DL_DIR}"
110 python base_do_fetch() {
113 localdata = bb.data.createCopy(d)
114 bb.data.update_data(localdata)
116 src_uri = bb.data.getVar('SRC_URI', localdata, 1)
121 bb.fetch.init(src_uri.split(),d)
122 except bb.fetch.NoMethodError:
123 (type, value, traceback) = sys.exc_info()
124 raise bb.build.FuncFailed("No method: %s" % value)
125 except bb.MalformedUrl:
126 (type, value, traceback) = sys.exc_info()
127 raise bb.build.FuncFailed("Malformed URL: %s" % value)
130 bb.fetch.go(localdata)
131 except bb.fetch.MissingParameterError:
132 (type, value, traceback) = sys.exc_info()
133 raise bb.build.FuncFailed("Missing parameters: %s" % value)
134 except bb.fetch.FetchError:
135 (type, value, traceback) = sys.exc_info()
136 raise bb.build.FuncFailed("Fetch failed: %s" % value)
137 except bb.fetch.MD5SumError:
138 (type, value, traceback) = sys.exc_info()
139 raise bb.build.FuncFailed("MD5 failed: %s" % value)
141 (type, value, traceback) = sys.exc_info()
142 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
145 pv = bb.data.getVar('PV', d, True)
146 pn = bb.data.getVar('PN', d, True)
150 for url in src_uri.split():
151 localpath = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
152 (type,host,path,_,_,params) = bb.decodeurl(url)
153 uri = "%s://%s%s" % (type,host,path)
155 if type in [ "http", "https", "ftp", "ftps" ]:
156 # We provide a default shortcut of plain [] for the first fetch uri
157 # Explicit names in any uri overrides this default.
158 if not "name" in params and first_uri:
161 if not base_chk_file(pn, pv, uri, localpath, params, d):
162 if not bb.data.getVar("OE_ALLOW_INSECURE_DOWNLOADS", d, True):
163 bb.fatal("%s-%s: %s cannot check archive integrity" % (pn,pv,uri))
165 bb.note("%s-%s: %s cannot check archive integrity" % (pn,pv,uri))
167 raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
170 def oe_unpack_file(file, data, url = None):
173 url = "file://%s" % file
174 dots = file.split(".")
175 if dots[-1] in ['gz', 'bz2', 'Z']:
176 efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
180 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
181 if file.endswith('.tar'):
182 cmd = 'tar x --no-same-owner -f %s' % file
183 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
184 cmd = 'tar xz --no-same-owner -f %s' % file
185 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
186 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
187 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
188 cmd = 'gzip -dc %s > %s' % (file, efile)
189 elif file.endswith('.bz2'):
190 cmd = 'bzip2 -dc %s > %s' % (file, efile)
191 elif file.endswith('.tar.xz'):
192 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
193 elif file.endswith('.xz'):
194 cmd = 'xz -dc %s > %s' % (file, efile)
195 elif file.endswith('.zip') or file.endswith('.jar'):
199 cmd = "%s '%s'" % (cmd, file)
200 elif (type == "file" and file.endswith('.patch') or file.endswith('.diff')) and parm.get('apply') != 'no':
201 # patch and diff files are special and need not be copied to workdir
203 elif os.path.isdir(file):
205 filespath = bb.data.getVar("FILESPATH", data, 1).split(":")
207 if file[0:len(fp)] == fp:
208 destdir = file[len(fp):file.rfind('/')]
209 destdir = destdir.strip('/')
212 elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
213 os.makedirs("%s/%s" % (os.getcwd(), destdir))
216 cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
218 if not 'patch' in parm and parm.get('apply') != 'yes':
219 # The "destdir" handling was specifically done for FILESPATH
220 # items. So, only do so for file:// entries.
223 dest = os.path.dirname(path) or "."
225 # this case is for backward compatiblity with older version
226 # of bitbake which do not have the fix
227 # http://cgit.openembedded.org/cgit.cgi/bitbake/commit/?id=ca257adc587bb0937ea76d8b32b654fdbf4192b8
228 # this should not be needed once all releases of bitbake has this fix
230 dest = host + os.path.dirname(path) or "."
233 bb.mkdirhier("%s" % os.path.join(os.getcwd(),dest))
234 cmd = 'cp %s %s' % (file, os.path.join(os.getcwd(), dest))
238 dest = os.path.join(os.getcwd(), path)
240 dest = os.path.join(os.getcwd(), os.path.join(host, path))
241 if os.path.exists(dest):
242 if os.path.samefile(file, dest):
244 # Change to subdir before executing command
245 save_cwd = os.getcwd();
247 newdir = ("%s/%s" % (os.getcwd(), parm['subdir']))
251 cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
252 bb.note("Unpacking %s to %s/" % (base_path_out(file, data), base_path_out(os.getcwd(), data)))
253 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
259 addtask unpack after do_fetch
260 do_unpack[dirs] = "${WORKDIR}"
261 python base_do_unpack() {
264 localdata = bb.data.createCopy(d)
265 bb.data.update_data(localdata)
267 src_uri = bb.data.getVar('SRC_URI', localdata, True)
270 for url in src_uri.split():
272 local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
273 except bb.MalformedUrl, e:
274 raise bb.build.FuncFailed('Unable to generate local path for malformed uri: %s' % e)
276 raise bb.build.FuncFailed('Unable to locate local file for %s' % url)
277 local = os.path.realpath(local)
278 ret = oe_unpack_file(local, localdata, url)
280 raise bb.build.FuncFailed()
283 addhandler base_eventhandler
284 python base_eventhandler() {
285 from bb import note, error, data
286 from bb.event import getName
290 if name == "TaskCompleted":
291 msg = "package %s: task %s is complete." % (data.getVar("PF", e.data, 1), e.task)
292 elif name == "UnsatisfiedDep":
293 msg = "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
297 # Only need to output when using 1.8 or lower, the UI code handles it
299 if (int(bb.__version__.split(".")[0]) <= 1 and int(bb.__version__.split(".")[1]) <= 8):
303 if name.startswith("BuildStarted"):
304 bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
305 statusvars = bb.data.getVar("BUILDCFG_VARS", e.data, 1).split()
306 statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
307 statusmsg = "\n%s\n%s\n" % (bb.data.getVar("BUILDCFG_HEADER", e.data, 1), "\n".join(statuslines))
310 needed_vars = bb.data.getVar("BUILDCFG_NEEDEDVARS", e.data, 1).split()
312 for v in needed_vars:
313 val = bb.data.getVar(v, e.data, 1)
314 if not val or val == 'INVALID':
317 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
320 # Handle removing stamps for 'rebuild' task
322 if name.startswith("StampUpdate"):
323 for (fn, task) in e.targets:
324 #print "%s %s" % (task, fn)
325 if task == "do_rebuild":
326 dir = "%s.*" % e.stampPrefix[fn]
327 bb.note("Removing stamps: " + dir)
328 os.system('rm -f '+ dir)
329 os.system('touch ' + e.stampPrefix[fn] + '.needclean')
331 if not data in e.__dict__:
334 log = data.getVar("EVENTLOG", e.data, 1)
336 logfile = file(log, "a")
337 logfile.write("%s\n" % msg)
341 addtask configure after do_unpack do_patch
342 do_configure[dirs] = "${S} ${B}"
343 do_configure[deptask] = "do_populate_sysroot"
344 base_do_configure() {
348 addtask compile after do_configure
349 do_compile[dirs] = "${S} ${B}"
351 if [ -e Makefile -o -e makefile ]; then
352 oe_runmake || die "make failed"
354 oenote "nothing to compile"
358 addtask install after do_compile
359 do_install[dirs] = "${D} ${S} ${B}"
360 # Remove and re-create ${D} so that is it guaranteed to be empty
361 do_install[cleandirs] = "${D}"
378 source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
379 if not source_mirror_fetch:
380 need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
383 this_host = bb.data.getVar('HOST_SYS', d, 1)
384 if not re.match(need_host, this_host):
385 raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
387 need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
390 this_machine = bb.data.getVar('MACHINE', d, 1)
391 if this_machine and not re.match(need_machine, this_machine):
392 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
394 need_target = bb.data.getVar('COMPATIBLE_TARGET_SYS', d, 1)
397 this_target = bb.data.getVar('TARGET_SYS', d, 1)
398 if this_target and not re.match(need_target, this_target):
399 raise bb.parse.SkipPackage("incompatible with target system %s" % this_target)
401 pn = bb.data.getVar('PN', d, 1)
403 # OBSOLETE in bitbake 1.7.4
404 srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
406 bb.data.setVar('SRCDATE', srcdate, d)
408 use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
410 bb.data.setVar('USE_NLS', use_nls, d)
412 setup_checksum_deps(d)
414 # Git packages should DEPEND on git-native
415 srcuri = bb.data.getVar('SRC_URI', d, 1)
416 if "git://" in srcuri:
417 depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
418 depends = depends + " git-native:do_populate_sysroot"
419 bb.data.setVarFlag('do_fetch', 'depends', depends, d)
421 # unzip-native should already be staged before unpacking ZIP recipes
422 need_unzip = bb.data.getVar('NEED_UNZIP_FOR_UNPACK', d, 1)
423 src_uri = bb.data.getVar('SRC_URI', d, 1)
425 if ".zip" in src_uri or need_unzip == "1":
426 depends = bb.data.getVarFlag('do_unpack', 'depends', d) or ""
427 depends = depends + " unzip-native:do_populate_sysroot"
428 bb.data.setVarFlag('do_unpack', 'depends', depends, d)
430 # 'multimachine' handling
431 mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
432 pkg_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
434 if (pkg_arch == mach_arch):
435 # Already machine specific - nothing further to do
439 # We always try to scan SRC_URI for urls with machine overrides
440 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
442 override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
445 for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
446 path = bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d)
447 if os.path.isdir(path):
450 for s in srcuri.split():
451 if not s.startswith("file://"):
453 local = bb.data.expand(bb.fetch.localpath(s, d), d)
455 if local.startswith(mp):
456 #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch))
457 bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
458 bb.data.setVar('MULTIMACH_ARCH', mach_arch, d)
463 packages = bb.data.getVar('PACKAGES', d, 1).split()
465 pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1)
467 # We could look for != PACKAGE_ARCH here but how to choose
468 # if multiple differences are present?
469 # Look through PACKAGE_ARCHS for the priority order?
470 if pkgarch and pkgarch == mach_arch:
471 multiarch = mach_arch
474 bb.data.setVar('MULTIMACH_ARCH', multiarch, d)
477 EXPORT_FUNCTIONS do_setscene do_fetch do_unpack do_configure do_compile do_install do_package