1 BB_DEFAULT_TASK ?= "build"
5 inherit packaged-staging
13 OE_IMPORTS += "oe.path oe.utils oe.packagegroup sys os time"
16 if isinstance(e, bb.event.ConfigParsed):
18 bbpath = e.data.getVar("BBPATH", True).split(":")
19 sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
21 def inject(name, value):
22 """Make a python object accessible from the metadata"""
23 if hasattr(bb.utils, "_context"):
24 bb.utils._context[name] = value
26 __builtins__[name] = value
28 for toimport in e.data.getVar("OE_IMPORTS", True).split():
29 imported = __import__(toimport)
30 inject(toimport.split(".", 1)[0], imported)
53 oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
54 ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
59 # Ideally this will check a flag so we will operate properly in
60 # the case where host == build == target, for now we don't work in
63 deps = "coreutils-native"
64 if bb.data.getVar('PN', d, True) in ("shasum-native", "stagemanager-native",
68 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
69 # we need that built is the responsibility of the patch function / class, not
71 if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
72 if (bb.data.getVar('HOST_SYS', d, 1) !=
73 bb.data.getVar('BUILD_SYS', d, 1)):
74 deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
75 elif bb.data.inherits_class('native', d) and \
76 bb.data.getVar('PN', d, True) not in \
77 ("linux-libc-headers-native", "quilt-native",
78 "unifdef-native", "shasum-native",
79 "stagemanager-native", "coreutils-native"):
80 deps += " linux-libc-headers-native"
83 DEPENDS_prepend="${@base_deps(d)} "
84 DEPENDS_virtclass-native_prepend="${@base_deps(d)} "
85 DEPENDS_virtclass-nativesdk_prepend="${@base_deps(d)} "
88 SCENEFUNCS += "base_scenefunction"
90 python base_scenefunction () {
91 stamp = bb.data.getVar('STAMP', d, 1) + ".needclean"
92 if os.path.exists(stamp):
93 bb.build.exec_func("do_clean", d)
96 python base_do_setscene () {
97 for f in (bb.data.getVar('SCENEFUNCS', d, 1) or '').split():
98 bb.build.exec_func(f, d)
99 if not os.path.exists(bb.data.getVar('STAMP', d, 1) + ".do_setscene"):
100 bb.build.make_stamp("do_setscene", d)
102 do_setscene[selfstamp] = "1"
103 addtask setscene before do_fetch
106 do_fetch[dirs] = "${DL_DIR}"
107 python base_do_fetch() {
110 localdata = bb.data.createCopy(d)
111 bb.data.update_data(localdata)
113 src_uri = bb.data.getVar('SRC_URI', localdata, 1)
118 bb.fetch.init(src_uri.split(),d)
119 except bb.fetch.NoMethodError:
120 (type, value, traceback) = sys.exc_info()
121 raise bb.build.FuncFailed("No method: %s" % value)
122 except bb.MalformedUrl:
123 (type, value, traceback) = sys.exc_info()
124 raise bb.build.FuncFailed("Malformed URL: %s" % value)
127 bb.fetch.go(localdata)
128 except bb.fetch.MissingParameterError:
129 (type, value, traceback) = sys.exc_info()
130 raise bb.build.FuncFailed("Missing parameters: %s" % value)
131 except bb.fetch.FetchError:
132 (type, value, traceback) = sys.exc_info()
133 raise bb.build.FuncFailed("Fetch failed: %s" % value)
134 except bb.fetch.MD5SumError:
135 (type, value, traceback) = sys.exc_info()
136 raise bb.build.FuncFailed("MD5 failed: %s" % value)
138 (type, value, traceback) = sys.exc_info()
139 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
142 pv = bb.data.getVar('PV', d, True)
143 pn = bb.data.getVar('PN', d, True)
147 for url in src_uri.split():
148 localpath = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
149 (type,host,path,_,_,params) = bb.decodeurl(url)
150 uri = "%s://%s%s" % (type,host,path)
152 if type in [ "http", "https", "ftp", "ftps" ]:
153 # We provide a default shortcut of plain [] for the first fetch uri
154 # Explicit names in any uri overrides this default.
155 if not "name" in params and first_uri:
158 if not base_chk_file(pn, pv, uri, localpath, params, d):
159 if not bb.data.getVar("OE_ALLOW_INSECURE_DOWNLOADS", d, True):
160 bb.fatal("%s-%s: %s cannot check archive integrity" % (pn,pv,uri))
162 bb.note("%s-%s: %s cannot check archive integrity" % (pn,pv,uri))
164 raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
167 def oe_unpack(d, local, urldata):
168 from oe.unpack import unpack_file, is_patch, UnpackError
169 if is_patch(local, urldata.parm):
173 if "subdir" in urldata.parm:
174 subdirs.append(urldata.parm["subdir"])
176 if urldata.type == "file":
178 urlpath = urldata.path
180 urlpath = "%s%s" % (urldata.host, urldata.path)
182 if not os.path.isabs(urlpath):
183 subdirs.append(os.path.dirname(urlpath))
185 workdir = d.getVar("WORKDIR", True)
187 destdir = oe.path.join(workdir, *subdirs)
188 bb.mkdirhier(destdir)
191 dos = urldata.parm.get("dos")
193 bb.note("Unpacking %s to %s/" % (base_path_out(local, d),
194 base_path_out(destdir, d)))
196 unpack_file(local, destdir, env={"PATH": d.getVar("PATH", True)}, dos=dos)
197 except UnpackError, exc:
200 addtask unpack after do_fetch
201 do_unpack[dirs] = "${WORKDIR}"
202 python base_do_unpack() {
203 from glob import glob
205 src_uri = d.getVar("SRC_URI", True)
208 srcurldata = bb.fetch.init(src_uri.split(), d, True)
209 filespath = d.getVar("FILESPATH", True).split(":")
211 for url in src_uri.split():
212 urldata = srcurldata[url]
213 if urldata.type == "file" and "*" in urldata.path:
214 # The fetch code doesn't know how to handle globs, so
215 # we need to handle the local bits ourselves
216 for path in filespath:
217 srcdir = oe.path.join(path, urldata.host,
218 os.path.dirname(urldata.path))
219 if os.path.exists(srcdir):
222 bb.fatal("Unable to locate files for %s" % url)
224 for filename in glob(oe.path.join(srcdir,
225 os.path.basename(urldata.path))):
226 oe_unpack(d, filename, urldata)
228 local = urldata.localpath
230 raise bb.build.FuncFailed('Unable to locate local file for %s' % url)
232 oe_unpack(d, local, urldata)
235 python build_summary() {
236 from bb import note, error, data
237 from bb.event import getName
239 if isinstance(e, bb.event.BuildStarted):
240 bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
241 statusvars = bb.data.getVar("BUILDCFG_VARS", e.data, 1).split()
242 statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
243 statusmsg = "\n%s\n%s\n" % (bb.data.getVar("BUILDCFG_HEADER", e.data, 1), "\n".join(statuslines))
246 needed_vars = bb.data.getVar("BUILDCFG_NEEDEDVARS", e.data, 1).split()
248 for v in needed_vars:
249 val = bb.data.getVar(v, e.data, 1)
250 if not val or val == 'INVALID':
253 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
255 addhandler build_summary
257 addtask configure after do_unpack do_patch
258 do_configure[dirs] = "${S} ${B}"
259 do_configure[deptask] = "do_populate_sysroot"
260 base_do_configure() {
264 addtask compile after do_configure
265 do_compile[dirs] = "${S} ${B}"
267 if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
268 oe_runmake || die "make failed"
270 oenote "nothing to compile"
274 addtask install after do_compile
275 do_install[dirs] = "${D} ${S} ${B}"
276 # Remove and re-create ${D} so that is it guaranteed to be empty
277 do_install[cleandirs] = "${D}"
294 source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
295 if not source_mirror_fetch:
296 need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
299 this_host = bb.data.getVar('HOST_SYS', d, 1)
300 if not re.match(need_host, this_host):
301 raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
303 need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
306 this_machine = bb.data.getVar('MACHINE', d, 1)
307 if this_machine and not re.match(need_machine, this_machine):
308 this_soc_family = bb.data.getVar('SOC_FAMILY', d, 1)
309 if (this_soc_family and not re.match(need_machine, this_soc_family)) or not this_soc_family:
310 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
312 need_target = bb.data.getVar('COMPATIBLE_TARGET_SYS', d, 1)
315 this_target = bb.data.getVar('TARGET_SYS', d, 1)
316 if this_target and not re.match(need_target, this_target):
317 raise bb.parse.SkipPackage("incompatible with target system %s" % this_target)
319 pn = bb.data.getVar('PN', d, 1)
321 # OBSOLETE in bitbake 1.7.4
322 srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
324 bb.data.setVar('SRCDATE', srcdate, d)
326 use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
328 bb.data.setVar('USE_NLS', use_nls, d)
330 setup_checksum_deps(d)
332 # Git packages should DEPEND on git-native
333 srcuri = bb.data.getVar('SRC_URI', d, 1)
334 if "git://" in srcuri:
335 depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
336 depends = depends + " git-native:do_populate_sysroot"
337 bb.data.setVarFlag('do_fetch', 'depends', depends, d)
339 if "hg://" in srcuri:
340 depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
341 depends = depends + " mercurial-native:do_populate_sysroot"
342 bb.data.setVarFlag('do_fetch', 'depends', depends, d)
344 # unzip-native should already be staged before unpacking ZIP recipes
345 need_unzip = bb.data.getVar('NEED_UNZIP_FOR_UNPACK', d, 1)
346 src_uri = bb.data.getVar('SRC_URI', d, 1)
348 if ".zip" in src_uri or need_unzip == "1":
349 depends = bb.data.getVarFlag('do_unpack', 'depends', d) or ""
350 depends = depends + " unzip-native:do_populate_sysroot"
351 bb.data.setVarFlag('do_unpack', 'depends', depends, d)
354 depends = bb.data.getVarFlag('do_unpack', 'depends', d) or ""
355 depends = depends + " lzip-native:do_populate_sysroot"
356 bb.data.setVarFlag('do_unpack', 'depends', depends, d)
358 # 'multimachine' handling
359 mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
360 pkg_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
362 if (pkg_arch == mach_arch):
363 # Already machine specific - nothing further to do
367 # We always try to scan SRC_URI for urls with machine overrides
368 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
370 override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
371 if override != '0' and is_machine_specific(d):
372 bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
373 bb.data.setVar('MULTIMACH_ARCH', mach_arch, d)
378 packages = bb.data.getVar('PACKAGES', d, 1).split()
380 pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1)
382 # We could look for != PACKAGE_ARCH here but how to choose
383 # if multiple differences are present?
384 # Look through PACKAGE_ARCHS for the priority order?
385 if pkgarch and pkgarch == mach_arch:
386 multiarch = mach_arch
389 bb.data.setVar('MULTIMACH_ARCH', multiarch, d)
392 EXPORT_FUNCTIONS do_setscene do_fetch do_unpack do_configure do_compile do_install do_package