1 BB_DEFAULT_TASK ?= "build"
30 oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
31 ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
34 def base_dep_prepend(d):
36 # Ideally this will check a flag so we will operate properly in
37 # the case where host == build == target, for now we don't work in
40 deps = "coreutils-native"
41 if bb.data.getVar('PN', d, True) in ("shasum-native", "stagemanager-native",
45 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
46 # we need that built is the responsibility of the patch function / class, not
48 if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
49 if (bb.data.getVar('HOST_SYS', d, 1) !=
50 bb.data.getVar('BUILD_SYS', d, 1)):
51 deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
54 DEPENDS_prepend="${@base_dep_prepend(d)} "
55 DEPENDS_virtclass-native_prepend="${@base_dep_prepend(d)} "
56 DEPENDS_virtclass-nativesdk_prepend="${@base_dep_prepend(d)} "
59 SCENEFUNCS += "base_scenefunction"
61 python base_scenefunction () {
62 stamp = bb.data.getVar('STAMP', d, 1) + ".needclean"
63 if os.path.exists(stamp):
64 bb.build.exec_func("do_clean", d)
67 python base_do_setscene () {
68 for f in (bb.data.getVar('SCENEFUNCS', d, 1) or '').split():
69 bb.build.exec_func(f, d)
70 if not os.path.exists(bb.data.getVar('STAMP', d, 1) + ".do_setscene"):
71 bb.build.make_stamp("do_setscene", d)
73 do_setscene[selfstamp] = "1"
74 addtask setscene before do_fetch
77 do_fetch[dirs] = "${DL_DIR}"
78 python base_do_fetch() {
81 localdata = bb.data.createCopy(d)
82 bb.data.update_data(localdata)
84 src_uri = bb.data.getVar('SRC_URI', localdata, 1)
89 bb.fetch.init(src_uri.split(),d)
90 except bb.fetch.NoMethodError:
91 (type, value, traceback) = sys.exc_info()
92 raise bb.build.FuncFailed("No method: %s" % value)
93 except bb.MalformedUrl:
94 (type, value, traceback) = sys.exc_info()
95 raise bb.build.FuncFailed("Malformed URL: %s" % value)
98 bb.fetch.go(localdata)
99 except bb.fetch.MissingParameterError:
100 (type, value, traceback) = sys.exc_info()
101 raise bb.build.FuncFailed("Missing parameters: %s" % value)
102 except bb.fetch.FetchError:
103 (type, value, traceback) = sys.exc_info()
104 raise bb.build.FuncFailed("Fetch failed: %s" % value)
105 except bb.fetch.MD5SumError:
106 (type, value, traceback) = sys.exc_info()
107 raise bb.build.FuncFailed("MD5 failed: %s" % value)
109 (type, value, traceback) = sys.exc_info()
110 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
113 pv = bb.data.getVar('PV', d, True)
114 pn = bb.data.getVar('PN', d, True)
118 for url in src_uri.split():
119 localpath = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
120 (type,host,path,_,_,params) = bb.decodeurl(url)
121 uri = "%s://%s%s" % (type,host,path)
123 if type in [ "http", "https", "ftp", "ftps" ]:
124 # We provide a default shortcut of plain [] for the first fetch uri
125 # Explicit names in any uri overrides this default.
126 if not "name" in params and first_uri:
129 if not base_chk_file(pn, pv, uri, localpath, params, d):
130 if not bb.data.getVar("OE_ALLOW_INSECURE_DOWNLOADS", d, True):
131 bb.fatal("%s-%s: %s cannot check archive integrity" % (pn,pv,uri))
133 bb.note("%s-%s: %s cannot check archive integrity" % (pn,pv,uri))
135 raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
138 def oe_unpack_file(file, data, url = None):
141 url = "file://%s" % file
142 dots = file.split(".")
143 if dots[-1] in ['gz', 'bz2', 'Z']:
144 efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
148 if file.endswith('.tar'):
149 cmd = 'tar x --no-same-owner -f %s' % file
150 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
151 cmd = 'tar xz --no-same-owner -f %s' % file
152 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
153 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
154 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
155 cmd = 'gzip -dc %s > %s' % (file, efile)
156 elif file.endswith('.bz2'):
157 cmd = 'bzip2 -dc %s > %s' % (file, efile)
158 elif file.endswith('.tar.xz'):
159 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
160 elif file.endswith('.xz'):
161 cmd = 'xz -dc %s > %s' % (file, efile)
162 elif file.endswith('.zip') or file.endswith('.jar'):
164 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
167 cmd = "%s '%s'" % (cmd, file)
168 elif os.path.isdir(file):
170 filespath = bb.data.getVar("FILESPATH", data, 1).split(":")
172 if file[0:len(fp)] == fp:
173 destdir = file[len(fp):file.rfind('/')]
174 destdir = destdir.strip('/')
177 elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
178 os.makedirs("%s/%s" % (os.getcwd(), destdir))
181 cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
183 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
184 if not 'patch' in parm:
185 # The "destdir" handling was specifically done for FILESPATH
186 # items. So, only do so for file:// entries.
188 destdir = bb.decodeurl(url)[1] or "."
191 bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
192 cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
197 dest = os.path.join(os.getcwd(), os.path.basename(file))
198 if os.path.exists(dest):
199 if os.path.samefile(file, dest):
202 # Change to subdir before executing command
203 save_cwd = os.getcwd();
204 parm = bb.decodeurl(url)[5]
206 newdir = ("%s/%s" % (os.getcwd(), parm['subdir']))
210 cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
211 bb.note("Unpacking %s to %s/" % (base_path_out(file, data), base_path_out(os.getcwd(), data)))
212 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
218 addtask unpack after do_fetch
219 do_unpack[dirs] = "${WORKDIR}"
220 python base_do_unpack() {
223 localdata = bb.data.createCopy(d)
224 bb.data.update_data(localdata)
226 src_uri = bb.data.getVar('SRC_URI', localdata)
229 src_uri = bb.data.expand(src_uri, localdata)
230 for url in src_uri.split():
232 local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
233 except bb.MalformedUrl, e:
234 raise bb.build.FuncFailed('Unable to generate local path for malformed uri: %s' % e)
236 raise bb.build.FuncFailed('Unable to locate local file for %s' % url)
237 local = os.path.realpath(local)
238 ret = oe_unpack_file(local, localdata, url)
240 raise bb.build.FuncFailed()
243 addhandler base_eventhandler
244 python base_eventhandler() {
245 from bb import note, error, data
246 from bb.event import getName
250 if name == "TaskCompleted":
251 msg = "package %s: task %s is complete." % (data.getVar("PF", e.data, 1), e.task)
252 elif name == "UnsatisfiedDep":
253 msg = "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
257 # Only need to output when using 1.8 or lower, the UI code handles it
259 if (int(bb.__version__.split(".")[0]) <= 1 and int(bb.__version__.split(".")[1]) <= 8):
263 if name.startswith("BuildStarted"):
264 bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
265 statusvars = bb.data.getVar("BUILDCFG_VARS", e.data, 1).split()
266 statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
267 statusmsg = "\n%s\n%s\n" % (bb.data.getVar("BUILDCFG_HEADER", e.data, 1), "\n".join(statuslines))
270 needed_vars = bb.data.getVar("BUILDCFG_NEEDEDVARS", e.data, 1).split()
272 for v in needed_vars:
273 val = bb.data.getVar(v, e.data, 1)
274 if not val or val == 'INVALID':
277 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
280 # Handle removing stamps for 'rebuild' task
282 if name.startswith("StampUpdate"):
283 for (fn, task) in e.targets:
284 #print "%s %s" % (task, fn)
285 if task == "do_rebuild":
286 dir = "%s.*" % e.stampPrefix[fn]
287 bb.note("Removing stamps: " + dir)
288 os.system('rm -f '+ dir)
289 os.system('touch ' + e.stampPrefix[fn] + '.needclean')
291 if not data in e.__dict__:
294 log = data.getVar("EVENTLOG", e.data, 1)
296 logfile = file(log, "a")
297 logfile.write("%s\n" % msg)
301 addtask configure after do_unpack do_patch
302 do_configure[dirs] = "${S} ${B}"
303 do_configure[deptask] = "do_populate_staging"
304 base_do_configure() {
308 addtask compile after do_configure
309 do_compile[dirs] = "${S} ${B}"
311 if [ -e Makefile -o -e makefile ]; then
312 oe_runmake || die "make failed"
314 oenote "nothing to compile"
319 addtask install after do_compile
320 do_install[dirs] = "${D} ${S} ${B}"
321 # Remove and re-create ${D} so that is it guaranteed to be empty
322 do_install[cleandirs] = "${D}"
339 source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
340 if not source_mirror_fetch:
341 need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
344 this_host = bb.data.getVar('HOST_SYS', d, 1)
345 if not re.match(need_host, this_host):
346 raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
348 need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
351 this_machine = bb.data.getVar('MACHINE', d, 1)
352 if this_machine and not re.match(need_machine, this_machine):
353 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
355 pn = bb.data.getVar('PN', d, 1)
357 # OBSOLETE in bitbake 1.7.4
358 srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
360 bb.data.setVar('SRCDATE', srcdate, d)
362 use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
364 bb.data.setVar('USE_NLS', use_nls, d)
366 setup_checksum_deps(d)
368 # Git packages should DEPEND on git-native
369 srcuri = bb.data.getVar('SRC_URI', d, 1)
370 if "git://" in srcuri:
371 depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
372 depends = depends + " git-native:do_populate_staging"
373 bb.data.setVarFlag('do_fetch', 'depends', depends, d)
375 # unzip-native should already be staged before unpacking ZIP recipes
376 need_unzip = bb.data.getVar('NEED_UNZIP_FOR_UNPACK', d, 1)
377 src_uri = bb.data.getVar('SRC_URI', d, 1)
379 if ".zip" in src_uri or need_unzip == "1":
380 depends = bb.data.getVarFlag('do_unpack', 'depends', d) or ""
381 depends = depends + " unzip-native:do_populate_staging"
382 bb.data.setVarFlag('do_unpack', 'depends', depends, d)
384 # 'multimachine' handling
385 mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
386 pkg_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
388 if (pkg_arch == mach_arch):
389 # Already machine specific - nothing further to do
393 # We always try to scan SRC_URI for urls with machine overrides
394 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
396 override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
399 for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
400 path = bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d)
401 if os.path.isdir(path):
404 for s in srcuri.split():
405 if not s.startswith("file://"):
407 local = bb.data.expand(bb.fetch.localpath(s, d), d)
409 if local.startswith(mp):
410 #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch))
411 bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
412 bb.data.setVar('MULTIMACH_ARCH', mach_arch, d)
417 packages = bb.data.getVar('PACKAGES', d, 1).split()
419 pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1)
421 # We could look for != PACKAGE_ARCH here but how to choose
422 # if multiple differences are present?
423 # Look through PACKAGE_ARCHS for the priority order?
424 if pkgarch and pkgarch == mach_arch:
425 multiarch = mach_arch
428 bb.data.setVar('MULTIMACH_ARCH', multiarch, d)
431 EXPORT_FUNCTIONS do_setscene do_fetch do_unpack do_configure do_compile do_install do_package