2 # This class knows how to package up [e]glibc. Its shared since prebuild binary toolchains
3 # may need packaging and its pointless to duplicate this code.
5 # Caller should set GLIBC_INTERNAL_USE_BINARY_LOCALE to one of:
6 # "compile" - Use QEMU to generate the binary locale files
7 # "precompiled" - The binary locale files are pregenerated and already present
8 # "ondevice" - The device will build the locale files upon first boot through the postinst
10 GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice"
12 python __anonymous () {
13 enabled = bb.data.getVar("ENABLE_BINARY_LOCALE_GENERATION", d, 1)
15 if enabled and int(enabled):
18 target_arch = bb.data.getVar("TARGET_ARCH", d, 1)
19 binary_arches = bb.data.getVar("BINARY_LOCALE_ARCHES", d, 1) or ""
20 use_cross_localedef = bb.data.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", d, 1) or ""
22 for regexp in binary_arches.split(" "):
23 r = re.compile(regexp)
25 if r.match(target_arch):
26 depends = bb.data.getVar("DEPENDS", d, 1)
27 if use_cross_localedef == "1" :
28 depends = "%s cross-localedef-native" % depends
30 depends = "%s qemu-native" % depends
31 bb.data.setVar("DEPENDS", depends, d)
32 bb.data.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile", d)
36 def get_libc_fpu_setting(bb, d):
37 if bb.data.getVar('TARGET_FPU', d, 1) in [ 'soft' ]:
41 OVERRIDES_append = ":${TARGET_ARCH}-${TARGET_OS}"
43 do_configure_prepend() {
44 sed -e "s#@BASH@#/bin/sh#" -i ${S}/elf/ldd.bash.in
49 # indentation removed on purpose
50 locale_base_postinst() {
53 if [ "x$D" != "x" ]; then
58 mkdir -p ${TMP_LOCALE}
59 if [ -f ${libdir}/locale/locale-archive ]; then
60 cp ${libdir}/locale/locale-archive ${TMP_LOCALE}/
62 localedef --inputfile=${datadir}/i18n/locales/%s --charmap=%s --prefix=/tmp/locale %s
63 mkdir -p ${libdir}/locale/
64 mv ${TMP_LOCALE}/locale-archive ${libdir}/locale/
68 # indentation removed on purpose
69 locale_base_postrm() {
73 mkdir -p ${TMP_LOCALE}
74 if [ -f ${libdir}/locale/locale-archive ]; then
75 cp ${libdir}/locale/locale-archive ${TMP_LOCALE}/
77 localedef --delete-from-archive --inputfile=${datadir}/locales/%s --charmap=%s --prefix=/tmp/locale %s
78 mv ${TMP_LOCALE}/locale-archive ${libdir}/locale/
83 oe_runmake install_root=${D} \
84 libdir='${libdir}' slibdir='${base_libdir}' \
85 localedir='${libdir}/locale' \
87 for r in ${rpcsvc}; do
88 h=`echo $r|sed -e's,\.x$,.h,'`
89 install -m 0644 ${S}/sunrpc/rpcsvc/$h ${D}/${includedir}/rpcsvc/
91 install -m 0644 ${WORKDIR}/etc/ld.so.conf ${D}/${sysconfdir}/
92 install -d ${D}${libdir}/locale
93 make -f ${WORKDIR}/generate-supported.mk IN="${S}/localedata/SUPPORTED" OUT="${WORKDIR}/SUPPORTED"
94 # get rid of some broken files...
95 for i in ${GLIBC_BROKEN_LOCALES}; do
96 grep -v $i ${WORKDIR}/SUPPORTED > ${WORKDIR}/SUPPORTED.tmp
97 mv ${WORKDIR}/SUPPORTED.tmp ${WORKDIR}/SUPPORTED
100 rm -rf ${D}${datadir}/zoneinfo
101 rm -rf ${D}${libexecdir}/getconf
102 rm -rf ${D}${sysconfdir}/localtime
103 install -d ${D}${sysconfdir}/init.d
104 install -m 0644 ${S}/nscd/nscd.conf ${D}${sysconfdir}/
105 install ${S}/nscd/nscd.init ${D}${sysconfdir}/init.d/nscd
108 TMP_LOCALE="/tmp/locale${libdir}/locale"
110 do_prep_locale_tree() {
111 treedir=${WORKDIR}/locale-tree
113 mkdir -p $treedir/bin $treedir/lib $treedir/${datadir} $treedir/${libdir}/locale
114 cp -pPR ${PKGD}${datadir}/i18n $treedir/${datadir}/i18n
115 # unzip to avoid parsing errors
116 for i in $treedir/${datadir}/i18n/charmaps/*gz; do
119 cp -pPR ${PKGD}${base_libdir}/* $treedir/lib
120 if [ -f ${STAGING_DIR_NATIVE}${prefix_native}/lib/libgcc_s.* ]; then
121 cp -pPR ${STAGING_DIR_NATIVE}/${prefix_native}/lib/libgcc_s.* $treedir/lib
123 install -m 0755 ${PKGD}${bindir}/localedef $treedir/bin
126 do_collect_bins_from_locale_tree() {
127 treedir=${WORKDIR}/locale-tree
129 mkdir -p ${PKGD}${libdir}
130 cp -pPR $treedir/${libdir}/locale ${PKGD}${libdir}
135 python package_do_split_gconvs () {
137 if (bb.data.getVar('PACKAGE_NO_GCONV', d, 1) == '1'):
138 bb.note("package requested not splitting gconvs")
141 if not bb.data.getVar('PACKAGES', d, 1):
144 bpn = bb.data.getVar('BPN', d, 1)
145 libdir = bb.data.getVar('libdir', d, 1)
147 bb.error("libdir not defined")
149 datadir = bb.data.getVar('datadir', d, 1)
151 bb.error("datadir not defined")
154 gconv_libdir = base_path_join(libdir, "gconv")
155 charmap_dir = base_path_join(datadir, "i18n", "charmaps")
156 locales_dir = base_path_join(datadir, "i18n", "locales")
157 binary_locales_dir = base_path_join(libdir, "locale")
159 def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group):
162 c_re = re.compile('^copy "(.*)"')
163 i_re = re.compile('^include "(\w+)".*')
164 for l in f.readlines():
165 m = c_re.match(l) or i_re.match(l)
167 dp = legitimize_package_name('%s-gconv-%s' % (bpn, m.group(1)))
172 bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d)
174 bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d)
176 do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \
177 description='gconv module for character set %s', hook=calc_gconv_deps, \
178 extra_depends=bpn+'-gconv')
180 def calc_charmap_deps(fn, pkg, file_regex, output_pattern, group):
183 c_re = re.compile('^copy "(.*)"')
184 i_re = re.compile('^include "(\w+)".*')
185 for l in f.readlines():
186 m = c_re.match(l) or i_re.match(l)
188 dp = legitimize_package_name('%s-charmap-%s' % (bpn, m.group(1)))
193 bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d)
195 bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d)
197 do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \
198 description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='')
200 def calc_locale_deps(fn, pkg, file_regex, output_pattern, group):
203 c_re = re.compile('^copy "(.*)"')
204 i_re = re.compile('^include "(\w+)".*')
205 for l in f.readlines():
206 m = c_re.match(l) or i_re.match(l)
208 dp = legitimize_package_name(bpn+'-localedata-%s' % m.group(1))
213 bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d)
215 bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d)
217 do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \
218 description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
219 bb.data.setVar('PACKAGES', bb.data.getVar('PACKAGES', d) + ' ' + bpn + '-gconv', d)
221 use_bin = bb.data.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", d, 1)
223 dot_re = re.compile("(.*)\.(.*)")
225 #GLIBC_GENERATE_LOCALES var specifies which locales to be supported, empty or "all" means all locales
226 if use_bin != "precompiled":
227 supported = bb.data.getVar('GLIBC_GENERATE_LOCALES', d, 1)
228 if not supported or supported == "all":
229 f = open(base_path_join(bb.data.getVar('WORKDIR', d, 1), "SUPPORTED"), "r")
230 supported = f.readlines()
233 supported = supported.split()
234 supported = map(lambda s:s.replace(".", " ") + "\n", supported)
237 full_bin_path = bb.data.getVar('PKGD', d, True) + binary_locales_dir
238 for dir in os.listdir(full_bin_path):
239 dbase = dir.split(".")
242 d2 = "." + dbase[1].upper() + " "
243 supported.append(dbase[0] + d2)
245 # Collate the locales by base and encoding
246 utf8_only = int(bb.data.getVar('LOCALE_UTF8_ONLY', d, 1) or 0)
250 (locale, charset) = l.split(" ")
251 if utf8_only and charset != 'UTF-8':
253 m = dot_re.match(locale)
256 if not encodings.has_key(locale):
257 encodings[locale] = []
258 encodings[locale].append(charset)
260 def output_locale_source(name, pkgname, locale, encoding):
261 bb.data.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \
262 (bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding)), d)
263 bb.data.setVar('pkg_postinst_%s' % pkgname, bb.data.getVar('locale_base_postinst', d, 1) \
264 % (locale, encoding, locale), d)
265 bb.data.setVar('pkg_postrm_%s' % pkgname, bb.data.getVar('locale_base_postrm', d, 1) % \
266 (locale, encoding, locale), d)
268 def output_locale_binary_rdepends(name, pkgname, locale, encoding):
269 m = re.match("(.*)_(.*)", name)
271 libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-",""))
274 bb.data.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \
275 % (bpn, libc_name)), d)
276 rprovides = (bb.data.getVar('RPROVIDES_%s' % pkgname, d, True) or "").split()
277 rprovides.append(legitimize_package_name('%s-binary-localedata-%s' % (bpn, libc_name)))
278 bb.data.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides), d)
280 def output_locale_binary(name, pkgname, locale, encoding):
281 treedir = base_path_join(bb.data.getVar("WORKDIR", d, 1), "locale-tree")
282 ldlibdir = "%s/lib" % treedir
283 path = bb.data.getVar("PATH", d, 1)
284 i18npath = base_path_join(treedir, datadir, "i18n")
285 gconvpath = base_path_join(treedir, "iconvdata")
287 use_cross_localedef = bb.data.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", d, 1) or "0"
288 if use_cross_localedef == "1":
289 target_arch = bb.data.getVar('TARGET_ARCH', d, True)
290 locale_arch_options = { \
291 "arm": " --uint32-align=4 --little-endian ", \
292 "armeb": " --uint32-align=4 --big-endian ", \
293 "armel": " --uint32-align=4 --little-endian ", \
294 "powerpc": " --uint32-align=4 --big-endian ", \
295 "mips": " --uint32-align=4 --big-endian ", \
296 "mipsel": " --uint32-align=4 --little-endian ", \
297 "mips64": " --uint32-align=4 --big-endian ", \
298 "i386": " --uint32-align=4 --little-endian ", \
299 "i486": " --uint32-align=4 --little-endian ", \
300 "i586": " --uint32-align=4 --little-endian ", \
301 "i686": " --uint32-align=4 --little-endian ", \
302 "x86_64": " --uint32-align=4 --little-endian " }
304 if target_arch in locale_arch_options:
305 localedef_opts = locale_arch_options[target_arch]
307 bb.error("locale_arch_options not found for target_arch=" + target_arch)
308 raise bb.build.FuncFailed("unknown arch:" + target_arch + " for locale_arch_options")
310 localedef_opts += " --force --old-style --no-archive --prefix=%s \
311 --inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/usr/lib/locale/%s" \
312 % (treedir, treedir, datadir, locale, encoding, treedir, name)
314 cmd = "PATH=\"%s\" I18NPATH=\"%s\" GCONV_PATH=\"%s\" cross-localedef %s" % \
315 (path, i18npath, gconvpath, localedef_opts)
316 else: # earlier slower qemu way
317 qemu = qemu_target_binary(d)
318 localedef_opts = "--force --old-style --no-archive --prefix=%s \
319 --inputfile=%s/i18n/locales/%s --charmap=%s %s" \
320 % (treedir, datadir, locale, encoding, name)
322 qemu_options = bb.data.getVar("QEMU_OPTIONS_%s" % bb.data.getVar('PACKAGE_ARCH', d, 1), d, 1)
324 qemu_options = bb.data.getVar('QEMU_OPTIONS', d, 1)
326 cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
327 -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
328 (path, i18npath, qemu, treedir, ldlibdir, qemu_options, treedir, localedef_opts)
330 bb.note("generating locale %s (%s)" % (locale, encoding))
332 process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
333 if process.wait() != 0:
337 bb.note(process.stdout.read())
339 bb.note(process.stderr.read())
340 raise bb.build.FuncFailed("localedef returned an error")
342 def output_locale(name, locale, encoding):
343 pkgname = 'locale-base-' + legitimize_package_name(name)
344 bb.data.setVar('ALLOW_EMPTY_%s' % pkgname, '1', d)
345 bb.data.setVar('PACKAGES', '%s %s' % (pkgname, bb.data.getVar('PACKAGES', d, 1)), d)
346 rprovides = ' virtual-locale-%s' % legitimize_package_name(name)
347 m = re.match("(.*)_(.*)", name)
349 rprovides += ' virtual-locale-%s' % m.group(1)
350 bb.data.setVar('RPROVIDES_%s' % pkgname, rprovides, d)
352 if use_bin == "compile":
353 output_locale_binary_rdepends(name, pkgname, locale, encoding)
354 output_locale_binary(name, pkgname, locale, encoding)
355 elif use_bin == "precompiled":
356 output_locale_binary_rdepends(name, pkgname, locale, encoding)
358 output_locale_source(name, pkgname, locale, encoding)
360 if use_bin == "compile":
361 bb.note("preparing tree for binary locale generation")
362 bb.build.exec_func("do_prep_locale_tree", d)
364 # Reshuffle names so that UTF-8 is preferred over other encodings
366 for l in encodings.keys():
367 if len(encodings[l]) == 1:
368 output_locale(l, l, encodings[l][0])
369 if encodings[l][0] != "UTF-8":
372 if "UTF-8" in encodings[l]:
373 output_locale(l, l, "UTF-8")
374 encodings[l].remove("UTF-8")
377 for e in encodings[l]:
378 output_locale('%s.%s' % (l, e), l, e)
380 if non_utf8 != [] and use_bin != "precompiled":
381 bb.note("the following locales are supported only in legacy encodings:")
382 bb.note(" " + " ".join(non_utf8))
384 if use_bin == "compile":
385 bb.note("collecting binary locales from locale tree")
386 bb.build.exec_func("do_collect_bins_from_locale_tree", d)
387 do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
388 output_pattern=bpn+'-binary-localedata-%s', \
389 description='binary locale definition for %s', extra_depends='', allow_dirs=True)
390 elif use_bin == "precompiled":
391 do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
392 output_pattern=bpn+'-binary-localedata-%s', \
393 description='binary locale definition for %s', extra_depends='', allow_dirs=True)
395 bb.note("generation of binary locales disabled. this may break i18n!")
399 # We want to do this indirection so that we can safely 'return'
400 # from the called function even though we're prepending
401 python populate_packages_prepend () {
402 if bb.data.getVar('DEBIAN_NAMES', d, 1):
403 bpn = bb.data.getVar('BPN', d, 1)
404 bb.data.setVar('PKG_'+bpn, 'libc6', d)
405 bb.data.setVar('PKG_'+bpn+'-dev', 'libc6-dev', d)
406 bb.build.exec_func('package_do_split_gconvs', d)