build: strip -single_module when doing bundle on mac OS X
[Samba.git] / buildtools / wafsamba / samba_utils.py
bloba25393b89da0463f8a283859b33bf00ec277d708
1 # a waf tool to add autoconf-like macros to the configure section
2 # and for SAMBA_ macros for building libraries, binaries etc
4 import Build, os, sys, Options, Utils, Task, re, fnmatch, Logs
5 from TaskGen import feature, before
6 from Configure import conf
7 from Logs import debug
8 import shlex
10 # TODO: make this a --option
11 LIB_PATH="shared"
14 # sigh, python octal constants are a mess
15 MODE_644 = int('644', 8)
16 MODE_755 = int('755', 8)
18 @conf
19 def SET_TARGET_TYPE(ctx, target, value):
20 '''set the target type of a target'''
21 cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
22 if target in cache and cache[target] != 'EMPTY':
23 Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.curdir, value, cache[target]))
24 sys.exit(1)
25 LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
26 debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir))
27 return True
30 def GET_TARGET_TYPE(ctx, target):
31 '''get target type from cache'''
32 cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
33 if not target in cache:
34 return None
35 return cache[target]
38 ######################################################
39 # this is used as a decorator to make functions only
40 # run once. Based on the idea from
41 # http://stackoverflow.com/questions/815110/is-there-a-decorator-to-simply-cache-function-return-values
42 runonce_ret = {}
43 def runonce(function):
44 def runonce_wrapper(*args):
45 if args in runonce_ret:
46 return runonce_ret[args]
47 else:
48 ret = function(*args)
49 runonce_ret[args] = ret
50 return ret
51 return runonce_wrapper
54 def ADD_LD_LIBRARY_PATH(path):
55 '''add something to LD_LIBRARY_PATH'''
56 if 'LD_LIBRARY_PATH' in os.environ:
57 oldpath = os.environ['LD_LIBRARY_PATH']
58 else:
59 oldpath = ''
60 newpath = oldpath.split(':')
61 if not path in newpath:
62 newpath.append(path)
63 os.environ['LD_LIBRARY_PATH'] = ':'.join(newpath)
66 def install_rpath(bld):
67 '''the rpath value for installation'''
68 bld.env['RPATH'] = []
69 if bld.env.RPATH_ON_INSTALL:
70 return ['%s/lib' % bld.env.PREFIX]
71 return []
74 def build_rpath(bld):
75 '''the rpath value for build'''
76 rpath = os.path.normpath('%s/%s' % (bld.env.BUILD_DIRECTORY, LIB_PATH))
77 bld.env['RPATH'] = []
78 if bld.env.RPATH_ON_BUILD:
79 return [rpath]
80 ADD_LD_LIBRARY_PATH(rpath)
81 return []
84 @conf
85 def LOCAL_CACHE(ctx, name):
86 '''return a named build cache dictionary, used to store
87 state inside other functions'''
88 if name in ctx.env:
89 return ctx.env[name]
90 ctx.env[name] = {}
91 return ctx.env[name]
94 @conf
95 def LOCAL_CACHE_SET(ctx, cachename, key, value):
96 '''set a value in a local cache'''
97 cache = LOCAL_CACHE(ctx, cachename)
98 cache[key] = value
101 @conf
102 def ASSERT(ctx, expression, msg):
103 '''a build assert call'''
104 if not expression:
105 raise Utils.WafError("ERROR: %s\n" % msg)
106 Build.BuildContext.ASSERT = ASSERT
109 def SUBDIR(bld, subdir, list):
110 '''create a list of files by pre-pending each with a subdir name'''
111 ret = ''
112 for l in TO_LIST(list):
113 ret = ret + os.path.normpath(os.path.join(subdir, l)) + ' '
114 return ret
115 Build.BuildContext.SUBDIR = SUBDIR
118 def dict_concat(d1, d2):
119 '''concatenate two dictionaries d1 += d2'''
120 for t in d2:
121 if t not in d1:
122 d1[t] = d2[t]
125 def exec_command(self, cmd, **kw):
126 '''this overrides the 'waf -v' debug output to be in a nice
127 unix like format instead of a python list.
128 Thanks to ita on #waf for this'''
129 import Utils, Logs
130 _cmd = cmd
131 if isinstance(cmd, list):
132 _cmd = ' '.join(cmd)
133 debug('runner: %s' % _cmd)
134 if self.log:
135 self.log.write('%s\n' % cmd)
136 kw['log'] = self.log
137 try:
138 if not kw.get('cwd', None):
139 kw['cwd'] = self.cwd
140 except AttributeError:
141 self.cwd = kw['cwd'] = self.bldnode.abspath()
142 return Utils.exec_command(cmd, **kw)
143 Build.BuildContext.exec_command = exec_command
146 def ADD_COMMAND(opt, name, function):
147 '''add a new top level command to waf'''
148 Utils.g_module.__dict__[name] = function
149 opt.name = function
150 Options.Handler.ADD_COMMAND = ADD_COMMAND
153 @feature('cc', 'cshlib', 'cprogram')
154 @before('apply_core','exec_rule')
155 def process_depends_on(self):
156 '''The new depends_on attribute for build rules
157 allow us to specify a dependency on output from
158 a source generation rule'''
159 if getattr(self , 'depends_on', None):
160 lst = self.to_list(self.depends_on)
161 for x in lst:
162 y = self.bld.name_to_obj(x, self.env)
163 self.bld.ASSERT(y is not None, "Failed to find dependency %s of %s" % (x, self.name))
164 y.post()
165 if getattr(y, 'more_includes', None):
166 self.includes += " " + y.more_includes
169 os_path_relpath = getattr(os.path, 'relpath', None)
170 if os_path_relpath is None:
171 # Python < 2.6 does not have os.path.relpath, provide a replacement
172 # (imported from Python2.6.5~rc2)
173 def os_path_relpath(path, start):
174 """Return a relative version of a path"""
175 start_list = os.path.abspath(start).split("/")
176 path_list = os.path.abspath(path).split("/")
178 # Work out how much of the filepath is shared by start and path.
179 i = len(os.path.commonprefix([start_list, path_list]))
181 rel_list = ['..'] * (len(start_list)-i) + path_list[i:]
182 if not rel_list:
183 return start
184 return os.path.join(*rel_list)
187 def unique_list(seq):
188 '''return a uniquified list in the same order as the existing list'''
189 seen = {}
190 result = []
191 for item in seq:
192 if item in seen: continue
193 seen[item] = True
194 result.append(item)
195 return result
198 def TO_LIST(str, delimiter=None):
199 '''Split a list, preserving quoted strings and existing lists'''
200 if str is None:
201 return []
202 if isinstance(str, list):
203 return str
204 lst = str.split(delimiter)
205 # the string may have had quotes in it, now we
206 # check if we did have quotes, and use the slower shlex
207 # if we need to
208 for e in lst:
209 if e[0] == '"':
210 return shlex.split(str)
211 return lst
214 def subst_vars_error(string, env):
215 '''substitute vars, throw an error if a variable is not defined'''
216 lst = re.split('(\$\{\w+\})', string)
217 out = []
218 for v in lst:
219 if re.match('\$\{\w+\}', v):
220 vname = v[2:-1]
221 if not vname in env:
222 Logs.error("Failed to find variable %s in %s" % (vname, string))
223 sys.exit(1)
224 v = env[vname]
225 out.append(v)
226 return ''.join(out)
229 @conf
230 def SUBST_ENV_VAR(ctx, varname):
231 '''Substitute an environment variable for any embedded variables'''
232 return subst_vars_error(ctx.env[varname], ctx.env)
233 Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR
236 def ENFORCE_GROUP_ORDERING(bld):
237 '''enforce group ordering for the project. This
238 makes the group ordering apply only when you specify
239 a target with --target'''
240 if Options.options.compile_targets:
241 @feature('*')
242 @before('exec_rule', 'apply_core', 'collect')
243 def force_previous_groups(self):
244 if getattr(self.bld, 'enforced_group_ordering', False) == True:
245 return
246 self.bld.enforced_group_ordering = True
248 def group_name(g):
249 tm = self.bld.task_manager
250 return [x for x in tm.groups_names if id(tm.groups_names[x]) == id(g)][0]
252 my_id = id(self)
253 bld = self.bld
254 stop = None
255 for g in bld.task_manager.groups:
256 for t in g.tasks_gen:
257 if id(t) == my_id:
258 stop = id(g)
259 debug('group: Forcing up to group %s for target %s',
260 group_name(g), self.name or self.target)
261 break
262 if stop != None:
263 break
264 if stop is None:
265 return
267 for i in xrange(len(bld.task_manager.groups)):
268 g = bld.task_manager.groups[i]
269 bld.task_manager.current_group = i
270 if id(g) == stop:
271 break
272 debug('group: Forcing group %s', group_name(g))
273 for t in g.tasks_gen:
274 if not getattr(t, 'forced_groups', False):
275 debug('group: Posting %s', t.name or t.target)
276 t.forced_groups = True
277 t.post()
278 Build.BuildContext.ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING
281 def recursive_dirlist(dir, relbase, pattern=None):
282 '''recursive directory list'''
283 ret = []
284 for f in os.listdir(dir):
285 f2 = dir + '/' + f
286 if os.path.isdir(f2):
287 ret.extend(recursive_dirlist(f2, relbase))
288 else:
289 if pattern and not fnmatch.fnmatch(f, pattern):
290 continue
291 ret.append(os_path_relpath(f2, relbase))
292 return ret
295 def mkdir_p(dir):
296 '''like mkdir -p'''
297 if os.path.isdir(dir):
298 return
299 mkdir_p(os.path.dirname(dir))
300 os.mkdir(dir)
303 def SUBST_VARS_RECURSIVE(string, env):
304 '''recursively expand variables'''
305 if string is None:
306 return string
307 limit=100
308 while (string.find('${') != -1 and limit > 0):
309 string = subst_vars_error(string, env)
310 limit -= 1
311 return string
314 @conf
315 def EXPAND_VARIABLES(ctx, varstr, vars=None):
316 '''expand variables from a user supplied dictionary
318 This is most useful when you pass vars=locals() to expand
319 all your local variables in strings
322 if isinstance(varstr, list):
323 ret = []
324 for s in varstr:
325 ret.append(EXPAND_VARIABLES(ctx, s, vars=vars))
326 return ret
328 import Environment
329 env = Environment.Environment()
330 ret = varstr
331 # substitute on user supplied dict if avaiilable
332 if vars is not None:
333 for v in vars.keys():
334 env[v] = vars[v]
335 ret = SUBST_VARS_RECURSIVE(ret, env)
337 # if anything left, subst on the environment as well
338 if ret.find('${') != -1:
339 ret = SUBST_VARS_RECURSIVE(ret, ctx.env)
340 # make sure there is nothing left. Also check for the common
341 # typo of $( instead of ${
342 if ret.find('${') != -1 or ret.find('$(') != -1:
343 Logs.error('Failed to substitute all variables in varstr=%s' % ret)
344 sys.exit(1)
345 return ret
346 Build.BuildContext.EXPAND_VARIABLES = EXPAND_VARIABLES
349 def RUN_COMMAND(cmd,
350 env=None,
351 shell=False):
352 '''run a external command, return exit code or signal'''
353 if env:
354 cmd = SUBST_VARS_RECURSIVE(cmd, env)
356 status = os.system(cmd)
357 if os.WIFEXITED(status):
358 return os.WEXITSTATUS(status)
359 if os.WIFSIGNALED(status):
360 return - os.WTERMSIG(status)
361 Logs.error("Unknown exit reason %d for command: %s" (status, cmd))
362 return -1
365 # make sure we have md5. some systems don't have it
366 try:
367 from hashlib import md5
368 except:
369 try:
370 import md5
371 except:
372 import Constants
373 Constants.SIG_NIL = hash('abcd')
374 class replace_md5(object):
375 def __init__(self):
376 self.val = None
377 def update(self, val):
378 self.val = hash((self.val, val))
379 def digest(self):
380 return str(self.val)
381 def hexdigest(self):
382 return self.digest().encode('hex')
383 def replace_h_file(filename):
384 f = open(filename, 'rb')
385 m = replace_md5()
386 while (filename):
387 filename = f.read(100000)
388 m.update(filename)
389 f.close()
390 return m.digest()
391 Utils.md5 = replace_md5
392 Task.md5 = replace_md5
393 Utils.h_file = replace_h_file
396 def LOAD_ENVIRONMENT():
397 '''load the configuration environment, allowing access to env vars
398 from new commands'''
399 import Environment
400 env = Environment.Environment()
401 try:
402 env.load('.lock-wscript')
403 env.load(env.blddir + '/c4che/default.cache.py')
404 except:
405 pass
406 return env
409 def IS_NEWER(bld, file1, file2):
410 '''return True if file1 is newer than file2'''
411 t1 = os.stat(os.path.join(bld.curdir, file1)).st_mtime
412 t2 = os.stat(os.path.join(bld.curdir, file2)).st_mtime
413 return t1 > t2
414 Build.BuildContext.IS_NEWER = IS_NEWER
417 @conf
418 def RECURSE(ctx, directory):
419 '''recurse into a directory, relative to the curdir or top level'''
420 try:
421 visited_dirs = ctx.visited_dirs
422 except:
423 visited_dirs = ctx.visited_dirs = set()
424 d = os.path.join(ctx.curdir, directory)
425 if os.path.exists(d):
426 abspath = os.path.abspath(d)
427 else:
428 abspath = os.path.abspath(os.path.join(Utils.g_module.srcdir, directory))
429 ctxclass = ctx.__class__.__name__
430 key = ctxclass + ':' + abspath
431 if key in visited_dirs:
432 # already done it
433 return
434 visited_dirs.add(key)
435 relpath = os_path_relpath(abspath, ctx.curdir)
436 if ctxclass == 'Handler':
437 return ctx.sub_options(relpath)
438 if ctxclass == 'ConfigurationContext':
439 return ctx.sub_config(relpath)
440 if ctxclass == 'BuildContext':
441 return ctx.add_subdirs(relpath)
442 Logs.error('Unknown RECURSE context class', ctxclass)
443 raise
444 Options.Handler.RECURSE = RECURSE
445 Build.BuildContext.RECURSE = RECURSE
448 def CHECK_MAKEFLAGS(bld):
449 '''check for MAKEFLAGS environment variable in case we are being
450 called from a Makefile try to honor a few make command line flags'''
451 if not 'WAF_MAKE' in os.environ:
452 return
453 makeflags = os.environ.get('MAKEFLAGS')
454 jobs_set = False
455 # we need to use shlex.split to cope with the escaping of spaces
456 # in makeflags
457 for opt in shlex.split(makeflags):
458 # options can come either as -x or as x
459 if opt[0:2] == 'V=':
460 Options.options.verbose = Logs.verbose = int(opt[2:])
461 if Logs.verbose > 0:
462 Logs.zones = ['runner']
463 if Logs.verbose > 2:
464 Logs.zones = ['*']
465 elif opt[0].isupper() and opt.find('=') != -1:
466 loc = opt.find('=')
467 setattr(Options.options, opt[0:loc], opt[loc+1:])
468 elif opt[0] != '-':
469 for v in opt:
470 if v == 'j':
471 jobs_set = True
472 elif v == 'k':
473 Options.options.keep = True
474 elif opt == '-j':
475 jobs_set = True
476 elif opt == '-k':
477 Options.options.keep = True
478 if not jobs_set:
479 # default to one job
480 Options.options.jobs = 1
482 Build.BuildContext.CHECK_MAKEFLAGS = CHECK_MAKEFLAGS
484 option_groups = {}
486 def option_group(opt, name):
487 '''find or create an option group'''
488 global option_groups
489 if name in option_groups:
490 return option_groups[name]
491 gr = opt.add_option_group(name)
492 option_groups[name] = gr
493 return gr
494 Options.Handler.option_group = option_group
497 def save_file(filename, contents, create_dir=False):
498 '''save data to a file'''
499 if create_dir:
500 mkdir_p(os.path.dirname(filename))
501 try:
502 f = open(filename, 'w')
503 f.write(contents)
504 f.close()
505 except:
506 return False
507 return True
510 def load_file(filename):
511 '''return contents of a file'''
512 try:
513 f = open(filename, 'r')
514 r = f.read()
515 f.close()
516 except:
517 return None
518 return r
521 def reconfigure(ctx):
522 '''rerun configure if necessary'''
523 import Configure, samba_wildcard, Scripting
524 if not os.path.exists(".lock-wscript"):
525 raise Utils.WafError('configure has not been run')
526 bld = samba_wildcard.fake_build_environment()
527 Configure.autoconfig = True
528 Scripting.check_configured(bld)
531 def map_shlib_extension(ctx, name, python=False):
532 '''map a filename with a shared library extension of .so to the real shlib name'''
533 if name is None:
534 return None
535 if name[-1:].isdigit():
536 # some libraries have specified versions in the wscript rule
537 return name
538 (root1, ext1) = os.path.splitext(name)
539 if python:
540 (root2, ext2) = os.path.splitext(ctx.env.pyext_PATTERN)
541 else:
542 (root2, ext2) = os.path.splitext(ctx.env.shlib_PATTERN)
543 return root1+ext2
544 Build.BuildContext.map_shlib_extension = map_shlib_extension
547 def make_libname(ctx, name, nolibprefix=False, version=None, python=False):
548 """make a library filename
549 Options:
550 nolibprefix: don't include the lib prefix
551 version : add a version number
552 python : if we should use python module name conventions"""
554 if python:
555 libname = ctx.env.pyext_PATTERN % name
556 else:
557 libname = ctx.env.shlib_PATTERN % name
558 if nolibprefix and libname[0:3] == 'lib':
559 libname = libname[3:]
560 if version:
561 if version[0] == '.':
562 version = version[1:]
563 (root, ext) = os.path.splitext(libname)
564 if ext == ".dylib":
565 # special case - version goes before the prefix
566 libname = "%s.%s%s" % (root, version, ext)
567 else:
568 libname = "%s%s.%s" % (root, ext, version)
569 return libname
570 Build.BuildContext.make_libname = make_libname
573 def get_tgt_list(bld):
574 '''return a list of build objects for samba'''
576 targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
578 # build a list of task generators we are interested in
579 tgt_list = []
580 for tgt in targets:
581 type = targets[tgt]
582 if not type in ['SUBSYSTEM', 'MODULE', 'BINARY', 'LIBRARY', 'ASN1', 'PYTHON']:
583 continue
584 t = bld.name_to_obj(tgt, bld.env)
585 if t is None:
586 Logs.error("Target %s of type %s has no task generator" % (tgt, type))
587 sys.exit(1)
588 tgt_list.append(t)
589 return tgt_list