param: rename szPrintcapName -> printcap_name
[Samba.git] / buildtools / wafsamba / samba_utils.py
blob540fe447942bbb2598b4a8e9da18b5cfd21a1581
1 # a waf tool to add autoconf-like macros to the configure section
2 # and for SAMBA_ macros for building libraries, binaries etc
4 import Build, os, sys, Options, Utils, Task, re, fnmatch, Logs
5 from TaskGen import feature, before
6 from Configure import conf, ConfigurationContext
7 from Logs import debug
8 import shlex
10 # TODO: make this a --option
11 LIB_PATH="shared"
14 # sigh, python octal constants are a mess
15 MODE_644 = int('644', 8)
16 MODE_755 = int('755', 8)
18 @conf
19 def SET_TARGET_TYPE(ctx, target, value):
20 '''set the target type of a target'''
21 cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
22 if target in cache and cache[target] != 'EMPTY':
23 Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.curdir, value, cache[target]))
24 sys.exit(1)
25 LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
26 debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir))
27 return True
30 def GET_TARGET_TYPE(ctx, target):
31 '''get target type from cache'''
32 cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
33 if not target in cache:
34 return None
35 return cache[target]
38 ######################################################
39 # this is used as a decorator to make functions only
40 # run once. Based on the idea from
41 # http://stackoverflow.com/questions/815110/is-there-a-decorator-to-simply-cache-function-return-values
42 def runonce(function):
43 runonce_ret = {}
44 def runonce_wrapper(*args):
45 if args in runonce_ret:
46 return runonce_ret[args]
47 else:
48 ret = function(*args)
49 runonce_ret[args] = ret
50 return ret
51 return runonce_wrapper
54 def ADD_LD_LIBRARY_PATH(path):
55 '''add something to LD_LIBRARY_PATH'''
56 if 'LD_LIBRARY_PATH' in os.environ:
57 oldpath = os.environ['LD_LIBRARY_PATH']
58 else:
59 oldpath = ''
60 newpath = oldpath.split(':')
61 if not path in newpath:
62 newpath.append(path)
63 os.environ['LD_LIBRARY_PATH'] = ':'.join(newpath)
66 def needs_private_lib(bld, target):
67 '''return True if a target links to a private library'''
68 for lib in getattr(target, "final_libs", []):
69 t = bld.name_to_obj(lib, bld.env)
70 if t and getattr(t, 'private_library', False):
71 return True
72 return False
75 def install_rpath(target):
76 '''the rpath value for installation'''
77 bld = target.bld
78 bld.env['RPATH'] = []
79 ret = set()
80 if bld.env.RPATH_ON_INSTALL:
81 ret.add(bld.EXPAND_VARIABLES(bld.env.LIBDIR))
82 if bld.env.RPATH_ON_INSTALL_PRIVATE and needs_private_lib(bld, target):
83 ret.add(bld.EXPAND_VARIABLES(bld.env.PRIVATELIBDIR))
84 return list(ret)
87 def build_rpath(bld):
88 '''the rpath value for build'''
89 rpaths = [os.path.normpath('%s/%s' % (bld.env.BUILD_DIRECTORY, d)) for d in ("shared", "shared/private")]
90 bld.env['RPATH'] = []
91 if bld.env.RPATH_ON_BUILD:
92 return rpaths
93 for rpath in rpaths:
94 ADD_LD_LIBRARY_PATH(rpath)
95 return []
98 @conf
99 def LOCAL_CACHE(ctx, name):
100 '''return a named build cache dictionary, used to store
101 state inside other functions'''
102 if name in ctx.env:
103 return ctx.env[name]
104 ctx.env[name] = {}
105 return ctx.env[name]
108 @conf
109 def LOCAL_CACHE_SET(ctx, cachename, key, value):
110 '''set a value in a local cache'''
111 cache = LOCAL_CACHE(ctx, cachename)
112 cache[key] = value
115 @conf
116 def ASSERT(ctx, expression, msg):
117 '''a build assert call'''
118 if not expression:
119 raise Utils.WafError("ERROR: %s\n" % msg)
120 Build.BuildContext.ASSERT = ASSERT
123 def SUBDIR(bld, subdir, list):
124 '''create a list of files by pre-pending each with a subdir name'''
125 ret = ''
126 for l in TO_LIST(list):
127 ret = ret + os.path.normpath(os.path.join(subdir, l)) + ' '
128 return ret
129 Build.BuildContext.SUBDIR = SUBDIR
132 def dict_concat(d1, d2):
133 '''concatenate two dictionaries d1 += d2'''
134 for t in d2:
135 if t not in d1:
136 d1[t] = d2[t]
139 def exec_command(self, cmd, **kw):
140 '''this overrides the 'waf -v' debug output to be in a nice
141 unix like format instead of a python list.
142 Thanks to ita on #waf for this'''
143 import Utils, Logs
144 _cmd = cmd
145 if isinstance(cmd, list):
146 _cmd = ' '.join(cmd)
147 debug('runner: %s' % _cmd)
148 if self.log:
149 self.log.write('%s\n' % cmd)
150 kw['log'] = self.log
151 try:
152 if not kw.get('cwd', None):
153 kw['cwd'] = self.cwd
154 except AttributeError:
155 self.cwd = kw['cwd'] = self.bldnode.abspath()
156 return Utils.exec_command(cmd, **kw)
157 Build.BuildContext.exec_command = exec_command
160 def ADD_COMMAND(opt, name, function):
161 '''add a new top level command to waf'''
162 Utils.g_module.__dict__[name] = function
163 opt.name = function
164 Options.Handler.ADD_COMMAND = ADD_COMMAND
167 @feature('cc', 'cshlib', 'cprogram')
168 @before('apply_core','exec_rule')
169 def process_depends_on(self):
170 '''The new depends_on attribute for build rules
171 allow us to specify a dependency on output from
172 a source generation rule'''
173 if getattr(self , 'depends_on', None):
174 lst = self.to_list(self.depends_on)
175 for x in lst:
176 y = self.bld.name_to_obj(x, self.env)
177 self.bld.ASSERT(y is not None, "Failed to find dependency %s of %s" % (x, self.name))
178 y.post()
179 if getattr(y, 'more_includes', None):
180 self.includes += " " + y.more_includes
183 os_path_relpath = getattr(os.path, 'relpath', None)
184 if os_path_relpath is None:
185 # Python < 2.6 does not have os.path.relpath, provide a replacement
186 # (imported from Python2.6.5~rc2)
187 def os_path_relpath(path, start):
188 """Return a relative version of a path"""
189 start_list = os.path.abspath(start).split("/")
190 path_list = os.path.abspath(path).split("/")
192 # Work out how much of the filepath is shared by start and path.
193 i = len(os.path.commonprefix([start_list, path_list]))
195 rel_list = ['..'] * (len(start_list)-i) + path_list[i:]
196 if not rel_list:
197 return start
198 return os.path.join(*rel_list)
201 def unique_list(seq):
202 '''return a uniquified list in the same order as the existing list'''
203 seen = {}
204 result = []
205 for item in seq:
206 if item in seen: continue
207 seen[item] = True
208 result.append(item)
209 return result
212 def TO_LIST(str, delimiter=None):
213 '''Split a list, preserving quoted strings and existing lists'''
214 if str is None:
215 return []
216 if isinstance(str, list):
217 # we need to return a new independent list...
218 return list(str)
219 if len(str) == 0:
220 return []
221 lst = str.split(delimiter)
222 # the string may have had quotes in it, now we
223 # check if we did have quotes, and use the slower shlex
224 # if we need to
225 for e in lst:
226 if e[0] == '"':
227 return shlex.split(str)
228 return lst
231 def subst_vars_error(string, env):
232 '''substitute vars, throw an error if a variable is not defined'''
233 lst = re.split('(\$\{\w+\})', string)
234 out = []
235 for v in lst:
236 if re.match('\$\{\w+\}', v):
237 vname = v[2:-1]
238 if not vname in env:
239 raise KeyError("Failed to find variable %s in %s" % (vname, string))
240 v = env[vname]
241 out.append(v)
242 return ''.join(out)
245 @conf
246 def SUBST_ENV_VAR(ctx, varname):
247 '''Substitute an environment variable for any embedded variables'''
248 return subst_vars_error(ctx.env[varname], ctx.env)
249 Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR
252 def ENFORCE_GROUP_ORDERING(bld):
253 '''enforce group ordering for the project. This
254 makes the group ordering apply only when you specify
255 a target with --target'''
256 if Options.options.compile_targets:
257 @feature('*')
258 @before('exec_rule', 'apply_core', 'collect')
259 def force_previous_groups(self):
260 if getattr(self.bld, 'enforced_group_ordering', False):
261 return
262 self.bld.enforced_group_ordering = True
264 def group_name(g):
265 tm = self.bld.task_manager
266 return [x for x in tm.groups_names if id(tm.groups_names[x]) == id(g)][0]
268 my_id = id(self)
269 bld = self.bld
270 stop = None
271 for g in bld.task_manager.groups:
272 for t in g.tasks_gen:
273 if id(t) == my_id:
274 stop = id(g)
275 debug('group: Forcing up to group %s for target %s',
276 group_name(g), self.name or self.target)
277 break
278 if stop is not None:
279 break
280 if stop is None:
281 return
283 for i in xrange(len(bld.task_manager.groups)):
284 g = bld.task_manager.groups[i]
285 bld.task_manager.current_group = i
286 if id(g) == stop:
287 break
288 debug('group: Forcing group %s', group_name(g))
289 for t in g.tasks_gen:
290 if not getattr(t, 'forced_groups', False):
291 debug('group: Posting %s', t.name or t.target)
292 t.forced_groups = True
293 t.post()
294 Build.BuildContext.ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING
297 def recursive_dirlist(dir, relbase, pattern=None):
298 '''recursive directory list'''
299 ret = []
300 for f in os.listdir(dir):
301 f2 = dir + '/' + f
302 if os.path.isdir(f2):
303 ret.extend(recursive_dirlist(f2, relbase))
304 else:
305 if pattern and not fnmatch.fnmatch(f, pattern):
306 continue
307 ret.append(os_path_relpath(f2, relbase))
308 return ret
311 def mkdir_p(dir):
312 '''like mkdir -p'''
313 if not dir:
314 return
315 if dir.endswith("/"):
316 mkdir_p(dir[:-1])
317 return
318 if os.path.isdir(dir):
319 return
320 mkdir_p(os.path.dirname(dir))
321 os.mkdir(dir)
324 def SUBST_VARS_RECURSIVE(string, env):
325 '''recursively expand variables'''
326 if string is None:
327 return string
328 limit=100
329 while (string.find('${') != -1 and limit > 0):
330 string = subst_vars_error(string, env)
331 limit -= 1
332 return string
335 @conf
336 def EXPAND_VARIABLES(ctx, varstr, vars=None):
337 '''expand variables from a user supplied dictionary
339 This is most useful when you pass vars=locals() to expand
340 all your local variables in strings
343 if isinstance(varstr, list):
344 ret = []
345 for s in varstr:
346 ret.append(EXPAND_VARIABLES(ctx, s, vars=vars))
347 return ret
349 if not isinstance(varstr, str):
350 return varstr
352 import Environment
353 env = Environment.Environment()
354 ret = varstr
355 # substitute on user supplied dict if avaiilable
356 if vars is not None:
357 for v in vars.keys():
358 env[v] = vars[v]
359 ret = SUBST_VARS_RECURSIVE(ret, env)
361 # if anything left, subst on the environment as well
362 if ret.find('${') != -1:
363 ret = SUBST_VARS_RECURSIVE(ret, ctx.env)
364 # make sure there is nothing left. Also check for the common
365 # typo of $( instead of ${
366 if ret.find('${') != -1 or ret.find('$(') != -1:
367 Logs.error('Failed to substitute all variables in varstr=%s' % ret)
368 sys.exit(1)
369 return ret
370 Build.BuildContext.EXPAND_VARIABLES = EXPAND_VARIABLES
373 def RUN_COMMAND(cmd,
374 env=None,
375 shell=False):
376 '''run a external command, return exit code or signal'''
377 if env:
378 cmd = SUBST_VARS_RECURSIVE(cmd, env)
380 status = os.system(cmd)
381 if os.WIFEXITED(status):
382 return os.WEXITSTATUS(status)
383 if os.WIFSIGNALED(status):
384 return - os.WTERMSIG(status)
385 Logs.error("Unknown exit reason %d for command: %s" (status, cmd))
386 return -1
389 def RUN_PYTHON_TESTS(testfiles, pythonpath=None):
390 env = LOAD_ENVIRONMENT()
391 if pythonpath is None:
392 pythonpath = os.path.join(Utils.g_module.blddir, 'python')
393 result = 0
394 for interp in env.python_interpreters:
395 for testfile in testfiles:
396 cmd = "PYTHONPATH=%s %s %s" % (pythonpath, interp, testfile)
397 print('Running Python test with %s: %s' % (interp, testfile))
398 ret = RUN_COMMAND(cmd)
399 if ret:
400 print('Python test failed: %s' % cmd)
401 result = ret
402 return result
405 # make sure we have md5. some systems don't have it
406 try:
407 from hashlib import md5
408 # Even if hashlib.md5 exists, it may be unusable.
409 # Try to use MD5 function. In FIPS mode this will cause an exception
410 # and we'll get to the replacement code
411 foo = md5('abcd')
412 except:
413 try:
414 import md5
415 # repeat the same check here, mere success of import is not enough.
416 # Try to use MD5 function. In FIPS mode this will cause an exception
417 foo = md5.md5('abcd')
418 except:
419 import Constants
420 Constants.SIG_NIL = hash('abcd')
421 class replace_md5(object):
422 def __init__(self):
423 self.val = None
424 def update(self, val):
425 self.val = hash((self.val, val))
426 def digest(self):
427 return str(self.val)
428 def hexdigest(self):
429 return self.digest().encode('hex')
430 def replace_h_file(filename):
431 f = open(filename, 'rb')
432 m = replace_md5()
433 while (filename):
434 filename = f.read(100000)
435 m.update(filename)
436 f.close()
437 return m.digest()
438 Utils.md5 = replace_md5
439 Task.md5 = replace_md5
440 Utils.h_file = replace_h_file
443 def LOAD_ENVIRONMENT():
444 '''load the configuration environment, allowing access to env vars
445 from new commands'''
446 import Environment
447 env = Environment.Environment()
448 try:
449 env.load('.lock-wscript')
450 env.load(env.blddir + '/c4che/default.cache.py')
451 except:
452 pass
453 return env
456 def IS_NEWER(bld, file1, file2):
457 '''return True if file1 is newer than file2'''
458 t1 = os.stat(os.path.join(bld.curdir, file1)).st_mtime
459 t2 = os.stat(os.path.join(bld.curdir, file2)).st_mtime
460 return t1 > t2
461 Build.BuildContext.IS_NEWER = IS_NEWER
464 @conf
465 def RECURSE(ctx, directory):
466 '''recurse into a directory, relative to the curdir or top level'''
467 try:
468 visited_dirs = ctx.visited_dirs
469 except:
470 visited_dirs = ctx.visited_dirs = set()
471 d = os.path.join(ctx.curdir, directory)
472 if os.path.exists(d):
473 abspath = os.path.abspath(d)
474 else:
475 abspath = os.path.abspath(os.path.join(Utils.g_module.srcdir, directory))
476 ctxclass = ctx.__class__.__name__
477 key = ctxclass + ':' + abspath
478 if key in visited_dirs:
479 # already done it
480 return
481 visited_dirs.add(key)
482 relpath = os_path_relpath(abspath, ctx.curdir)
483 if ctxclass == 'Handler':
484 return ctx.sub_options(relpath)
485 if ctxclass == 'ConfigurationContext':
486 return ctx.sub_config(relpath)
487 if ctxclass == 'BuildContext':
488 return ctx.add_subdirs(relpath)
489 Logs.error('Unknown RECURSE context class', ctxclass)
490 raise
491 Options.Handler.RECURSE = RECURSE
492 Build.BuildContext.RECURSE = RECURSE
495 def CHECK_MAKEFLAGS(bld):
496 '''check for MAKEFLAGS environment variable in case we are being
497 called from a Makefile try to honor a few make command line flags'''
498 if not 'WAF_MAKE' in os.environ:
499 return
500 makeflags = os.environ.get('MAKEFLAGS')
501 if makeflags is None:
502 return
503 jobs_set = False
504 # we need to use shlex.split to cope with the escaping of spaces
505 # in makeflags
506 for opt in shlex.split(makeflags):
507 # options can come either as -x or as x
508 if opt[0:2] == 'V=':
509 Options.options.verbose = Logs.verbose = int(opt[2:])
510 if Logs.verbose > 0:
511 Logs.zones = ['runner']
512 if Logs.verbose > 2:
513 Logs.zones = ['*']
514 elif opt[0].isupper() and opt.find('=') != -1:
515 # this allows us to set waf options on the make command line
516 # for example, if you do "make FOO=blah", then we set the
517 # option 'FOO' in Options.options, to blah. If you look in wafsamba/wscript
518 # you will see that the command line accessible options have their dest=
519 # set to uppercase, to allow for passing of options from make in this way
520 # this is also how "make test TESTS=testpattern" works, and
521 # "make VERBOSE=1" as well as things like "make SYMBOLCHECK=1"
522 loc = opt.find('=')
523 setattr(Options.options, opt[0:loc], opt[loc+1:])
524 elif opt[0] != '-':
525 for v in opt:
526 if v == 'j':
527 jobs_set = True
528 elif v == 'k':
529 Options.options.keep = True
530 elif opt == '-j':
531 jobs_set = True
532 elif opt == '-k':
533 Options.options.keep = True
534 if not jobs_set:
535 # default to one job
536 Options.options.jobs = 1
538 Build.BuildContext.CHECK_MAKEFLAGS = CHECK_MAKEFLAGS
540 option_groups = {}
542 def option_group(opt, name):
543 '''find or create an option group'''
544 global option_groups
545 if name in option_groups:
546 return option_groups[name]
547 gr = opt.add_option_group(name)
548 option_groups[name] = gr
549 return gr
550 Options.Handler.option_group = option_group
553 def save_file(filename, contents, create_dir=False):
554 '''save data to a file'''
555 if create_dir:
556 mkdir_p(os.path.dirname(filename))
557 try:
558 f = open(filename, 'w')
559 f.write(contents)
560 f.close()
561 except:
562 return False
563 return True
566 def load_file(filename):
567 '''return contents of a file'''
568 try:
569 f = open(filename, 'r')
570 r = f.read()
571 f.close()
572 except:
573 return None
574 return r
577 def reconfigure(ctx):
578 '''rerun configure if necessary'''
579 import Configure, samba_wildcard, Scripting
580 if not os.path.exists(".lock-wscript"):
581 raise Utils.WafError('configure has not been run')
582 bld = samba_wildcard.fake_build_environment()
583 Configure.autoconfig = True
584 Scripting.check_configured(bld)
587 def map_shlib_extension(ctx, name, python=False):
588 '''map a filename with a shared library extension of .so to the real shlib name'''
589 if name is None:
590 return None
591 if name[-1:].isdigit():
592 # some libraries have specified versions in the wscript rule
593 return name
594 (root1, ext1) = os.path.splitext(name)
595 if python:
596 return ctx.env.pyext_PATTERN % root1
597 else:
598 (root2, ext2) = os.path.splitext(ctx.env.shlib_PATTERN)
599 return root1+ext2
600 Build.BuildContext.map_shlib_extension = map_shlib_extension
602 def apply_pattern(filename, pattern):
603 '''apply a filename pattern to a filename that may have a directory component'''
604 dirname = os.path.dirname(filename)
605 if not dirname:
606 return pattern % filename
607 basename = os.path.basename(filename)
608 return os.path.join(dirname, pattern % basename)
610 def make_libname(ctx, name, nolibprefix=False, version=None, python=False):
611 """make a library filename
612 Options:
613 nolibprefix: don't include the lib prefix
614 version : add a version number
615 python : if we should use python module name conventions"""
617 if python:
618 libname = apply_pattern(name, ctx.env.pyext_PATTERN)
619 else:
620 libname = apply_pattern(name, ctx.env.shlib_PATTERN)
621 if nolibprefix and libname[0:3] == 'lib':
622 libname = libname[3:]
623 if version:
624 if version[0] == '.':
625 version = version[1:]
626 (root, ext) = os.path.splitext(libname)
627 if ext == ".dylib":
628 # special case - version goes before the prefix
629 libname = "%s.%s%s" % (root, version, ext)
630 else:
631 libname = "%s%s.%s" % (root, ext, version)
632 return libname
633 Build.BuildContext.make_libname = make_libname
636 def get_tgt_list(bld):
637 '''return a list of build objects for samba'''
639 targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
641 # build a list of task generators we are interested in
642 tgt_list = []
643 for tgt in targets:
644 type = targets[tgt]
645 if not type in ['SUBSYSTEM', 'MODULE', 'BINARY', 'LIBRARY', 'ASN1', 'PYTHON']:
646 continue
647 t = bld.name_to_obj(tgt, bld.env)
648 if t is None:
649 Logs.error("Target %s of type %s has no task generator" % (tgt, type))
650 sys.exit(1)
651 tgt_list.append(t)
652 return tgt_list
654 from Constants import WSCRIPT_FILE
655 def PROCESS_SEPARATE_RULE(self, rule):
656 ''' cause waf to process additional script based on `rule'.
657 You should have file named wscript_<stage>_rule in the current directory
658 where stage is either 'configure' or 'build'
660 ctxclass = self.__class__.__name__
661 stage = ''
662 if ctxclass == 'ConfigurationContext':
663 stage = 'configure'
664 elif ctxclass == 'BuildContext':
665 stage = 'build'
666 file_path = os.path.join(self.curdir, WSCRIPT_FILE+'_'+stage+'_'+rule)
667 txt = load_file(file_path)
668 if txt:
669 dc = {'ctx': self}
670 if getattr(self.__class__, 'pre_recurse', None):
671 dc = self.pre_recurse(txt, file_path, self.curdir)
672 exec(compile(txt, file_path, 'exec'), dc)
673 if getattr(self.__class__, 'post_recurse', None):
674 dc = self.post_recurse(txt, file_path, self.curdir)
676 Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
677 ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
679 def AD_DC_BUILD_IS_ENABLED(self):
680 if self.CONFIG_SET('AD_DC_BUILD_IS_ENABLED'):
681 return True
682 return False
684 Build.BuildContext.AD_DC_BUILD_IS_ENABLED = AD_DC_BUILD_IS_ENABLED