samba-tool ntacl: pass system session to get/set-ntacl functions
[Samba.git] / buildtools / wafsamba / samba_utils.py
blobb4da3c5246f276c89f11b423d5e5d8422283d73b
1 # a waf tool to add autoconf-like macros to the configure section
2 # and for SAMBA_ macros for building libraries, binaries etc
4 import os, sys, re, fnmatch, shlex, inspect
5 from optparse import SUPPRESS_HELP
6 from waflib import Build, Options, Utils, Task, Logs, Configure, Errors, Context
7 from waflib.TaskGen import feature, before, after
8 from waflib.Configure import ConfigurationContext
9 from waflib.Logs import debug
10 from waflib import ConfigSet
11 from waflib.Build import CACHE_SUFFIX
13 # TODO: make this a --option
14 LIB_PATH="shared"
17 # sigh, python octal constants are a mess
18 MODE_644 = int('644', 8)
19 MODE_755 = int('755', 8)
21 def conf(f):
22 # override in order to propagate the argument "mandatory"
23 def fun(*k, **kw):
24 mandatory = True
25 if 'mandatory' in kw:
26 mandatory = kw['mandatory']
27 del kw['mandatory']
29 try:
30 return f(*k, **kw)
31 except Errors.ConfigurationError:
32 if mandatory:
33 raise
35 fun.__name__ = f.__name__
36 if 'mandatory' in inspect.getsource(f):
37 fun = f
39 setattr(Configure.ConfigurationContext, f.__name__, fun)
40 setattr(Build.BuildContext, f.__name__, fun)
41 return f
42 Configure.conf = conf
43 Configure.conftest = conf
45 @conf
46 def SET_TARGET_TYPE(ctx, target, value):
47 '''set the target type of a target'''
48 cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
49 if target in cache and cache[target] != 'EMPTY':
50 Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.path.abspath(), value, cache[target]))
51 sys.exit(1)
52 LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
53 debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.path.abspath()))
54 return True
57 def GET_TARGET_TYPE(ctx, target):
58 '''get target type from cache'''
59 cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
60 if not target in cache:
61 return None
62 return cache[target]
65 def ADD_LD_LIBRARY_PATH(path):
66 '''add something to LD_LIBRARY_PATH'''
67 if 'LD_LIBRARY_PATH' in os.environ:
68 oldpath = os.environ['LD_LIBRARY_PATH']
69 else:
70 oldpath = ''
71 newpath = oldpath.split(':')
72 if not path in newpath:
73 newpath.append(path)
74 os.environ['LD_LIBRARY_PATH'] = ':'.join(newpath)
77 def needs_private_lib(bld, target):
78 '''return True if a target links to a private library'''
79 for lib in getattr(target, "final_libs", []):
80 t = bld.get_tgen_by_name(lib)
81 if t and getattr(t, 'private_library', False):
82 return True
83 return False
86 def install_rpath(target):
87 '''the rpath value for installation'''
88 bld = target.bld
89 bld.env['RPATH'] = []
90 ret = set()
91 if bld.env.RPATH_ON_INSTALL:
92 ret.add(bld.EXPAND_VARIABLES(bld.env.LIBDIR))
93 if bld.env.RPATH_ON_INSTALL_PRIVATE and needs_private_lib(bld, target):
94 ret.add(bld.EXPAND_VARIABLES(bld.env.PRIVATELIBDIR))
95 return list(ret)
98 def build_rpath(bld):
99 '''the rpath value for build'''
100 rpaths = [os.path.normpath('%s/%s' % (bld.env.BUILD_DIRECTORY, d)) for d in ("shared", "shared/private")]
101 bld.env['RPATH'] = []
102 if bld.env.RPATH_ON_BUILD:
103 return rpaths
104 for rpath in rpaths:
105 ADD_LD_LIBRARY_PATH(rpath)
106 return []
109 @conf
110 def LOCAL_CACHE(ctx, name):
111 '''return a named build cache dictionary, used to store
112 state inside other functions'''
113 if name in ctx.env:
114 return ctx.env[name]
115 ctx.env[name] = {}
116 return ctx.env[name]
119 @conf
120 def LOCAL_CACHE_SET(ctx, cachename, key, value):
121 '''set a value in a local cache'''
122 cache = LOCAL_CACHE(ctx, cachename)
123 cache[key] = value
126 @conf
127 def ASSERT(ctx, expression, msg):
128 '''a build assert call'''
129 if not expression:
130 raise Errors.WafError("ERROR: %s\n" % msg)
131 Build.BuildContext.ASSERT = ASSERT
134 def SUBDIR(bld, subdir, list):
135 '''create a list of files by pre-pending each with a subdir name'''
136 ret = ''
137 for l in TO_LIST(list):
138 ret = ret + os.path.normpath(os.path.join(subdir, l)) + ' '
139 return ret
140 Build.BuildContext.SUBDIR = SUBDIR
143 def dict_concat(d1, d2):
144 '''concatenate two dictionaries d1 += d2'''
145 for t in d2:
146 if t not in d1:
147 d1[t] = d2[t]
149 def ADD_COMMAND(opt, name, function):
150 '''add a new top level command to waf'''
151 Context.g_module.__dict__[name] = function
152 opt.name = function
153 Options.OptionsContext.ADD_COMMAND = ADD_COMMAND
156 @feature('c', 'cc', 'cshlib', 'cprogram')
157 @before('apply_core','exec_rule')
158 def process_depends_on(self):
159 '''The new depends_on attribute for build rules
160 allow us to specify a dependency on output from
161 a source generation rule'''
162 if getattr(self , 'depends_on', None):
163 lst = self.to_list(self.depends_on)
164 for x in lst:
165 y = self.bld.get_tgen_by_name(x)
166 self.bld.ASSERT(y is not None, "Failed to find dependency %s of %s" % (x, self.name))
167 y.post()
168 if getattr(y, 'more_includes', None):
169 self.includes += " " + y.more_includes
172 os_path_relpath = getattr(os.path, 'relpath', None)
173 if os_path_relpath is None:
174 # Python < 2.6 does not have os.path.relpath, provide a replacement
175 # (imported from Python2.6.5~rc2)
176 def os_path_relpath(path, start):
177 """Return a relative version of a path"""
178 start_list = os.path.abspath(start).split("/")
179 path_list = os.path.abspath(path).split("/")
181 # Work out how much of the filepath is shared by start and path.
182 i = len(os.path.commonprefix([start_list, path_list]))
184 rel_list = ['..'] * (len(start_list)-i) + path_list[i:]
185 if not rel_list:
186 return start
187 return os.path.join(*rel_list)
190 def unique_list(seq):
191 '''return a uniquified list in the same order as the existing list'''
192 seen = {}
193 result = []
194 for item in seq:
195 if item in seen: continue
196 seen[item] = True
197 result.append(item)
198 return result
201 def TO_LIST(str, delimiter=None):
202 '''Split a list, preserving quoted strings and existing lists'''
203 if str is None:
204 return []
205 if isinstance(str, list):
206 # we need to return a new independent list...
207 return list(str)
208 if len(str) == 0:
209 return []
210 lst = str.split(delimiter)
211 # the string may have had quotes in it, now we
212 # check if we did have quotes, and use the slower shlex
213 # if we need to
214 for e in lst:
215 if e[0] == '"':
216 return shlex.split(str)
217 return lst
220 def subst_vars_error(string, env):
221 '''substitute vars, throw an error if a variable is not defined'''
222 lst = re.split('(\$\{\w+\})', string)
223 out = []
224 for v in lst:
225 if re.match('\$\{\w+\}', v):
226 vname = v[2:-1]
227 if not vname in env:
228 raise KeyError("Failed to find variable %s in %s in env %s <%s>" % (vname, string, env.__class__, str(env)))
229 v = env[vname]
230 if isinstance(v, list):
231 v = ' '.join(v)
232 out.append(v)
233 return ''.join(out)
236 @conf
237 def SUBST_ENV_VAR(ctx, varname):
238 '''Substitute an environment variable for any embedded variables'''
239 return subst_vars_error(ctx.env[varname], ctx.env)
240 Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR
243 def recursive_dirlist(dir, relbase, pattern=None):
244 '''recursive directory list'''
245 ret = []
246 for f in os.listdir(dir):
247 f2 = dir + '/' + f
248 if os.path.isdir(f2):
249 ret.extend(recursive_dirlist(f2, relbase))
250 else:
251 if pattern and not fnmatch.fnmatch(f, pattern):
252 continue
253 ret.append(os_path_relpath(f2, relbase))
254 return ret
257 def mkdir_p(dir):
258 '''like mkdir -p'''
259 if not dir:
260 return
261 if dir.endswith("/"):
262 mkdir_p(dir[:-1])
263 return
264 if os.path.isdir(dir):
265 return
266 mkdir_p(os.path.dirname(dir))
267 os.mkdir(dir)
270 def SUBST_VARS_RECURSIVE(string, env):
271 '''recursively expand variables'''
272 if string is None:
273 return string
274 limit=100
275 while (string.find('${') != -1 and limit > 0):
276 string = subst_vars_error(string, env)
277 limit -= 1
278 return string
281 @conf
282 def EXPAND_VARIABLES(ctx, varstr, vars=None):
283 '''expand variables from a user supplied dictionary
285 This is most useful when you pass vars=locals() to expand
286 all your local variables in strings
289 if isinstance(varstr, list):
290 ret = []
291 for s in varstr:
292 ret.append(EXPAND_VARIABLES(ctx, s, vars=vars))
293 return ret
295 if not isinstance(varstr, str):
296 return varstr
298 env = ConfigSet.ConfigSet()
299 ret = varstr
300 # substitute on user supplied dict if avaiilable
301 if vars is not None:
302 for v in vars.keys():
303 env[v] = vars[v]
304 ret = SUBST_VARS_RECURSIVE(ret, env)
306 # if anything left, subst on the environment as well
307 if ret.find('${') != -1:
308 ret = SUBST_VARS_RECURSIVE(ret, ctx.env)
309 # make sure there is nothing left. Also check for the common
310 # typo of $( instead of ${
311 if ret.find('${') != -1 or ret.find('$(') != -1:
312 Logs.error('Failed to substitute all variables in varstr=%s' % ret)
313 sys.exit(1)
314 return ret
315 Build.BuildContext.EXPAND_VARIABLES = EXPAND_VARIABLES
318 def RUN_COMMAND(cmd,
319 env=None,
320 shell=False):
321 '''run a external command, return exit code or signal'''
322 if env:
323 cmd = SUBST_VARS_RECURSIVE(cmd, env)
325 status = os.system(cmd)
326 if os.WIFEXITED(status):
327 return os.WEXITSTATUS(status)
328 if os.WIFSIGNALED(status):
329 return - os.WTERMSIG(status)
330 Logs.error("Unknown exit reason %d for command: %s" (status, cmd))
331 return -1
334 def RUN_PYTHON_TESTS(testfiles, pythonpath=None, extra_env=None):
335 env = LOAD_ENVIRONMENT()
336 if pythonpath is None:
337 pythonpath = os.path.join(Context.g_module.out, 'python')
338 result = 0
339 for interp in env.python_interpreters:
340 if not isinstance(interp, str):
341 interp = ' '.join(interp)
342 for testfile in testfiles:
343 cmd = "PYTHONPATH=%s %s %s" % (pythonpath, interp, testfile)
344 if extra_env:
345 for key, value in extra_env.items():
346 cmd = "%s=%s %s" % (key, value, cmd)
347 print('Running Python test with %s: %s' % (interp, testfile))
348 ret = RUN_COMMAND(cmd)
349 if ret:
350 print('Python test failed: %s' % cmd)
351 result = ret
352 return result
355 # make sure we have md5. some systems don't have it
356 try:
357 from hashlib import md5
358 # Even if hashlib.md5 exists, it may be unusable.
359 # Try to use MD5 function. In FIPS mode this will cause an exception
360 # and we'll get to the replacement code
361 foo = md5(b'abcd')
362 except:
363 try:
364 import md5
365 # repeat the same check here, mere success of import is not enough.
366 # Try to use MD5 function. In FIPS mode this will cause an exception
367 foo = md5.md5(b'abcd')
368 except:
369 Context.SIG_NIL = hash('abcd')
370 class replace_md5(object):
371 def __init__(self):
372 self.val = None
373 def update(self, val):
374 self.val = hash((self.val, val))
375 def digest(self):
376 return str(self.val)
377 def hexdigest(self):
378 return self.digest().encode('hex')
379 def replace_h_file(filename):
380 f = open(filename, 'rb')
381 m = replace_md5()
382 while (filename):
383 filename = f.read(100000)
384 m.update(filename)
385 f.close()
386 return m.digest()
387 Utils.md5 = replace_md5
388 Task.md5 = replace_md5
389 Utils.h_file = replace_h_file
392 def LOAD_ENVIRONMENT():
393 '''load the configuration environment, allowing access to env vars
394 from new commands'''
395 env = ConfigSet.ConfigSet()
396 try:
397 p = os.path.join(Context.g_module.out, 'c4che/default'+CACHE_SUFFIX)
398 env.load(p)
399 except (OSError, IOError):
400 pass
401 return env
404 def IS_NEWER(bld, file1, file2):
405 '''return True if file1 is newer than file2'''
406 curdir = bld.path.abspath()
407 t1 = os.stat(os.path.join(curdir, file1)).st_mtime
408 t2 = os.stat(os.path.join(curdir, file2)).st_mtime
409 return t1 > t2
410 Build.BuildContext.IS_NEWER = IS_NEWER
413 @conf
414 def RECURSE(ctx, directory):
415 '''recurse into a directory, relative to the curdir or top level'''
416 try:
417 visited_dirs = ctx.visited_dirs
418 except AttributeError:
419 visited_dirs = ctx.visited_dirs = set()
420 d = os.path.join(ctx.path.abspath(), directory)
421 if os.path.exists(d):
422 abspath = os.path.abspath(d)
423 else:
424 abspath = os.path.abspath(os.path.join(Context.g_module.top, directory))
425 ctxclass = ctx.__class__.__name__
426 key = ctxclass + ':' + abspath
427 if key in visited_dirs:
428 # already done it
429 return
430 visited_dirs.add(key)
431 relpath = os_path_relpath(abspath, ctx.path.abspath())
432 if ctxclass in ['tmp', 'OptionsContext', 'ConfigurationContext', 'BuildContext']:
433 return ctx.recurse(relpath)
434 if 'waflib.extras.compat15' in sys.modules:
435 return ctx.recurse(relpath)
436 Logs.error('Unknown RECURSE context class: {}'.format(ctxclass))
437 raise
438 Options.OptionsContext.RECURSE = RECURSE
439 Build.BuildContext.RECURSE = RECURSE
442 def CHECK_MAKEFLAGS(bld):
443 '''check for MAKEFLAGS environment variable in case we are being
444 called from a Makefile try to honor a few make command line flags'''
445 if not 'WAF_MAKE' in os.environ:
446 return
447 makeflags = os.environ.get('MAKEFLAGS')
448 if makeflags is None:
449 return
450 jobs_set = False
451 jobs = None
452 # we need to use shlex.split to cope with the escaping of spaces
453 # in makeflags
454 for opt in shlex.split(makeflags):
455 # options can come either as -x or as x
456 if opt[0:2] == 'V=':
457 Options.options.verbose = Logs.verbose = int(opt[2:])
458 if Logs.verbose > 0:
459 Logs.zones = ['runner']
460 if Logs.verbose > 2:
461 Logs.zones = ['*']
462 elif opt[0].isupper() and opt.find('=') != -1:
463 # this allows us to set waf options on the make command line
464 # for example, if you do "make FOO=blah", then we set the
465 # option 'FOO' in Options.options, to blah. If you look in wafsamba/wscript
466 # you will see that the command line accessible options have their dest=
467 # set to uppercase, to allow for passing of options from make in this way
468 # this is also how "make test TESTS=testpattern" works, and
469 # "make VERBOSE=1" as well as things like "make SYMBOLCHECK=1"
470 loc = opt.find('=')
471 setattr(Options.options, opt[0:loc], opt[loc+1:])
472 elif opt[0] != '-':
473 for v in opt:
474 if re.search(r'j[0-9]*$', v):
475 jobs_set = True
476 jobs = opt.strip('j')
477 elif v == 'k':
478 Options.options.keep = True
479 elif re.search(r'-j[0-9]*$', opt):
480 jobs_set = True
481 jobs = opt.strip('-j')
482 elif opt == '-k':
483 Options.options.keep = True
484 if not jobs_set:
485 # default to one job
486 Options.options.jobs = 1
487 elif jobs_set and jobs:
488 Options.options.jobs = int(jobs)
490 Build.BuildContext.CHECK_MAKEFLAGS = CHECK_MAKEFLAGS
492 option_groups = {}
494 def option_group(opt, name):
495 '''find or create an option group'''
496 global option_groups
497 if name in option_groups:
498 return option_groups[name]
499 gr = opt.add_option_group(name)
500 option_groups[name] = gr
501 return gr
502 Options.OptionsContext.option_group = option_group
505 def save_file(filename, contents, create_dir=False):
506 '''save data to a file'''
507 if create_dir:
508 mkdir_p(os.path.dirname(filename))
509 try:
510 f = open(filename, 'w')
511 f.write(contents)
512 f.close()
513 except:
514 return False
515 return True
518 def load_file(filename):
519 '''return contents of a file'''
520 try:
521 f = open(filename, 'r')
522 r = f.read()
523 f.close()
524 except:
525 return None
526 return r
529 def reconfigure(ctx):
530 '''rerun configure if necessary'''
531 if not os.path.exists(".lock-wscript"):
532 raise Errors.WafError('configure has not been run')
533 import samba_wildcard
534 bld = samba_wildcard.fake_build_environment()
535 Configure.autoconfig = True
536 Scripting.check_configured(bld)
539 def map_shlib_extension(ctx, name, python=False):
540 '''map a filename with a shared library extension of .so to the real shlib name'''
541 if name is None:
542 return None
543 if name[-1:].isdigit():
544 # some libraries have specified versions in the wscript rule
545 return name
546 (root1, ext1) = os.path.splitext(name)
547 if python:
548 return ctx.env.pyext_PATTERN % root1
549 else:
550 (root2, ext2) = os.path.splitext(ctx.env.cshlib_PATTERN)
551 return root1+ext2
552 Build.BuildContext.map_shlib_extension = map_shlib_extension
554 def apply_pattern(filename, pattern):
555 '''apply a filename pattern to a filename that may have a directory component'''
556 dirname = os.path.dirname(filename)
557 if not dirname:
558 return pattern % filename
559 basename = os.path.basename(filename)
560 return os.path.join(dirname, pattern % basename)
562 def make_libname(ctx, name, nolibprefix=False, version=None, python=False):
563 """make a library filename
564 Options:
565 nolibprefix: don't include the lib prefix
566 version : add a version number
567 python : if we should use python module name conventions"""
569 if python:
570 libname = apply_pattern(name, ctx.env.pyext_PATTERN)
571 else:
572 libname = apply_pattern(name, ctx.env.cshlib_PATTERN)
573 if nolibprefix and libname[0:3] == 'lib':
574 libname = libname[3:]
575 if version:
576 if version[0] == '.':
577 version = version[1:]
578 (root, ext) = os.path.splitext(libname)
579 if ext == ".dylib":
580 # special case - version goes before the prefix
581 libname = "%s.%s%s" % (root, version, ext)
582 else:
583 libname = "%s%s.%s" % (root, ext, version)
584 return libname
585 Build.BuildContext.make_libname = make_libname
588 def get_tgt_list(bld):
589 '''return a list of build objects for samba'''
591 targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
593 # build a list of task generators we are interested in
594 tgt_list = []
595 for tgt in targets:
596 type = targets[tgt]
597 if not type in ['SUBSYSTEM', 'MODULE', 'BINARY', 'LIBRARY', 'ASN1', 'PYTHON']:
598 continue
599 t = bld.get_tgen_by_name(tgt)
600 if t is None:
601 Logs.error("Target %s of type %s has no task generator" % (tgt, type))
602 sys.exit(1)
603 tgt_list.append(t)
604 return tgt_list
606 from waflib.Context import WSCRIPT_FILE
607 def PROCESS_SEPARATE_RULE(self, rule):
608 ''' cause waf to process additional script based on `rule'.
609 You should have file named wscript_<stage>_rule in the current directory
610 where stage is either 'configure' or 'build'
612 stage = ''
613 if isinstance(self, Configure.ConfigurationContext):
614 stage = 'configure'
615 elif isinstance(self, Build.BuildContext):
616 stage = 'build'
617 file_path = os.path.join(self.path.abspath(), WSCRIPT_FILE+'_'+stage+'_'+rule)
618 node = self.root.find_node(file_path)
619 if node:
620 try:
621 cache = self.recurse_cache
622 except AttributeError:
623 cache = self.recurse_cache = {}
624 if node not in cache:
625 cache[node] = True
626 self.pre_recurse(node)
627 try:
628 function_code = node.read('rU', None)
629 exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict)
630 finally:
631 self.post_recurse(node)
633 Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
634 ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
636 def AD_DC_BUILD_IS_ENABLED(self):
637 if self.CONFIG_SET('AD_DC_BUILD_IS_ENABLED'):
638 return True
639 return False
641 Build.BuildContext.AD_DC_BUILD_IS_ENABLED = AD_DC_BUILD_IS_ENABLED
643 @feature('cprogram', 'cshlib', 'cstaticlib')
644 @after('apply_lib_vars')
645 @before('apply_obj_vars')
646 def samba_before_apply_obj_vars(self):
647 """before apply_obj_vars for uselib, this removes the standard paths"""
649 def is_standard_libpath(env, path):
650 for _path in env.STANDARD_LIBPATH:
651 if _path == os.path.normpath(path):
652 return True
653 return False
655 v = self.env
657 for i in v['RPATH']:
658 if is_standard_libpath(v, i):
659 v['RPATH'].remove(i)
661 for i in v['LIBPATH']:
662 if is_standard_libpath(v, i):
663 v['LIBPATH'].remove(i)
665 def samba_add_onoff_option(opt, option, help=(), dest=None, default=True,
666 with_name="with", without_name="without"):
667 if default is None:
668 default_str = "auto"
669 elif default is True:
670 default_str = "yes"
671 elif default is False:
672 default_str = "no"
673 else:
674 default_str = str(default)
676 if help == ():
677 help = ("Build with %s support (default=%s)" % (option, default_str))
678 if dest is None:
679 dest = "with_%s" % option.replace('-', '_')
681 with_val = "--%s-%s" % (with_name, option)
682 without_val = "--%s-%s" % (without_name, option)
684 opt.add_option(with_val, help=help, action="store_true", dest=dest,
685 default=default)
686 opt.add_option(without_val, help=SUPPRESS_HELP, action="store_false",
687 dest=dest)
688 Options.OptionsContext.samba_add_onoff_option = samba_add_onoff_option