auth/credentials: don't ignore "client use kerberos" and --use-kerberos for machine...
[Samba.git] / buildtools / wafsamba / samba_utils.py
blobe45f0ae1a35eaefaaf819cad0b7a0c044d046954
1 # a waf tool to add autoconf-like macros to the configure section
2 # and for SAMBA_ macros for building libraries, binaries etc
4 import errno
5 import os, sys, re, fnmatch, shlex, inspect
6 from optparse import SUPPRESS_HELP
7 from waflib import Build, Options, Utils, Task, Logs, Configure, Errors, Context
8 from waflib import Scripting
9 from waflib.TaskGen import feature, before, after
10 from waflib.Configure import ConfigurationContext
11 from waflib.Logs import debug
12 from waflib import ConfigSet
13 from waflib.Build import CACHE_SUFFIX
15 # TODO: make this a --option
16 LIB_PATH="shared"
18 # Py3 transition helper function to get a string from a variable that
19 # may be 'str' or 'bytes'. If 'bytes' then it is decoded using 'utf8'.
20 # If 'str' is passed it is returned unchanged.
21 def get_string(bytesorstring):
22 tmp = bytesorstring
23 if isinstance(bytesorstring, bytes):
24 tmp = bytesorstring.decode('utf8')
25 elif not isinstance(bytesorstring, str):
26 raise ValueError('Expected byte of string for %s:%s' % (type(bytesorstring), bytesorstring))
27 return tmp
29 # sigh, python octal constants are a mess
30 MODE_644 = int('644', 8)
31 MODE_744 = int('744', 8)
32 MODE_755 = int('755', 8)
33 MODE_777 = int('777', 8)
35 def conf(f):
36 # override in order to propagate the argument "mandatory"
37 def fun(*k, **kw):
38 mandatory = True
39 if 'mandatory' in kw:
40 mandatory = kw['mandatory']
41 del kw['mandatory']
43 try:
44 return f(*k, **kw)
45 except Errors.ConfigurationError:
46 if mandatory:
47 raise
49 fun.__name__ = f.__name__
50 if 'mandatory' in inspect.getsource(f):
51 fun = f
53 setattr(Configure.ConfigurationContext, f.__name__, fun)
54 setattr(Build.BuildContext, f.__name__, fun)
55 return f
56 Configure.conf = conf
57 Configure.conftest = conf
59 @conf
60 def SET_TARGET_TYPE(ctx, target, value):
61 '''set the target type of a target'''
62 cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
63 if target in cache and cache[target] != 'EMPTY':
64 Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.path.abspath(), value, cache[target]))
65 sys.exit(1)
66 LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
67 debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.path.abspath()))
68 return True
71 def GET_TARGET_TYPE(ctx, target):
72 '''get target type from cache'''
73 cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
74 if not target in cache:
75 return None
76 return cache[target]
79 def ADD_LD_LIBRARY_PATH(path):
80 '''add something to LD_LIBRARY_PATH'''
81 if 'LD_LIBRARY_PATH' in os.environ:
82 oldpath = os.environ['LD_LIBRARY_PATH']
83 else:
84 oldpath = ''
85 newpath = oldpath.split(':')
86 if not path in newpath:
87 newpath.append(path)
88 os.environ['LD_LIBRARY_PATH'] = ':'.join(newpath)
91 def needs_private_lib(bld, target):
92 '''return True if a target links to a private library'''
93 for lib in getattr(target, "final_libs", []):
94 t = bld.get_tgen_by_name(lib)
95 if t and getattr(t, 'private_library', False):
96 return True
97 return False
100 def install_rpath(target):
101 '''the rpath value for installation'''
102 bld = target.bld
103 bld.env['RPATH'] = []
104 ret = set()
105 if bld.env.RPATH_ON_INSTALL:
106 ret.add(bld.EXPAND_VARIABLES(bld.env.LIBDIR))
107 if bld.env.RPATH_ON_INSTALL_PRIVATE and needs_private_lib(bld, target):
108 ret.add(bld.EXPAND_VARIABLES(bld.env.PRIVATELIBDIR))
109 return list(ret)
112 def build_rpath(bld):
113 '''the rpath value for build'''
114 rpaths = [os.path.normpath('%s/%s' % (bld.env.BUILD_DIRECTORY, d)) for d in ("shared", "shared/private")]
115 bld.env['RPATH'] = []
116 if bld.env.RPATH_ON_BUILD:
117 return rpaths
118 for rpath in rpaths:
119 ADD_LD_LIBRARY_PATH(rpath)
120 return []
123 @conf
124 def LOCAL_CACHE(ctx, name):
125 '''return a named build cache dictionary, used to store
126 state inside other functions'''
127 if name in ctx.env:
128 return ctx.env[name]
129 ctx.env[name] = {}
130 return ctx.env[name]
133 @conf
134 def LOCAL_CACHE_SET(ctx, cachename, key, value):
135 '''set a value in a local cache'''
136 cache = LOCAL_CACHE(ctx, cachename)
137 cache[key] = value
140 @conf
141 def ASSERT(ctx, expression, msg):
142 '''a build assert call'''
143 if not expression:
144 raise Errors.WafError("ERROR: %s\n" % msg)
145 Build.BuildContext.ASSERT = ASSERT
148 def SUBDIR(bld, subdir, list):
149 '''create a list of files by pre-pending each with a subdir name'''
150 ret = ''
151 for l in TO_LIST(list):
152 ret = ret + os.path.normpath(os.path.join(subdir, l)) + ' '
153 return ret
154 Build.BuildContext.SUBDIR = SUBDIR
157 def dict_concat(d1, d2):
158 '''concatenate two dictionaries d1 += d2'''
159 for t in d2:
160 if t not in d1:
161 d1[t] = d2[t]
163 def ADD_COMMAND(opt, name, function):
164 '''add a new top level command to waf'''
165 Context.g_module.__dict__[name] = function
166 opt.name = function
167 Options.OptionsContext.ADD_COMMAND = ADD_COMMAND
170 @feature('c', 'cc', 'cshlib', 'cprogram')
171 @before('apply_core','exec_rule')
172 def process_depends_on(self):
173 '''The new depends_on attribute for build rules
174 allow us to specify a dependency on output from
175 a source generation rule'''
176 if getattr(self , 'depends_on', None):
177 lst = self.to_list(self.depends_on)
178 for x in lst:
179 y = self.bld.get_tgen_by_name(x)
180 self.bld.ASSERT(y is not None, "Failed to find dependency %s of %s" % (x, self.name))
181 y.post()
182 if getattr(y, 'more_includes', None):
183 self.includes += " " + y.more_includes
186 def unique_list(seq):
187 '''return a uniquified list in the same order as the existing list'''
188 seen = {}
189 result = []
190 for item in seq:
191 if item in seen: continue
192 seen[item] = True
193 result.append(item)
194 return result
197 def TO_LIST(str, delimiter=None):
198 '''Split a list, preserving quoted strings and existing lists'''
199 if str is None:
200 return []
201 if isinstance(str, list):
202 # we need to return a new independent list...
203 return list(str)
204 if len(str) == 0:
205 return []
206 lst = str.split(delimiter)
207 # the string may have had quotes in it, now we
208 # check if we did have quotes, and use the slower shlex
209 # if we need to
210 for e in lst:
211 if e[0] == '"':
212 return shlex.split(str)
213 return lst
216 def subst_vars_error(string, env):
217 '''substitute vars, throw an error if a variable is not defined'''
218 lst = re.split(r'(\$\{\w+\})', string)
219 out = []
220 for v in lst:
221 if re.match(r'\$\{\w+\}', v):
222 vname = v[2:-1]
223 if not vname in env:
224 raise KeyError("Failed to find variable %s in %s in env %s <%s>" % (vname, string, env.__class__, str(env)))
225 v = env[vname]
226 if isinstance(v, list):
227 v = ' '.join(v)
228 out.append(v)
229 return ''.join(out)
232 @conf
233 def SUBST_ENV_VAR(ctx, varname):
234 '''Substitute an environment variable for any embedded variables'''
235 return subst_vars_error(ctx.env[varname], ctx.env)
236 Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR
239 def recursive_dirlist(dir, relbase, pattern=None):
240 '''recursive directory list'''
241 ret = []
242 for f in os.listdir(dir):
243 f2 = dir + '/' + f
244 if os.path.isdir(f2):
245 ret.extend(recursive_dirlist(f2, relbase))
246 else:
247 if pattern and not fnmatch.fnmatch(f, pattern):
248 continue
249 ret.append(os.path.relpath(f2, relbase))
250 return ret
253 def symlink(src, dst, force=True):
254 """Can create symlink by force"""
255 try:
256 os.symlink(src, dst)
257 except OSError as exc:
258 if exc.errno == errno.EEXIST and force:
259 os.remove(dst)
260 os.symlink(src, dst)
261 else:
262 raise
265 def mkdir_p(dir):
266 '''like mkdir -p'''
267 if not dir:
268 return
269 if dir.endswith("/"):
270 mkdir_p(dir[:-1])
271 return
272 if os.path.isdir(dir):
273 return
274 mkdir_p(os.path.dirname(dir))
275 os.mkdir(dir)
278 def SUBST_VARS_RECURSIVE(string, env):
279 '''recursively expand variables'''
280 if string is None:
281 return string
282 limit=100
283 while (string.find('${') != -1 and limit > 0):
284 string = subst_vars_error(string, env)
285 limit -= 1
286 return string
289 @conf
290 def EXPAND_VARIABLES(ctx, varstr, vars=None):
291 '''expand variables from a user supplied dictionary
293 This is most useful when you pass vars=locals() to expand
294 all your local variables in strings
297 if isinstance(varstr, list):
298 ret = []
299 for s in varstr:
300 ret.append(EXPAND_VARIABLES(ctx, s, vars=vars))
301 return ret
303 if not isinstance(varstr, str):
304 return varstr
306 env = ConfigSet.ConfigSet()
307 ret = varstr
308 # substitute on user supplied dict if available
309 if vars is not None:
310 for v in vars.keys():
311 env[v] = vars[v]
312 ret = SUBST_VARS_RECURSIVE(ret, env)
314 # if anything left, subst on the environment as well
315 if ret.find('${') != -1:
316 ret = SUBST_VARS_RECURSIVE(ret, ctx.env)
317 # make sure there is nothing left. Also check for the common
318 # typo of $( instead of ${
319 if ret.find('${') != -1 or ret.find('$(') != -1:
320 Logs.error('Failed to substitute all variables in varstr=%s' % ret)
321 sys.exit(1)
322 return ret
323 Build.BuildContext.EXPAND_VARIABLES = EXPAND_VARIABLES
326 def RUN_COMMAND(cmd,
327 env=None,
328 shell=False):
329 '''run a external command, return exit code or signal'''
330 if env:
331 cmd = SUBST_VARS_RECURSIVE(cmd, env)
333 status = os.system(cmd)
334 if os.WIFEXITED(status):
335 return os.WEXITSTATUS(status)
336 if os.WIFSIGNALED(status):
337 return - os.WTERMSIG(status)
338 Logs.error("Unknown exit reason %d for command: %s" % (status, cmd))
339 return -1
342 def RUN_PYTHON_TESTS(testfiles, pythonpath=None, extra_env=None):
343 env = LOAD_ENVIRONMENT()
344 if pythonpath is None:
345 pythonpath = os.path.join(Context.g_module.out, 'python')
346 result = 0
347 for interp in env.python_interpreters:
348 if not isinstance(interp, str):
349 interp = ' '.join(interp)
350 for testfile in testfiles:
351 cmd = "PYTHONPATH=%s %s %s" % (pythonpath, interp, testfile)
352 if extra_env:
353 for key, value in extra_env.items():
354 cmd = "%s=%s %s" % (key, value, cmd)
355 print('Running Python test with %s: %s' % (interp, testfile))
356 ret = RUN_COMMAND(cmd)
357 if ret:
358 print('Python test failed: %s' % cmd)
359 result = ret
360 return result
363 # make sure we have md5. some systems don't have it
364 try:
365 from hashlib import md5
366 # Even if hashlib.md5 exists, it may be unusable.
367 # Try to use MD5 function. In FIPS mode this will cause an exception
368 # and we'll get to the replacement code
369 foo = md5(b'abcd')
370 except:
371 try:
372 import md5
373 # repeat the same check here, mere success of import is not enough.
374 # Try to use MD5 function. In FIPS mode this will cause an exception
375 foo = md5.md5(b'abcd')
376 except:
377 Context.SIG_NIL = hash('abcd')
378 class replace_md5(object):
379 def __init__(self):
380 self.val = None
381 def update(self, val):
382 self.val = hash((self.val, val))
383 def digest(self):
384 return str(self.val)
385 def hexdigest(self):
386 return self.digest().encode('hex')
387 def replace_h_file(filename):
388 f = open(filename, 'rb')
389 m = replace_md5()
390 while (filename):
391 filename = f.read(100000)
392 m.update(filename)
393 f.close()
394 return m.digest()
395 Utils.md5 = replace_md5
396 Task.md5 = replace_md5
397 Utils.h_file = replace_h_file
400 def LOAD_ENVIRONMENT():
401 '''load the configuration environment, allowing access to env vars
402 from new commands'''
403 env = ConfigSet.ConfigSet()
404 try:
405 p = os.path.join(Context.g_module.out, 'c4che/default'+CACHE_SUFFIX)
406 env.load(p)
407 except (OSError, IOError):
408 pass
409 return env
412 def IS_NEWER(bld, file1, file2):
413 '''return True if file1 is newer than file2'''
414 curdir = bld.path.abspath()
415 t1 = os.stat(os.path.join(curdir, file1)).st_mtime
416 t2 = os.stat(os.path.join(curdir, file2)).st_mtime
417 return t1 > t2
418 Build.BuildContext.IS_NEWER = IS_NEWER
421 @conf
422 def RECURSE(ctx, directory):
423 '''recurse into a directory, relative to the curdir or top level'''
424 try:
425 visited_dirs = ctx.visited_dirs
426 except AttributeError:
427 visited_dirs = ctx.visited_dirs = set()
428 d = os.path.join(ctx.path.abspath(), directory)
429 if os.path.exists(d):
430 abspath = os.path.abspath(d)
431 else:
432 abspath = os.path.abspath(os.path.join(Context.g_module.top, directory))
433 ctxclass = ctx.__class__.__name__
434 key = ctxclass + ':' + abspath
435 if key in visited_dirs:
436 # already done it
437 return
438 visited_dirs.add(key)
439 relpath = os.path.relpath(abspath, ctx.path.abspath())
440 if ctxclass in ['OptionsContext',
441 'ConfigurationContext',
442 'BuildContext',
443 'CleanContext',
444 'InstallContext',
445 'UninstallContext',
446 'ListContext']:
447 return ctx.recurse(relpath)
448 if 'waflib.extras.compat15' in sys.modules:
449 return ctx.recurse(relpath)
450 raise Errors.WafError('Unknown RECURSE context class: {}'.format(ctxclass))
451 Options.OptionsContext.RECURSE = RECURSE
452 Build.BuildContext.RECURSE = RECURSE
455 def CHECK_MAKEFLAGS(options):
456 '''check for MAKEFLAGS environment variable in case we are being
457 called from a Makefile try to honor a few make command line flags'''
458 if not 'WAF_MAKE' in os.environ:
459 return
460 makeflags = os.environ.get('MAKEFLAGS')
461 if makeflags is None:
462 makeflags = ""
463 jobs_set = False
464 jobs = None
465 # we need to use shlex.split to cope with the escaping of spaces
466 # in makeflags
467 for opt in shlex.split(makeflags):
468 # options can come either as -x or as x
469 if opt[0:2] == 'V=':
470 options.verbose = Logs.verbose = int(opt[2:])
471 if Logs.verbose > 0:
472 Logs.zones = ['runner']
473 if Logs.verbose > 2:
474 Logs.zones = ['*']
475 elif opt[0].isupper() and opt.find('=') != -1:
476 # this allows us to set waf options on the make command line
477 # for example, if you do "make FOO=blah", then we set the
478 # option 'FOO' in Options.options, to blah. If you look in wafsamba/wscript
479 # you will see that the command line accessible options have their dest=
480 # set to uppercase, to allow for passing of options from make in this way
481 # this is also how "make test TESTS=testpattern" works, and
482 # "make VERBOSE=1" as well as things like "make SYMBOLCHECK=1"
483 loc = opt.find('=')
484 setattr(options, opt[0:loc], opt[loc+1:])
485 elif opt[0] != '-':
486 for v in opt:
487 if re.search(r'j[0-9]*$', v):
488 jobs_set = True
489 jobs = opt.strip('j')
490 elif v == 'k':
491 options.keep = True
492 elif re.search(r'-j[0-9]*$', opt):
493 jobs_set = True
494 jobs = opt.strip('-j')
495 elif opt == '-k':
496 options.keep = True
497 if not jobs_set:
498 # default to one job
499 options.jobs = 1
500 elif jobs_set and jobs:
501 options.jobs = int(jobs)
503 waflib_options_parse_cmd_args = Options.OptionsContext.parse_cmd_args
504 def wafsamba_options_parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False):
505 (options, commands, envvars) = \
506 waflib_options_parse_cmd_args(self,
507 _args=_args,
508 cwd=cwd,
509 allow_unknown=allow_unknown)
510 CHECK_MAKEFLAGS(options)
511 if options.jobs == 1:
513 # waflib.Runner.Parallel processes jobs inline if the possible number
514 # of jobs is just 1. But (at least in waf <= 2.0.12) it still calls
515 # create a waflib.Runner.Spawner() which creates a single
516 # waflib.Runner.Consumer() thread that tries to process jobs from the
517 # queue.
519 # This has strange effects, which are not noticed typically,
520 # but at least on AIX python has broken threading and fails
521 # in random ways.
523 # So we just add a dummy Spawner class.
524 class NoOpSpawner(object):
525 def __init__(self, master):
526 return
527 from waflib import Runner
528 Runner.Spawner = NoOpSpawner
529 return options, commands, envvars
530 Options.OptionsContext.parse_cmd_args = wafsamba_options_parse_cmd_args
532 option_groups = {}
534 def option_group(opt, name):
535 '''find or create an option group'''
536 global option_groups
537 if name in option_groups:
538 return option_groups[name]
539 gr = opt.add_option_group(name)
540 option_groups[name] = gr
541 return gr
542 Options.OptionsContext.option_group = option_group
545 def save_file(filename, contents, create_dir=False):
546 '''save data to a file'''
547 if create_dir:
548 mkdir_p(os.path.dirname(filename))
549 try:
550 f = open(filename, 'w')
551 f.write(contents)
552 f.close()
553 except:
554 return False
555 return True
558 def load_file(filename):
559 '''return contents of a file'''
560 try:
561 f = open(filename, 'r')
562 r = f.read()
563 f.close()
564 except:
565 return None
566 return r
569 def reconfigure(ctx):
570 '''rerun configure if necessary'''
571 if not os.path.exists(os.environ.get('WAFLOCK', '.lock-wscript')):
572 raise Errors.WafError('configure has not been run')
573 import samba_wildcard
574 bld = samba_wildcard.fake_build_environment()
575 Configure.autoconfig = True
576 Scripting.check_configured(bld)
579 def map_shlib_extension(ctx, name, python=False):
580 '''map a filename with a shared library extension of .so to the real shlib name'''
581 if name is None:
582 return None
583 if name[-1:].isdigit():
584 # some libraries have specified versions in the wscript rule
585 return name
586 (root1, ext1) = os.path.splitext(name)
587 if python:
588 return ctx.env.pyext_PATTERN % root1
589 else:
590 (root2, ext2) = os.path.splitext(ctx.env.cshlib_PATTERN)
591 return root1+ext2
592 Build.BuildContext.map_shlib_extension = map_shlib_extension
594 def apply_pattern(filename, pattern):
595 '''apply a filename pattern to a filename that may have a directory component'''
596 dirname = os.path.dirname(filename)
597 if not dirname:
598 return pattern % filename
599 basename = os.path.basename(filename)
600 return os.path.join(dirname, pattern % basename)
602 def make_libname(ctx, name, nolibprefix=False, version=None, python=False):
603 """make a library filename
604 Options:
605 nolibprefix: don't include the lib prefix
606 version : add a version number
607 python : if we should use python module name conventions"""
609 if python:
610 libname = apply_pattern(name, ctx.env.pyext_PATTERN)
611 else:
612 libname = apply_pattern(name, ctx.env.cshlib_PATTERN)
613 if nolibprefix and libname[0:3] == 'lib':
614 libname = libname[3:]
615 if version:
616 if version[0] == '.':
617 version = version[1:]
618 (root, ext) = os.path.splitext(libname)
619 if ext == ".dylib":
620 # special case - version goes before the prefix
621 libname = "%s.%s%s" % (root, version, ext)
622 else:
623 libname = "%s%s.%s" % (root, ext, version)
624 return libname
625 Build.BuildContext.make_libname = make_libname
628 def get_tgt_list(bld):
629 '''return a list of build objects for samba'''
631 targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
633 # build a list of task generators we are interested in
634 tgt_list = []
635 for tgt in targets:
636 type = targets[tgt]
637 if not type in ['SUBSYSTEM', 'BUILTIN', 'MODULE', 'BINARY', 'LIBRARY', 'PLUGIN', 'ASN1', 'PYTHON']:
638 continue
639 t = bld.get_tgen_by_name(tgt)
640 if t is None:
641 Logs.error("Target %s of type %s has no task generator" % (tgt, type))
642 sys.exit(1)
643 tgt_list.append(t)
644 return tgt_list
646 from waflib.Context import WSCRIPT_FILE
647 def PROCESS_SEPARATE_RULE(self, rule):
648 ''' cause waf to process additional script based on `rule'.
649 You should have file named wscript_<stage>_rule in the current directory
650 where stage is either 'configure' or 'build'
652 stage = ''
653 if isinstance(self, Configure.ConfigurationContext):
654 stage = 'configure'
655 elif isinstance(self, Build.BuildContext):
656 stage = 'build'
657 file_path = os.path.join(self.path.abspath(), WSCRIPT_FILE+'_'+stage+'_'+rule)
658 node = self.root.find_node(file_path)
659 if node:
660 try:
661 cache = self.recurse_cache
662 except AttributeError:
663 cache = self.recurse_cache = {}
664 if node not in cache:
665 cache[node] = True
666 self.pre_recurse(node)
667 try:
668 function_code = node.read('r', None)
669 exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict)
670 finally:
671 self.post_recurse(node)
673 Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
674 ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
676 def AD_DC_BUILD_IS_ENABLED(self):
677 if self.CONFIG_SET('AD_DC_BUILD_IS_ENABLED'):
678 return True
679 return False
681 Build.BuildContext.AD_DC_BUILD_IS_ENABLED = AD_DC_BUILD_IS_ENABLED
683 @feature('cprogram', 'cshlib', 'cstaticlib')
684 @after('apply_lib_vars')
685 @before('apply_obj_vars')
686 def samba_before_apply_obj_vars(self):
687 """before apply_obj_vars for uselib, this removes the standard paths"""
689 def is_standard_libpath(env, path):
690 normalized_path = os.path.normpath(path)
691 for _path in env.STANDARD_LIBPATH:
692 if _path == normalized_path:
693 return True
694 return False
696 v = self.env
698 for i in v['RPATH']:
699 if is_standard_libpath(v, i):
700 v['RPATH'].remove(i)
702 for i in v['LIBPATH']:
703 if is_standard_libpath(v, i):
704 v['LIBPATH'].remove(i)
706 # Samba options are mostly on by default (administrators and packagers
707 # specify features to remove, not add), which is why default=True
709 def samba_add_onoff_option(opt, option, help=(), dest=None, default=True,
710 with_name="with", without_name="without"):
711 if default is None:
712 default_str = "auto"
713 elif default is True:
714 default_str = "yes"
715 elif default is False:
716 default_str = "no"
717 else:
718 default_str = str(default)
720 if help == ():
721 help = ("Build with %s support (default=%s)" % (option, default_str))
722 if dest is None:
723 dest = "with_%s" % option.replace('-', '_')
725 with_val = "--%s-%s" % (with_name, option)
726 without_val = "--%s-%s" % (without_name, option)
728 opt.add_option(with_val, help=help, action="store_true", dest=dest,
729 default=default)
730 opt.add_option(without_val, help=SUPPRESS_HELP, action="store_false",
731 dest=dest)
732 Options.OptionsContext.samba_add_onoff_option = samba_add_onoff_option