s3:rpc_server: call reopen_logs before we print the copyright notice
[Samba.git] / buildtools / wafsamba / samba_waf18.py
blob54444b3ab68f00921a6640d3944c2df2c9faa90e
1 # compatibility layer for building with more recent waf versions
3 import os, shlex, sys
4 from waflib import Build, Configure, Node, Utils, Options, Logs, TaskGen
5 from waflib import ConfigSet
6 from waflib.TaskGen import feature, after
7 from waflib.Configure import conf, ConfigurationContext
9 from waflib.Tools.flex import decide_ext
11 # This version of flexfun runs in tsk.get_cwd() as opposed to the
12 # bld.variant_dir: since input paths adjusted against tsk.get_cwd(), we have to
13 # use tsk.get_cwd() for the work directory as well.
14 def flexfun(tsk):
15 env = tsk.env
16 bld = tsk.generator.bld
17 def to_list(xx):
18 if isinstance(xx, str):
19 return [xx]
20 return xx
21 tsk.last_cmd = lst = []
22 lst.extend(to_list(env.FLEX))
23 lst.extend(to_list(env.FLEXFLAGS))
24 inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs]
25 if env.FLEX_MSYS:
26 inputs = [x.replace(os.sep, '/') for x in inputs]
27 lst.extend(inputs)
28 lst = [x for x in lst if x]
29 txt = bld.cmd_and_log(lst, cwd=tsk.get_cwd(), env=env.env or None, quiet=0)
30 tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207
32 TaskGen.declare_chain(
33 name = 'flex',
34 rule = flexfun, # issue #854
35 ext_in = '.l',
36 decider = decide_ext,
39 Build.BuildContext.variant = 'default'
40 Build.CleanContext.variant = 'default'
41 Build.InstallContext.variant = 'default'
42 Build.UninstallContext.variant = 'default'
43 Build.ListContext.variant = 'default'
45 def abspath(self, env=None):
46 if env and hasattr(self, 'children'):
47 return self.get_bld().abspath()
48 return self.old_abspath()
49 Node.Node.old_abspath = Node.Node.abspath
50 Node.Node.abspath = abspath
52 def bldpath(self, env=None):
53 return self.abspath()
54 #return self.path_from(self.ctx.bldnode.parent)
55 Node.Node.bldpath = bldpath
57 def srcpath(self, env=None):
58 return self.abspath()
59 #return self.path_from(self.ctx.bldnode.parent)
60 Node.Node.srcpath = srcpath
62 def store_fast(self, filename):
63 file = open(filename, 'wb')
64 data = self.get_merged_dict()
65 try:
66 Build.cPickle.dump(data, file, -1)
67 finally:
68 file.close()
69 ConfigSet.ConfigSet.store_fast = store_fast
71 def load_fast(self, filename):
72 file = open(filename, 'rb')
73 try:
74 data = Build.cPickle.load(file)
75 finally:
76 file.close()
77 self.table.update(data)
78 ConfigSet.ConfigSet.load_fast = load_fast
80 @feature('c', 'cxx', 'd', 'asm', 'fc', 'includes')
81 @after('propagate_uselib_vars', 'process_source')
82 def apply_incpaths(self):
83 lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
84 self.includes_nodes = lst
85 cwdx = getattr(self.bld, 'cwdx', self.bld.bldnode)
86 self.env['INCPATHS'] = [x.path_from(cwdx) for x in lst]
88 @conf
89 def define(self, key, val, quote=True, comment=None):
90 assert key and isinstance(key, str)
92 if val is None:
93 val = ()
94 elif isinstance(val, bool):
95 val = int(val)
97 # waf 1.5
98 self.env[key] = val
100 if isinstance(val, int) or isinstance(val, float):
101 s = '%s=%s'
102 else:
103 s = quote and '%s="%s"' or '%s=%s'
104 app = s % (key, str(val))
106 ban = key + '='
107 lst = self.env.DEFINES
108 for x in lst:
109 if x.startswith(ban):
110 lst[lst.index(x)] = app
111 break
112 else:
113 self.env.append_value('DEFINES', app)
115 self.env.append_unique('define_key', key)
117 # compat15 removes this but we want to keep it
118 @conf
119 def undefine(self, key, from_env=True, comment=None):
120 assert key and isinstance(key, str)
122 ban = key + '='
123 self.env.DEFINES = [x for x in self.env.DEFINES if not x.startswith(ban)]
124 self.env.append_unique('define_key', key)
125 # waf 1.5
126 if from_env:
127 self.env[key] = ()
129 class ConfigurationContext(Configure.ConfigurationContext):
130 def init_dirs(self):
131 self.setenv('default')
132 self.env.merge_config_header = True
133 return super(ConfigurationContext, self).init_dirs()
135 def find_program_samba(self, *k, **kw):
136 # Override the waf default set in the @conf decorator in Configure.py
137 if 'mandatory' not in kw:
138 kw['mandatory'] = False
139 ret = self.find_program_old(*k, **kw)
140 return ret
141 Configure.ConfigurationContext.find_program_old = Configure.ConfigurationContext.find_program
142 Configure.ConfigurationContext.find_program = find_program_samba
144 Build.BuildContext.ENFORCE_GROUP_ORDERING = Utils.nada
145 Build.BuildContext.AUTOCLEANUP_STALE_FILES = Utils.nada
147 @conf
148 def check(self, *k, **kw):
149 '''Override the waf defaults to inject --with-directory options'''
151 # match the configuration test with specific options, for example:
152 # --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv"
153 self.validate_c(kw)
155 additional_dirs = []
156 if 'msg' in kw:
157 msg = kw['msg']
158 for x in Options.OptionsContext.parser.parser.option_list:
159 if getattr(x, 'match', None) and msg in x.match:
160 d = getattr(Options.options, x.dest, '')
161 if d:
162 additional_dirs.append(d)
164 # we add the additional dirs twice: once for the test data, and again if the compilation test succeeds below
165 def add_options_dir(dirs, env):
166 for x in dirs:
167 if not x in env.CPPPATH:
168 env.CPPPATH = [os.path.join(x, 'include')] + env.CPPPATH
169 if not x in env.LIBPATH:
170 env.LIBPATH = [os.path.join(x, 'lib')] + env.LIBPATH
172 add_options_dir(additional_dirs, kw['env'])
174 self.start_msg(kw['msg'], **kw)
175 ret = None
176 try:
177 ret = self.run_build(*k, **kw)
178 except self.errors.ConfigurationError:
179 self.end_msg(kw['errmsg'], 'YELLOW', **kw)
180 if Logs.verbose > 1:
181 raise
182 else:
183 self.fatal('The configuration failed')
184 else:
185 kw['success'] = ret
186 # success! time for brandy
187 add_options_dir(additional_dirs, self.env)
189 ret = self.post_check(*k, **kw)
190 if not ret:
191 self.end_msg(kw['errmsg'], 'YELLOW', **kw)
192 self.fatal('The configuration failed %r' % ret)
193 else:
194 self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
195 return ret
197 @conf
198 def CHECK_LIBRARY_SUPPORT(conf, rpath=False, version_script=False, msg=None):
199 '''see if the platform supports building libraries'''
201 if msg is None:
202 if rpath:
203 msg = "rpath library support"
204 else:
205 msg = "building library support"
207 def build(bld):
208 lib_node = bld.srcnode.make_node('libdir/liblc1.c')
209 lib_node.parent.mkdir()
210 lib_node.write('int lib_func(void) { return 42; }\n', 'w')
211 main_node = bld.srcnode.make_node('main.c')
212 main_node.write('int lib_func(void);\n'
213 'int main(void) {return !(lib_func() == 42);}', 'w')
214 linkflags = []
215 if version_script:
216 script = bld.srcnode.make_node('ldscript')
217 script.write('TEST_1.0A2 { global: *; };\n', 'w')
218 linkflags.append('-Wl,--version-script=%s' % script.abspath())
219 bld(features='c cshlib', source=lib_node, target='lib1', linkflags=linkflags, name='lib1')
220 o = bld(features='c cprogram', source=main_node, target='prog1', uselib_local='lib1')
221 if rpath:
222 o.rpath = [lib_node.parent.abspath()]
223 def run_app(self):
224 args = conf.SAMBA_CROSS_ARGS(msg=msg)
225 env = dict(os.environ)
226 env['LD_LIBRARY_PATH'] = self.inputs[0].parent.abspath() + os.pathsep + env.get('LD_LIBRARY_PATH', '')
227 self.generator.bld.cmd_and_log([self.inputs[0].abspath()] + args, env=env)
228 o.post()
229 bld(rule=run_app, source=o.link_task.outputs[0])
231 # ok, so it builds
232 try:
233 conf.check(build_fun=build, msg='Checking for %s' % msg)
234 except conf.errors.ConfigurationError:
235 return False
236 return True
238 @conf
239 def CHECK_NEED_LC(conf, msg):
240 '''check if we need -lc'''
241 def build(bld):
242 lib_node = bld.srcnode.make_node('libdir/liblc1.c')
243 lib_node.parent.mkdir()
244 lib_node.write('#include <stdio.h>\nint lib_func(void) { FILE *f = fopen("foo", "r");}\n', 'w')
245 bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc')
246 try:
247 conf.check(build_fun=build, msg=msg, okmsg='-lc is unnecessary', errmsg='-lc is necessary')
248 except conf.errors.ConfigurationError:
249 return False
250 return True
252 # already implemented on "waf -v"
253 def order(bld, tgt_list):
254 return True
255 Build.BuildContext.check_group_ordering = order
257 @conf
258 def CHECK_CFG(self, *k, **kw):
259 if 'args' in kw:
260 kw['args'] = shlex.split(kw['args'])
261 if not 'mandatory' in kw:
262 kw['mandatory'] = False
263 kw['global_define'] = True
264 return self.check_cfg(*k, **kw)
266 def cmd_output(cmd, **kw):
268 silent = False
269 if 'silent' in kw:
270 silent = kw['silent']
271 del(kw['silent'])
273 if 'e' in kw:
274 tmp = kw['e']
275 del(kw['e'])
276 kw['env'] = tmp
278 kw['shell'] = isinstance(cmd, str)
279 kw['stdout'] = Utils.subprocess.PIPE
280 if silent:
281 kw['stderr'] = Utils.subprocess.PIPE
283 try:
284 p = Utils.subprocess.Popen(cmd, **kw)
285 output = p.communicate()[0]
286 except OSError as e:
287 raise ValueError(str(e))
289 if p.returncode:
290 if not silent:
291 msg = "command execution failed: %s -> %r" % (cmd, str(output))
292 raise ValueError(msg)
293 output = ''
294 return output
295 Utils.cmd_output = cmd_output
298 @TaskGen.feature('c', 'cxx', 'd')
299 @TaskGen.before('apply_incpaths', 'propagate_uselib_vars')
300 @TaskGen.after('apply_link', 'process_source')
301 def apply_uselib_local(self):
303 process the uselib_local attribute
304 execute after apply_link because of the execution order set on 'link_task'
306 env = self.env
307 from waflib.Tools.ccroot import stlink_task
309 # 1. the case of the libs defined in the project (visit ancestors first)
310 # the ancestors external libraries (uselib) will be prepended
311 self.uselib = self.to_list(getattr(self, 'uselib', []))
312 self.includes = self.to_list(getattr(self, 'includes', []))
313 names = self.to_list(getattr(self, 'uselib_local', []))
314 get = self.bld.get_tgen_by_name
315 seen = set()
316 seen_uselib = set()
317 tmp = Utils.deque(names) # consume a copy of the list of names
318 if tmp:
319 if Logs.verbose:
320 Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
321 while tmp:
322 lib_name = tmp.popleft()
323 # visit dependencies only once
324 if lib_name in seen:
325 continue
327 y = get(lib_name)
328 y.post()
329 seen.add(lib_name)
331 # object has ancestors to process (shared libraries): add them to the end of the list
332 if getattr(y, 'uselib_local', None):
333 for x in self.to_list(getattr(y, 'uselib_local', [])):
334 obj = get(x)
335 obj.post()
336 if getattr(obj, 'link_task', None):
337 if not isinstance(obj.link_task, stlink_task):
338 tmp.append(x)
340 # link task and flags
341 if getattr(y, 'link_task', None):
343 link_name = y.target[y.target.rfind(os.sep) + 1:]
344 if isinstance(y.link_task, stlink_task):
345 env.append_value('STLIB', [link_name])
346 else:
347 # some linkers can link against programs
348 env.append_value('LIB', [link_name])
350 # the order
351 self.link_task.set_run_after(y.link_task)
353 # for the recompilation
354 self.link_task.dep_nodes += y.link_task.outputs
356 # add the link path too
357 tmp_path = y.link_task.outputs[0].parent.bldpath()
358 if not tmp_path in env['LIBPATH']:
359 env.prepend_value('LIBPATH', [tmp_path])
361 # add ancestors uselib too - but only propagate those that have no staticlib defined
362 for v in self.to_list(getattr(y, 'uselib', [])):
363 if v not in seen_uselib:
364 seen_uselib.add(v)
365 if not env['STLIB_' + v]:
366 if not v in self.uselib:
367 self.uselib.insert(0, v)
369 # if the library task generator provides 'export_includes', add to the include path
370 # the export_includes must be a list of paths relative to the other library
371 if getattr(y, 'export_includes', None):
372 self.includes.extend(y.to_incnodes(y.export_includes))
374 @TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib')
375 @TaskGen.after('apply_link')
376 def apply_objdeps(self):
377 "add the .o files produced by some other object files in the same manner as uselib_local"
378 names = getattr(self, 'add_objects', [])
379 if not names:
380 return
381 names = self.to_list(names)
383 get = self.bld.get_tgen_by_name
384 seen = []
385 while names:
386 x = names[0]
388 # visit dependencies only once
389 if x in seen:
390 names = names[1:]
391 continue
393 # object does not exist ?
394 y = get(x)
396 # object has ancestors to process first ? update the list of names
397 if getattr(y, 'add_objects', None):
398 added = 0
399 lst = y.to_list(y.add_objects)
400 lst.reverse()
401 for u in lst:
402 if u in seen:
403 continue
404 added = 1
405 names = [u]+names
406 if added:
407 continue # list of names modified, loop
409 # safe to process the current object
410 y.post()
411 seen.append(x)
413 for t in getattr(y, 'compiled_tasks', []):
414 self.link_task.inputs.extend(t.outputs)
416 @TaskGen.after('apply_link')
417 def process_obj_files(self):
418 if not hasattr(self, 'obj_files'):
419 return
420 for x in self.obj_files:
421 node = self.path.find_resource(x)
422 self.link_task.inputs.append(node)
424 @TaskGen.taskgen_method
425 def add_obj_file(self, file):
426 """Small example on how to link object files as if they were source
427 obj = bld.create_obj('cc')
428 obj.add_obj_file('foo.o')"""
429 if not hasattr(self, 'obj_files'):
430 self.obj_files = []
431 if not 'process_obj_files' in self.meths:
432 self.meths.append('process_obj_files')
433 self.obj_files.append(file)