3 # Thomas Nagy, 2006-2018 (ita)
6 C/C++ preprocessor for finding dependencies
8 Reasons for using the Waf preprocessor by default
10 #. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files)
11 #. Not all compilers provide .d files for obtaining the dependencies (portability)
12 #. A naive file scanner will not catch the constructs such as "#include foo()"
13 #. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything)
15 Regarding the speed concerns:
17 * the preprocessing is performed only when files must be compiled
18 * the macros are evaluated only for #if/#elif/#include
19 * system headers are not scanned by default
21 Now if you do not want the Waf preprocessor, the tool +gccdeps* uses the .d files produced
22 during the compilation to track the dependencies (useful when used with the boost libraries).
23 It only works with gcc >= 4.4 though.
25 A dumb preprocessor is also available in the tool *c_dumbpreproc*
27 # TODO: more varargs, pragma once
29 import re
, string
, traceback
30 from waflib
import Logs
, Utils
, Errors
32 class PreprocError(Errors
.WafError
):
35 FILE_CACHE_SIZE
= 100000
36 LINE_CACHE_SIZE
= 100000
39 "Constant representing a special token used in :py:meth:`waflib.Tools.c_preproc.c_parser.start` iteration to switch to a header read previously"
42 "Limit on the amount of files to read in the dependency scanner"
45 "Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)"
47 standard_includes
= ['/usr/local/include', '/usr/include']
49 standard_includes
= []
52 """Apply trigraph rules (False by default)"""
54 # obsolete, do not use
70 """Operators such as and/or/xor for c++. Set an empty dict to disable."""
72 # ignore #warning and #error
73 re_lines
= re
.compile(
74 '^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
75 re
.IGNORECASE | re
.MULTILINE
)
76 """Match #include lines"""
78 re_mac
= re
.compile("^[a-zA-Z_]\w*")
79 """Match macro definitions"""
81 re_fun
= re
.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
82 """Match macro functions"""
84 re_pragma_once
= re
.compile('^\s*once\s*', re
.IGNORECASE
)
85 """Match #pragma once statements"""
87 re_nl
= re
.compile('\\\\\r*\n', re
.MULTILINE
)
90 re_cpp
= re
.compile(r
'//.*?$|/\*.*?\*/|\'(?
:\\.|
[^
\\\'])*\'|
"(?:\\.|[^\\"])*"', re.DOTALL | re.MULTILINE )
91 """Filter C/C++ comments"""
93 trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')]
94 """Trigraph definitions"""
96 chr_esc
= {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39}
97 """Escape characters"""
106 """Identifier token"""
112 """Character token"""
114 tok_types
= [NUM
, STR
, IDENT
, OP
]
118 r
"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",
119 r
'L?"([^"\\]|\\.)*"',
121 r
'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',
123 """Expression types"""
125 re_clexer
= re
.compile('|'.join(["(?P<%s>%s)" % (name
, part
) for name
, part
in zip(tok_types
, exp_types
)]), re
.M
)
126 """Match expressions into tokens"""
129 """Parser state is *accepted*"""
132 """Parser state is *ignored*, for example preprocessor lines in an #if 0 block"""
135 """Parser state is *undefined* at the moment"""
138 """Parser state is *skipped*, for example preprocessor lines in a #elif 0 block"""
141 """Replace function used with :py:attr:`waflib.Tools.c_preproc.re_cpp`"""
149 Operator precedence rules required for parsing expressions of the form::
153 ops
= ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
154 for x
, syms
in enumerate(ops
):
155 for u
in syms
.split():
158 def reduce_nums(val_1
, val_2
, val_op
):
160 Apply arithmetic rules to compute a result
162 :param val1: input parameter
163 :type val1: int or string
164 :param val2: input parameter
165 :type val2: int or string
166 :param val_op: C operator in *+*, */*, *-*, etc
170 #print val_1, val_2, val_op
172 # now perform the operation, make certain a and b are numeric
197 elif d
=='|' or d
== 'bitor':
199 elif d
=='||' or d
== 'or' :
201 elif d
=='&' or d
== 'bitand':
203 elif d
=='&&' or d
== 'and':
205 elif d
=='!=' or d
== 'not_eq':
207 elif d
=='^' or d
== 'xor':
227 Try to obtain a number from a list of tokens. The token types are defined in :py:attr:`waflib.Tools.ccroot.tok_types`.
229 :param lst: list of preprocessor tokens
230 :type lst: list of tuple (tokentype, value)
231 :return: a pair containing the number and the rest of the list
232 :rtype: tuple(value, list)
235 raise PreprocError('empty list for get_num')
253 raise PreprocError('rparen expected %r' % lst
)
255 (num
, _
) = get_term(lst
[1:i
])
256 return (num
, lst
[i
+1:])
259 return get_num(lst
[1:])
261 num
, lst
= get_num(lst
[1:])
262 return (reduce_nums('-1', num
, '*'), lst
)
264 num
, lst
= get_num(lst
[1:])
265 return (int(not int(num
)), lst
)
267 num
, lst
= get_num(lst
[1:])
268 return (~
int(num
), lst
)
270 raise PreprocError('Invalid op token %r for get_num' % lst
)
274 # all macros should have been replaced, remaining identifiers eval to 0
277 raise PreprocError('Invalid token %r for get_num' % lst
)
281 Evaluate an expression recursively, for example::
285 :param lst: list of tokens
286 :type lst: list of tuple(token, value)
287 :return: the value and the remaining tokens
292 raise PreprocError('empty list for get_term')
293 num
, lst
= get_num(lst
)
300 return get_term(lst
[1:])
317 raise PreprocError('rparen expected %r' % lst
)
320 return get_term(lst
[1:i
])
322 return get_term(lst
[i
+1:])
325 num2
, lst
= get_num(lst
[1:])
328 # no more tokens to process
329 num2
= reduce_nums(num
, num2
, v
)
330 return get_term([(NUM
, num2
)] + lst
)
332 # operator precedence
335 raise PreprocError('op expected %r' % lst
)
337 if prec
[v2
] >= prec
[v
]:
338 num2
= reduce_nums(num
, num2
, v
)
339 return get_term([(NUM
, num2
)] + lst
)
341 num3
, lst
= get_num(lst
[1:])
342 num3
= reduce_nums(num2
, num3
, v2
)
343 return get_term([(NUM
, num
), (p
, v
), (NUM
, num3
)] + lst
)
346 raise PreprocError('cannot reduce %r' % lst
)
348 def reduce_eval(lst
):
350 Take a list of tokens and output true or false for #if/#elif conditions.
352 :param lst: a list of tokens
353 :type lst: list of tuple(token, value)
355 :rtype: tuple(NUM, int)
357 num
, lst
= get_term(lst
)
362 Merge a list of tokens into a string
364 :param lst: a list of tokens
365 :type lst: list of tuple(token, value)
368 lst
= [str(v2
) for (p2
, v2
) in lst
]
371 def paste_tokens(t1
, t2
):
373 Token pasting works between identifiers, particular operators, and identifiers and numbers::
380 :type t1: tuple(type, value)
382 :type t2: tuple(type, value)
385 if t1
[0] == OP
and t2
[0] == OP
:
387 elif t1
[0] == IDENT
and (t2
[0] == IDENT
or t2
[0] == NUM
):
389 elif t1
[0] == NUM
and t2
[0] == NUM
:
392 raise PreprocError('tokens do not make a valid paste %r and %r' % (t1
, t2
))
393 return (p1
, t1
[1] + t2
[1])
395 def reduce_tokens(lst
, defs
, ban
=[]):
397 Replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied
399 :param lst: list of tokens
400 :type lst: list of tuple(token, value)
401 :param defs: macro definitions
403 :param ban: macros that cannot be substituted (recursion is not allowed)
404 :type ban: list of string
405 :return: the new list of tokens
413 if p
== IDENT
and v
== "defined":
422 elif p2
== OP
and v2
== '(':
425 del lst
[i
] # remove the ident, and change the ) for the value
431 raise PreprocError('Invalid define expression %r' % lst
)
433 elif p
== IDENT
and v
in defs
:
435 if isinstance(defs
[v
], str):
436 a
, b
= extract_macro(defs
[v
])
439 to_add
= macro_def
[1]
441 if isinstance(macro_def
[0], list):
442 # macro without arguments
445 reduce_tokens(accu
, defs
, ban
+[v
])
450 # collect the arguments for the funcall
456 raise PreprocError('expected ( after %r (got nothing)' % v
)
459 if p2
!= OP
or v2
!= '(':
460 raise PreprocError('expected ( after %r' % v
)
470 if p2
== OP
and count_paren
== 0:
472 one_param
.append((p2
, v2
))
476 args
.append(one_param
)
480 raise PreprocError('empty param in funcall %r' % v
)
481 args
.append(one_param
)
484 one_param
.append((p2
, v2
))
486 one_param
.append((p2
, v2
))
492 raise PreprocError('malformed macro')
494 # substitute the arguments within the define expression
496 arg_table
= macro_def
[0]
498 while j
< len(to_add
):
501 if p2
== OP
and v2
== '#':
502 # stringize is for arguments only
503 if j
+1 < len(to_add
) and to_add
[j
+1][0] == IDENT
and to_add
[j
+1][1] in arg_table
:
504 toks
= args
[arg_table
[to_add
[j
+1][1]]]
505 accu
.append((STR
, stringize(toks
)))
508 accu
.append((p2
, v2
))
509 elif p2
== OP
and v2
== '##':
510 # token pasting, how can man invent such a complicated system?
511 if accu
and j
+1 < len(to_add
):
512 # we have at least two tokens
516 if to_add
[j
+1][0] == IDENT
and to_add
[j
+1][1] in arg_table
:
517 toks
= args
[arg_table
[to_add
[j
+1][1]]]
520 accu
[-1] = paste_tokens(t1
, toks
[0]) #(IDENT, accu[-1][1] + toks[0][1])
521 accu
.extend(toks
[1:])
524 accu
.append((p2
, v2
))
526 elif to_add
[j
+1][0] == IDENT
and to_add
[j
+1][1] == '__VA_ARGS__':
527 # first collect the tokens
529 st
= len(macro_def
[0])
531 for x
in args
[pt
-st
+1:]:
533 va_toks
.append((OP
, ','))
535 va_toks
.pop() # extra comma
540 # remove the token paste
542 if v4
== ',' and pt
< st
:
547 accu
[-1] = paste_tokens(t1
, to_add
[j
+1])
551 # Invalid paste, case "##a" or "b##"
552 accu
.append((p2
, v2
))
554 elif p2
== IDENT
and v2
in arg_table
:
555 toks
= args
[arg_table
[v2
]]
556 reduce_tokens(toks
, defs
, ban
+[v
])
559 accu
.append((p2
, v2
))
564 reduce_tokens(accu
, defs
, ban
+[v
])
566 for x
in range(len(accu
)-1, -1, -1):
567 lst
.insert(i
, accu
[x
])
572 def eval_macro(lst
, defs
):
574 Reduce the tokens by :py:func:`waflib.Tools.c_preproc.reduce_tokens` and try to return a 0/1 result by :py:func:`waflib.Tools.c_preproc.reduce_eval`.
576 :param lst: list of tokens
577 :type lst: list of tuple(token, value)
578 :param defs: macro definitions
582 reduce_tokens(lst
, defs
, [])
584 raise PreprocError('missing tokens to evaluate')
588 if p
== IDENT
and v
not in defs
:
589 raise PreprocError('missing macro %r' % lst
)
591 p
, v
= reduce_eval(lst
)
594 def extract_macro(txt
):
596 Process a macro definition of the form::
597 #define f(x, y) x * y
599 into a function or a simple macro without arguments
601 :param txt: expression to exact a macro definition from
603 :return: a tuple containing the name, the list of arguments and the replacement
604 :rtype: tuple(string, [list, list])
607 if re_fun
.search(txt
):
612 raise PreprocError('expected (')
628 elif p
== OP
and v
== ')':
631 raise PreprocError('unexpected token (3)')
633 if p
== OP
and v
== ',':
635 elif p
== OP
and v
== ')':
638 raise PreprocError('comma or ... expected')
644 elif p
== OP
and v
== '...':
645 raise PreprocError('not implemented (1)')
647 raise PreprocError('comma or ... expected (2)')
649 raise PreprocError('not implemented (2)')
651 raise PreprocError('unexpected else')
653 #~ print (name, [params, t[i+1:]])
654 return (name
, [params
, t
[i
+1:]])
658 return (v
, [[], t
[1:]])
660 # empty define, assign an empty token
661 return (v
, [[], [('T','')]])
663 re_include
= re
.compile('^\s*(<(?:.*)>|"(?:.*)")')
664 def extract_include(txt
, defs
):
666 Process a line in the form::
670 :param txt: include line to process
672 :param defs: macro definitions
674 :return: the file name
677 m
= re_include
.search(txt
)
680 return txt
[0], txt
[1:-1]
682 # perform preprocessing and look at the result, it must match an include
684 reduce_tokens(toks
, defs
, ['waf_include'])
687 raise PreprocError('could not parse include %r' % txt
)
690 if toks
[0][0] == STR
:
691 return '"', toks
[0][1]
693 if toks
[0][1] == '<' and toks
[-1][1] == '>':
694 ret
= '<', stringize(toks
).lstrip('<').rstrip('>')
697 raise PreprocError('could not parse include %r' % txt
)
703 :param txt: character to parse
705 :return: a character literal
710 raise PreprocError('attempted to parse a null char')
715 if len(txt
) == 4 and txt
[3] in string
.hexdigits
:
716 return int(txt
[2:], 16)
717 return int(txt
[2:], 16)
719 if c
== '0' and len(txt
)==2:
722 if len(txt
) > i
and txt
[1:1+i
].isdigit():
723 return (1+i
, int(txt
[1:1+i
], 8))
728 raise PreprocError('could not parse char literal %r' % txt
)
732 Convert a string into a list of tokens (shlex.split does not apply to c/c++/d)
734 :param s: input to tokenize
736 :return: a list of tokens
737 :rtype: list of tuple(token, value)
739 return tokenize_private(s
)[:] # force a copy of the results
741 def tokenize_private(s
):
743 for match
in re_clexer
.finditer(s
):
745 for name
in tok_types
:
751 elif v
.lower() == "true":
754 elif v
.lower() == "false":
761 v
= int(m('hex'), 16)
769 v
= m('n2') or m('n4')
776 # remove the quotes around the string
778 ret
.append((name
, v
))
782 def format_defines(lst
):
788 # "-DFOO" should give "#define FOO 1"
791 # all others are assumed to be -DX=Y
792 ret
.append('%s %s' % (y
[:pos
], y
[pos
+1:]))
794 raise ValueError('Invalid define expression %r' % y
)
797 class c_parser(object):
799 Used by :py:func:`waflib.Tools.c_preproc.scan` to parse c/h files. Note that by default,
800 only project headers are parsed.
802 def __init__(self
, nodepaths
=None, defines
=None):
804 """list of lines read"""
809 self
.defs
= dict(defines
) # make a copy
813 self
.currentnode_stack
= []
815 self
.nodepaths
= nodepaths
or []
819 """List of :py:class:`waflib.Node.Node` found so far"""
822 """List of file names that could not be matched by any file"""
827 self
.ban_includes
= set()
828 """Includes that must not be read (#pragma once)"""
831 """Include nodes/names already listed to avoid duplicates in self.nodes/self.names"""
833 def cached_find_resource(self
, node
, filename
):
835 Find a file from the input directory
837 :param node: directory
838 :type node: :py:class:`waflib.Node.Node`
839 :param filename: header to find
840 :type filename: string
841 :return: the node if found, or None
842 :rtype: :py:class:`waflib.Node.Node`
845 cache
= node
.ctx
.preproc_cache_node
846 except AttributeError:
847 cache
= node
.ctx
.preproc_cache_node
= Utils
.lru_cache(FILE_CACHE_SIZE
)
849 key
= (node
, filename
)
853 ret
= node
.find_resource(filename
)
855 if getattr(ret
, 'children', None):
857 elif ret
.is_child_of(node
.ctx
.bldnode
):
858 tmp
= node
.ctx
.srcnode
.search_node(ret
.path_from(node
.ctx
.bldnode
))
859 if tmp
and getattr(tmp
, 'children', None):
864 def tryfind(self
, filename
, kind
='"', env
=None):
866 Try to obtain a node from the filename based from the include paths. Will add
867 the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to
868 :py:attr:`waflib.Tools.c_preproc.c_parser.names` if no corresponding file is found. Called by
869 :py:attr:`waflib.Tools.c_preproc.c_parser.start`.
871 :param filename: header to find
872 :type filename: string
873 :return: the node if found
874 :rtype: :py:class:`waflib.Node.Node`
876 if filename
.endswith('.moc'):
877 # we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated
878 # in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient.
879 self
.names
.append(filename
)
882 self
.curfile
= filename
887 for n
in reversed(self
.currentnode_stack
):
888 found
= self
.cached_find_resource(n
, filename
)
892 found
= self
.cached_find_resource(self
.currentnode_stack
[-1], filename
)
895 for n
in self
.nodepaths
:
896 found
= self
.cached_find_resource(n
, filename
)
901 if found
and not found
in self
.ban_includes
:
902 if found
not in listed
:
904 self
.nodes
.append(found
)
907 if filename
not in listed
:
909 self
.names
.append(filename
)
912 def filter_comments(self
, node
):
914 Filter the comments from a c/h file, and return the preprocessor lines.
915 The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally.
917 :return: the preprocessor directives as a list of (keyword, line)
918 :rtype: a list of string pairs
920 # return a list of tuples : keyword, line
923 for (a
, b
) in trig_def
:
924 code
= code
.split(a
).join(b
)
925 code
= re_nl
.sub('', code
)
926 code
= re_cpp
.sub(repl
, code
)
927 return re_lines
.findall(code
)
929 def parse_lines(self
, node
):
931 cache
= node
.ctx
.preproc_cache_lines
932 except AttributeError:
933 cache
= node
.ctx
.preproc_cache_lines
= Utils
.lru_cache(LINE_CACHE_SIZE
)
937 cache
[node
] = lines
= self
.filter_comments(node
)
938 lines
.append((POPFILE
, ''))
942 def addlines(self
, node
):
944 Add the lines from a header in the list of preprocessor lines to parse
947 :type node: :py:class:`waflib.Node.Node`
950 self
.currentnode_stack
.append(node
.parent
)
952 self
.count_files
+= 1
953 if self
.count_files
> recursion_limit
:
955 raise PreprocError('recursion limit exceeded')
958 Logs
.debug('preproc: reading file %r', node
)
960 lines
= self
.parse_lines(node
)
961 except EnvironmentError:
962 raise PreprocError('could not read the file %r' % node
)
965 Logs
.error('parsing %r failed %s', node
, traceback
.format_exc())
967 self
.lines
.extend(lines
)
969 def start(self
, node
, env
):
971 Preprocess a source file to obtain the dependencies, which are accumulated to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes`
972 and :py:attr:`waflib.Tools.c_preproc.c_parser.names`.
974 :param node: source file
975 :type node: :py:class:`waflib.Node.Node`
976 :param env: config set containing additional defines to take into account
977 :type env: :py:class:`waflib.ConfigSet.ConfigSet`
979 Logs
.debug('preproc: scanning %s (in %s)', node
.name
, node
.parent
.name
)
981 self
.current_file
= node
984 # macros may be defined on the command-line, so they must be parsed as if they were part of the file
986 lst
= format_defines(env
.DEFINES
)
988 self
.lines
.extend([('define', x
) for x
in lst
])
991 (token
, line
) = self
.lines
.pop()
993 self
.count_files
-= 1
994 self
.currentnode_stack
.pop()
1000 # make certain we define the state if we are about to enter in an if block
1001 if token
[:2] == 'if':
1002 state
.append(undefined
)
1003 elif token
== 'endif':
1006 # skip lines when in a dead 'if' branch, wait for the endif
1008 if skipped
in self
.state
or ignored
in self
.state
:
1012 ret
= eval_macro(tokenize(line
), self
.defs
)
1014 state
[-1] = accepted
1017 elif token
== 'ifdef':
1018 m
= re_mac
.match(line
)
1019 if m
and m
.group() in self
.defs
:
1020 state
[-1] = accepted
1023 elif token
== 'ifndef':
1024 m
= re_mac
.match(line
)
1025 if m
and m
.group() in self
.defs
:
1028 state
[-1] = accepted
1029 elif token
== 'include' or token
== 'import':
1030 (kind
, inc
) = extract_include(line
, self
.defs
)
1031 self
.current_file
= self
.tryfind(inc
, kind
, env
)
1032 if token
== 'import':
1033 self
.ban_includes
.add(self
.current_file
)
1034 elif token
== 'elif':
1035 if state
[-1] == accepted
:
1037 elif state
[-1] == ignored
:
1038 if eval_macro(tokenize(line
), self
.defs
):
1039 state
[-1] = accepted
1040 elif token
== 'else':
1041 if state
[-1] == accepted
:
1043 elif state
[-1] == ignored
:
1044 state
[-1] = accepted
1045 elif token
== 'define':
1047 self
.defs
[self
.define_name(line
)] = line
1048 except AttributeError:
1049 raise PreprocError('Invalid define line %r' % line
)
1050 elif token
== 'undef':
1051 m
= re_mac
.match(line
)
1052 if m
and m
.group() in self
.defs
:
1053 self
.defs
.__delitem
__(m
.group())
1054 #print "undef %s" % name
1055 elif token
== 'pragma':
1056 if re_pragma_once
.match(line
.lower()):
1057 self
.ban_includes
.add(self
.current_file
)
1058 except Exception as e
:
1060 Logs
.debug('preproc: line parsing failed (%s): %s %s', e
, line
, traceback
.format_exc())
1062 def define_name(self
, line
):
1064 :param line: define line
1067 :return: the define name
1069 return re_mac
.match(line
).group()
1073 Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind::
1075 #include some_macro()
1077 This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example
1080 incn
= task
.generator
.includes_nodes
1081 except AttributeError:
1082 raise Errors
.WafError('%r is missing a feature such as "c", "cxx" or "includes": ' % task
.generator
)
1085 nodepaths
= incn
+ [task
.generator
.bld
.root
.find_dir(x
) for x
in standard_includes
]
1087 nodepaths
= [x
for x
in incn
if x
.is_child_of(x
.ctx
.srcnode
) or x
.is_child_of(x
.ctx
.bldnode
)]
1089 tmp
= c_parser(nodepaths
)
1090 tmp
.start(task
.inputs
[0], task
.env
)
1091 return (tmp
.nodes
, tmp
.names
)