10 import multiprocessing
12 def find_pound_include (line
, use_outside
, use_slash
):
13 inc
= re
.findall (ur
"^\s*#\s*include\s*\"(.+?
)\"", line)
16 if use_outside or os.path.exists (nm):
17 if use_slash or '/' not in nm:
21 def find_system_include (line):
22 inc = re.findall (ur"^\s
*#\s*include\s*<(.+?)>", line)
27 def find_pound_define (line
):
28 inc
= re
.findall (ur
"^\s*#\s*define ([A-Za-z0-9_]+)", line
)
31 print "What? more than 1 match in #define??"
37 def is_pound_if (line
):
38 inc
= re
.findall ("^\s*#\s*if\s", line
)
40 inc
= re
.findall ("^\s*#\s*if[n]?def\s", line
)
45 def is_pound_endif (line
):
46 inc
= re
.findall ("^\s*#\s*endif", line
)
51 def find_pound_if (line
):
52 inc
= re
.findall (ur
"^\s*#\s*if\s+(.*)", line
)
54 inc
= re
.findall (ur
"^\s*#\s*elif\s+(.*)", line
)
56 inc2
= re
.findall (ur
"defined\s*\((.+?)\)", inc
[0])
57 inc3
= re
.findall (ur
"defined\s+([a-zA-Z0-9_]+)", inc
[0])
62 inc
= re
.findall (ur
"^\s*#\s*ifdef\s(.*)", line
)
64 inc
= re
.findall (ur
"^\s*#\s*ifndef\s(.*)", line
)
66 inc2
= re
.findall ("[A-Za-z_][A-Za-z_0-9]*", inc
[0])
70 print "WTF. more than one line returned for find_pound_if"
75 # IINFO - this is a vector of include information. It consists of 7 elements.
76 # [0] - base name of the file
77 # [1] - path leading to this file.
78 # [2] - orderd list of all headers directly included by this file.
79 # [3] - Ordered list of any headers included within condionally compiled code.
80 # headers files are expected to have all includes one level deep due to
81 # the omnipresent guards at the top of the file.
82 # [4] - List of all macros which are consumed (used) within this file.
83 # [5] - list of all macros which may be defined in this file.
84 # [6] - The source code for this file, if cached.
85 # [7] - line number info for any headers in the source file. Indexed by base
86 # name, returning the line the include is on.
88 empty_iinfo
= ("", "", list(), list(), list(), list(), list())
90 # This function will process a file and extract interesting information.
91 # DO_MACROS indicates whether macros defined and used should be recorded.
92 # KEEP_SRC indicates the source for the file should be cached.
93 def process_include_info (filen
, do_macros
, keep_src
):
95 if not os
.path
.exists (filen
):
98 sfile
= open (filen
, "r");
99 data
= sfile
.readlines()
102 # Ignore the initial #ifdef HEADER_H in header files
103 if filen
[-2:] == ".h":
117 if is_pound_if (line
):
119 elif is_pound_endif (line
):
122 nm
= find_pound_include (line
, True, True)
123 if nm
!= "" and nm
not in incl
and nm
[-2:] == ".h":
126 cond_incl
.append (nm
)
132 d
= find_pound_define (line
)
138 d
= find_pound_if (line
)
140 # The first #if in a header file should be the guard
141 if header
and len (d
) == 1 and guard
== "":
142 if d
[0][-2:] == "_H":
145 guard
= "Guess there was no guard..."
148 if mac
!= "defined" and mac
not in macin
:
154 return (os
.path
.basename (filen
), os
.path
.dirname (filen
), incl
, cond_incl
,
155 macin
, macout
, data
, src_line
)
157 # Extract header info, but no macros or source code.
158 def process_ii (filen
):
159 return process_include_info (filen
, False, False)
161 # Extract header information, and collect macro information.
162 def process_ii_macro (filen
):
163 return process_include_info (filen
, True, False)
165 # Extract header information, cache the source lines.
166 def process_ii_src (filen
):
167 return process_include_info (filen
, False, True)
169 # Extract header information, coolewc macro info and cache the source lines.
170 def process_ii_macro_src (filen
):
171 return process_include_info (filen
, True, True)
180 def ii_include_list (iinfo
):
183 def ii_include_list_cond (iinfo
):
186 def ii_include_list_non_cond (iinfo
):
187 l
= ii_include_list (iinfo
)
188 for n
in ii_include_list_cond (iinfo
):
192 def ii_macro_consume (iinfo
):
195 def ii_macro_define (iinfo
):
201 def ii_src_line (iinfo
):
205 f
= open (fname
, 'rb')
206 incl
= pickle
.load (f
)
207 consumes
= pickle
.load (f
)
208 defines
= pickle
.load (f
)
209 obj
= (fname
,fname
,incl
,list(), list(), consumes
, defines
, list(), list())
212 def ii_write (fname
, obj
):
213 f
= open (fname
, 'wb')
214 pickle
.dump (obj
[2], f
)
215 pickle
.dump (obj
[4], f
)
216 pickle
.dump (obj
[5], f
)
219 # execute a system command which returns file names
220 def execute_command (command
):
222 f
= os
.popen (command
)
231 # Try to locate a build directory from PATH
232 def find_gcc_bld_dir (path
):
234 # Look for blddir/gcc/tm.h
235 command
= "find " + path
+ " -mindepth 2 -maxdepth 3 -name tm.h"
236 files
= execute_command (command
)
238 p
= os
.path
.dirname (y
)
239 if os
.path
.basename (p
) == "gcc":
242 # If not found, try looking a bit deeper
243 # Dont look this deep initially because a lot of cross target builds may show
244 # up in the list before a native build... but those are better than nothing.
246 command
= "find " + path
+ " -mindepth 3 -maxdepth 5 -name tm.h"
247 files
= execute_command (command
)
249 p
= os
.path
.dirname (y
)
250 if os
.path
.basename (p
) == "gcc":
257 # Find files matching pattern NAME, return in a list.
258 # CURRENT is True if you want to include the current directory
259 # DEEPER is True if you want to search 3 levels below the current directory
260 # any files with testsuite diurectories are ignored
262 def find_gcc_files (name
, current
, deeper
):
267 command
= "find -maxdepth 1 -name " + name
+ " -not -path \"./testsuite/*\""
269 command
= "find -maxdepth 4 -name " + name
+ " -not -path \"./testsuite/*\""
272 command
= "find -maxdepth 4 -mindepth 2 -name " + name
+ " -not -path \"./testsuite/*\""
275 files
= execute_command (command
)
279 # find the list of unique include names found in a file.
280 def find_unique_include_list_src (data
):
283 d
= find_pound_include (line
, True, True)
284 if d
and d
not in found
and d
[-2:] == ".h":
288 # find the list of unique include names found in a file.
289 def find_unique_include_list (filen
):
290 data
= open (filen
).read().splitlines()
291 return find_unique_include_list_src (data
)
294 # Create the macin, macout, and incl vectors for a file FILEN.
295 # macin are the macros that are used in #if* conditional expressions
296 # macout are the macros which are #defined
297 # incl is the list of incluide files encountered
298 # returned as a tuple of the filename followed by the triplet of lists
299 # (filen, macin, macout, incl)
301 def create_macro_in_out (filen
):
302 sfile
= open (filen
, "r");
303 data
= sfile
.readlines()
311 d
= find_pound_define (line
)
317 d
= find_pound_if (line
)
320 if mac
!= "defined" and mac
not in macin
:
324 nm
= find_pound_include (line
, True, True)
325 if nm
!= "" and nm
not in incl
:
328 return (filen
, macin
, macout
, incl
)
330 # create the macro information for filen, and create .macin, .macout, and .incl
331 # files. Return the created macro tuple.
332 def create_include_data_files (filen
):
334 macros
= create_macro_in_out (filen
)
340 if len (defines
) > 0:
341 disp_message
= disp_message
+ " " + str(len (defines
)) + " #defines"
342 dfile
= open (filen
+ ".macout", "w")
344 dfile
.write (x
+ "\n")
347 if len (depends
) > 0:
348 disp_message
= disp_message
+ " " + str(len (depends
)) + " #if dependencies"
349 dfile
= open (filen
+ ".macin", "w")
351 dfile
.write (x
+ "\n")
355 disp_message
= disp_message
+ " " + str(len (incls
)) + " #includes"
356 dfile
= open (filen
+ ".incl", "w")
358 dfile
.write (x
+ "\n")
365 # extract data for include file name_h and enter it into the dictionary.
366 # this does not change once read in. use_requires is True if you want to
367 # prime the values with already created .requires and .provides files.
368 def get_include_data (name_h
, use_requires
):
372 if use_requires
and os
.path
.exists (name_h
+ ".requires"):
373 macin
= open (name_h
+ ".requires").read().splitlines()
374 elif os
.path
.exists (name_h
+ ".macin"):
375 macin
= open (name_h
+ ".macin").read().splitlines()
377 if use_requires
and os
.path
.exists (name_h
+ ".provides"):
378 macout
= open (name_h
+ ".provides").read().splitlines()
379 elif os
.path
.exists (name_h
+ ".macout"):
380 macout
= open (name_h
+ ".macout").read().splitlines()
382 if os
.path
.exists (name_h
+ ".incl"):
383 incl
= open (name_h
+ ".incl").read().splitlines()
385 if len(macin
) == 0 and len(macout
) == 0 and len(incl
) == 0:
387 data
= ( name_h
, macin
, macout
, incl
)
390 # find FIND in src, and replace it with the list of headers in REPLACE.
391 # Remove any duplicates of FIND in REPLACE, and if some of the REPLACE
392 # headers occur earlier in the include chain, leave them.
393 # Return the new SRC only if anything changed.
394 def find_replace_include (find
, replace
, src
):
399 inc
= find_pound_include (line
, True, True)
402 if seen
.get(y
) == None:
403 res
.append("#include \""+y
+"\"\n")
407 # if find isnt in the replacement list, then we are deleting FIND, so changes.
408 if find
not in replace
:
412 if seen
.get(inc
) == None:
424 # pass in a require and provide dictionary to be read in.
425 def read_require_provides (require
, provide
):
426 if not os
.path
.exists ("require-provide.master"):
427 print "require-provide.master file is not available. please run data collection."
429 incl_list
= open("require-provide.master").read().splitlines()
431 if os
.path
.exists (f
+".requires"):
432 require
[os
.path
.basename (f
)] = open (f
+ ".requires").read().splitlines()
434 require
[os
.path
.basename (f
)] = list ()
435 if os
.path
.exists (f
+".provides"):
436 provide
[os
.path
.basename (f
)] = open (f
+ ".provides").read().splitlines()
438 provide
[os
.path
.basename (f
)] = list ()
441 def build_include_list (filen
):
442 include_files
= list()
443 sfile
= open (filen
, "r")
444 data
= sfile
.readlines()
447 nm
= find_pound_include (line
, False, False)
448 if nm
!= "" and nm
[-2:] == ".h":
449 if nm
not in include_files
:
450 include_files
.append(nm
)
453 def build_reverse_include_list (filen
):
454 include_files
= list()
455 sfile
= open (filen
, "r")
456 data
= sfile
.readlines()
458 for line
in reversed(data
):
459 nm
= find_pound_include (line
, False, False)
461 if nm
not in include_files
:
462 include_files
.append(nm
)
465 # Get compilation return code, and compensate for a warning that we want to
466 # consider an error when it comes to inlined templates.
467 def get_make_rc (rc
, output
):
470 # This is not considered an error during compilation of an individual file,
471 # but it will cause an error during link if it isn't defined. If this
472 # warning is seen during compiling a file, make it a build error so we
473 # don't remove the header.
474 h
= re
.findall ("warning: inline function.*used but never defined", output
)
479 def get_make_output (build_dir
, make_opt
):
480 devnull
= open('/dev/null', 'w')
481 at_a_time
= multiprocessing
.cpu_count() * 2
482 make
= "make -j"+str(at_a_time
)+ " "
484 command
= "cd " + build_dir
+"; " + make
+ make_opt
486 command
= make
+ make_opt
487 process
= subprocess
.Popen(command
, stdout
=devnull
, stderr
=subprocess
.PIPE
, shell
=True)
488 output
= process
.communicate();
489 rc
= get_make_rc (process
.returncode
, output
[1])
490 return (rc
, output
[1])
492 def spawn_makes (command_list
):
493 devnull
= open('/dev/null', 'w')
496 text
= " Trying target builds : "
497 for command_pair
in command_list
:
498 tname
= command_pair
[0]
499 command
= command_pair
[1]
501 c
= subprocess
.Popen(command
, bufsize
=-1, stdout
=devnull
, stderr
=subprocess
.PIPE
, shell
=True)
502 proc_res
.append ((c
, tname
))
507 output
= p
[0].communicate()
508 ret
= (get_make_rc (p
[0].returncode
, output
[1]), output
[1], p
[1])
510 # Just record the first one.
515 def get_make_output_parallel (targ_list
, make_opt
, at_a_time
):
519 at_a_time
= multiprocessing
.cpu_count() * 2
520 proc_res
= [0] * at_a_time
522 if make_opt
[-2:] == ".o":
523 s
= "cd " + x
[1] + "/gcc/; make " + make_opt
525 s
= "cd " + x
[1] +"; make " + make_opt
526 command
.append ((x
[0],s
))
530 loops
= num
// at_a_time
533 for idx
in range (loops
):
534 ret
= spawn_makes (command
[idx
*at_a_time
:(idx
+1)*at_a_time
])
540 leftover
= num
% at_a_time
542 ret
= spawn_makes (command
[-leftover
:])
549 def readwholefile (src_file
):
550 sfile
= open (src_file
, "r")
551 src_data
= sfile
.readlines()