Allow 2 insns from sched group to issue in same cycle, if no stalls needed.
[official-gcc.git] / contrib / header-tools / headerutils.py
blob95c47fb4b6956d23aea46a91c251fd68e8af27b9
1 #! /usr/bin/python2
2 import os.path
3 import sys
4 import shlex
5 import re
6 import subprocess
7 import shutil
8 import pickle
10 import multiprocessing
12 def find_pound_include (line, use_outside, use_slash):
13 inc = re.findall (ur"^\s*#\s*include\s*\"(.+?)\"", line)
14 if len(inc) == 1:
15 nm = inc[0]
16 if use_outside or os.path.exists (nm):
17 if use_slash or '/' not in nm:
18 return nm
19 return ""
21 def find_system_include (line):
22 inc = re.findall (ur"^\s*#\s*include\s*<(.+?)>", line)
23 if len(inc) == 1:
24 return inc[0]
25 return ""
27 def find_pound_define (line):
28 inc = re.findall (ur"^\s*#\s*define ([A-Za-z0-9_]+)", line)
29 if len(inc) != 0:
30 if len(inc) > 1:
31 print "What? more than 1 match in #define??"
32 print inc
33 sys.exit(5)
34 return inc[0];
35 return ""
37 def is_pound_if (line):
38 inc = re.findall ("^\s*#\s*if\s", line)
39 if not inc:
40 inc = re.findall ("^\s*#\s*if[n]?def\s", line)
41 if inc:
42 return True
43 return False
45 def is_pound_endif (line):
46 inc = re.findall ("^\s*#\s*endif", line)
47 if inc:
48 return True
49 return False
51 def find_pound_if (line):
52 inc = re.findall (ur"^\s*#\s*if\s+(.*)", line)
53 if len(inc) == 0:
54 inc = re.findall (ur"^\s*#\s*elif\s+(.*)", line)
55 if len(inc) > 0:
56 inc2 = re.findall (ur"defined\s*\((.+?)\)", inc[0])
57 inc3 = re.findall (ur"defined\s+([a-zA-Z0-9_]+)", inc[0])
58 for yy in inc3:
59 inc2.append (yy)
60 return inc2
61 else:
62 inc = re.findall (ur"^\s*#\s*ifdef\s(.*)", line)
63 if len(inc) == 0:
64 inc = re.findall (ur"^\s*#\s*ifndef\s(.*)", line)
65 if len(inc) > 0:
66 inc2 = re.findall ("[A-Za-z_][A-Za-z_0-9]*", inc[0])
67 return inc2
68 if len(inc) == 0:
69 return list ()
70 print "WTF. more than one line returned for find_pound_if"
71 print inc
72 sys.exit(5)
75 # IINFO - this is a vector of include information. It consists of 7 elements.
76 # [0] - base name of the file
77 # [1] - path leading to this file.
78 # [2] - orderd list of all headers directly included by this file.
79 # [3] - Ordered list of any headers included within condionally compiled code.
80 # headers files are expected to have all includes one level deep due to
81 # the omnipresent guards at the top of the file.
82 # [4] - List of all macros which are consumed (used) within this file.
83 # [5] - list of all macros which may be defined in this file.
84 # [6] - The source code for this file, if cached.
85 # [7] - line number info for any headers in the source file. Indexed by base
86 # name, returning the line the include is on.
88 empty_iinfo = ("", "", list(), list(), list(), list(), list())
90 # This function will process a file and extract interesting information.
91 # DO_MACROS indicates whether macros defined and used should be recorded.
92 # KEEP_SRC indicates the source for the file should be cached.
93 def process_include_info (filen, do_macros, keep_src):
94 header = False
95 if not os.path.exists (filen):
96 return empty_iinfo
98 sfile = open (filen, "r");
99 data = sfile.readlines()
100 sfile.close()
102 # Ignore the initial #ifdef HEADER_H in header files
103 if filen[-2:] == ".h":
104 nest = -1
105 header = True
106 else:
107 nest = 0
109 macout = list ()
110 macin = list()
111 incl = list()
112 cond_incl = list()
113 src_line = { }
114 guard = ""
116 for line in (data):
117 if is_pound_if (line):
118 nest += 1
119 elif is_pound_endif (line):
120 nest -= 1
122 nm = find_pound_include (line, True, True)
123 if nm != "" and nm not in incl and nm[-2:] == ".h":
124 incl.append (nm)
125 if nest > 0:
126 cond_incl.append (nm)
127 if keep_src:
128 src_line[nm] = line
129 continue
131 if do_macros:
132 d = find_pound_define (line)
133 if d:
134 if d not in macout:
135 macout.append (d);
136 continue
138 d = find_pound_if (line)
139 if d:
140 # The first #if in a header file should be the guard
141 if header and len (d) == 1 and guard == "":
142 if d[0][-2:] == "_H":
143 guard = d
144 else:
145 guard = "Guess there was no guard..."
146 else:
147 for mac in d:
148 if mac != "defined" and mac not in macin:
149 macin.append (mac);
151 if not keep_src:
152 data = list()
154 return (os.path.basename (filen), os.path.dirname (filen), incl, cond_incl,
155 macin, macout, data, src_line)
157 # Extract header info, but no macros or source code.
158 def process_ii (filen):
159 return process_include_info (filen, False, False)
161 # Extract header information, and collect macro information.
162 def process_ii_macro (filen):
163 return process_include_info (filen, True, False)
165 # Extract header information, cache the source lines.
166 def process_ii_src (filen):
167 return process_include_info (filen, False, True)
169 # Extract header information, coolewc macro info and cache the source lines.
170 def process_ii_macro_src (filen):
171 return process_include_info (filen, True, True)
174 def ii_base (iinfo):
175 return iinfo[0]
177 def ii_path (iinfo):
178 return iinfo[1]
180 def ii_include_list (iinfo):
181 return iinfo[2]
183 def ii_include_list_cond (iinfo):
184 return iinfo[3]
186 def ii_include_list_non_cond (iinfo):
187 l = ii_include_list (iinfo)
188 for n in ii_include_list_cond (iinfo):
189 l.remove (n)
190 return l
192 def ii_macro_consume (iinfo):
193 return iinfo[4]
195 def ii_macro_define (iinfo):
196 return iinfo[5]
198 def ii_src (iinfo):
199 return iinfo[6]
201 def ii_src_line (iinfo):
202 return iinfo[7]
204 def ii_read (fname):
205 f = open (fname, 'rb')
206 incl = pickle.load (f)
207 consumes = pickle.load (f)
208 defines = pickle.load (f)
209 obj = (fname,fname,incl,list(), list(), consumes, defines, list(), list())
210 return obj
212 def ii_write (fname, obj):
213 f = open (fname, 'wb')
214 pickle.dump (obj[2], f)
215 pickle.dump (obj[4], f)
216 pickle.dump (obj[5], f)
217 f.close ()
219 # execute a system command which returns file names
220 def execute_command (command):
221 files = list()
222 f = os.popen (command)
223 for x in f:
224 if x[0:2] == "./":
225 fn = x.rstrip()[2:]
226 else:
227 fn = x.rstrip()
228 files.append(fn)
229 return files
231 # Try to locate a build directory from PATH
232 def find_gcc_bld_dir (path):
233 blddir = ""
234 # Look for blddir/gcc/tm.h
235 command = "find " + path + " -mindepth 2 -maxdepth 3 -name tm.h"
236 files = execute_command (command)
237 for y in files:
238 p = os.path.dirname (y)
239 if os.path.basename (p) == "gcc":
240 blddir = p
241 break
242 # If not found, try looking a bit deeper
243 # Dont look this deep initially because a lot of cross target builds may show
244 # up in the list before a native build... but those are better than nothing.
245 if not blddir:
246 command = "find " + path + " -mindepth 3 -maxdepth 5 -name tm.h"
247 files = execute_command (command)
248 for y in files:
249 p = os.path.dirname (y)
250 if os.path.basename (p) == "gcc":
251 blddir = p
252 break
254 return blddir
257 # Find files matching pattern NAME, return in a list.
258 # CURRENT is True if you want to include the current directory
259 # DEEPER is True if you want to search 3 levels below the current directory
260 # any files with testsuite diurectories are ignored
262 def find_gcc_files (name, current, deeper):
263 files = list()
264 command = ""
265 if current:
266 if not deeper:
267 command = "find -maxdepth 1 -name " + name + " -not -path \"./testsuite/*\""
268 else:
269 command = "find -maxdepth 4 -name " + name + " -not -path \"./testsuite/*\""
270 else:
271 if deeper:
272 command = "find -maxdepth 4 -mindepth 2 -name " + name + " -not -path \"./testsuite/*\""
274 if command != "":
275 files = execute_command (command)
277 return files
279 # find the list of unique include names found in a file.
280 def find_unique_include_list_src (data):
281 found = list ()
282 for line in data:
283 d = find_pound_include (line, True, True)
284 if d and d not in found and d[-2:] == ".h":
285 found.append (d)
286 return found
288 # find the list of unique include names found in a file.
289 def find_unique_include_list (filen):
290 data = open (filen).read().splitlines()
291 return find_unique_include_list_src (data)
294 # Create the macin, macout, and incl vectors for a file FILEN.
295 # macin are the macros that are used in #if* conditional expressions
296 # macout are the macros which are #defined
297 # incl is the list of incluide files encountered
298 # returned as a tuple of the filename followed by the triplet of lists
299 # (filen, macin, macout, incl)
301 def create_macro_in_out (filen):
302 sfile = open (filen, "r");
303 data = sfile.readlines()
304 sfile.close()
306 macout = list ()
307 macin = list()
308 incl = list()
310 for line in (data):
311 d = find_pound_define (line)
312 if d != "":
313 if d not in macout:
314 macout.append (d);
315 continue
317 d = find_pound_if (line)
318 if len(d) != 0:
319 for mac in d:
320 if mac != "defined" and mac not in macin:
321 macin.append (mac);
322 continue
324 nm = find_pound_include (line, True, True)
325 if nm != "" and nm not in incl:
326 incl.append (nm)
328 return (filen, macin, macout, incl)
330 # create the macro information for filen, and create .macin, .macout, and .incl
331 # files. Return the created macro tuple.
332 def create_include_data_files (filen):
334 macros = create_macro_in_out (filen)
335 depends = macros[1]
336 defines = macros[2]
337 incls = macros[3]
339 disp_message = filen
340 if len (defines) > 0:
341 disp_message = disp_message + " " + str(len (defines)) + " #defines"
342 dfile = open (filen + ".macout", "w")
343 for x in defines:
344 dfile.write (x + "\n")
345 dfile.close ()
347 if len (depends) > 0:
348 disp_message = disp_message + " " + str(len (depends)) + " #if dependencies"
349 dfile = open (filen + ".macin", "w")
350 for x in depends:
351 dfile.write (x + "\n")
352 dfile.close ()
354 if len (incls) > 0:
355 disp_message = disp_message + " " + str(len (incls)) + " #includes"
356 dfile = open (filen + ".incl", "w")
357 for x in incls:
358 dfile.write (x + "\n")
359 dfile.close ()
361 return macros
365 # extract data for include file name_h and enter it into the dictionary.
366 # this does not change once read in. use_requires is True if you want to
367 # prime the values with already created .requires and .provides files.
368 def get_include_data (name_h, use_requires):
369 macin = list()
370 macout = list()
371 incl = list ()
372 if use_requires and os.path.exists (name_h + ".requires"):
373 macin = open (name_h + ".requires").read().splitlines()
374 elif os.path.exists (name_h + ".macin"):
375 macin = open (name_h + ".macin").read().splitlines()
377 if use_requires and os.path.exists (name_h + ".provides"):
378 macout = open (name_h + ".provides").read().splitlines()
379 elif os.path.exists (name_h + ".macout"):
380 macout = open (name_h + ".macout").read().splitlines()
382 if os.path.exists (name_h + ".incl"):
383 incl = open (name_h + ".incl").read().splitlines()
385 if len(macin) == 0 and len(macout) == 0 and len(incl) == 0:
386 return ()
387 data = ( name_h, macin, macout, incl )
388 return data
390 # find FIND in src, and replace it with the list of headers in REPLACE.
391 # Remove any duplicates of FIND in REPLACE, and if some of the REPLACE
392 # headers occur earlier in the include chain, leave them.
393 # Return the new SRC only if anything changed.
394 def find_replace_include (find, replace, src):
395 res = list()
396 seen = { }
397 anything = False
398 for line in src:
399 inc = find_pound_include (line, True, True)
400 if inc == find:
401 for y in replace:
402 if seen.get(y) == None:
403 res.append("#include \""+y+"\"\n")
404 seen[y] = True
405 if y != find:
406 anything = True
407 # if find isnt in the replacement list, then we are deleting FIND, so changes.
408 if find not in replace:
409 anything = True
410 else:
411 if inc in replace:
412 if seen.get(inc) == None:
413 res.append (line)
414 seen[inc] = True
415 else:
416 res.append (line)
418 if (anything):
419 return res
420 else:
421 return list()
424 # pass in a require and provide dictionary to be read in.
425 def read_require_provides (require, provide):
426 if not os.path.exists ("require-provide.master"):
427 print "require-provide.master file is not available. please run data collection."
428 sys.exit(1)
429 incl_list = open("require-provide.master").read().splitlines()
430 for f in incl_list:
431 if os.path.exists (f+".requires"):
432 require[os.path.basename (f)] = open (f + ".requires").read().splitlines()
433 else:
434 require[os.path.basename (f)] = list ()
435 if os.path.exists (f+".provides"):
436 provide[os.path.basename (f)] = open (f + ".provides").read().splitlines()
437 else:
438 provide [os.path.basename (f)] = list ()
441 def build_include_list (filen):
442 include_files = list()
443 sfile = open (filen, "r")
444 data = sfile.readlines()
445 sfile.close()
446 for line in data:
447 nm = find_pound_include (line, False, False)
448 if nm != "" and nm[-2:] == ".h":
449 if nm not in include_files:
450 include_files.append(nm)
451 return include_files
453 def build_reverse_include_list (filen):
454 include_files = list()
455 sfile = open (filen, "r")
456 data = sfile.readlines()
457 sfile.close()
458 for line in reversed(data):
459 nm = find_pound_include (line, False, False)
460 if nm != "":
461 if nm not in include_files:
462 include_files.append(nm)
463 return include_files
465 # Get compilation return code, and compensate for a warning that we want to
466 # consider an error when it comes to inlined templates.
467 def get_make_rc (rc, output):
468 rc = rc % 1280
469 if rc == 0:
470 # This is not considered an error during compilation of an individual file,
471 # but it will cause an error during link if it isn't defined. If this
472 # warning is seen during compiling a file, make it a build error so we
473 # don't remove the header.
474 h = re.findall ("warning: inline function.*used but never defined", output)
475 if len(h) != 0:
476 rc = 1
477 return rc;
479 def get_make_output (build_dir, make_opt):
480 devnull = open('/dev/null', 'w')
481 at_a_time = multiprocessing.cpu_count() * 2
482 make = "make -j"+str(at_a_time)+ " "
483 if build_dir != "":
484 command = "cd " + build_dir +"; " + make + make_opt
485 else:
486 command = make + make_opt
487 process = subprocess.Popen(command, stdout=devnull, stderr=subprocess.PIPE, shell=True)
488 output = process.communicate();
489 rc = get_make_rc (process.returncode, output[1])
490 return (rc , output[1])
492 def spawn_makes (command_list):
493 devnull = open('/dev/null', 'w')
494 rc = (0,"", "")
495 proc_res = list()
496 text = " Trying target builds : "
497 for command_pair in command_list:
498 tname = command_pair[0]
499 command = command_pair[1]
500 text += tname + ", "
501 c = subprocess.Popen(command, bufsize=-1, stdout=devnull, stderr=subprocess.PIPE, shell=True)
502 proc_res.append ((c, tname))
504 print text[:-2]
506 for p in proc_res:
507 output = p[0].communicate()
508 ret = (get_make_rc (p[0].returncode, output[1]), output[1], p[1])
509 if (ret[0] != 0):
510 # Just record the first one.
511 if rc[0] == 0:
512 rc = ret;
513 return rc
515 def get_make_output_parallel (targ_list, make_opt, at_a_time):
516 command = list()
517 targname = list()
518 if at_a_time == 0:
519 at_a_time = multiprocessing.cpu_count() * 2
520 proc_res = [0] * at_a_time
521 for x in targ_list:
522 if make_opt[-2:] == ".o":
523 s = "cd " + x[1] + "/gcc/; make " + make_opt
524 else:
525 s = "cd " + x[1] +"; make " + make_opt
526 command.append ((x[0],s))
528 num = len(command)
529 rc = (0,"", "")
530 loops = num // at_a_time
532 if (loops > 0):
533 for idx in range (loops):
534 ret = spawn_makes (command[idx*at_a_time:(idx+1)*at_a_time])
535 if ret[0] != 0:
536 rc = ret
537 break
539 if (rc[0] == 0):
540 leftover = num % at_a_time
541 if (leftover > 0):
542 ret = spawn_makes (command[-leftover:])
543 if ret[0] != 0:
544 rc = ret
546 return rc
549 def readwholefile (src_file):
550 sfile = open (src_file, "r")
551 src_data = sfile.readlines()
552 sfile.close()
553 return src_data