9 from headerutils import *
14 no_remove = [ "system.h", "coretypes.h", "config.h" , "bconfig.h", "backend.h" ]
16 # These targets are the ones which provide "coverage". Typically, if any
17 # target is going to fail compilation, it's one of these. This was determined
18 # during the initial runs of reduce-headers... On a full set of target builds,
19 # every failure which occured was triggered by one of these.
20 # This list is used during target-list construction simply to put any of these
21 # *first* in the candidate list, increasing the probability that a failure is
32 "powerpc-eabisimaltivec",
33 "rs6000-ibm-aix5.1.0",
43 target_builds = list()
47 search_path = [ ".", "../include", "../libcpp/include" ]
52 # Given a header name, normalize it. ie. cp/cp-tree.h could be in gcc, while
53 # the same header could be referenced from within the cp subdirectory as
55 # for now, just assume basenames are unique
57 def normalize_header (header):
58 return os.path.basename (header)
61 # Adds a header file and its sub-includes to the global dictionary if they
62 # aren't already there. Specify s_path since different build directories may
63 # append themselves on demand to the global list.
64 # return entry for the specified header, knowing all sub entries are completed
66 def get_header_info (header, s_path):
69 process_list = list ()
72 bname_iinfo = empty_iinfo
74 if os.path.exists (path + "/" + header):
75 location = path + "/" + header
79 bname = normalize_header (location)
80 if header_dict.get (bname):
81 bname_iinfo = header_dict[bname]
82 loc2 = ii_path (bname_iinfo)+ "/" + bname
85 if location[:2] == "./":
86 location = location[2:]
88 # Don't use the cache if it isnt the right one.
89 bname_iinfo = process_ii_macro (location)
92 bname_iinfo = process_ii_macro (location)
93 header_dict[bname] = bname_iinfo
94 # now decend into the include tree
95 for i in ii_include_list (bname_iinfo):
96 get_header_info (i, s_path)
98 # if the file isnt in the source directories, look in the build and target
99 # directories. If it is here, then aggregate all the versions.
100 location = build_dir + "/gcc/" + header
101 build_inc = target_inc = False
102 if os.path.exists (location):
104 for x in target_dict:
105 location = target_dict[x] + "/gcc/" + header
106 if os.path.exists (location):
110 if (build_inc or target_inc):
111 bname = normalize_header(header)
116 iinfo = process_ii_macro (build_dir + "/gcc/" + header)
117 defines = set (ii_macro_define (iinfo))
118 consumes = set (ii_macro_consume (iinfo))
119 incl = set (ii_include_list (iinfo))
122 for x in target_dict:
123 location = target_dict[x] + "/gcc/" + header
124 if os.path.exists (location):
125 iinfo = process_ii_macro (location)
126 defines.update (ii_macro_define (iinfo))
127 consumes.update (ii_macro_consume (iinfo))
128 incl.update (ii_include_list (iinfo))
130 bname_iinfo = (header, "build", list(incl), list(), list(consumes), list(defines), list(), list())
132 header_dict[bname] = bname_iinfo
134 get_header_info (i, s_path)
139 # return a list of all headers brought in by this header
140 def all_headers (fname):
142 headers_stack = list()
143 headers_list = list()
144 if header_dict.get (fname) == None:
146 for y in ii_include_list (header_dict[fname]):
147 headers_stack.append (y)
150 h = headers_stack.pop ()
151 hn = normalize_header (h)
152 if hn not in headers_list:
153 headers_list.append (hn)
154 if header_dict.get(hn):
155 for y in ii_include_list (header_dict[hn]):
156 if normalize_header (y) not in headers_list:
157 headers_stack.append (y)
164 # Search bld_dir for all target tuples, confirm that they have a build path with
165 # bld_dir/target-tuple/gcc, and build a dictionary of build paths indexed by
168 def build_target_dict (bld_dir, just_these):
172 if os.path.exists (bld_dir):
176 ls = os.listdir(bld_dir)
178 if t.find("-") != -1:
180 tpath = bld_dir + "/" + target
181 if not os.path.exists (tpath + "/gcc"):
182 print "Error: gcc build directory for target " + t + " Does not exist: " + tpath + "/gcc"
185 target_dict[target] = tpath
190 def get_obj_name (src_file):
191 if src_file[-2:] == ".c":
192 return src_file.replace (".c", ".o")
193 elif src_file[-3:] == ".cc":
194 return src_file.replace (".cc", ".o")
197 def target_obj_exists (target, obj_name):
199 # look in a subdir if src has a subdir, then check gcc base directory.
200 if target_dict.get(target):
201 obj = target_dict[target] + "/gcc/" + obj_name
202 if not os.path.exists (obj):
203 obj = target_dict[target] + "/gcc/" + os.path.basename(obj_name)
204 if os.path.exists (obj):
208 # Given a src file, return a list of targets which may build this file.
209 def find_targets (src_file):
212 obj_name = get_obj_name (src_file)
214 print "Error: " + src_file + " - Cannot determine object name."
217 # Put the high priority targets which tend to trigger failures first
218 for target in target_priority:
219 if target_obj_exists (target, obj_name):
220 targ_list.append ((target, target_dict[target]))
222 for target in target_dict:
223 if target not in target_priority and target_obj_exists (target, obj_name):
224 targ_list.append ((target, target_dict[target]))
229 def try_to_remove (src_file, h_list, verbose):
234 # build from scratch each time
240 src_info = process_ii_macro_src (src_file)
241 src_data = ii_src (src_info)
243 inclist = ii_include_list_non_cond (src_info)
244 # work is done if there are no includes to check
246 return src_file + ": No include files to attempt to remove"
248 # work on the include list in reverse.
251 # Get the target list
253 targ_list = find_targets (src_file)
256 if os.path.dirname (src_file):
257 spath.append (os.path.dirname (src_file))
260 if src_file.find("config/") != -1:
261 # config files dont usually build on the host
263 obn = get_obj_name (os.path.basename (src_file))
264 if obn and os.path.exists (build_dir + "/gcc/" + obn):
267 summary = src_file + ": Target builds are required for config files. None found."
271 summary =src_file + ": Cannot find any targets which build this file."
276 # confirm it actually builds before we do anything
277 print "Confirming source file builds"
278 res = get_make_output (build_dir + "/gcc", "all")
280 message = "Error: " + src_file + " does not build currently."
281 summary = src_file + " does not build on host."
285 verbose.write (message + "\n")
286 verbose.write (res[1]+ "\n")
289 src_requires = set (ii_macro_consume (src_info))
290 for macro in src_requires:
291 because[macro] = src_file
292 header_seen = list ()
294 os.rename (src_file, src_file + ".bak")
295 src_orig = copy.deepcopy (src_data)
296 src_tmp = copy.deepcopy (src_data)
299 # process the includes from bottom to top. This is because we know that
300 # later includes have are known to be needed, so any dependency from this
301 # header is a true dependency
302 for inc_file in inclist:
303 inc_file_norm = normalize_header (inc_file)
305 if inc_file in no_remove:
307 if len (h_list) != 0 and inc_file_norm not in h_list:
309 if inc_file_norm[0:3] == "gt-":
311 if inc_file_norm[0:6] == "gtype-":
313 if inc_file_norm.replace(".h",".c") == os.path.basename(src_file):
316 lookfor = ii_src_line(src_info)[inc_file]
317 src_tmp.remove (lookfor)
318 message = "Trying " + src_file + " without " + inc_file
321 verbose.write (message + "\n")
322 out = open(src_file, "w")
329 res = get_make_output (build_dir + "/gcc", "all")
334 message = "Passed Host build"
337 message = "Compilation failed:\n";
341 objfile = get_obj_name (src_file)
343 if objfile and os.path.exists(t1[1] +"/gcc/"+objfile):
344 res = get_make_output_parallel (targ_list, objfile, 0)
346 res = get_make_output_parallel (targ_list, "all-gcc", 0)
349 message = "Compilation failed on TARGET : " + res[2]
352 message = "Passed host and target builds"
359 verbose.write (message + "\n");
360 verbose.write (res[1])
361 verbose.write ("\n");
362 if os.path.exists (inc_file):
363 ilog = open(inc_file+".log","a")
364 ilog.write (message + " for " + src_file + ":\n\n");
365 ilog.write ("============================================\n");
369 if os.path.exists (src_file):
370 ilog = open(src_file+".log","a")
371 ilog.write (message + " for " +inc_file + ":\n\n");
372 ilog.write ("============================================\n");
377 # Given a sequence where :
379 # #include "target.h" // includes tm.h
381 # target.h was required, and when attempting to remove tm.h we'd see that
382 # all the macro defintions are "required" since they all look like:
387 # when target.h was found to be required, tm.h will be tagged as included.
388 # so when we get this far, we know we dont have to check the macros for
389 # tm.h since we know it is already been included.
391 if inc_file_norm not in header_seen:
392 iinfo = get_header_info (inc_file, spath)
393 newlist = all_headers (inc_file_norm)
394 if ii_path(iinfo) == "build" and not target_dict:
396 text = message + " : Will not remove a build file without some targets."
398 ilog = open(src_file+".log","a")
399 ilog.write (text +"\n")
400 ilog.write ("============================================\n");
402 ilog = open("reduce-headers-kept.log","a")
403 ilog.write (src_file + " " + text +"\n")
407 if not keep and inc_file_norm not in header_seen:
408 # now look for any macro requirements.
410 if not h in header_seen:
411 if header_dict.get(h):
412 defined = ii_macro_define (header_dict[h])
414 if dep in src_requires and dep not in ignore_list:
416 text = message + ", but must keep " + inc_file + " because it provides " + dep
417 if because.get(dep) != None:
418 text = text + " Possibly required by " + because[dep]
420 ilog = open(inc_file+".log","a")
421 ilog.write (because[dep]+": Requires [dep] in "+src_file+"\n")
422 ilog.write ("============================================\n");
424 ilog = open(src_file+".log","a")
425 ilog.write (text +"\n")
426 ilog.write ("============================================\n");
428 ilog = open("reduce-headers-kept.log","a")
429 ilog.write (src_file + " " + text +"\n")
432 verbose.write (text + "\n")
435 # add all headers 'consumes' to src_requires list, and mark as seen
437 if not h in header_seen:
438 header_seen.append (h)
439 if header_dict.get(h):
440 consume = ii_macro_consume (header_dict[h])
442 if dep not in src_requires:
443 src_requires.add (dep)
444 if because.get(dep) == None:
445 because[dep] = inc_file
447 src_tmp = copy.deepcopy (src_data)
449 print message + " --> removing " + inc_file + "\n"
452 verbose.write (message + " --> removing " + inc_file + "\n")
453 if remove_count.get(inc_file) == None:
454 remove_count[inc_file] = 1
456 remove_count[inc_file] += 1
457 src_data = copy.deepcopy (src_tmp)
459 print "Interuption: restoring original file"
460 out = open(src_file, "w")
461 for line in src_orig:
466 # copy current version, since it is the "right" one now.
467 out = open(src_file, "w")
468 for line in src_data:
472 # Try a final host bootstrap build to make sure everything is kosher.
474 res = get_make_output (build_dir, "all")
477 # host build failed! return to original version
478 print "Error: " + src_file + " Failed to bootstrap at end!!! restoring."
479 print " Bad version at " + src_file + ".bad"
480 os.rename (src_file, src_file + ".bad")
481 out = open(src_file, "w")
482 for line in src_orig:
485 return src_file + ": failed to build after reduction. Restored original"
487 if src_data == src_orig:
488 summary = src_file + ": No change."
490 summary = src_file + ": Reduction performed, "+str(rmcount)+" includes removed."
500 for x in sys.argv[1:]:
504 fn = normalize_header (x[2:])
512 ignore_list.append(x[2:])
514 only_targs.append(x[2:])
518 print "Error: Unrecognized option " + x
521 if not os.path.exists (x):
522 print "Error: specified file " + x + " does not exist."
528 build_target_dict (target_dir, only_targs)
530 if build_dir == "" and target_dir == "":
531 print "Error: Must specify a build directory, and/or a target directory."
534 if build_dir and not os.path.exists (build_dir):
535 print "Error: specified build directory does not exist : " + build_dir
538 if target_dir and not os.path.exists (target_dir):
539 print "Error: specified target directory does not exist : " + target_dir
543 print "Attempts to remove extraneous include files from source files."
545 print "Should be run from the main gcc source directory, and works on a target"
546 print "directory, as we attempt to make the 'all' target."
548 print "By default, gcc-reorder-includes is run on each file before attempting"
549 print "to remove includes. this removes duplicates and puts some headers in a"
550 print "canonical ordering"
552 print "The build directory should be ready to compile via make. Time is saved"
553 print "if the build is already complete, so that only changes need to be built."
555 print "Usage: [options] file1.c [file2.c] ... [filen.c]"
556 print " -bdir : the root build directory to attempt buiding .o files."
557 print " -tdir : the target build directory"
558 print " -d : Ignore conditional macro dependencies."
560 print " -Dmacro : Ignore a specific macro for dependencies"
561 print " -Ttarget : Only consider target in target directory."
562 print " -fheader : Specifies a specific .h file to be considered."
564 print " -D, -T, and -f can be specified mulitple times and are aggregated."
566 print " The original file will be in filen.bak"
571 print "Attempting to remove only these files:"
576 logfile = open("reduce-headers.log","w")
579 msg = try_to_remove (x, only_h, logfile)
580 ilog = open("reduce-headers.sum","a")
581 ilog.write (msg + "\n")
584 ilog = open("reduce-headers.sum","a")
585 ilog.write ("===============================================================\n")
586 for x in remove_count:
587 msg = x + ": Removed " + str(remove_count[x]) + " times."
589 logfile.write (msg + "\n")
590 ilog.write (msg + "\n")