9 from headerutils import *
14 no_remove = [ "system.h", "coretypes.h", "config.h" , "bconfig.h", "backend.h" ]
16 # These targets are the ones which provide "coverage". Typically, if any
17 # target is going to fail compilation, it's one of these. This was determined
18 # during the initial runs of reduce-headers... On a full set of target builds,
19 # every failure which occured was triggered by one of these.
20 # This list is used during target-list construction simply to put any of these
21 # *first* in the candidate list, increasing the probability that a failure is
31 "powerpc-eabisimaltivec",
32 "rs6000-ibm-aix5.1.0",
41 target_builds = list()
45 search_path = [ ".", "../include", "../libcpp/include" ]
50 # Given a header name, normalize it. ie. cp/cp-tree.h could be in gcc, while
51 # the same header could be referenced from within the cp subdirectory as
53 # for now, just assume basenames are unique
55 def normalize_header (header):
56 return os.path.basename (header)
59 # Adds a header file and its sub-includes to the global dictionary if they
60 # aren't already there. Specify s_path since different build directories may
61 # append themselves on demand to the global list.
62 # return entry for the specified header, knowing all sub entries are completed
64 def get_header_info (header, s_path):
67 process_list = list ()
70 bname_iinfo = empty_iinfo
72 if os.path.exists (path + "/" + header):
73 location = path + "/" + header
77 bname = normalize_header (location)
78 if header_dict.get (bname):
79 bname_iinfo = header_dict[bname]
80 loc2 = ii_path (bname_iinfo)+ "/" + bname
83 if location[:2] == "./":
84 location = location[2:]
86 # Don't use the cache if it isnt the right one.
87 bname_iinfo = process_ii_macro (location)
90 bname_iinfo = process_ii_macro (location)
91 header_dict[bname] = bname_iinfo
92 # now decend into the include tree
93 for i in ii_include_list (bname_iinfo):
94 get_header_info (i, s_path)
96 # if the file isnt in the source directories, look in the build and target
97 # directories. If it is here, then aggregate all the versions.
98 location = build_dir + "/gcc/" + header
99 build_inc = target_inc = False
100 if os.path.exists (location):
102 for x in target_dict:
103 location = target_dict[x] + "/gcc/" + header
104 if os.path.exists (location):
108 if (build_inc or target_inc):
109 bname = normalize_header(header)
114 iinfo = process_ii_macro (build_dir + "/gcc/" + header)
115 defines = set (ii_macro_define (iinfo))
116 consumes = set (ii_macro_consume (iinfo))
117 incl = set (ii_include_list (iinfo))
120 for x in target_dict:
121 location = target_dict[x] + "/gcc/" + header
122 if os.path.exists (location):
123 iinfo = process_ii_macro (location)
124 defines.update (ii_macro_define (iinfo))
125 consumes.update (ii_macro_consume (iinfo))
126 incl.update (ii_include_list (iinfo))
128 bname_iinfo = (header, "build", list(incl), list(), list(consumes), list(defines), list(), list())
130 header_dict[bname] = bname_iinfo
132 get_header_info (i, s_path)
137 # return a list of all headers brought in by this header
138 def all_headers (fname):
140 headers_stack = list()
141 headers_list = list()
142 if header_dict.get (fname) == None:
144 for y in ii_include_list (header_dict[fname]):
145 headers_stack.append (y)
148 h = headers_stack.pop ()
149 hn = normalize_header (h)
150 if hn not in headers_list:
151 headers_list.append (hn)
152 if header_dict.get(hn):
153 for y in ii_include_list (header_dict[hn]):
154 if normalize_header (y) not in headers_list:
155 headers_stack.append (y)
162 # Search bld_dir for all target tuples, confirm that they have a build path with
163 # bld_dir/target-tuple/gcc, and build a dictionary of build paths indexed by
166 def build_target_dict (bld_dir, just_these):
170 if os.path.exists (bld_dir):
174 ls = os.listdir(bld_dir)
176 if t.find("-") != -1:
178 tpath = bld_dir + "/" + target
179 if not os.path.exists (tpath + "/gcc"):
180 print "Error: gcc build directory for target " + t + " Does not exist: " + tpath + "/gcc"
183 target_dict[target] = tpath
188 def get_obj_name (src_file):
189 if src_file[-2:] == ".c":
190 return src_file.replace (".c", ".o")
191 elif src_file[-3:] == ".cc":
192 return src_file.replace (".cc", ".o")
195 def target_obj_exists (target, obj_name):
197 # look in a subdir if src has a subdir, then check gcc base directory.
198 if target_dict.get(target):
199 obj = target_dict[target] + "/gcc/" + obj_name
200 if not os.path.exists (obj):
201 obj = target_dict[target] + "/gcc/" + os.path.basename(obj_name)
202 if os.path.exists (obj):
206 # Given a src file, return a list of targets which may build this file.
207 def find_targets (src_file):
210 obj_name = get_obj_name (src_file)
212 print "Error: " + src_file + " - Cannot determine object name."
215 # Put the high priority targets which tend to trigger failures first
216 for target in target_priority:
217 if target_obj_exists (target, obj_name):
218 targ_list.append ((target, target_dict[target]))
220 for target in target_dict:
221 if target not in target_priority and target_obj_exists (target, obj_name):
222 targ_list.append ((target, target_dict[target]))
227 def try_to_remove (src_file, h_list, verbose):
232 # build from scratch each time
238 src_info = process_ii_macro_src (src_file)
239 src_data = ii_src (src_info)
241 inclist = ii_include_list_non_cond (src_info)
242 # work is done if there are no includes to check
244 return src_file + ": No include files to attempt to remove"
246 # work on the include list in reverse.
249 # Get the target list
251 targ_list = find_targets (src_file)
254 if os.path.dirname (src_file):
255 spath.append (os.path.dirname (src_file))
258 if src_file.find("config/") != -1:
259 # config files dont usually build on the host
261 obn = get_obj_name (os.path.basename (src_file))
262 if obn and os.path.exists (build_dir + "/gcc/" + obn):
265 summary = src_file + ": Target builds are required for config files. None found."
269 summary =src_file + ": Cannot find any targets which build this file."
274 # confirm it actually builds before we do anything
275 print "Confirming source file builds"
276 res = get_make_output (build_dir + "/gcc", "all")
278 message = "Error: " + src_file + " does not build currently."
279 summary = src_file + " does not build on host."
283 verbose.write (message + "\n")
284 verbose.write (res[1]+ "\n")
287 src_requires = set (ii_macro_consume (src_info))
288 for macro in src_requires:
289 because[macro] = src_file
290 header_seen = list ()
292 os.rename (src_file, src_file + ".bak")
293 src_orig = copy.deepcopy (src_data)
294 src_tmp = copy.deepcopy (src_data)
297 # process the includes from bottom to top. This is because we know that
298 # later includes have are known to be needed, so any dependency from this
299 # header is a true dependency
300 for inc_file in inclist:
301 inc_file_norm = normalize_header (inc_file)
303 if inc_file in no_remove:
305 if len (h_list) != 0 and inc_file_norm not in h_list:
307 if inc_file_norm[0:3] == "gt-":
309 if inc_file_norm[0:6] == "gtype-":
311 if inc_file_norm.replace(".h",".c") == os.path.basename(src_file):
314 lookfor = ii_src_line(src_info)[inc_file]
315 src_tmp.remove (lookfor)
316 message = "Trying " + src_file + " without " + inc_file
319 verbose.write (message + "\n")
320 out = open(src_file, "w")
327 res = get_make_output (build_dir + "/gcc", "all")
332 message = "Passed Host build"
335 message = "Compilation failed:\n";
339 objfile = get_obj_name (src_file)
341 if objfile and os.path.exists(t1[1] +"/gcc/"+objfile):
342 res = get_make_output_parallel (targ_list, objfile, 0)
344 res = get_make_output_parallel (targ_list, "all-gcc", 0)
347 message = "Compilation failed on TARGET : " + res[2]
350 message = "Passed host and target builds"
357 verbose.write (message + "\n");
358 verbose.write (res[1])
359 verbose.write ("\n");
360 if os.path.exists (inc_file):
361 ilog = open(inc_file+".log","a")
362 ilog.write (message + " for " + src_file + ":\n\n");
363 ilog.write ("============================================\n");
367 if os.path.exists (src_file):
368 ilog = open(src_file+".log","a")
369 ilog.write (message + " for " +inc_file + ":\n\n");
370 ilog.write ("============================================\n");
375 # Given a sequence where :
377 # #include "target.h" // includes tm.h
379 # target.h was required, and when attempting to remove tm.h we'd see that
380 # all the macro defintions are "required" since they all look like:
385 # when target.h was found to be required, tm.h will be tagged as included.
386 # so when we get this far, we know we dont have to check the macros for
387 # tm.h since we know it is already been included.
389 if inc_file_norm not in header_seen:
390 iinfo = get_header_info (inc_file, spath)
391 newlist = all_headers (inc_file_norm)
392 if ii_path(iinfo) == "build" and not target_dict:
394 text = message + " : Will not remove a build file without some targets."
396 ilog = open(src_file+".log","a")
397 ilog.write (text +"\n")
398 ilog.write ("============================================\n");
400 ilog = open("reduce-headers-kept.log","a")
401 ilog.write (src_file + " " + text +"\n")
405 if not keep and inc_file_norm not in header_seen:
406 # now look for any macro requirements.
408 if not h in header_seen:
409 if header_dict.get(h):
410 defined = ii_macro_define (header_dict[h])
412 if dep in src_requires and dep not in ignore_list:
414 text = message + ", but must keep " + inc_file + " because it provides " + dep
415 if because.get(dep) != None:
416 text = text + " Possibly required by " + because[dep]
418 ilog = open(inc_file+".log","a")
419 ilog.write (because[dep]+": Requires [dep] in "+src_file+"\n")
420 ilog.write ("============================================\n");
422 ilog = open(src_file+".log","a")
423 ilog.write (text +"\n")
424 ilog.write ("============================================\n");
426 ilog = open("reduce-headers-kept.log","a")
427 ilog.write (src_file + " " + text +"\n")
430 verbose.write (text + "\n")
433 # add all headers 'consumes' to src_requires list, and mark as seen
435 if not h in header_seen:
436 header_seen.append (h)
437 if header_dict.get(h):
438 consume = ii_macro_consume (header_dict[h])
440 if dep not in src_requires:
441 src_requires.add (dep)
442 if because.get(dep) == None:
443 because[dep] = inc_file
445 src_tmp = copy.deepcopy (src_data)
447 print message + " --> removing " + inc_file + "\n"
450 verbose.write (message + " --> removing " + inc_file + "\n")
451 if remove_count.get(inc_file) == None:
452 remove_count[inc_file] = 1
454 remove_count[inc_file] += 1
455 src_data = copy.deepcopy (src_tmp)
457 print "Interuption: restoring original file"
458 out = open(src_file, "w")
459 for line in src_orig:
464 # copy current version, since it is the "right" one now.
465 out = open(src_file, "w")
466 for line in src_data:
470 # Try a final host bootstrap build to make sure everything is kosher.
472 res = get_make_output (build_dir, "all")
475 # host build failed! return to original version
476 print "Error: " + src_file + " Failed to bootstrap at end!!! restoring."
477 print " Bad version at " + src_file + ".bad"
478 os.rename (src_file, src_file + ".bad")
479 out = open(src_file, "w")
480 for line in src_orig:
483 return src_file + ": failed to build after reduction. Restored original"
485 if src_data == src_orig:
486 summary = src_file + ": No change."
488 summary = src_file + ": Reduction performed, "+str(rmcount)+" includes removed."
498 for x in sys.argv[1:]:
502 fn = normalize_header (x[2:])
510 ignore_list.append(x[2:])
512 only_targs.append(x[2:])
516 print "Error: Unrecognized option " + x
519 if not os.path.exists (x):
520 print "Error: specified file " + x + " does not exist."
526 build_target_dict (target_dir, only_targs)
528 if build_dir == "" and target_dir == "":
529 print "Error: Must specify a build directory, and/or a target directory."
532 if build_dir and not os.path.exists (build_dir):
533 print "Error: specified build directory does not exist : " + build_dir
536 if target_dir and not os.path.exists (target_dir):
537 print "Error: specified target directory does not exist : " + target_dir
541 print "Attempts to remove extraneous include files from source files."
543 print "Should be run from the main gcc source directory, and works on a target"
544 print "directory, as we attempt to make the 'all' target."
546 print "By default, gcc-reorder-includes is run on each file before attempting"
547 print "to remove includes. this removes duplicates and puts some headers in a"
548 print "canonical ordering"
550 print "The build directory should be ready to compile via make. Time is saved"
551 print "if the build is already complete, so that only changes need to be built."
553 print "Usage: [options] file1.c [file2.c] ... [filen.c]"
554 print " -bdir : the root build directory to attempt buiding .o files."
555 print " -tdir : the target build directory"
556 print " -d : Ignore conditional macro dependencies."
558 print " -Dmacro : Ignore a specific macro for dependencies"
559 print " -Ttarget : Only consider target in target directory."
560 print " -fheader : Specifies a specific .h file to be considered."
562 print " -D, -T, and -f can be specified mulitple times and are aggregated."
564 print " The original file will be in filen.bak"
569 print "Attempting to remove only these files:"
574 logfile = open("reduce-headers.log","w")
577 msg = try_to_remove (x, only_h, logfile)
578 ilog = open("reduce-headers.sum","a")
579 ilog.write (msg + "\n")
582 ilog = open("reduce-headers.sum","a")
583 ilog.write ("===============================================================\n")
584 for x in remove_count:
585 msg = x + ": Removed " + str(remove_count[x]) + " times."
587 logfile.write (msg + "\n")
588 ilog.write (msg + "\n")