9 from headerutils import *
14 no_remove = [ "system.h", "coretypes.h", "config.h" , "bconfig.h", "backend.h" ]
16 # These targets are the ones which provide "coverage". Typically, if any
17 # target is going to fail compilation, it's one of these. This was determined
18 # during the initial runs of reduce-headers... On a full set of target builds,
19 # every failure which occured was triggered by one of these.
20 # This list is used during target-list construction simply to put any of these
21 # *first* in the candidate list, increasing the probability that a failure is
33 "powerpc-eabisimaltivec",
34 "rs6000-ibm-aix5.1.0",
44 target_builds = list()
48 search_path = [ ".", "../include", "../libcpp/include" ]
53 # Given a header name, normalize it. ie. cp/cp-tree.h could be in gcc, while
54 # the same header could be referenced from within the cp subdirectory as
56 # for now, just assume basenames are unique
58 def normalize_header (header):
59 return os.path.basename (header)
62 # Adds a header file and its sub-includes to the global dictionary if they
63 # aren't already there. Specify s_path since different build directories may
64 # append themselves on demand to the global list.
65 # return entry for the specified header, knowing all sub entries are completed
67 def get_header_info (header, s_path):
70 process_list = list ()
73 bname_iinfo = empty_iinfo
75 if os.path.exists (path + "/" + header):
76 location = path + "/" + header
80 bname = normalize_header (location)
81 if header_dict.get (bname):
82 bname_iinfo = header_dict[bname]
83 loc2 = ii_path (bname_iinfo)+ "/" + bname
86 if location[:2] == "./":
87 location = location[2:]
89 # Don't use the cache if it isnt the right one.
90 bname_iinfo = process_ii_macro (location)
93 bname_iinfo = process_ii_macro (location)
94 header_dict[bname] = bname_iinfo
95 # now decend into the include tree
96 for i in ii_include_list (bname_iinfo):
97 get_header_info (i, s_path)
99 # if the file isnt in the source directories, look in the build and target
100 # directories. If it is here, then aggregate all the versions.
101 location = build_dir + "/gcc/" + header
102 build_inc = target_inc = False
103 if os.path.exists (location):
105 for x in target_dict:
106 location = target_dict[x] + "/gcc/" + header
107 if os.path.exists (location):
111 if (build_inc or target_inc):
112 bname = normalize_header(header)
117 iinfo = process_ii_macro (build_dir + "/gcc/" + header)
118 defines = set (ii_macro_define (iinfo))
119 consumes = set (ii_macro_consume (iinfo))
120 incl = set (ii_include_list (iinfo))
123 for x in target_dict:
124 location = target_dict[x] + "/gcc/" + header
125 if os.path.exists (location):
126 iinfo = process_ii_macro (location)
127 defines.update (ii_macro_define (iinfo))
128 consumes.update (ii_macro_consume (iinfo))
129 incl.update (ii_include_list (iinfo))
131 bname_iinfo = (header, "build", list(incl), list(), list(consumes), list(defines), list(), list())
133 header_dict[bname] = bname_iinfo
135 get_header_info (i, s_path)
140 # return a list of all headers brought in by this header
141 def all_headers (fname):
143 headers_stack = list()
144 headers_list = list()
145 if header_dict.get (fname) == None:
147 for y in ii_include_list (header_dict[fname]):
148 headers_stack.append (y)
151 h = headers_stack.pop ()
152 hn = normalize_header (h)
153 if hn not in headers_list:
154 headers_list.append (hn)
155 if header_dict.get(hn):
156 for y in ii_include_list (header_dict[hn]):
157 if normalize_header (y) not in headers_list:
158 headers_stack.append (y)
165 # Search bld_dir for all target tuples, confirm that they have a build path with
166 # bld_dir/target-tuple/gcc, and build a dictionary of build paths indexed by
169 def build_target_dict (bld_dir, just_these):
173 if os.path.exists (bld_dir):
177 ls = os.listdir(bld_dir)
179 if t.find("-") != -1:
181 tpath = bld_dir + "/" + target
182 if not os.path.exists (tpath + "/gcc"):
183 print "Error: gcc build directory for target " + t + " Does not exist: " + tpath + "/gcc"
186 target_dict[target] = tpath
191 def get_obj_name (src_file):
192 if src_file[-2:] == ".c":
193 return src_file.replace (".c", ".o")
194 elif src_file[-3:] == ".cc":
195 return src_file.replace (".cc", ".o")
198 def target_obj_exists (target, obj_name):
200 # look in a subdir if src has a subdir, then check gcc base directory.
201 if target_dict.get(target):
202 obj = target_dict[target] + "/gcc/" + obj_name
203 if not os.path.exists (obj):
204 obj = target_dict[target] + "/gcc/" + os.path.basename(obj_name)
205 if os.path.exists (obj):
209 # Given a src file, return a list of targets which may build this file.
210 def find_targets (src_file):
213 obj_name = get_obj_name (src_file)
215 print "Error: " + src_file + " - Cannot determine object name."
218 # Put the high priority targets which tend to trigger failures first
219 for target in target_priority:
220 if target_obj_exists (target, obj_name):
221 targ_list.append ((target, target_dict[target]))
223 for target in target_dict:
224 if target not in target_priority and target_obj_exists (target, obj_name):
225 targ_list.append ((target, target_dict[target]))
230 def try_to_remove (src_file, h_list, verbose):
235 # build from scratch each time
241 src_info = process_ii_macro_src (src_file)
242 src_data = ii_src (src_info)
244 inclist = ii_include_list_non_cond (src_info)
245 # work is done if there are no includes to check
247 return src_file + ": No include files to attempt to remove"
249 # work on the include list in reverse.
252 # Get the target list
254 targ_list = find_targets (src_file)
257 if os.path.dirname (src_file):
258 spath.append (os.path.dirname (src_file))
261 if src_file.find("config/") != -1:
262 # config files dont usually build on the host
264 obn = get_obj_name (os.path.basename (src_file))
265 if obn and os.path.exists (build_dir + "/gcc/" + obn):
268 summary = src_file + ": Target builds are required for config files. None found."
272 summary =src_file + ": Cannot find any targets which build this file."
277 # confirm it actually builds before we do anything
278 print "Confirming source file builds"
279 res = get_make_output (build_dir + "/gcc", "all")
281 message = "Error: " + src_file + " does not build currently."
282 summary = src_file + " does not build on host."
286 verbose.write (message + "\n")
287 verbose.write (res[1]+ "\n")
290 src_requires = set (ii_macro_consume (src_info))
291 for macro in src_requires:
292 because[macro] = src_file
293 header_seen = list ()
295 os.rename (src_file, src_file + ".bak")
296 src_orig = copy.deepcopy (src_data)
297 src_tmp = copy.deepcopy (src_data)
300 # process the includes from bottom to top. This is because we know that
301 # later includes have are known to be needed, so any dependency from this
302 # header is a true dependency
303 for inc_file in inclist:
304 inc_file_norm = normalize_header (inc_file)
306 if inc_file in no_remove:
308 if len (h_list) != 0 and inc_file_norm not in h_list:
310 if inc_file_norm[0:3] == "gt-":
312 if inc_file_norm[0:6] == "gtype-":
314 if inc_file_norm.replace(".h",".c") == os.path.basename(src_file):
317 lookfor = ii_src_line(src_info)[inc_file]
318 src_tmp.remove (lookfor)
319 message = "Trying " + src_file + " without " + inc_file
322 verbose.write (message + "\n")
323 out = open(src_file, "w")
330 res = get_make_output (build_dir + "/gcc", "all")
335 message = "Passed Host build"
338 message = "Compilation failed:\n";
342 objfile = get_obj_name (src_file)
344 if objfile and os.path.exists(t1[1] +"/gcc/"+objfile):
345 res = get_make_output_parallel (targ_list, objfile, 0)
347 res = get_make_output_parallel (targ_list, "all-gcc", 0)
350 message = "Compilation failed on TARGET : " + res[2]
353 message = "Passed host and target builds"
360 verbose.write (message + "\n");
361 verbose.write (res[1])
362 verbose.write ("\n");
363 if os.path.exists (inc_file):
364 ilog = open(inc_file+".log","a")
365 ilog.write (message + " for " + src_file + ":\n\n");
366 ilog.write ("============================================\n");
370 if os.path.exists (src_file):
371 ilog = open(src_file+".log","a")
372 ilog.write (message + " for " +inc_file + ":\n\n");
373 ilog.write ("============================================\n");
378 # Given a sequence where :
380 # #include "target.h" // includes tm.h
382 # target.h was required, and when attempting to remove tm.h we'd see that
383 # all the macro defintions are "required" since they all look like:
388 # when target.h was found to be required, tm.h will be tagged as included.
389 # so when we get this far, we know we dont have to check the macros for
390 # tm.h since we know it is already been included.
392 if inc_file_norm not in header_seen:
393 iinfo = get_header_info (inc_file, spath)
394 newlist = all_headers (inc_file_norm)
395 if ii_path(iinfo) == "build" and not target_dict:
397 text = message + " : Will not remove a build file without some targets."
399 ilog = open(src_file+".log","a")
400 ilog.write (text +"\n")
401 ilog.write ("============================================\n");
403 ilog = open("reduce-headers-kept.log","a")
404 ilog.write (src_file + " " + text +"\n")
408 if not keep and inc_file_norm not in header_seen:
409 # now look for any macro requirements.
411 if not h in header_seen:
412 if header_dict.get(h):
413 defined = ii_macro_define (header_dict[h])
415 if dep in src_requires and dep not in ignore_list:
417 text = message + ", but must keep " + inc_file + " because it provides " + dep
418 if because.get(dep) != None:
419 text = text + " Possibly required by " + because[dep]
421 ilog = open(inc_file+".log","a")
422 ilog.write (because[dep]+": Requires [dep] in "+src_file+"\n")
423 ilog.write ("============================================\n");
425 ilog = open(src_file+".log","a")
426 ilog.write (text +"\n")
427 ilog.write ("============================================\n");
429 ilog = open("reduce-headers-kept.log","a")
430 ilog.write (src_file + " " + text +"\n")
433 verbose.write (text + "\n")
436 # add all headers 'consumes' to src_requires list, and mark as seen
438 if not h in header_seen:
439 header_seen.append (h)
440 if header_dict.get(h):
441 consume = ii_macro_consume (header_dict[h])
443 if dep not in src_requires:
444 src_requires.add (dep)
445 if because.get(dep) == None:
446 because[dep] = inc_file
448 src_tmp = copy.deepcopy (src_data)
450 print message + " --> removing " + inc_file + "\n"
453 verbose.write (message + " --> removing " + inc_file + "\n")
454 if remove_count.get(inc_file) == None:
455 remove_count[inc_file] = 1
457 remove_count[inc_file] += 1
458 src_data = copy.deepcopy (src_tmp)
460 print "Interuption: restoring original file"
461 out = open(src_file, "w")
462 for line in src_orig:
467 # copy current version, since it is the "right" one now.
468 out = open(src_file, "w")
469 for line in src_data:
473 # Try a final host bootstrap build to make sure everything is kosher.
475 res = get_make_output (build_dir, "all")
478 # host build failed! return to original version
479 print "Error: " + src_file + " Failed to bootstrap at end!!! restoring."
480 print " Bad version at " + src_file + ".bad"
481 os.rename (src_file, src_file + ".bad")
482 out = open(src_file, "w")
483 for line in src_orig:
486 return src_file + ": failed to build after reduction. Restored original"
488 if src_data == src_orig:
489 summary = src_file + ": No change."
491 summary = src_file + ": Reduction performed, "+str(rmcount)+" includes removed."
501 for x in sys.argv[1:]:
505 fn = normalize_header (x[2:])
513 ignore_list.append(x[2:])
515 only_targs.append(x[2:])
519 print "Error: Unrecognized option " + x
522 if not os.path.exists (x):
523 print "Error: specified file " + x + " does not exist."
529 build_target_dict (target_dir, only_targs)
531 if build_dir == "" and target_dir == "":
532 print "Error: Must specify a build directory, and/or a target directory."
535 if build_dir and not os.path.exists (build_dir):
536 print "Error: specified build directory does not exist : " + build_dir
539 if target_dir and not os.path.exists (target_dir):
540 print "Error: specified target directory does not exist : " + target_dir
544 print "Attempts to remove extraneous include files from source files."
546 print "Should be run from the main gcc source directory, and works on a target"
547 print "directory, as we attempt to make the 'all' target."
549 print "By default, gcc-reorder-includes is run on each file before attempting"
550 print "to remove includes. this removes duplicates and puts some headers in a"
551 print "canonical ordering"
553 print "The build directory should be ready to compile via make. Time is saved"
554 print "if the build is already complete, so that only changes need to be built."
556 print "Usage: [options] file1.c [file2.c] ... [filen.c]"
557 print " -bdir : the root build directory to attempt buiding .o files."
558 print " -tdir : the target build directory"
559 print " -d : Ignore conditional macro dependencies."
561 print " -Dmacro : Ignore a specific macro for dependencies"
562 print " -Ttarget : Only consider target in target directory."
563 print " -fheader : Specifies a specific .h file to be considered."
565 print " -D, -T, and -f can be specified mulitple times and are aggregated."
567 print " The original file will be in filen.bak"
572 print "Attempting to remove only these files:"
577 logfile = open("reduce-headers.log","w")
580 msg = try_to_remove (x, only_h, logfile)
581 ilog = open("reduce-headers.sum","a")
582 ilog.write (msg + "\n")
585 ilog = open("reduce-headers.sum","a")
586 ilog.write ("===============================================================\n")
587 for x in remove_count:
588 msg = x + ": Removed " + str(remove_count[x]) + " times."
590 logfile.write (msg + "\n")
591 ilog.write (msg + "\n")