3 ## Copyright (C) 2005, 2006, 2008 Free Software Foundation
4 ## Written by Gary Benson <gbenson@redhat.com>
6 ## This program is free software; you can redistribute it and/or modify
7 ## it under the terms of the GNU General Public License as published by
8 ## the Free Software Foundation; either version 2 of the License, or
9 ## (at your option) any later version.
11 ## This program is distributed in the hope that it will be useful,
12 ## but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ## GNU General Public License for more details.
22 import cStringIO
as StringIO
25 PATHS
= {"make": "@MAKE@",
26 "gcj": "@prefix@/bin/gcj@gcc_suffix@",
27 "dbtool": "@prefix@/bin/gcj-dbtool@gcc_suffix@"}
30 GCJFLAGS
= ["-fPIC", "-findirect-dispatch", "-fjni"]
31 LDFLAGS
= ["-Wl,-Bsymbolic"]
33 MAX_CLASSES_PER_JAR
= 1024
34 MAX_BYTES_PER_JAR
= 1048576
38 MAKEFILE_HEADER
= '''\
41 GCJFLAGS = %(gcjflags)s
45 $(GCJ) -c $(GCJFLAGS) $< -o $@
57 $(%(base)s_SOURCES:.jar=.o)
59 %(dso)s: $(%(base)s_OBJECTS)
60 $(GCJ) -shared $(GCJFLAGS) $(LDFLAGS) $^ -o $@
62 %(db)s: $(%(base)s_SOURCES)
65 $(DBTOOL) -f $@ $$jar \\
66 %(libdir)s/%(dso)s; \\
69 ZIPMAGIC
, CLASSMAGIC
= "PK\x03\x04", "\xca\xfe\xba\xbe"
71 class Error(Exception):
75 def __init__(self
, srcdir
, libdir
, prefix
= None):
76 self
.srcdir
= os
.path
.abspath(srcdir
)
77 self
.libdir
= os
.path
.abspath(libdir
)
79 self
.dstdir
= self
.libdir
81 self
.dstdir
= os
.path
.join(prefix
, self
.libdir
.lstrip(os
.sep
))
83 # Calling code may modify these parameters
84 self
.gcjflags
= copy
.copy(GCJFLAGS
)
85 self
.ldflags
= copy
.copy(LDFLAGS
)
86 self
.makeflags
= copy
.copy(MAKEFLAGS
)
90 """Search srcdir for classes and jarfiles, then generate
91 solibs and mappings databases for them all in libdir."""
92 if not os
.path
.isdir(self
.dstdir
):
93 os
.makedirs(self
.dstdir
)
97 jobs
= self
.getJobList()
99 raise Error
, "nothing to do"
100 self
.writeMakefile(MAKEFILE
, jobs
)
103 system([PATHS
["make"]] + self
.makeflags
)
110 def getJobList(self
):
111 """Return all jarfiles and class collections in srcdir."""
112 jobs
= weed_jobs(find_jobs(self
.srcdir
, self
.exclusions
))
116 def writeMakefile(self
, path
, jobs
):
117 """Generate a makefile to build the solibs and mappings
118 databases for the specified list of jobs."""
120 print >>fp
, MAKEFILE_HEADER
% {
122 "dbtool": PATHS
["dbtool"],
123 "gcjflags": " ".join(self
.gcjflags
),
124 "ldflags": " ".join(self
.ldflags
),
125 "targets": " \\\n".join(reduce(operator
.add
, [
126 (job
.dsoName(), job
.dbName()) for job
in jobs
]))}
128 values
= job
.ruleArguments()
129 values
["libdir"] = self
.libdir
130 print >>fp
, MAKEFILE_JOB
% values
133 def find_jobs(dir, exclusions
= ()):
134 """Scan a directory and find things to compile: jarfiles (zips,
135 wars, ears, rars, etc: we go by magic rather than file extension)
136 and directories of classes."""
137 def visit((classes
, zips
), dir, items
):
139 path
= os
.path
.join(dir, item
)
140 if os
.path
.islink(path
) or not os
.path
.isfile(path
):
142 magic
= open(path
, "r").read(4)
143 if magic
== ZIPMAGIC
:
145 elif magic
== CLASSMAGIC
:
147 classes
, paths
= [], []
148 os
.path
.walk(dir, visit
, (classes
, paths
))
149 # Convert the list of classes into a list of directories
151 # XXX this requires the class to be correctly located in its heirachy.
152 path
= classes
[0][:-len(os
.sep
+ classname(classes
[0]) + ".class")]
154 classes
= [cls
for cls
in classes
if not cls
.startswith(path
)]
155 # Handle exclusions. We're really strict about them because the
156 # option is temporary in aot-compile-rpm and dead options left in
157 # specfiles will hinder its removal.
158 for path
in exclusions
:
162 raise Error
, "%s: path does not exist or is not a job" % path
163 # Build the list of jobs
167 if os
.path
.isfile(path
):
176 """A collection of classes that will be compiled as a unit."""
178 def __init__(self
, path
):
179 self
.path
, self
.classes
, self
.blocks
= path
, {}, None
182 def addClass(self
, bytes
, name
):
183 """Subclasses call this from their __init__ method for
184 every class they find."""
185 digest
= md5
.new(bytes
).digest()
186 self
.classes
[digest
] = bytes
187 self
.classnames
[digest
] = name
189 def __makeBlocks(self
):
190 """Split self.classes into chunks that can be compiled to
191 native code by gcj. In the majority of cases this is not
192 necessary -- the job will have come from a jarfile which will
193 be equivalent to the one we generate -- but this only happens
194 _if_ the job was a jarfile and _if_ the jarfile isn't too big
195 and _if_ the jarfile has the correct extension and _if_ all
196 classes are correctly named and _if_ the jarfile has no
197 embedded jarfiles. Fitting a special case around all these
198 conditions is tricky to say the least.
200 Note that this could be called at the end of each subclass's
201 __init__ method. The reason this is not done is because we
202 need to parse every class file. This is slow, and unnecessary
203 if the job is subsetted."""
205 for hash, bytes
in self
.classes
.items():
207 name
= classname(bytes
)
209 warn("job %s: class %s malformed or not a valid class file" \
210 % (self
.path
, self
.classnames
[hash]))
212 if not names
.has_key(name
):
214 names
[name
].append(hash)
215 names
= names
.items()
216 # We have to sort somehow, or the jars we generate
217 # We sort by name in a simplistic attempt to keep related
218 # classes together so inter-class optimisation can happen.
220 self
.blocks
, bytes
= [[]], 0
221 for name
, hashes
in names
:
223 if len(self
.blocks
[-1]) >= MAX_CLASSES_PER_JAR \
224 or bytes
>= MAX_BYTES_PER_JAR
:
225 self
.blocks
.append([])
227 self
.blocks
[-1].append((name
, hash))
228 bytes
+= len(self
.classes
[hash])
231 # The implementation and the documentation don't seem to match.
233 # [a, b].isSubsetOf([a]) => True
235 # Identical copies of all classes this collection do not exist
236 # in the other. I think the method should be named isSupersetOf
237 # and the documentation should swap uses of "this" and "other"
239 # XXX think about this when I've had more sleep...
240 def isSubsetOf(self
, other
):
241 """Returns True if identical copies of all classes in this
242 collection exist in the other."""
243 for item
in other
.classes
.keys():
244 if not self
.classes
.has_key(item
):
248 def __targetName(self
, ext
):
249 return self
.basename
+ ext
251 def tempJarName(self
, num
):
252 return self
.__targetName
(".%d.jar" % (num
+ 1))
254 def tempObjName(self
, num
):
255 return self
.__targetName
(".%d.o" % (num
+ 1))
258 """Return the filename of the shared library that will be
259 built from this job."""
260 return self
.__targetName
(".so")
263 """Return the filename of the mapping database that will be
264 built from this job."""
265 return self
.__targetName
(".db")
267 def ruleArguments(self
):
268 """Return a dictionary of values that when substituted
269 into MAKEFILE_JOB will create the rules required to build
270 the shared library and mapping database for this job."""
271 if self
.blocks
is None:
275 [c
.isalnum() and c
or "_" for c
in self
.dsoName()]),
276 "jars": " \\\n".join(
277 [self
.tempJarName(i
) for i
in xrange(len(self
.blocks
))]),
278 "dso": self
.dsoName(),
282 """Generate jarfiles that can be native compiled by gcj."""
283 if self
.blocks
is None:
285 for block
, i
in zip(self
.blocks
, xrange(len(self
.blocks
))):
286 jar
= zipfile
.ZipFile(self
.tempJarName(i
), "w", zipfile
.ZIP_STORED
)
287 for name
, hash in block
:
289 zipfile
.ZipInfo("%s.class" % name
), self
.classes
[hash])
293 """Delete all temporary files created during this job's build."""
294 if self
.blocks
is None:
296 for i
in xrange(len(self
.blocks
)):
297 os
.unlink(self
.tempJarName(i
))
298 os
.unlink(self
.tempObjName(i
))
301 """A Job whose origin was a jarfile."""
303 def __init__(self
, path
):
304 Job
.__init
__(self
, path
)
305 self
._walk
(zipfile
.ZipFile(path
, "r"))
308 for name
in zf
.namelist():
309 bytes
= zf
.read(name
)
310 if bytes
.startswith(ZIPMAGIC
):
311 self
._walk
(zipfile
.ZipFile(StringIO
.StringIO(bytes
)))
312 elif bytes
.startswith(CLASSMAGIC
):
313 self
.addClass(bytes
, name
)
316 """A Job whose origin was a directory of classfiles."""
318 def __init__(self
, path
):
319 Job
.__init
__(self
, path
)
320 os
.path
.walk(path
, DirJob
._visit
, self
)
322 def _visit(self
, dir, items
):
324 path
= os
.path
.join(dir, item
)
325 if os
.path
.islink(path
) or not os
.path
.isfile(path
):
329 if magic
== CLASSMAGIC
:
330 self
.addClass(magic
+ fp
.read(), name
)
333 """Remove any jarfiles that are completely contained within
334 another. This is more common than you'd think, and we only
335 need one nativified copy of each class after all."""
336 jobs
= copy
.copy(jobs
)
342 if job1
.isSubsetOf(job2
):
343 msg
= "subsetted %s" % job2
.path
344 if job2
.isSubsetOf(job1
):
345 if (isinstance(job1
, DirJob
) and
346 isinstance(job2
, JarJob
)):
347 # In the braindead case where a package
348 # contains an expanded copy of a jarfile
349 # the jarfile takes precedence.
351 msg
+= " (identical)"
363 def set_basenames(jobs
):
364 """Ensure that each jarfile has a different basename."""
367 name
= os
.path
.basename(job
.path
)
368 if not names
.has_key(name
):
370 names
[name
].append(job
)
371 for name
, set in names
.items():
373 set[0].basename
= name
375 # prefix the jar filenames to make them unique
376 # XXX will not work in most cases -- needs generalising
377 set = [(job
.path
.split(os
.sep
), job
) for job
in set]
378 minlen
= min([len(bits
) for bits
, job
in set])
379 set = [(bits
[-minlen
:], job
) for bits
, job
in set]
380 bits
= apply(zip, [bits
for bits
, job
in set])
391 ["_".join(name
) for name
in apply(zip, bits
[-2:])],
392 [job
for bits
, job
in set])
393 for name
, job
in set:
394 warn("building %s as %s" % (job
.path
, name
))
396 # XXX keep this check until we're properly general
400 if names
.has_key(name
):
401 raise Error
, "%s: duplicate jobname" % name
405 """Execute a command."""
406 status
= os
.spawnv(os
.P_WAIT
, command
[0], command
)
408 raise Error
, "%s exited with code %d" % (command
[0], status
)
410 raise Error
, "%s killed by signal %d" % (command
[0], -status
)
413 """Print a warning message."""
414 print >>sys
.stderr
, "%s: warning: %s" % (
415 os
.path
.basename(sys
.argv
[0]), msg
)
417 def classname(bytes
):
418 """Extract the class name from the bytes of a class file."""
419 klass
= classfile
.Class(bytes
)
420 return klass
.constants
[klass
.constants
[klass
.name
][1]][1]