Open the cache in binary mode.
[gitstats.git] / gitstats
blobb9f7f6ea94c69c584adafdc18cb4edf5d3521ad7
1 #!/usr/bin/env python
2 # Copyright (c) 2007-2010 Heikki Hokkanen <hoxu@users.sf.net> & others (see doc/author.txt)
3 # GPLv2 / GPLv3
4 import datetime
5 import getopt
6 import glob
7 import os
8 import pickle
9 import platform
10 import re
11 import shutil
12 import subprocess
13 import sys
14 import time
15 import zlib
17 GNUPLOT_COMMON = 'set terminal png transparent\nset size 1.0,0.5\n'
18 ON_LINUX = (platform.system() == 'Linux')
19 WEEKDAYS = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')
21 exectime_internal = 0.0
22 exectime_external = 0.0
23 time_start = time.time()
25 # By default, gnuplot is searched from path, but can be overridden with the
26 # environment variable "GNUPLOT"
27 gnuplot_cmd = 'gnuplot'
28 if 'GNUPLOT' in os.environ:
29 gnuplot_cmd = os.environ['GNUPLOT']
31 conf = {
32 'max_domains': 10,
33 'max_ext_length': 10,
34 'style': 'gitstats.css',
35 'max_authors': 20,
38 def getpipeoutput(cmds, quiet = False):
39 global exectime_external
40 start = time.time()
41 if not quiet and ON_LINUX and os.isatty(1):
42 print '>> ' + ' | '.join(cmds),
43 sys.stdout.flush()
44 p0 = subprocess.Popen(cmds[0], stdout = subprocess.PIPE, shell = True)
45 p = p0
46 for x in cmds[1:]:
47 p = subprocess.Popen(x, stdin = p0.stdout, stdout = subprocess.PIPE, shell = True)
48 p0 = p
49 output = p.communicate()[0]
50 end = time.time()
51 if not quiet:
52 if ON_LINUX and os.isatty(1):
53 print '\r',
54 print '[%.5f] >> %s' % (end - start, ' | '.join(cmds))
55 exectime_external += (end - start)
56 return output.rstrip('\n')
58 def getkeyssortedbyvalues(dict):
59 return map(lambda el : el[1], sorted(map(lambda el : (el[1], el[0]), dict.items())))
61 # dict['author'] = { 'commits': 512 } - ...key(dict, 'commits')
62 def getkeyssortedbyvaluekey(d, key):
63 return map(lambda el : el[1], sorted(map(lambda el : (d[el][key], el), d.keys())))
65 VERSION = 0
66 def getversion():
67 global VERSION
68 if VERSION == 0:
69 VERSION = getpipeoutput(["git rev-parse --short HEAD"]).split('\n')[0]
70 return VERSION
72 class DataCollector:
73 """Manages data collection from a revision control repository."""
74 def __init__(self):
75 self.stamp_created = time.time()
76 self.cache = {}
79 # This should be the main function to extract data from the repository.
80 def collect(self, dir):
81 self.dir = dir
82 self.projectname = os.path.basename(os.path.abspath(dir))
85 # Load cacheable data
86 def loadCache(self, cachefile):
87 if not os.path.exists(cachefile):
88 return
89 print 'Loading cache...'
90 f = open(cachefile, 'rb')
91 try:
92 self.cache = pickle.loads(zlib.decompress(f.read()))
93 except:
94 # temporary hack to upgrade non-compressed caches
95 f.seek(0)
96 self.cache = pickle.load(f)
97 f.close()
100 # Produce any additional statistics from the extracted data.
101 def refine(self):
102 pass
105 # : get a dictionary of author
106 def getAuthorInfo(self, author):
107 return None
109 def getActivityByDayOfWeek(self):
110 return {}
112 def getActivityByHourOfDay(self):
113 return {}
115 # : get a dictionary of domains
116 def getDomainInfo(self, domain):
117 return None
120 # Get a list of authors
121 def getAuthors(self):
122 return []
124 def getFirstCommitDate(self):
125 return datetime.datetime.now()
127 def getLastCommitDate(self):
128 return datetime.datetime.now()
130 def getStampCreated(self):
131 return self.stamp_created
133 def getTags(self):
134 return []
136 def getTotalAuthors(self):
137 return -1
139 def getTotalCommits(self):
140 return -1
142 def getTotalFiles(self):
143 return -1
145 def getTotalLOC(self):
146 return -1
149 # Save cacheable data
150 def saveCache(self, cachefile):
151 print 'Saving cache...'
152 f = open(cachefile, 'wb')
153 #pickle.dump(self.cache, f)
154 data = zlib.compress(pickle.dumps(self.cache))
155 f.write(data)
156 f.close()
158 class GitDataCollector(DataCollector):
159 def collect(self, dir):
160 DataCollector.collect(self, dir)
162 try:
163 self.total_authors = int(getpipeoutput(['git log', 'git shortlog -s', 'wc -l']))
164 except:
165 self.total_authors = 0
166 #self.total_lines = int(getoutput('git-ls-files -z |xargs -0 cat |wc -l'))
168 self.activity_by_hour_of_day = {} # hour -> commits
169 self.activity_by_day_of_week = {} # day -> commits
170 self.activity_by_month_of_year = {} # month [1-12] -> commits
171 self.activity_by_hour_of_week = {} # weekday -> hour -> commits
172 self.activity_by_hour_of_day_busiest = 0
173 self.activity_by_hour_of_week_busiest = 0
174 self.activity_by_year_week = {} # yy_wNN -> commits
175 self.activity_by_year_week_peak = 0
177 self.authors = {} # name -> {commits, first_commit_stamp, last_commit_stamp, last_active_day, active_days, lines_added, lines_removed}
179 # domains
180 self.domains = {} # domain -> commits
182 # author of the month
183 self.author_of_month = {} # month -> author -> commits
184 self.author_of_year = {} # year -> author -> commits
185 self.commits_by_month = {} # month -> commits
186 self.commits_by_year = {} # year -> commits
187 self.first_commit_stamp = 0
188 self.last_commit_stamp = 0
189 self.last_active_day = None
190 self.active_days = set()
192 # lines
193 self.total_lines = 0
194 self.total_lines_added = 0
195 self.total_lines_removed = 0
197 # timezone
198 self.commits_by_timezone = {} # timezone -> commits
200 # tags
201 self.tags = {}
202 lines = getpipeoutput(['git show-ref --tags']).split('\n')
203 for line in lines:
204 if len(line) == 0:
205 continue
206 (hash, tag) = line.split(' ')
208 tag = tag.replace('refs/tags/', '')
209 output = getpipeoutput(['git log "%s" --pretty=format:"%%at %%an" -n 1' % hash])
210 if len(output) > 0:
211 parts = output.split(' ')
212 stamp = 0
213 try:
214 stamp = int(parts[0])
215 except ValueError:
216 stamp = 0
217 self.tags[tag] = { 'stamp': stamp, 'hash' : hash, 'date' : datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), 'commits': 0, 'authors': {} }
219 # collect info on tags, starting from latest
220 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), self.tags.items()))))
221 prev = None
222 for tag in reversed(tags_sorted_by_date_desc):
223 cmd = 'git shortlog -s "%s"' % tag
224 if prev != None:
225 cmd += ' "^%s"' % prev
226 output = getpipeoutput([cmd])
227 if len(output) == 0:
228 continue
229 prev = tag
230 for line in output.split('\n'):
231 parts = re.split('\s+', line, 2)
232 commits = int(parts[1])
233 author = parts[2]
234 self.tags[tag]['commits'] += commits
235 self.tags[tag]['authors'][author] = commits
237 # Collect revision statistics
238 # Outputs "<stamp> <date> <time> <timezone> <author> '<' <mail> '>'"
239 lines = getpipeoutput(['git rev-list --pretty=format:"%at %ai %an <%aE>" HEAD', 'grep -v ^commit']).split('\n')
240 for line in lines:
241 parts = line.split(' ', 4)
242 author = ''
243 try:
244 stamp = int(parts[0])
245 except ValueError:
246 stamp = 0
247 timezone = parts[3]
248 author, mail = parts[4].split('<', 1)
249 author = author.rstrip()
250 mail = mail.rstrip('>')
251 domain = '?'
252 if mail.find('@') != -1:
253 domain = mail.rsplit('@', 1)[1]
254 date = datetime.datetime.fromtimestamp(float(stamp))
256 # First and last commit stamp
257 if self.last_commit_stamp == 0:
258 self.last_commit_stamp = stamp
259 self.first_commit_stamp = stamp
261 # activity
262 # hour
263 hour = date.hour
264 self.activity_by_hour_of_day[hour] = self.activity_by_hour_of_day.get(hour, 0) + 1
265 # most active hour?
266 if self.activity_by_hour_of_day[hour] > self.activity_by_hour_of_day_busiest:
267 self.activity_by_hour_of_day_busiest = self.activity_by_hour_of_day[hour]
269 # day of week
270 day = date.weekday()
271 self.activity_by_day_of_week[day] = self.activity_by_day_of_week.get(day, 0) + 1
273 # domain stats
274 if domain not in self.domains:
275 self.domains[domain] = {}
276 # commits
277 self.domains[domain]['commits'] = self.domains[domain].get('commits', 0) + 1
279 # hour of week
280 if day not in self.activity_by_hour_of_week:
281 self.activity_by_hour_of_week[day] = {}
282 self.activity_by_hour_of_week[day][hour] = self.activity_by_hour_of_week[day].get(hour, 0) + 1
283 # most active hour?
284 if self.activity_by_hour_of_week[day][hour] > self.activity_by_hour_of_week_busiest:
285 self.activity_by_hour_of_week_busiest = self.activity_by_hour_of_week[day][hour]
287 # month of year
288 month = date.month
289 self.activity_by_month_of_year[month] = self.activity_by_month_of_year.get(month, 0) + 1
291 # yearly/weekly activity
292 yyw = date.strftime('%Y-%W')
293 self.activity_by_year_week[yyw] = self.activity_by_year_week.get(yyw, 0) + 1
294 if self.activity_by_year_week_peak < self.activity_by_year_week[yyw]:
295 self.activity_by_year_week_peak = self.activity_by_year_week[yyw]
297 # author stats
298 if author not in self.authors:
299 self.authors[author] = {}
300 # commits
301 if 'last_commit_stamp' not in self.authors[author]:
302 self.authors[author]['last_commit_stamp'] = stamp
303 self.authors[author]['first_commit_stamp'] = stamp
304 self.authors[author]['commits'] = self.authors[author].get('commits', 0) + 1
306 # author of the month/year
307 yymm = date.strftime('%Y-%m')
308 if yymm in self.author_of_month:
309 self.author_of_month[yymm][author] = self.author_of_month[yymm].get(author, 0) + 1
310 else:
311 self.author_of_month[yymm] = {}
312 self.author_of_month[yymm][author] = 1
313 self.commits_by_month[yymm] = self.commits_by_month.get(yymm, 0) + 1
315 yy = date.year
316 if yy in self.author_of_year:
317 self.author_of_year[yy][author] = self.author_of_year[yy].get(author, 0) + 1
318 else:
319 self.author_of_year[yy] = {}
320 self.author_of_year[yy][author] = 1
321 self.commits_by_year[yy] = self.commits_by_year.get(yy, 0) + 1
323 # authors: active days
324 yymmdd = date.strftime('%Y-%m-%d')
325 if 'last_active_day' not in self.authors[author]:
326 self.authors[author]['last_active_day'] = yymmdd
327 self.authors[author]['active_days'] = 1
328 elif yymmdd != self.authors[author]['last_active_day']:
329 self.authors[author]['last_active_day'] = yymmdd
330 self.authors[author]['active_days'] += 1
332 # project: active days
333 if yymmdd != self.last_active_day:
334 self.last_active_day = yymmdd
335 self.active_days.add(yymmdd)
337 # timezone
338 self.commits_by_timezone[timezone] = self.commits_by_timezone.get(timezone, 0) + 1
340 # TODO Optimize this, it's the worst bottleneck
341 # outputs "<stamp> <files>" for each revision
342 self.files_by_stamp = {} # stamp -> files
343 revlines = getpipeoutput(['git rev-list --pretty=format:"%at %T" HEAD', 'grep -v ^commit']).strip().split('\n')
344 lines = []
345 for revline in revlines:
346 time, rev = revline.split(' ')
347 linecount = self.getFilesInCommit(rev)
348 lines.append('%d %d' % (int(time), linecount))
350 self.total_commits = len(lines)
351 for line in lines:
352 parts = line.split(' ')
353 if len(parts) != 2:
354 continue
355 (stamp, files) = parts[0:2]
356 try:
357 self.files_by_stamp[int(stamp)] = int(files)
358 except ValueError:
359 print 'Warning: failed to parse line "%s"' % line
361 # extensions
362 self.extensions = {} # extension -> files, lines
363 lines = getpipeoutput(['git ls-tree -r -z HEAD']).split('\000')
364 self.total_files = len(lines)
365 for line in lines:
366 if len(line) == 0:
367 continue
368 parts = re.split('\s+', line, 4)
369 sha1 = parts[2]
370 filename = parts[3]
372 if filename.find('.') == -1 or filename.rfind('.') == 0:
373 ext = ''
374 else:
375 ext = filename[(filename.rfind('.') + 1):]
376 if len(ext) > conf['max_ext_length']:
377 ext = ''
379 if ext not in self.extensions:
380 self.extensions[ext] = {'files': 0, 'lines': 0}
382 self.extensions[ext]['files'] += 1
383 try:
384 self.extensions[ext]['lines'] += self.getLinesInBlob(sha1)
385 except:
386 print 'Warning: Could not count lines for file "%s"' % line
388 # line statistics
389 # outputs:
390 # N files changed, N insertions (+), N deletions(-)
391 # <stamp> <author>
392 self.changes_by_date = {} # stamp -> { files, ins, del }
393 lines = getpipeoutput(['git log --shortstat --pretty=format:"%at %an"']).split('\n')
394 lines.reverse()
395 files = 0; inserted = 0; deleted = 0; total_lines = 0
396 author = None
397 for line in lines:
398 if len(line) == 0:
399 continue
401 # <stamp> <author>
402 if line.find('files changed,') == -1:
403 pos = line.find(' ')
404 if pos != -1:
405 try:
406 (stamp, author) = (int(line[:pos]), line[pos+1:])
407 self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted, 'lines': total_lines }
408 if author not in self.authors:
409 self.authors[author] = { 'lines_added' : 0, 'lines_removed' : 0 }
410 self.authors[author]['lines_added'] = self.authors[author].get('lines_added', 0) + inserted
411 self.authors[author]['lines_removed'] = self.authors[author].get('lines_removed', 0) + deleted
412 except ValueError:
413 print 'Warning: unexpected line "%s"' % line
414 else:
415 print 'Warning: unexpected line "%s"' % line
416 else:
417 numbers = re.findall('\d+', line)
418 if len(numbers) == 3:
419 (files, inserted, deleted) = map(lambda el : int(el), numbers)
420 total_lines += inserted
421 total_lines -= deleted
422 self.total_lines_added += inserted
423 self.total_lines_removed += deleted
424 else:
425 print 'Warning: failed to handle line "%s"' % line
426 (files, inserted, deleted) = (0, 0, 0)
427 #self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted }
428 self.total_lines = total_lines
430 def refine(self):
431 # authors
432 # name -> {place_by_commits, commits_frac, date_first, date_last, timedelta}
433 authors_by_commits = getkeyssortedbyvaluekey(self.authors, 'commits')
434 authors_by_commits.reverse() # most first
435 for i, name in enumerate(authors_by_commits):
436 self.authors[name]['place_by_commits'] = i + 1
438 for name in self.authors.keys():
439 a = self.authors[name]
440 a['commits_frac'] = (100 * float(a['commits'])) / self.getTotalCommits()
441 date_first = datetime.datetime.fromtimestamp(a['first_commit_stamp'])
442 date_last = datetime.datetime.fromtimestamp(a['last_commit_stamp'])
443 delta = date_last - date_first
444 a['date_first'] = date_first.strftime('%Y-%m-%d')
445 a['date_last'] = date_last.strftime('%Y-%m-%d')
446 a['timedelta'] = delta
448 def getActiveDays(self):
449 return self.active_days
451 def getActivityByDayOfWeek(self):
452 return self.activity_by_day_of_week
454 def getActivityByHourOfDay(self):
455 return self.activity_by_hour_of_day
457 def getAuthorInfo(self, author):
458 return self.authors[author]
460 def getAuthors(self, limit = None):
461 res = getkeyssortedbyvaluekey(self.authors, 'commits')
462 res.reverse()
463 return res[:limit]
465 def getCommitDeltaDays(self):
466 return (self.last_commit_stamp - self.first_commit_stamp) / 86400 + 1
468 def getDomainInfo(self, domain):
469 return self.domains[domain]
471 def getDomains(self):
472 return self.domains.keys()
474 def getFilesInCommit(self, rev):
475 try:
476 res = self.cache['files_in_tree'][rev]
477 except:
478 res = int(getpipeoutput(['git ls-tree -r --name-only "%s"' % rev, 'wc -l']).split('\n')[0])
479 if 'files_in_tree' not in self.cache:
480 self.cache['files_in_tree'] = {}
481 self.cache['files_in_tree'][rev] = res
483 return res
485 def getFirstCommitDate(self):
486 return datetime.datetime.fromtimestamp(self.first_commit_stamp)
488 def getLastCommitDate(self):
489 return datetime.datetime.fromtimestamp(self.last_commit_stamp)
491 def getLinesInBlob(self, sha1):
492 try:
493 res = self.cache['lines_in_blob'][sha1]
494 except:
495 res = int(getpipeoutput(['git cat-file blob %s' % sha1, 'wc -l']).split()[0])
496 if 'lines_in_blob' not in self.cache:
497 self.cache['lines_in_blob'] = {}
498 self.cache['lines_in_blob'][sha1] = res
499 return res
501 def getTags(self):
502 lines = getpipeoutput(['git show-ref --tags', 'cut -d/ -f3'])
503 return lines.split('\n')
505 def getTagDate(self, tag):
506 return self.revToDate('tags/' + tag)
508 def getTotalAuthors(self):
509 return self.total_authors
511 def getTotalCommits(self):
512 return self.total_commits
514 def getTotalFiles(self):
515 return self.total_files
517 def getTotalLOC(self):
518 return self.total_lines
520 def revToDate(self, rev):
521 stamp = int(getpipeoutput(['git log --pretty=format:%%at "%s" -n 1' % rev]))
522 return datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d')
524 class ReportCreator:
525 """Creates the actual report based on given data."""
526 def __init__(self):
527 pass
529 def create(self, data, path):
530 self.data = data
531 self.path = path
533 def html_linkify(text):
534 return text.lower().replace(' ', '_')
536 def html_header(level, text):
537 name = html_linkify(text)
538 return '\n<h%d><a href="#%s" name="%s">%s</a></h%d>\n\n' % (level, name, name, text, level)
540 class HTMLReportCreator(ReportCreator):
541 def create(self, data, path):
542 ReportCreator.create(self, data, path)
543 self.title = data.projectname
545 # copy static files. Looks in the binary directory, ../share/gitstats and /usr/share/gitstats
546 binarypath = os.path.dirname(os.path.abspath(__file__))
547 secondarypath = os.path.join(binarypath, '..', 'share', 'gitstats')
548 basedirs = [binarypath, secondarypath, '/usr/share/gitstats']
549 for file in ('gitstats.css', 'sortable.js', 'arrow-up.gif', 'arrow-down.gif', 'arrow-none.gif'):
550 for base in basedirs:
551 src = base + '/' + file
552 if os.path.exists(src):
553 shutil.copyfile(src, path + '/' + file)
554 break
555 else:
556 print 'Warning: "%s" not found, so not copied (searched: %s)' % (file, basedirs)
558 f = open(path + "/index.html", 'w')
559 format = '%Y-%m-%d %H:%M:%S'
560 self.printHeader(f)
562 f.write('<h1>GitStats - %s</h1>' % data.projectname)
564 self.printNav(f)
566 f.write('<dl>')
567 f.write('<dt>Project name</dt><dd>%s</dd>' % (data.projectname))
568 f.write('<dt>Generated</dt><dd>%s (in %d seconds)</dd>' % (datetime.datetime.now().strftime(format), time.time() - data.getStampCreated()))
569 f.write('<dt>Generator</dt><dd><a href="http://gitstats.sourceforge.net/">GitStats</a> (version %s)</dd>' % getversion())
570 f.write('<dt>Report Period</dt><dd>%s to %s</dd>' % (data.getFirstCommitDate().strftime(format), data.getLastCommitDate().strftime(format)))
571 f.write('<dt>Age</dt><dd>%d days, %d active days (%3.2f%%)</dd>' % (data.getCommitDeltaDays(), len(data.getActiveDays()), (100.0 * len(data.getActiveDays()) / data.getCommitDeltaDays())))
572 f.write('<dt>Total Files</dt><dd>%s</dd>' % data.getTotalFiles())
573 f.write('<dt>Total Lines of Code</dt><dd>%s (%d added, %d removed)</dd>' % (data.getTotalLOC(), data.total_lines_added, data.total_lines_removed))
574 f.write('<dt>Total Commits</dt><dd>%s (average %.1f commits per active day, %.1f per all days)</dd>' % (data.getTotalCommits(), float(data.getTotalCommits()) / len(data.getActiveDays()), float(data.getTotalCommits()) / data.getCommitDeltaDays()))
575 f.write('<dt>Authors</dt><dd>%s</dd>' % data.getTotalAuthors())
576 f.write('</dl>')
578 f.write('</body>\n</html>')
579 f.close()
582 # Activity
583 f = open(path + '/activity.html', 'w')
584 self.printHeader(f)
585 f.write('<h1>Activity</h1>')
586 self.printNav(f)
588 #f.write('<h2>Last 30 days</h2>')
590 #f.write('<h2>Last 12 months</h2>')
592 # Weekly activity
593 WEEKS = 32
594 f.write(html_header(2, 'Weekly activity'))
595 f.write('<p>Last %d weeks</p>' % WEEKS)
597 # generate weeks to show (previous N weeks from now)
598 now = datetime.datetime.now()
599 deltaweek = datetime.timedelta(7)
600 weeks = []
601 stampcur = now
602 for i in range(0, WEEKS):
603 weeks.insert(0, stampcur.strftime('%Y-%W'))
604 stampcur -= deltaweek
606 # top row: commits & bar
607 f.write('<table class="noborders"><tr>')
608 for i in range(0, WEEKS):
609 commits = 0
610 if weeks[i] in data.activity_by_year_week:
611 commits = data.activity_by_year_week[weeks[i]]
613 percentage = 0
614 if weeks[i] in data.activity_by_year_week:
615 percentage = float(data.activity_by_year_week[weeks[i]]) / data.activity_by_year_week_peak
616 height = max(1, int(200 * percentage))
617 f.write('<td style="text-align: center; vertical-align: bottom">%d<div style="display: block; background-color: red; width: 20px; height: %dpx"></div></td>' % (commits, height))
619 # bottom row: year/week
620 f.write('</tr><tr>')
621 for i in range(0, WEEKS):
622 f.write('<td>%s</td>' % (WEEKS - i))
623 f.write('</tr></table>')
625 # Hour of Day
626 f.write(html_header(2, 'Hour of Day'))
627 hour_of_day = data.getActivityByHourOfDay()
628 f.write('<table><tr><th>Hour</th>')
629 for i in range(0, 24):
630 f.write('<th>%d</th>' % i)
631 f.write('</tr>\n<tr><th>Commits</th>')
632 fp = open(path + '/hour_of_day.dat', 'w')
633 for i in range(0, 24):
634 if i in hour_of_day:
635 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
636 f.write('<td style="background-color: rgb(%d, 0, 0)">%d</td>' % (r, hour_of_day[i]))
637 fp.write('%d %d\n' % (i, hour_of_day[i]))
638 else:
639 f.write('<td>0</td>')
640 fp.write('%d 0\n' % i)
641 fp.close()
642 f.write('</tr>\n<tr><th>%</th>')
643 totalcommits = data.getTotalCommits()
644 for i in range(0, 24):
645 if i in hour_of_day:
646 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
647 f.write('<td style="background-color: rgb(%d, 0, 0)">%.2f</td>' % (r, (100.0 * hour_of_day[i]) / totalcommits))
648 else:
649 f.write('<td>0.00</td>')
650 f.write('</tr></table>')
651 f.write('<img src="hour_of_day.png" alt="Hour of Day" />')
652 fg = open(path + '/hour_of_day.dat', 'w')
653 for i in range(0, 24):
654 if i in hour_of_day:
655 fg.write('%d %d\n' % (i + 1, hour_of_day[i]))
656 else:
657 fg.write('%d 0\n' % (i + 1))
658 fg.close()
660 # Day of Week
661 f.write(html_header(2, 'Day of Week'))
662 day_of_week = data.getActivityByDayOfWeek()
663 f.write('<div class="vtable"><table>')
664 f.write('<tr><th>Day</th><th>Total (%)</th></tr>')
665 fp = open(path + '/day_of_week.dat', 'w')
666 for d in range(0, 7):
667 commits = 0
668 if d in day_of_week:
669 commits = day_of_week[d]
670 fp.write('%d %s %d\n' % (d + 1, WEEKDAYS[d], commits))
671 f.write('<tr>')
672 f.write('<th>%s</th>' % (WEEKDAYS[d]))
673 if d in day_of_week:
674 f.write('<td>%d (%.2f%%)</td>' % (day_of_week[d], (100.0 * day_of_week[d]) / totalcommits))
675 else:
676 f.write('<td>0</td>')
677 f.write('</tr>')
678 f.write('</table></div>')
679 f.write('<img src="day_of_week.png" alt="Day of Week" />')
680 fp.close()
682 # Hour of Week
683 f.write(html_header(2, 'Hour of Week'))
684 f.write('<table>')
686 f.write('<tr><th>Weekday</th>')
687 for hour in range(0, 24):
688 f.write('<th>%d</th>' % (hour))
689 f.write('</tr>')
691 for weekday in range(0, 7):
692 f.write('<tr><th>%s</th>' % (WEEKDAYS[weekday]))
693 for hour in range(0, 24):
694 try:
695 commits = data.activity_by_hour_of_week[weekday][hour]
696 except KeyError:
697 commits = 0
698 if commits != 0:
699 f.write('<td')
700 r = 127 + int((float(commits) / data.activity_by_hour_of_week_busiest) * 128)
701 f.write(' style="background-color: rgb(%d, 0, 0)"' % r)
702 f.write('>%d</td>' % commits)
703 else:
704 f.write('<td></td>')
705 f.write('</tr>')
707 f.write('</table>')
709 # Month of Year
710 f.write(html_header(2, 'Month of Year'))
711 f.write('<div class="vtable"><table>')
712 f.write('<tr><th>Month</th><th>Commits (%)</th></tr>')
713 fp = open (path + '/month_of_year.dat', 'w')
714 for mm in range(1, 13):
715 commits = 0
716 if mm in data.activity_by_month_of_year:
717 commits = data.activity_by_month_of_year[mm]
718 f.write('<tr><td>%d</td><td>%d (%.2f %%)</td></tr>' % (mm, commits, (100.0 * commits) / data.getTotalCommits()))
719 fp.write('%d %d\n' % (mm, commits))
720 fp.close()
721 f.write('</table></div>')
722 f.write('<img src="month_of_year.png" alt="Month of Year" />')
724 # Commits by year/month
725 f.write(html_header(2, 'Commits by year/month'))
726 f.write('<div class="vtable"><table><tr><th>Month</th><th>Commits</th></tr>')
727 for yymm in reversed(sorted(data.commits_by_month.keys())):
728 f.write('<tr><td>%s</td><td>%d</td></tr>' % (yymm, data.commits_by_month[yymm]))
729 f.write('</table></div>')
730 f.write('<img src="commits_by_year_month.png" alt="Commits by year/month" />')
731 fg = open(path + '/commits_by_year_month.dat', 'w')
732 for yymm in sorted(data.commits_by_month.keys()):
733 fg.write('%s %s\n' % (yymm, data.commits_by_month[yymm]))
734 fg.close()
736 # Commits by year
737 f.write(html_header(2, 'Commits by Year'))
738 f.write('<div class="vtable"><table><tr><th>Year</th><th>Commits (% of all)</th></tr>')
739 for yy in reversed(sorted(data.commits_by_year.keys())):
740 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td></tr>' % (yy, data.commits_by_year[yy], (100.0 * data.commits_by_year[yy]) / data.getTotalCommits()))
741 f.write('</table></div>')
742 f.write('<img src="commits_by_year.png" alt="Commits by Year" />')
743 fg = open(path + '/commits_by_year.dat', 'w')
744 for yy in sorted(data.commits_by_year.keys()):
745 fg.write('%d %d\n' % (yy, data.commits_by_year[yy]))
746 fg.close()
748 # Commits by timezone
749 f.write(html_header(2, 'Commits by Timezone'))
750 f.write('<table><tr>')
751 f.write('<th>Timezone</th><th>Commits</th>')
752 max_commits_on_tz = max(data.commits_by_timezone.values())
753 for i in sorted(data.commits_by_timezone.keys(), key = lambda n : int(n)):
754 commits = data.commits_by_timezone[i]
755 r = 127 + int((float(commits) / max_commits_on_tz) * 128)
756 f.write('<tr><th>%s</th><td style="background-color: rgb(%d, 0, 0)">%d</td></tr>' % (i, r, commits))
757 f.write('</tr></table>')
759 f.write('</body></html>')
760 f.close()
763 # Authors
764 f = open(path + '/authors.html', 'w')
765 self.printHeader(f)
767 f.write('<h1>Authors</h1>')
768 self.printNav(f)
770 # Authors :: List of authors
771 f.write(html_header(2, 'List of Authors'))
773 f.write('<table class="authors sortable" id="authors">')
774 f.write('<tr><th>Author</th><th>Commits (%)</th><th>+ lines</th><th>- lines</th><th>First commit</th><th>Last commit</th><th class="unsortable">Age</th><th>Active days</th><th># by commits</th></tr>')
775 for author in data.getAuthors(conf['max_authors']):
776 info = data.getAuthorInfo(author)
777 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d</td><td>%d</td><td>%s</td><td>%s</td><td>%s</td><td>%d</td><td>%d</td></tr>' % (author, info['commits'], info['commits_frac'], info['lines_added'], info['lines_removed'], info['date_first'], info['date_last'], info['timedelta'], info['active_days'], info['place_by_commits']))
778 f.write('</table>')
780 allauthors = data.getAuthors()
781 if len(allauthors) > conf['max_authors']:
782 rest = allauthors[conf['max_authors']:]
783 f.write('<p class="moreauthors">These didn\'t make it to the top: %s</p>' % ', '.join(rest))
785 # Authors :: Author of Month
786 f.write(html_header(2, 'Author of Month'))
787 f.write('<table class="sortable" id="aom">')
788 f.write('<tr><th>Month</th><th>Author</th><th>Commits (%)</th><th class="unsortable">Next top 5</th></tr>')
789 for yymm in reversed(sorted(data.author_of_month.keys())):
790 authordict = data.author_of_month[yymm]
791 authors = getkeyssortedbyvalues(authordict)
792 authors.reverse()
793 commits = data.author_of_month[yymm][authors[0]]
794 next = ', '.join(authors[1:5])
795 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td></tr>' % (yymm, authors[0], commits, (100.0 * commits) / data.commits_by_month[yymm], data.commits_by_month[yymm], next))
797 f.write('</table>')
799 f.write(html_header(2, 'Author of Year'))
800 f.write('<table class="sortable" id="aoy"><tr><th>Year</th><th>Author</th><th>Commits (%)</th><th class="unsortable">Next top 5</th></tr>')
801 for yy in reversed(sorted(data.author_of_year.keys())):
802 authordict = data.author_of_year[yy]
803 authors = getkeyssortedbyvalues(authordict)
804 authors.reverse()
805 commits = data.author_of_year[yy][authors[0]]
806 next = ', '.join(authors[1:5])
807 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td></tr>' % (yy, authors[0], commits, (100.0 * commits) / data.commits_by_year[yy], data.commits_by_year[yy], next))
808 f.write('</table>')
810 # Domains
811 f.write(html_header(2, 'Commits by Domains'))
812 domains_by_commits = getkeyssortedbyvaluekey(data.domains, 'commits')
813 domains_by_commits.reverse() # most first
814 f.write('<div class="vtable"><table>')
815 f.write('<tr><th>Domains</th><th>Total (%)</th></tr>')
816 fp = open(path + '/domains.dat', 'w')
817 n = 0
818 for domain in domains_by_commits:
819 if n == conf['max_domains']:
820 break
821 commits = 0
822 n += 1
823 info = data.getDomainInfo(domain)
824 fp.write('%s %d %d\n' % (domain, n , info['commits']))
825 f.write('<tr><th>%s</th><td>%d (%.2f%%)</td></tr>' % (domain, info['commits'], (100.0 * info['commits'] / totalcommits)))
826 f.write('</table></div>')
827 f.write('<img src="domains.png" alt="Commits by Domains" />')
828 fp.close()
830 f.write('</body></html>')
831 f.close()
834 # Files
835 f = open(path + '/files.html', 'w')
836 self.printHeader(f)
837 f.write('<h1>Files</h1>')
838 self.printNav(f)
840 f.write('<dl>\n')
841 f.write('<dt>Total files</dt><dd>%d</dd>' % data.getTotalFiles())
842 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
843 f.write('<dt>Average file size</dt><dd>%.2f bytes</dd>' % ((100.0 * data.getTotalLOC()) / data.getTotalFiles()))
844 f.write('</dl>\n')
846 # Files :: File count by date
847 f.write(html_header(2, 'File count by date'))
849 # use set to get rid of duplicate/unnecessary entries
850 files_by_date = set()
851 for stamp in sorted(data.files_by_stamp.keys()):
852 files_by_date.add('%s %d' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
854 fg = open(path + '/files_by_date.dat', 'w')
855 for line in sorted(list(files_by_date)):
856 fg.write('%s\n' % line)
857 #for stamp in sorted(data.files_by_stamp.keys()):
858 # fg.write('%s %d\n' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
859 fg.close()
861 f.write('<img src="files_by_date.png" alt="Files by Date" />')
863 #f.write('<h2>Average file size by date</h2>')
865 # Files :: Extensions
866 f.write(html_header(2, 'Extensions'))
867 f.write('<table class="sortable" id="ext"><tr><th>Extension</th><th>Files (%)</th><th>Lines (%)</th><th>Lines/file</th></tr>')
868 for ext in sorted(data.extensions.keys()):
869 files = data.extensions[ext]['files']
870 lines = data.extensions[ext]['lines']
871 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d (%.2f%%)</td><td>%d</td></tr>' % (ext, files, (100.0 * files) / data.getTotalFiles(), lines, (100.0 * lines) / data.getTotalLOC(), lines / files))
872 f.write('</table>')
874 f.write('</body></html>')
875 f.close()
878 # Lines
879 f = open(path + '/lines.html', 'w')
880 self.printHeader(f)
881 f.write('<h1>Lines</h1>')
882 self.printNav(f)
884 f.write('<dl>\n')
885 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
886 f.write('</dl>\n')
888 f.write(html_header(2, 'Lines of Code'))
889 f.write('<img src="lines_of_code.png" />')
891 fg = open(path + '/lines_of_code.dat', 'w')
892 for stamp in sorted(data.changes_by_date.keys()):
893 fg.write('%d %d\n' % (stamp, data.changes_by_date[stamp]['lines']))
894 fg.close()
896 f.write('</body></html>')
897 f.close()
900 # tags.html
901 f = open(path + '/tags.html', 'w')
902 self.printHeader(f)
903 f.write('<h1>Tags</h1>')
904 self.printNav(f)
906 f.write('<dl>')
907 f.write('<dt>Total tags</dt><dd>%d</dd>' % len(data.tags))
908 if len(data.tags) > 0:
909 f.write('<dt>Average commits per tag</dt><dd>%.2f</dd>' % (1.0 * data.getTotalCommits() / len(data.tags)))
910 f.write('</dl>')
912 f.write('<table class="tags">')
913 f.write('<tr><th>Name</th><th>Date</th><th>Commits</th><th>Authors</th></tr>')
914 # sort the tags by date desc
915 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), data.tags.items()))))
916 for tag in tags_sorted_by_date_desc:
917 authorinfo = []
918 authors_by_commits = getkeyssortedbyvalues(data.tags[tag]['authors'])
919 for i in reversed(authors_by_commits):
920 authorinfo.append('%s (%d)' % (i, data.tags[tag]['authors'][i]))
921 f.write('<tr><td>%s</td><td>%s</td><td>%d</td><td>%s</td></tr>' % (tag, data.tags[tag]['date'], data.tags[tag]['commits'], ', '.join(authorinfo)))
922 f.write('</table>')
924 f.write('</body></html>')
925 f.close()
927 self.createGraphs(path)
929 def createGraphs(self, path):
930 print 'Generating graphs...'
932 # hour of day
933 f = open(path + '/hour_of_day.plot', 'w')
934 f.write(GNUPLOT_COMMON)
935 f.write(
937 set output 'hour_of_day.png'
938 unset key
939 set xrange [0.5:24.5]
940 set xtics 4
941 set grid y
942 set ylabel "Commits"
943 plot 'hour_of_day.dat' using 1:2:(0.5) w boxes fs solid
944 """)
945 f.close()
947 # day of week
948 f = open(path + '/day_of_week.plot', 'w')
949 f.write(GNUPLOT_COMMON)
950 f.write(
952 set output 'day_of_week.png'
953 unset key
954 set xrange [0.5:7.5]
955 set xtics 1
956 set grid y
957 set ylabel "Commits"
958 plot 'day_of_week.dat' using 1:3:(0.5):xtic(2) w boxes fs solid
959 """)
960 f.close()
962 # Domains
963 f = open(path + '/domains.plot', 'w')
964 f.write(GNUPLOT_COMMON)
965 f.write(
967 set output 'domains.png'
968 unset key
969 unset xtics
970 set grid y
971 set ylabel "Commits"
972 plot 'domains.dat' using 2:3:(0.5) with boxes fs solid, '' using 2:3:1 with labels rotate by 45 offset 0,1
973 """)
974 f.close()
976 # Month of Year
977 f = open(path + '/month_of_year.plot', 'w')
978 f.write(GNUPLOT_COMMON)
979 f.write(
981 set output 'month_of_year.png'
982 unset key
983 set xrange [0.5:12.5]
984 set xtics 1
985 set grid y
986 set ylabel "Commits"
987 plot 'month_of_year.dat' using 1:2:(0.5) w boxes fs solid
988 """)
989 f.close()
991 # commits_by_year_month
992 f = open(path + '/commits_by_year_month.plot', 'w')
993 f.write(GNUPLOT_COMMON)
994 f.write(
996 set output 'commits_by_year_month.png'
997 unset key
998 set xdata time
999 set timefmt "%Y-%m"
1000 set format x "%Y-%m"
1001 set xtics rotate by 90 15768000
1002 set bmargin 5
1003 set grid y
1004 set ylabel "Commits"
1005 plot 'commits_by_year_month.dat' using 1:2:(0.5) w boxes fs solid
1006 """)
1007 f.close()
1009 # commits_by_year
1010 f = open(path + '/commits_by_year.plot', 'w')
1011 f.write(GNUPLOT_COMMON)
1012 f.write(
1014 set output 'commits_by_year.png'
1015 unset key
1016 set xtics 1 rotate by 90
1017 set grid y
1018 set ylabel "Commits"
1019 set yrange [0:]
1020 plot 'commits_by_year.dat' using 1:2:(0.5) w boxes fs solid
1021 """)
1022 f.close()
1024 # Files by date
1025 f = open(path + '/files_by_date.plot', 'w')
1026 f.write(GNUPLOT_COMMON)
1027 f.write(
1029 set output 'files_by_date.png'
1030 unset key
1031 set xdata time
1032 set timefmt "%Y-%m-%d"
1033 set format x "%Y-%m-%d"
1034 set grid y
1035 set ylabel "Files"
1036 set xtics rotate by 90
1037 set ytics autofreq
1038 set bmargin 6
1039 plot 'files_by_date.dat' using 1:2 w steps
1040 """)
1041 f.close()
1043 # Lines of Code
1044 f = open(path + '/lines_of_code.plot', 'w')
1045 f.write(GNUPLOT_COMMON)
1046 f.write(
1048 set output 'lines_of_code.png'
1049 unset key
1050 set xdata time
1051 set timefmt "%s"
1052 set format x "%Y-%m-%d"
1053 set grid y
1054 set ylabel "Lines"
1055 set xtics rotate by 90
1056 set bmargin 6
1057 plot 'lines_of_code.dat' using 1:2 w lines
1058 """)
1059 f.close()
1061 os.chdir(path)
1062 files = glob.glob(path + '/*.plot')
1063 for f in files:
1064 out = getpipeoutput([gnuplot_cmd + ' "%s"' % f])
1065 if len(out) > 0:
1066 print out
1068 def printHeader(self, f, title = ''):
1069 f.write(
1070 """<?xml version="1.0" encoding="UTF-8"?>
1071 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
1072 <html xmlns="http://www.w3.org/1999/xhtml">
1073 <head>
1074 <title>GitStats - %s</title>
1075 <link rel="stylesheet" href="%s" type="text/css" />
1076 <meta name="generator" content="GitStats %s" />
1077 <script type="text/javascript" src="sortable.js"></script>
1078 </head>
1079 <body>
1080 """ % (self.title, conf['style'], getversion()))
1082 def printNav(self, f):
1083 f.write("""
1084 <div class="nav">
1085 <ul>
1086 <li><a href="index.html">General</a></li>
1087 <li><a href="activity.html">Activity</a></li>
1088 <li><a href="authors.html">Authors</a></li>
1089 <li><a href="files.html">Files</a></li>
1090 <li><a href="lines.html">Lines</a></li>
1091 <li><a href="tags.html">Tags</a></li>
1092 </ul>
1093 </div>
1094 """)
1097 class GitStats:
1098 def run(self, args_orig):
1099 optlist, args = getopt.getopt(args_orig, 'c:')
1100 for o,v in optlist:
1101 if o == '-c':
1102 key, value = v.split('=', 1)
1103 if key not in conf:
1104 raise 'Error: no such key "%s" in config' % key
1105 if isinstance(conf[key], int):
1106 conf[key] = int(value)
1107 else:
1108 conf[key] = value
1110 if len(args) < 2:
1111 print """
1112 Usage: gitstats [options] <gitpath> <outputpath>
1114 Options:
1115 -c key=value Override configuration value
1117 Default config values:
1119 """ % conf
1120 sys.exit(0)
1122 gitpath = args[0]
1123 outputpath = os.path.abspath(args[1])
1124 rundir = os.getcwd()
1126 try:
1127 os.makedirs(outputpath)
1128 except OSError:
1129 pass
1130 if not os.path.isdir(outputpath):
1131 print 'FATAL: Output path is not a directory or does not exist'
1132 sys.exit(1)
1134 print 'Git path: %s' % gitpath
1135 print 'Output path: %s' % outputpath
1137 os.chdir(gitpath)
1139 cachefile = os.path.join(outputpath, 'gitstats.cache')
1141 print 'Collecting data...'
1142 data = GitDataCollector()
1143 data.loadCache(cachefile)
1144 data.collect(gitpath)
1145 print 'Refining data...'
1146 data.saveCache(cachefile)
1147 data.refine()
1149 os.chdir(rundir)
1151 print 'Generating report...'
1152 report = HTMLReportCreator()
1153 report.create(data, outputpath)
1155 time_end = time.time()
1156 exectime_internal = time_end - time_start
1157 print 'Execution time %.5f secs, %.5f secs (%.2f %%) in external commands)' % (exectime_internal, exectime_external, (100.0 * exectime_external) / exectime_internal)
1159 g = GitStats()
1160 g.run(sys.argv[1:])