Don't compute per-author information with --first-parent
[gitstats.git] / gitstats
blob68063427c277191ff8e944f4eadf41540661237a
1 #!/usr/bin/env python
2 # Copyright (c) 2007-2010 Heikki Hokkanen <hoxu@users.sf.net> & others (see doc/author.txt)
3 # GPLv2 / GPLv3
4 import datetime
5 import getopt
6 import glob
7 import os
8 import pickle
9 import platform
10 import re
11 import shutil
12 import subprocess
13 import sys
14 import time
15 import zlib
17 GNUPLOT_COMMON = 'set terminal png transparent size 640,240\nset size 1.0,1.0\n'
18 ON_LINUX = (platform.system() == 'Linux')
19 WEEKDAYS = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')
21 exectime_internal = 0.0
22 exectime_external = 0.0
23 time_start = time.time()
25 # By default, gnuplot is searched from path, but can be overridden with the
26 # environment variable "GNUPLOT"
27 gnuplot_cmd = 'gnuplot'
28 if 'GNUPLOT' in os.environ:
29 gnuplot_cmd = os.environ['GNUPLOT']
31 conf = {
32 'max_domains': 10,
33 'max_ext_length': 10,
34 'style': 'gitstats.css',
35 'max_authors': 20,
36 'authors_top': 5,
37 'commit_begin': '',
38 'commit_end': '',
39 'linear_linestats': 1,
40 'project_name': '',
43 def getpipeoutput(cmds, quiet = False):
44 global exectime_external
45 start = time.time()
46 if not quiet and ON_LINUX and os.isatty(1):
47 print '>> ' + ' | '.join(cmds),
48 sys.stdout.flush()
49 p0 = subprocess.Popen(cmds[0], stdout = subprocess.PIPE, shell = True)
50 p = p0
51 for x in cmds[1:]:
52 p = subprocess.Popen(x, stdin = p0.stdout, stdout = subprocess.PIPE, shell = True)
53 p0 = p
54 output = p.communicate()[0]
55 end = time.time()
56 if not quiet:
57 if ON_LINUX and os.isatty(1):
58 print '\r',
59 print '[%.5f] >> %s' % (end - start, ' | '.join(cmds))
60 exectime_external += (end - start)
61 return output.rstrip('\n')
63 def getcommitrange(defaultrange = 'HEAD', end_only = False):
64 if len(conf['commit_end']) > 0:
65 if end_only or len(conf['commit_begin']) == 0:
66 return conf['commit_end']
67 return '%s..%s' % (conf['commit_begin'], conf['commit_end'])
68 return defaultrange
70 def getkeyssortedbyvalues(dict):
71 return map(lambda el : el[1], sorted(map(lambda el : (el[1], el[0]), dict.items())))
73 # dict['author'] = { 'commits': 512 } - ...key(dict, 'commits')
74 def getkeyssortedbyvaluekey(d, key):
75 return map(lambda el : el[1], sorted(map(lambda el : (d[el][key], el), d.keys())))
77 VERSION = 0
78 def getversion():
79 global VERSION
80 if VERSION == 0:
81 VERSION = getpipeoutput(["git rev-parse --short %s" % getcommitrange('HEAD')]).split('\n')[0]
82 return VERSION
84 def getgitversion():
85 return getpipeoutput(['git --version']).split('\n')[0]
87 def getgnuplotversion():
88 return getpipeoutput(['gnuplot --version']).split('\n')[0]
90 class DataCollector:
91 """Manages data collection from a revision control repository."""
92 def __init__(self):
93 self.stamp_created = time.time()
94 self.cache = {}
97 # This should be the main function to extract data from the repository.
98 def collect(self, dir):
99 self.dir = dir
100 if len(conf['project_name']) == 0:
101 self.projectname = os.path.basename(os.path.abspath(dir))
102 else:
103 self.projectname = conf['project_name']
106 # Load cacheable data
107 def loadCache(self, cachefile):
108 if not os.path.exists(cachefile):
109 return
110 print 'Loading cache...'
111 f = open(cachefile, 'rb')
112 try:
113 self.cache = pickle.loads(zlib.decompress(f.read()))
114 except:
115 # temporary hack to upgrade non-compressed caches
116 f.seek(0)
117 self.cache = pickle.load(f)
118 f.close()
121 # Produce any additional statistics from the extracted data.
122 def refine(self):
123 pass
126 # : get a dictionary of author
127 def getAuthorInfo(self, author):
128 return None
130 def getActivityByDayOfWeek(self):
131 return {}
133 def getActivityByHourOfDay(self):
134 return {}
136 # : get a dictionary of domains
137 def getDomainInfo(self, domain):
138 return None
141 # Get a list of authors
142 def getAuthors(self):
143 return []
145 def getFirstCommitDate(self):
146 return datetime.datetime.now()
148 def getLastCommitDate(self):
149 return datetime.datetime.now()
151 def getStampCreated(self):
152 return self.stamp_created
154 def getTags(self):
155 return []
157 def getTotalAuthors(self):
158 return -1
160 def getTotalCommits(self):
161 return -1
163 def getTotalFiles(self):
164 return -1
166 def getTotalLOC(self):
167 return -1
170 # Save cacheable data
171 def saveCache(self, cachefile):
172 print 'Saving cache...'
173 f = open(cachefile, 'wb')
174 #pickle.dump(self.cache, f)
175 data = zlib.compress(pickle.dumps(self.cache))
176 f.write(data)
177 f.close()
179 class GitDataCollector(DataCollector):
180 def collect(self, dir):
181 DataCollector.collect(self, dir)
183 try:
184 self.total_authors = int(getpipeoutput(['git shortlog -s %s' % getcommitrange(), 'wc -l']))
185 except:
186 self.total_authors = 0
187 #self.total_lines = int(getoutput('git-ls-files -z |xargs -0 cat |wc -l'))
189 self.activity_by_hour_of_day = {} # hour -> commits
190 self.activity_by_day_of_week = {} # day -> commits
191 self.activity_by_month_of_year = {} # month [1-12] -> commits
192 self.activity_by_hour_of_week = {} # weekday -> hour -> commits
193 self.activity_by_hour_of_day_busiest = 0
194 self.activity_by_hour_of_week_busiest = 0
195 self.activity_by_year_week = {} # yy_wNN -> commits
196 self.activity_by_year_week_peak = 0
198 self.authors = {} # name -> {commits, first_commit_stamp, last_commit_stamp, last_active_day, active_days, lines_added, lines_removed}
200 # domains
201 self.domains = {} # domain -> commits
203 # author of the month
204 self.author_of_month = {} # month -> author -> commits
205 self.author_of_year = {} # year -> author -> commits
206 self.commits_by_month = {} # month -> commits
207 self.commits_by_year = {} # year -> commits
208 self.first_commit_stamp = 0
209 self.last_commit_stamp = 0
210 self.last_active_day = None
211 self.active_days = set()
213 # lines
214 self.total_lines = 0
215 self.total_lines_added = 0
216 self.total_lines_removed = 0
218 # timezone
219 self.commits_by_timezone = {} # timezone -> commits
221 # tags
222 self.tags = {}
223 lines = getpipeoutput(['git show-ref --tags']).split('\n')
224 for line in lines:
225 if len(line) == 0:
226 continue
227 (hash, tag) = line.split(' ')
229 tag = tag.replace('refs/tags/', '')
230 output = getpipeoutput(['git log "%s" --pretty=format:"%%at %%aN" -n 1' % hash])
231 if len(output) > 0:
232 parts = output.split(' ')
233 stamp = 0
234 try:
235 stamp = int(parts[0])
236 except ValueError:
237 stamp = 0
238 self.tags[tag] = { 'stamp': stamp, 'hash' : hash, 'date' : datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), 'commits': 0, 'authors': {} }
240 # collect info on tags, starting from latest
241 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), self.tags.items()))))
242 prev = None
243 for tag in reversed(tags_sorted_by_date_desc):
244 cmd = 'git shortlog -s "%s"' % tag
245 if prev != None:
246 cmd += ' "^%s"' % prev
247 output = getpipeoutput([cmd])
248 if len(output) == 0:
249 continue
250 prev = tag
251 for line in output.split('\n'):
252 parts = re.split('\s+', line, 2)
253 commits = int(parts[1])
254 author = parts[2]
255 self.tags[tag]['commits'] += commits
256 self.tags[tag]['authors'][author] = commits
258 # Collect revision statistics
259 # Outputs "<stamp> <date> <time> <timezone> <author> '<' <mail> '>'"
260 lines = getpipeoutput(['git rev-list --pretty=format:"%%at %%ai %%aN <%%aE>" %s' % getcommitrange('HEAD'), 'grep -v ^commit']).split('\n')
261 for line in lines:
262 parts = line.split(' ', 4)
263 author = ''
264 try:
265 stamp = int(parts[0])
266 except ValueError:
267 stamp = 0
268 timezone = parts[3]
269 author, mail = parts[4].split('<', 1)
270 author = author.rstrip()
271 mail = mail.rstrip('>')
272 domain = '?'
273 if mail.find('@') != -1:
274 domain = mail.rsplit('@', 1)[1]
275 date = datetime.datetime.fromtimestamp(float(stamp))
277 # First and last commit stamp
278 if self.last_commit_stamp == 0:
279 self.last_commit_stamp = stamp
280 self.first_commit_stamp = stamp
282 # activity
283 # hour
284 hour = date.hour
285 self.activity_by_hour_of_day[hour] = self.activity_by_hour_of_day.get(hour, 0) + 1
286 # most active hour?
287 if self.activity_by_hour_of_day[hour] > self.activity_by_hour_of_day_busiest:
288 self.activity_by_hour_of_day_busiest = self.activity_by_hour_of_day[hour]
290 # day of week
291 day = date.weekday()
292 self.activity_by_day_of_week[day] = self.activity_by_day_of_week.get(day, 0) + 1
294 # domain stats
295 if domain not in self.domains:
296 self.domains[domain] = {}
297 # commits
298 self.domains[domain]['commits'] = self.domains[domain].get('commits', 0) + 1
300 # hour of week
301 if day not in self.activity_by_hour_of_week:
302 self.activity_by_hour_of_week[day] = {}
303 self.activity_by_hour_of_week[day][hour] = self.activity_by_hour_of_week[day].get(hour, 0) + 1
304 # most active hour?
305 if self.activity_by_hour_of_week[day][hour] > self.activity_by_hour_of_week_busiest:
306 self.activity_by_hour_of_week_busiest = self.activity_by_hour_of_week[day][hour]
308 # month of year
309 month = date.month
310 self.activity_by_month_of_year[month] = self.activity_by_month_of_year.get(month, 0) + 1
312 # yearly/weekly activity
313 yyw = date.strftime('%Y-%W')
314 self.activity_by_year_week[yyw] = self.activity_by_year_week.get(yyw, 0) + 1
315 if self.activity_by_year_week_peak < self.activity_by_year_week[yyw]:
316 self.activity_by_year_week_peak = self.activity_by_year_week[yyw]
318 # author stats
319 if author not in self.authors:
320 self.authors[author] = {}
321 # commits
322 if 'last_commit_stamp' not in self.authors[author]:
323 self.authors[author]['last_commit_stamp'] = stamp
324 self.authors[author]['first_commit_stamp'] = stamp
325 self.authors[author]['commits'] = self.authors[author].get('commits', 0) + 1
327 # author of the month/year
328 yymm = date.strftime('%Y-%m')
329 if yymm in self.author_of_month:
330 self.author_of_month[yymm][author] = self.author_of_month[yymm].get(author, 0) + 1
331 else:
332 self.author_of_month[yymm] = {}
333 self.author_of_month[yymm][author] = 1
334 self.commits_by_month[yymm] = self.commits_by_month.get(yymm, 0) + 1
336 yy = date.year
337 if yy in self.author_of_year:
338 self.author_of_year[yy][author] = self.author_of_year[yy].get(author, 0) + 1
339 else:
340 self.author_of_year[yy] = {}
341 self.author_of_year[yy][author] = 1
342 self.commits_by_year[yy] = self.commits_by_year.get(yy, 0) + 1
344 # authors: active days
345 yymmdd = date.strftime('%Y-%m-%d')
346 if 'last_active_day' not in self.authors[author]:
347 self.authors[author]['last_active_day'] = yymmdd
348 self.authors[author]['active_days'] = 1
349 elif yymmdd != self.authors[author]['last_active_day']:
350 self.authors[author]['last_active_day'] = yymmdd
351 self.authors[author]['active_days'] += 1
353 # project: active days
354 if yymmdd != self.last_active_day:
355 self.last_active_day = yymmdd
356 self.active_days.add(yymmdd)
358 # timezone
359 self.commits_by_timezone[timezone] = self.commits_by_timezone.get(timezone, 0) + 1
361 # TODO Optimize this, it's the worst bottleneck
362 # outputs "<stamp> <files>" for each revision
363 self.files_by_stamp = {} # stamp -> files
364 revlines = getpipeoutput(['git rev-list --pretty=format:"%%at %%T" %s' % getcommitrange('HEAD'), 'grep -v ^commit']).strip().split('\n')
365 lines = []
366 for revline in revlines:
367 time, rev = revline.split(' ')
368 linecount = self.getFilesInCommit(rev)
369 lines.append('%d %d' % (int(time), linecount))
371 self.total_commits = len(lines)
372 for line in lines:
373 parts = line.split(' ')
374 if len(parts) != 2:
375 continue
376 (stamp, files) = parts[0:2]
377 try:
378 self.files_by_stamp[int(stamp)] = int(files)
379 except ValueError:
380 print 'Warning: failed to parse line "%s"' % line
382 # extensions
383 self.extensions = {} # extension -> files, lines
384 lines = getpipeoutput(['git ls-tree -r -z %s' % getcommitrange('HEAD', end_only = True)]).split('\000')
385 self.total_files = len(lines)
386 for line in lines:
387 if len(line) == 0:
388 continue
389 parts = re.split('\s+', line, 4)
390 sha1 = parts[2]
391 filename = parts[3]
393 if filename.find('.') == -1 or filename.rfind('.') == 0:
394 ext = ''
395 else:
396 ext = filename[(filename.rfind('.') + 1):]
397 if len(ext) > conf['max_ext_length']:
398 ext = ''
400 if ext not in self.extensions:
401 self.extensions[ext] = {'files': 0, 'lines': 0}
403 self.extensions[ext]['files'] += 1
404 try:
405 self.extensions[ext]['lines'] += self.getLinesInBlob(sha1)
406 except:
407 print 'Warning: Could not count lines for file "%s"' % line
409 # line statistics
410 # outputs:
411 # N files changed, N insertions (+), N deletions(-)
412 # <stamp> <author>
413 self.changes_by_date = {} # stamp -> { files, ins, del }
414 # computation of lines of code by date is better done
415 # on a linear history.
416 extra = ''
417 if conf['linear_linestats']:
418 extra = '--first-parent -m'
419 lines = getpipeoutput(['git log --shortstat %s --pretty=format:"%%at %%aN" %s' % (extra, getcommitrange('HEAD'))]).split('\n')
420 lines.reverse()
421 files = 0; inserted = 0; deleted = 0; total_lines = 0
422 author = None
423 for line in lines:
424 if len(line) == 0:
425 continue
427 # <stamp> <author>
428 if line.find('files changed,') == -1:
429 pos = line.find(' ')
430 if pos != -1:
431 try:
432 (stamp, author) = (int(line[:pos]), line[pos+1:])
433 self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted, 'lines': total_lines }
434 files, inserted, deleted = 0, 0, 0
435 except ValueError:
436 print 'Warning: unexpected line "%s"' % line
437 else:
438 print 'Warning: unexpected line "%s"' % line
439 else:
440 numbers = re.findall('\d+', line)
441 if len(numbers) == 3:
442 (files, inserted, deleted) = map(lambda el : int(el), numbers)
443 total_lines += inserted
444 total_lines -= deleted
445 self.total_lines_added += inserted
446 self.total_lines_removed += deleted
447 else:
448 print 'Warning: failed to handle line "%s"' % line
449 (files, inserted, deleted) = (0, 0, 0)
450 #self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted }
451 self.total_lines = total_lines
453 # Per-author statistics
454 # Similar to the above, but never use --first-parent
455 # (we need to walk through every commits to know who
456 # commited what, not just through mainline)
457 lines = getpipeoutput(['git log --shortstat --pretty=format:"%%at %%aN" %s' % (getcommitrange('HEAD'))]).split('\n')
458 lines.reverse()
459 files = 0; inserted = 0; deleted = 0;
460 author = None
461 for line in lines:
462 if len(line) == 0:
463 continue
465 # <stamp> <author>
466 if line.find('files changed,') == -1:
467 pos = line.find(' ')
468 if pos != -1:
469 try:
470 (stamp, author) = (int(line[:pos]), line[pos+1:])
471 if author not in self.authors:
472 self.authors[author] = { 'lines_added' : 0, 'lines_removed' : 0 }
473 self.authors[author]['lines_added'] = self.authors[author].get('lines_added', 0) + inserted
474 self.authors[author]['lines_removed'] = self.authors[author].get('lines_removed', 0) + deleted
475 files, inserted, deleted = 0, 0, 0
476 except ValueError:
477 print 'Warning: unexpected line "%s"' % line
478 else:
479 print 'Warning: unexpected line "%s"' % line
480 else:
481 numbers = re.findall('\d+', line)
482 if len(numbers) == 3:
483 (files, inserted, deleted) = map(lambda el : int(el), numbers)
484 else:
485 print 'Warning: failed to handle line "%s"' % line
486 (files, inserted, deleted) = (0, 0, 0)
488 def refine(self):
489 # authors
490 # name -> {place_by_commits, commits_frac, date_first, date_last, timedelta}
491 authors_by_commits = getkeyssortedbyvaluekey(self.authors, 'commits')
492 authors_by_commits.reverse() # most first
493 for i, name in enumerate(authors_by_commits):
494 self.authors[name]['place_by_commits'] = i + 1
496 for name in self.authors.keys():
497 a = self.authors[name]
498 a['commits_frac'] = (100 * float(a['commits'])) / self.getTotalCommits()
499 date_first = datetime.datetime.fromtimestamp(a['first_commit_stamp'])
500 date_last = datetime.datetime.fromtimestamp(a['last_commit_stamp'])
501 delta = date_last - date_first
502 a['date_first'] = date_first.strftime('%Y-%m-%d')
503 a['date_last'] = date_last.strftime('%Y-%m-%d')
504 a['timedelta'] = delta
505 if 'lines_added' not in a: a['lines_added'] = 0
506 if 'lines_removed' not in a: a['lines_removed'] = 0
508 def getActiveDays(self):
509 return self.active_days
511 def getActivityByDayOfWeek(self):
512 return self.activity_by_day_of_week
514 def getActivityByHourOfDay(self):
515 return self.activity_by_hour_of_day
517 def getAuthorInfo(self, author):
518 return self.authors[author]
520 def getAuthors(self, limit = None):
521 res = getkeyssortedbyvaluekey(self.authors, 'commits')
522 res.reverse()
523 return res[:limit]
525 def getCommitDeltaDays(self):
526 return (self.last_commit_stamp - self.first_commit_stamp) / 86400 + 1
528 def getDomainInfo(self, domain):
529 return self.domains[domain]
531 def getDomains(self):
532 return self.domains.keys()
534 def getFilesInCommit(self, rev):
535 try:
536 res = self.cache['files_in_tree'][rev]
537 except:
538 res = int(getpipeoutput(['git ls-tree -r --name-only "%s"' % rev, 'wc -l']).split('\n')[0])
539 if 'files_in_tree' not in self.cache:
540 self.cache['files_in_tree'] = {}
541 self.cache['files_in_tree'][rev] = res
543 return res
545 def getFirstCommitDate(self):
546 return datetime.datetime.fromtimestamp(self.first_commit_stamp)
548 def getLastCommitDate(self):
549 return datetime.datetime.fromtimestamp(self.last_commit_stamp)
551 def getLinesInBlob(self, sha1):
552 try:
553 res = self.cache['lines_in_blob'][sha1]
554 except:
555 res = int(getpipeoutput(['git cat-file blob %s' % sha1, 'wc -l']).split()[0])
556 if 'lines_in_blob' not in self.cache:
557 self.cache['lines_in_blob'] = {}
558 self.cache['lines_in_blob'][sha1] = res
559 return res
561 def getTags(self):
562 lines = getpipeoutput(['git show-ref --tags', 'cut -d/ -f3'])
563 return lines.split('\n')
565 def getTagDate(self, tag):
566 return self.revToDate('tags/' + tag)
568 def getTotalAuthors(self):
569 return self.total_authors
571 def getTotalCommits(self):
572 return self.total_commits
574 def getTotalFiles(self):
575 return self.total_files
577 def getTotalLOC(self):
578 return self.total_lines
580 def revToDate(self, rev):
581 stamp = int(getpipeoutput(['git log --pretty=format:%%at "%s" -n 1' % rev]))
582 return datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d')
584 class ReportCreator:
585 """Creates the actual report based on given data."""
586 def __init__(self):
587 pass
589 def create(self, data, path):
590 self.data = data
591 self.path = path
593 def html_linkify(text):
594 return text.lower().replace(' ', '_')
596 def html_header(level, text):
597 name = html_linkify(text)
598 return '\n<h%d><a href="#%s" name="%s">%s</a></h%d>\n\n' % (level, name, name, text, level)
600 class HTMLReportCreator(ReportCreator):
601 def create(self, data, path):
602 ReportCreator.create(self, data, path)
603 self.title = data.projectname
605 # copy static files. Looks in the binary directory, ../share/gitstats and /usr/share/gitstats
606 binarypath = os.path.dirname(os.path.abspath(__file__))
607 secondarypath = os.path.join(binarypath, '..', 'share', 'gitstats')
608 basedirs = [binarypath, secondarypath, '/usr/share/gitstats']
609 for file in ('gitstats.css', 'sortable.js', 'arrow-up.gif', 'arrow-down.gif', 'arrow-none.gif'):
610 for base in basedirs:
611 src = base + '/' + file
612 if os.path.exists(src):
613 shutil.copyfile(src, path + '/' + file)
614 break
615 else:
616 print 'Warning: "%s" not found, so not copied (searched: %s)' % (file, basedirs)
618 f = open(path + "/index.html", 'w')
619 format = '%Y-%m-%d %H:%M:%S'
620 self.printHeader(f)
622 f.write('<h1>GitStats - %s</h1>' % data.projectname)
624 self.printNav(f)
626 f.write('<dl>')
627 f.write('<dt>Project name</dt><dd>%s</dd>' % (data.projectname))
628 f.write('<dt>Generated</dt><dd>%s (in %d seconds)</dd>' % (datetime.datetime.now().strftime(format), time.time() - data.getStampCreated()))
629 f.write('<dt>Generator</dt><dd><a href="http://gitstats.sourceforge.net/">GitStats</a> (version %s), %s, %s</dd>' % (getversion(), getgitversion(), getgnuplotversion()))
630 f.write('<dt>Report Period</dt><dd>%s to %s</dd>' % (data.getFirstCommitDate().strftime(format), data.getLastCommitDate().strftime(format)))
631 f.write('<dt>Age</dt><dd>%d days, %d active days (%3.2f%%)</dd>' % (data.getCommitDeltaDays(), len(data.getActiveDays()), (100.0 * len(data.getActiveDays()) / data.getCommitDeltaDays())))
632 f.write('<dt>Total Files</dt><dd>%s</dd>' % data.getTotalFiles())
633 f.write('<dt>Total Lines of Code</dt><dd>%s (%d added, %d removed)</dd>' % (data.getTotalLOC(), data.total_lines_added, data.total_lines_removed))
634 f.write('<dt>Total Commits</dt><dd>%s (average %.1f commits per active day, %.1f per all days)</dd>' % (data.getTotalCommits(), float(data.getTotalCommits()) / len(data.getActiveDays()), float(data.getTotalCommits()) / data.getCommitDeltaDays()))
635 f.write('<dt>Authors</dt><dd>%s (average %.1f commits per author)</dd>' % (data.getTotalAuthors(), (1.0 * data.getTotalCommits()) / data.getTotalAuthors()))
636 f.write('</dl>')
638 f.write('</body>\n</html>')
639 f.close()
642 # Activity
643 f = open(path + '/activity.html', 'w')
644 self.printHeader(f)
645 f.write('<h1>Activity</h1>')
646 self.printNav(f)
648 #f.write('<h2>Last 30 days</h2>')
650 #f.write('<h2>Last 12 months</h2>')
652 # Weekly activity
653 WEEKS = 32
654 f.write(html_header(2, 'Weekly activity'))
655 f.write('<p>Last %d weeks</p>' % WEEKS)
657 # generate weeks to show (previous N weeks from now)
658 now = datetime.datetime.now()
659 deltaweek = datetime.timedelta(7)
660 weeks = []
661 stampcur = now
662 for i in range(0, WEEKS):
663 weeks.insert(0, stampcur.strftime('%Y-%W'))
664 stampcur -= deltaweek
666 # top row: commits & bar
667 f.write('<table class="noborders"><tr>')
668 for i in range(0, WEEKS):
669 commits = 0
670 if weeks[i] in data.activity_by_year_week:
671 commits = data.activity_by_year_week[weeks[i]]
673 percentage = 0
674 if weeks[i] in data.activity_by_year_week:
675 percentage = float(data.activity_by_year_week[weeks[i]]) / data.activity_by_year_week_peak
676 height = max(1, int(200 * percentage))
677 f.write('<td style="text-align: center; vertical-align: bottom">%d<div style="display: block; background-color: red; width: 20px; height: %dpx"></div></td>' % (commits, height))
679 # bottom row: year/week
680 f.write('</tr><tr>')
681 for i in range(0, WEEKS):
682 f.write('<td>%s</td>' % (WEEKS - i))
683 f.write('</tr></table>')
685 # Hour of Day
686 f.write(html_header(2, 'Hour of Day'))
687 hour_of_day = data.getActivityByHourOfDay()
688 f.write('<table><tr><th>Hour</th>')
689 for i in range(0, 24):
690 f.write('<th>%d</th>' % i)
691 f.write('</tr>\n<tr><th>Commits</th>')
692 fp = open(path + '/hour_of_day.dat', 'w')
693 for i in range(0, 24):
694 if i in hour_of_day:
695 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
696 f.write('<td style="background-color: rgb(%d, 0, 0)">%d</td>' % (r, hour_of_day[i]))
697 fp.write('%d %d\n' % (i, hour_of_day[i]))
698 else:
699 f.write('<td>0</td>')
700 fp.write('%d 0\n' % i)
701 fp.close()
702 f.write('</tr>\n<tr><th>%</th>')
703 totalcommits = data.getTotalCommits()
704 for i in range(0, 24):
705 if i in hour_of_day:
706 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
707 f.write('<td style="background-color: rgb(%d, 0, 0)">%.2f</td>' % (r, (100.0 * hour_of_day[i]) / totalcommits))
708 else:
709 f.write('<td>0.00</td>')
710 f.write('</tr></table>')
711 f.write('<img src="hour_of_day.png" alt="Hour of Day" />')
712 fg = open(path + '/hour_of_day.dat', 'w')
713 for i in range(0, 24):
714 if i in hour_of_day:
715 fg.write('%d %d\n' % (i + 1, hour_of_day[i]))
716 else:
717 fg.write('%d 0\n' % (i + 1))
718 fg.close()
720 # Day of Week
721 f.write(html_header(2, 'Day of Week'))
722 day_of_week = data.getActivityByDayOfWeek()
723 f.write('<div class="vtable"><table>')
724 f.write('<tr><th>Day</th><th>Total (%)</th></tr>')
725 fp = open(path + '/day_of_week.dat', 'w')
726 for d in range(0, 7):
727 commits = 0
728 if d in day_of_week:
729 commits = day_of_week[d]
730 fp.write('%d %s %d\n' % (d + 1, WEEKDAYS[d], commits))
731 f.write('<tr>')
732 f.write('<th>%s</th>' % (WEEKDAYS[d]))
733 if d in day_of_week:
734 f.write('<td>%d (%.2f%%)</td>' % (day_of_week[d], (100.0 * day_of_week[d]) / totalcommits))
735 else:
736 f.write('<td>0</td>')
737 f.write('</tr>')
738 f.write('</table></div>')
739 f.write('<img src="day_of_week.png" alt="Day of Week" />')
740 fp.close()
742 # Hour of Week
743 f.write(html_header(2, 'Hour of Week'))
744 f.write('<table>')
746 f.write('<tr><th>Weekday</th>')
747 for hour in range(0, 24):
748 f.write('<th>%d</th>' % (hour))
749 f.write('</tr>')
751 for weekday in range(0, 7):
752 f.write('<tr><th>%s</th>' % (WEEKDAYS[weekday]))
753 for hour in range(0, 24):
754 try:
755 commits = data.activity_by_hour_of_week[weekday][hour]
756 except KeyError:
757 commits = 0
758 if commits != 0:
759 f.write('<td')
760 r = 127 + int((float(commits) / data.activity_by_hour_of_week_busiest) * 128)
761 f.write(' style="background-color: rgb(%d, 0, 0)"' % r)
762 f.write('>%d</td>' % commits)
763 else:
764 f.write('<td></td>')
765 f.write('</tr>')
767 f.write('</table>')
769 # Month of Year
770 f.write(html_header(2, 'Month of Year'))
771 f.write('<div class="vtable"><table>')
772 f.write('<tr><th>Month</th><th>Commits (%)</th></tr>')
773 fp = open (path + '/month_of_year.dat', 'w')
774 for mm in range(1, 13):
775 commits = 0
776 if mm in data.activity_by_month_of_year:
777 commits = data.activity_by_month_of_year[mm]
778 f.write('<tr><td>%d</td><td>%d (%.2f %%)</td></tr>' % (mm, commits, (100.0 * commits) / data.getTotalCommits()))
779 fp.write('%d %d\n' % (mm, commits))
780 fp.close()
781 f.write('</table></div>')
782 f.write('<img src="month_of_year.png" alt="Month of Year" />')
784 # Commits by year/month
785 f.write(html_header(2, 'Commits by year/month'))
786 f.write('<div class="vtable"><table><tr><th>Month</th><th>Commits</th></tr>')
787 for yymm in reversed(sorted(data.commits_by_month.keys())):
788 f.write('<tr><td>%s</td><td>%d</td></tr>' % (yymm, data.commits_by_month[yymm]))
789 f.write('</table></div>')
790 f.write('<img src="commits_by_year_month.png" alt="Commits by year/month" />')
791 fg = open(path + '/commits_by_year_month.dat', 'w')
792 for yymm in sorted(data.commits_by_month.keys()):
793 fg.write('%s %s\n' % (yymm, data.commits_by_month[yymm]))
794 fg.close()
796 # Commits by year
797 f.write(html_header(2, 'Commits by Year'))
798 f.write('<div class="vtable"><table><tr><th>Year</th><th>Commits (% of all)</th></tr>')
799 for yy in reversed(sorted(data.commits_by_year.keys())):
800 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td></tr>' % (yy, data.commits_by_year[yy], (100.0 * data.commits_by_year[yy]) / data.getTotalCommits()))
801 f.write('</table></div>')
802 f.write('<img src="commits_by_year.png" alt="Commits by Year" />')
803 fg = open(path + '/commits_by_year.dat', 'w')
804 for yy in sorted(data.commits_by_year.keys()):
805 fg.write('%d %d\n' % (yy, data.commits_by_year[yy]))
806 fg.close()
808 # Commits by timezone
809 f.write(html_header(2, 'Commits by Timezone'))
810 f.write('<table><tr>')
811 f.write('<th>Timezone</th><th>Commits</th>')
812 max_commits_on_tz = max(data.commits_by_timezone.values())
813 for i in sorted(data.commits_by_timezone.keys(), key = lambda n : int(n)):
814 commits = data.commits_by_timezone[i]
815 r = 127 + int((float(commits) / max_commits_on_tz) * 128)
816 f.write('<tr><th>%s</th><td style="background-color: rgb(%d, 0, 0)">%d</td></tr>' % (i, r, commits))
817 f.write('</tr></table>')
819 f.write('</body></html>')
820 f.close()
823 # Authors
824 f = open(path + '/authors.html', 'w')
825 self.printHeader(f)
827 f.write('<h1>Authors</h1>')
828 self.printNav(f)
830 # Authors :: List of authors
831 f.write(html_header(2, 'List of Authors'))
833 f.write('<table class="authors sortable" id="authors">')
834 f.write('<tr><th>Author</th><th>Commits (%)</th><th>+ lines</th><th>- lines</th><th>First commit</th><th>Last commit</th><th class="unsortable">Age</th><th>Active days</th><th># by commits</th></tr>')
835 for author in data.getAuthors(conf['max_authors']):
836 info = data.getAuthorInfo(author)
837 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d</td><td>%d</td><td>%s</td><td>%s</td><td>%s</td><td>%d</td><td>%d</td></tr>' % (author, info['commits'], info['commits_frac'], info['lines_added'], info['lines_removed'], info['date_first'], info['date_last'], info['timedelta'], info['active_days'], info['place_by_commits']))
838 f.write('</table>')
840 allauthors = data.getAuthors()
841 if len(allauthors) > conf['max_authors']:
842 rest = allauthors[conf['max_authors']:]
843 f.write('<p class="moreauthors">These didn\'t make it to the top: %s</p>' % ', '.join(rest))
845 # Authors :: Author of Month
846 f.write(html_header(2, 'Author of Month'))
847 f.write('<table class="sortable" id="aom">')
848 f.write('<tr><th>Month</th><th>Author</th><th>Commits (%%)</th><th class="unsortable">Next top %d</th><th>Number of authors</th></tr>' % conf['authors_top'])
849 for yymm in reversed(sorted(data.author_of_month.keys())):
850 authordict = data.author_of_month[yymm]
851 authors = getkeyssortedbyvalues(authordict)
852 authors.reverse()
853 commits = data.author_of_month[yymm][authors[0]]
854 next = ', '.join(authors[1:conf['authors_top']+1])
855 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td><td>%d</td></tr>' % (yymm, authors[0], commits, (100.0 * commits) / data.commits_by_month[yymm], data.commits_by_month[yymm], next, len(authors)))
857 f.write('</table>')
859 f.write(html_header(2, 'Author of Year'))
860 f.write('<table class="sortable" id="aoy"><tr><th>Year</th><th>Author</th><th>Commits (%%)</th><th class="unsortable">Next top %d</th><th>Number of authors</th></tr>' % conf['authors_top'])
861 for yy in reversed(sorted(data.author_of_year.keys())):
862 authordict = data.author_of_year[yy]
863 authors = getkeyssortedbyvalues(authordict)
864 authors.reverse()
865 commits = data.author_of_year[yy][authors[0]]
866 next = ', '.join(authors[1:conf['authors_top']+1])
867 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td><td>%d</td></tr>' % (yy, authors[0], commits, (100.0 * commits) / data.commits_by_year[yy], data.commits_by_year[yy], next, len(authors)))
868 f.write('</table>')
870 # Domains
871 f.write(html_header(2, 'Commits by Domains'))
872 domains_by_commits = getkeyssortedbyvaluekey(data.domains, 'commits')
873 domains_by_commits.reverse() # most first
874 f.write('<div class="vtable"><table>')
875 f.write('<tr><th>Domains</th><th>Total (%)</th></tr>')
876 fp = open(path + '/domains.dat', 'w')
877 n = 0
878 for domain in domains_by_commits:
879 if n == conf['max_domains']:
880 break
881 commits = 0
882 n += 1
883 info = data.getDomainInfo(domain)
884 fp.write('%s %d %d\n' % (domain, n , info['commits']))
885 f.write('<tr><th>%s</th><td>%d (%.2f%%)</td></tr>' % (domain, info['commits'], (100.0 * info['commits'] / totalcommits)))
886 f.write('</table></div>')
887 f.write('<img src="domains.png" alt="Commits by Domains" />')
888 fp.close()
890 f.write('</body></html>')
891 f.close()
894 # Files
895 f = open(path + '/files.html', 'w')
896 self.printHeader(f)
897 f.write('<h1>Files</h1>')
898 self.printNav(f)
900 f.write('<dl>\n')
901 f.write('<dt>Total files</dt><dd>%d</dd>' % data.getTotalFiles())
902 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
903 f.write('<dt>Average file size</dt><dd>%.2f bytes</dd>' % ((100.0 * data.getTotalLOC()) / data.getTotalFiles()))
904 f.write('</dl>\n')
906 # Files :: File count by date
907 f.write(html_header(2, 'File count by date'))
909 # use set to get rid of duplicate/unnecessary entries
910 files_by_date = set()
911 for stamp in sorted(data.files_by_stamp.keys()):
912 files_by_date.add('%s %d' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
914 fg = open(path + '/files_by_date.dat', 'w')
915 for line in sorted(list(files_by_date)):
916 fg.write('%s\n' % line)
917 #for stamp in sorted(data.files_by_stamp.keys()):
918 # fg.write('%s %d\n' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
919 fg.close()
921 f.write('<img src="files_by_date.png" alt="Files by Date" />')
923 #f.write('<h2>Average file size by date</h2>')
925 # Files :: Extensions
926 f.write(html_header(2, 'Extensions'))
927 f.write('<table class="sortable" id="ext"><tr><th>Extension</th><th>Files (%)</th><th>Lines (%)</th><th>Lines/file</th></tr>')
928 for ext in sorted(data.extensions.keys()):
929 files = data.extensions[ext]['files']
930 lines = data.extensions[ext]['lines']
931 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d (%.2f%%)</td><td>%d</td></tr>' % (ext, files, (100.0 * files) / data.getTotalFiles(), lines, (100.0 * lines) / data.getTotalLOC(), lines / files))
932 f.write('</table>')
934 f.write('</body></html>')
935 f.close()
938 # Lines
939 f = open(path + '/lines.html', 'w')
940 self.printHeader(f)
941 f.write('<h1>Lines</h1>')
942 self.printNav(f)
944 f.write('<dl>\n')
945 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
946 f.write('</dl>\n')
948 f.write(html_header(2, 'Lines of Code'))
949 f.write('<img src="lines_of_code.png" />')
951 fg = open(path + '/lines_of_code.dat', 'w')
952 for stamp in sorted(data.changes_by_date.keys()):
953 fg.write('%d %d\n' % (stamp, data.changes_by_date[stamp]['lines']))
954 fg.close()
956 f.write('</body></html>')
957 f.close()
960 # tags.html
961 f = open(path + '/tags.html', 'w')
962 self.printHeader(f)
963 f.write('<h1>Tags</h1>')
964 self.printNav(f)
966 f.write('<dl>')
967 f.write('<dt>Total tags</dt><dd>%d</dd>' % len(data.tags))
968 if len(data.tags) > 0:
969 f.write('<dt>Average commits per tag</dt><dd>%.2f</dd>' % (1.0 * data.getTotalCommits() / len(data.tags)))
970 f.write('</dl>')
972 f.write('<table class="tags">')
973 f.write('<tr><th>Name</th><th>Date</th><th>Commits</th><th>Authors</th></tr>')
974 # sort the tags by date desc
975 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), data.tags.items()))))
976 for tag in tags_sorted_by_date_desc:
977 authorinfo = []
978 authors_by_commits = getkeyssortedbyvalues(data.tags[tag]['authors'])
979 for i in reversed(authors_by_commits):
980 authorinfo.append('%s (%d)' % (i, data.tags[tag]['authors'][i]))
981 f.write('<tr><td>%s</td><td>%s</td><td>%d</td><td>%s</td></tr>' % (tag, data.tags[tag]['date'], data.tags[tag]['commits'], ', '.join(authorinfo)))
982 f.write('</table>')
984 f.write('</body></html>')
985 f.close()
987 self.createGraphs(path)
989 def createGraphs(self, path):
990 print 'Generating graphs...'
992 # hour of day
993 f = open(path + '/hour_of_day.plot', 'w')
994 f.write(GNUPLOT_COMMON)
995 f.write(
997 set output 'hour_of_day.png'
998 unset key
999 set xrange [0.5:24.5]
1000 set xtics 4
1001 set grid y
1002 set ylabel "Commits"
1003 plot 'hour_of_day.dat' using 1:2:(0.5) w boxes fs solid
1004 """)
1005 f.close()
1007 # day of week
1008 f = open(path + '/day_of_week.plot', 'w')
1009 f.write(GNUPLOT_COMMON)
1010 f.write(
1012 set output 'day_of_week.png'
1013 unset key
1014 set xrange [0.5:7.5]
1015 set xtics 1
1016 set grid y
1017 set ylabel "Commits"
1018 plot 'day_of_week.dat' using 1:3:(0.5):xtic(2) w boxes fs solid
1019 """)
1020 f.close()
1022 # Domains
1023 f = open(path + '/domains.plot', 'w')
1024 f.write(GNUPLOT_COMMON)
1025 f.write(
1027 set output 'domains.png'
1028 unset key
1029 unset xtics
1030 set yrange [0:]
1031 set grid y
1032 set ylabel "Commits"
1033 plot 'domains.dat' using 2:3:(0.5) with boxes fs solid, '' using 2:3:1 with labels rotate by 45 offset 0,1
1034 """)
1035 f.close()
1037 # Month of Year
1038 f = open(path + '/month_of_year.plot', 'w')
1039 f.write(GNUPLOT_COMMON)
1040 f.write(
1042 set output 'month_of_year.png'
1043 unset key
1044 set xrange [0.5:12.5]
1045 set xtics 1
1046 set grid y
1047 set ylabel "Commits"
1048 plot 'month_of_year.dat' using 1:2:(0.5) w boxes fs solid
1049 """)
1050 f.close()
1052 # commits_by_year_month
1053 f = open(path + '/commits_by_year_month.plot', 'w')
1054 f.write(GNUPLOT_COMMON)
1055 f.write(
1057 set output 'commits_by_year_month.png'
1058 unset key
1059 set xdata time
1060 set timefmt "%Y-%m"
1061 set format x "%Y-%m"
1062 set xtics rotate
1063 set bmargin 5
1064 set grid y
1065 set ylabel "Commits"
1066 plot 'commits_by_year_month.dat' using 1:2:(0.5) w boxes fs solid
1067 """)
1068 f.close()
1070 # commits_by_year
1071 f = open(path + '/commits_by_year.plot', 'w')
1072 f.write(GNUPLOT_COMMON)
1073 f.write(
1075 set output 'commits_by_year.png'
1076 unset key
1077 set xtics 1 rotate
1078 set grid y
1079 set ylabel "Commits"
1080 set yrange [0:]
1081 plot 'commits_by_year.dat' using 1:2:(0.5) w boxes fs solid
1082 """)
1083 f.close()
1085 # Files by date
1086 f = open(path + '/files_by_date.plot', 'w')
1087 f.write(GNUPLOT_COMMON)
1088 f.write(
1090 set output 'files_by_date.png'
1091 unset key
1092 set xdata time
1093 set timefmt "%Y-%m-%d"
1094 set format x "%Y-%m-%d"
1095 set grid y
1096 set ylabel "Files"
1097 set xtics rotate
1098 set ytics autofreq
1099 set bmargin 6
1100 plot 'files_by_date.dat' using 1:2 w steps
1101 """)
1102 f.close()
1104 # Lines of Code
1105 f = open(path + '/lines_of_code.plot', 'w')
1106 f.write(GNUPLOT_COMMON)
1107 f.write(
1109 set output 'lines_of_code.png'
1110 unset key
1111 set xdata time
1112 set timefmt "%s"
1113 set format x "%Y-%m-%d"
1114 set grid y
1115 set ylabel "Lines"
1116 set xtics rotate
1117 set bmargin 6
1118 plot 'lines_of_code.dat' using 1:2 w lines
1119 """)
1120 f.close()
1122 os.chdir(path)
1123 files = glob.glob(path + '/*.plot')
1124 for f in files:
1125 out = getpipeoutput([gnuplot_cmd + ' "%s"' % f])
1126 if len(out) > 0:
1127 print out
1129 def printHeader(self, f, title = ''):
1130 f.write(
1131 """<?xml version="1.0" encoding="UTF-8"?>
1132 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
1133 <html xmlns="http://www.w3.org/1999/xhtml">
1134 <head>
1135 <title>GitStats - %s</title>
1136 <link rel="stylesheet" href="%s" type="text/css" />
1137 <meta name="generator" content="GitStats %s" />
1138 <script type="text/javascript" src="sortable.js"></script>
1139 </head>
1140 <body>
1141 """ % (self.title, conf['style'], getversion()))
1143 def printNav(self, f):
1144 f.write("""
1145 <div class="nav">
1146 <ul>
1147 <li><a href="index.html">General</a></li>
1148 <li><a href="activity.html">Activity</a></li>
1149 <li><a href="authors.html">Authors</a></li>
1150 <li><a href="files.html">Files</a></li>
1151 <li><a href="lines.html">Lines</a></li>
1152 <li><a href="tags.html">Tags</a></li>
1153 </ul>
1154 </div>
1155 """)
1158 class GitStats:
1159 def run(self, args_orig):
1160 optlist, args = getopt.getopt(args_orig, 'c:')
1161 for o,v in optlist:
1162 if o == '-c':
1163 key, value = v.split('=', 1)
1164 if key not in conf:
1165 raise KeyError('no such key "%s" in config' % key)
1166 if isinstance(conf[key], int):
1167 conf[key] = int(value)
1168 else:
1169 conf[key] = value
1171 if len(args) < 2:
1172 print """
1173 Usage: gitstats [options] <gitpath> <outputpath>
1175 Options:
1176 -c key=value Override configuration value
1178 Default config values:
1180 """ % conf
1181 sys.exit(0)
1183 gitpath = args[0]
1184 outputpath = os.path.abspath(args[1])
1185 rundir = os.getcwd()
1187 try:
1188 os.makedirs(outputpath)
1189 except OSError:
1190 pass
1191 if not os.path.isdir(outputpath):
1192 print 'FATAL: Output path is not a directory or does not exist'
1193 sys.exit(1)
1195 print 'Git path: %s' % gitpath
1196 print 'Output path: %s' % outputpath
1198 os.chdir(gitpath)
1200 cachefile = os.path.join(outputpath, 'gitstats.cache')
1202 print 'Collecting data...'
1203 data = GitDataCollector()
1204 data.loadCache(cachefile)
1205 data.collect(gitpath)
1206 print 'Refining data...'
1207 data.saveCache(cachefile)
1208 data.refine()
1210 os.chdir(rundir)
1212 print 'Generating report...'
1213 report = HTMLReportCreator()
1214 report.create(data, outputpath)
1216 time_end = time.time()
1217 exectime_internal = time_end - time_start
1218 print 'Execution time %.5f secs, %.5f secs (%.2f %%) in external commands)' % (exectime_internal, exectime_external, (100.0 * exectime_external) / exectime_internal)
1220 if __name__=='__main__':
1221 g = GitStats()
1222 g.run(sys.argv[1:])