Allow overriding of some config variables.
[gitstats.git] / gitstats
blob6acd0ea658f922744f4278e06ecd9409d1ab86c6
1 #!/usr/bin/env python
2 # Copyright (c) 2007-2010 Heikki Hokkanen <hoxu@users.sf.net> & others (see doc/author.txt)
3 # GPLv2 / GPLv3
4 import datetime
5 import getopt
6 import glob
7 import os
8 import pickle
9 import platform
10 import re
11 import shutil
12 import subprocess
13 import sys
14 import time
15 import zlib
17 GNUPLOT_COMMON = 'set terminal png transparent\nset size 1.0,0.5\n'
18 ON_LINUX = (platform.system() == 'Linux')
20 exectime_internal = 0.0
21 exectime_external = 0.0
22 time_start = time.time()
24 # By default, gnuplot is searched from path, but can be overridden with the
25 # environment variable "GNUPLOT"
26 gnuplot_cmd = 'gnuplot'
27 if 'GNUPLOT' in os.environ:
28 gnuplot_cmd = os.environ['GNUPLOT']
30 conf = {
31 'max_domains': 10,
32 'max_ext_length': 10,
33 'style': 'gitstats.css'
36 def getpipeoutput(cmds, quiet = False):
37 global exectime_external
38 start = time.time()
39 if not quiet and ON_LINUX and os.isatty(1):
40 print '>> ' + ' | '.join(cmds),
41 sys.stdout.flush()
42 p0 = subprocess.Popen(cmds[0], stdout = subprocess.PIPE, shell = True)
43 p = p0
44 for x in cmds[1:]:
45 p = subprocess.Popen(x, stdin = p0.stdout, stdout = subprocess.PIPE, shell = True)
46 p0 = p
47 output = p.communicate()[0]
48 end = time.time()
49 if not quiet:
50 if ON_LINUX and os.isatty(1):
51 print '\r',
52 print '[%.5f] >> %s' % (end - start, ' | '.join(cmds))
53 exectime_external += (end - start)
54 return output.rstrip('\n')
56 def getkeyssortedbyvalues(dict):
57 return map(lambda el : el[1], sorted(map(lambda el : (el[1], el[0]), dict.items())))
59 # dict['author'] = { 'commits': 512 } - ...key(dict, 'commits')
60 def getkeyssortedbyvaluekey(d, key):
61 return map(lambda el : el[1], sorted(map(lambda el : (d[el][key], el), d.keys())))
63 VERSION = 0
64 def getversion():
65 global VERSION
66 if VERSION == 0:
67 VERSION = getpipeoutput(["git rev-parse --short HEAD"]).split('\n')[0]
68 return VERSION
70 class DataCollector:
71 """Manages data collection from a revision control repository."""
72 def __init__(self):
73 self.stamp_created = time.time()
74 self.cache = {}
77 # This should be the main function to extract data from the repository.
78 def collect(self, dir):
79 self.dir = dir
80 self.projectname = os.path.basename(os.path.abspath(dir))
83 # Load cacheable data
84 def loadCache(self, cachefile):
85 if not os.path.exists(cachefile):
86 return
87 print 'Loading cache...'
88 f = open(cachefile)
89 try:
90 self.cache = pickle.loads(zlib.decompress(f.read()))
91 except:
92 # temporary hack to upgrade non-compressed caches
93 f.seek(0)
94 self.cache = pickle.load(f)
95 f.close()
98 # Produce any additional statistics from the extracted data.
99 def refine(self):
100 pass
103 # : get a dictionary of author
104 def getAuthorInfo(self, author):
105 return None
107 def getActivityByDayOfWeek(self):
108 return {}
110 def getActivityByHourOfDay(self):
111 return {}
113 # : get a dictionary of domains
114 def getDomainInfo(self, domain):
115 return None
118 # Get a list of authors
119 def getAuthors(self):
120 return []
122 def getFirstCommitDate(self):
123 return datetime.datetime.now()
125 def getLastCommitDate(self):
126 return datetime.datetime.now()
128 def getStampCreated(self):
129 return self.stamp_created
131 def getTags(self):
132 return []
134 def getTotalAuthors(self):
135 return -1
137 def getTotalCommits(self):
138 return -1
140 def getTotalFiles(self):
141 return -1
143 def getTotalLOC(self):
144 return -1
147 # Save cacheable data
148 def saveCache(self, cachefile):
149 print 'Saving cache...'
150 f = open(cachefile, 'w')
151 #pickle.dump(self.cache, f)
152 data = zlib.compress(pickle.dumps(self.cache))
153 f.write(data)
154 f.close()
156 class GitDataCollector(DataCollector):
157 def collect(self, dir):
158 DataCollector.collect(self, dir)
160 try:
161 self.total_authors = int(getpipeoutput(['git log', 'git shortlog -s', 'wc -l']))
162 except:
163 self.total_authors = 0
164 #self.total_lines = int(getoutput('git-ls-files -z |xargs -0 cat |wc -l'))
166 self.activity_by_hour_of_day = {} # hour -> commits
167 self.activity_by_day_of_week = {} # day -> commits
168 self.activity_by_month_of_year = {} # month [1-12] -> commits
169 self.activity_by_hour_of_week = {} # weekday -> hour -> commits
170 self.activity_by_hour_of_day_busiest = 0
171 self.activity_by_hour_of_week_busiest = 0
172 self.activity_by_year_week = {} # yy_wNN -> commits
173 self.activity_by_year_week_peak = 0
175 self.authors = {} # name -> {commits, first_commit_stamp, last_commit_stamp, last_active_day, active_days, lines_added, lines_removed}
177 # domains
178 self.domains = {} # domain -> commits
180 # author of the month
181 self.author_of_month = {} # month -> author -> commits
182 self.author_of_year = {} # year -> author -> commits
183 self.commits_by_month = {} # month -> commits
184 self.commits_by_year = {} # year -> commits
185 self.first_commit_stamp = 0
186 self.last_commit_stamp = 0
187 self.last_active_day = None
188 self.active_days = set()
190 # lines
191 self.total_lines = 0
192 self.total_lines_added = 0
193 self.total_lines_removed = 0
195 # timezone
196 self.commits_by_timezone = {} # timezone -> commits
198 # tags
199 self.tags = {}
200 lines = getpipeoutput(['git show-ref --tags']).split('\n')
201 for line in lines:
202 if len(line) == 0:
203 continue
204 (hash, tag) = line.split(' ')
206 tag = tag.replace('refs/tags/', '')
207 output = getpipeoutput(['git log "%s" --pretty=format:"%%at %%an" -n 1' % hash])
208 if len(output) > 0:
209 parts = output.split(' ')
210 stamp = 0
211 try:
212 stamp = int(parts[0])
213 except ValueError:
214 stamp = 0
215 self.tags[tag] = { 'stamp': stamp, 'hash' : hash, 'date' : datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), 'commits': 0, 'authors': {} }
217 # collect info on tags, starting from latest
218 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), self.tags.items()))))
219 prev = None
220 for tag in reversed(tags_sorted_by_date_desc):
221 cmd = 'git shortlog -s "%s"' % tag
222 if prev != None:
223 cmd += ' "^%s"' % prev
224 output = getpipeoutput([cmd])
225 if len(output) == 0:
226 continue
227 prev = tag
228 for line in output.split('\n'):
229 parts = re.split('\s+', line, 2)
230 commits = int(parts[1])
231 author = parts[2]
232 self.tags[tag]['commits'] += commits
233 self.tags[tag]['authors'][author] = commits
235 # Collect revision statistics
236 # Outputs "<stamp> <date> <time> <timezone> <author> '<' <mail> '>'"
237 lines = getpipeoutput(['git rev-list --pretty=format:"%at %ai %an <%aE>" HEAD', 'grep -v ^commit']).split('\n')
238 for line in lines:
239 parts = line.split(' ', 4)
240 author = ''
241 try:
242 stamp = int(parts[0])
243 except ValueError:
244 stamp = 0
245 timezone = parts[3]
246 author, mail = parts[4].split('<', 1)
247 author = author.rstrip()
248 mail = mail.rstrip('>')
249 domain = '?'
250 if mail.find('@') != -1:
251 domain = mail.rsplit('@', 1)[1]
252 date = datetime.datetime.fromtimestamp(float(stamp))
254 # First and last commit stamp
255 if self.last_commit_stamp == 0:
256 self.last_commit_stamp = stamp
257 self.first_commit_stamp = stamp
259 # activity
260 # hour
261 hour = date.hour
262 self.activity_by_hour_of_day[hour] = self.activity_by_hour_of_day.get(hour, 0) + 1
263 # most active hour?
264 if self.activity_by_hour_of_day[hour] > self.activity_by_hour_of_day_busiest:
265 self.activity_by_hour_of_day_busiest = self.activity_by_hour_of_day[hour]
267 # day of week
268 day = date.weekday()
269 self.activity_by_day_of_week[day] = self.activity_by_day_of_week.get(day, 0) + 1
271 # domain stats
272 if domain not in self.domains:
273 self.domains[domain] = {}
274 # commits
275 self.domains[domain]['commits'] = self.domains[domain].get('commits', 0) + 1
277 # hour of week
278 if day not in self.activity_by_hour_of_week:
279 self.activity_by_hour_of_week[day] = {}
280 self.activity_by_hour_of_week[day][hour] = self.activity_by_hour_of_week[day].get(hour, 0) + 1
281 # most active hour?
282 if self.activity_by_hour_of_week[day][hour] > self.activity_by_hour_of_week_busiest:
283 self.activity_by_hour_of_week_busiest = self.activity_by_hour_of_week[day][hour]
285 # month of year
286 month = date.month
287 self.activity_by_month_of_year[month] = self.activity_by_month_of_year.get(month, 0) + 1
289 # yearly/weekly activity
290 yyw = date.strftime('%Y-%W')
291 self.activity_by_year_week[yyw] = self.activity_by_year_week.get(yyw, 0) + 1
292 if self.activity_by_year_week_peak < self.activity_by_year_week[yyw]:
293 self.activity_by_year_week_peak = self.activity_by_year_week[yyw]
295 # author stats
296 if author not in self.authors:
297 self.authors[author] = {}
298 # commits
299 if 'last_commit_stamp' not in self.authors[author]:
300 self.authors[author]['last_commit_stamp'] = stamp
301 self.authors[author]['first_commit_stamp'] = stamp
302 self.authors[author]['commits'] = self.authors[author].get('commits', 0) + 1
304 # author of the month/year
305 yymm = date.strftime('%Y-%m')
306 if yymm in self.author_of_month:
307 self.author_of_month[yymm][author] = self.author_of_month[yymm].get(author, 0) + 1
308 else:
309 self.author_of_month[yymm] = {}
310 self.author_of_month[yymm][author] = 1
311 self.commits_by_month[yymm] = self.commits_by_month.get(yymm, 0) + 1
313 yy = date.year
314 if yy in self.author_of_year:
315 self.author_of_year[yy][author] = self.author_of_year[yy].get(author, 0) + 1
316 else:
317 self.author_of_year[yy] = {}
318 self.author_of_year[yy][author] = 1
319 self.commits_by_year[yy] = self.commits_by_year.get(yy, 0) + 1
321 # authors: active days
322 yymmdd = date.strftime('%Y-%m-%d')
323 if 'last_active_day' not in self.authors[author]:
324 self.authors[author]['last_active_day'] = yymmdd
325 self.authors[author]['active_days'] = 1
326 elif yymmdd != self.authors[author]['last_active_day']:
327 self.authors[author]['last_active_day'] = yymmdd
328 self.authors[author]['active_days'] += 1
330 # project: active days
331 if yymmdd != self.last_active_day:
332 self.last_active_day = yymmdd
333 self.active_days.add(yymmdd)
335 # timezone
336 self.commits_by_timezone[timezone] = self.commits_by_timezone.get(timezone, 0) + 1
338 # TODO Optimize this, it's the worst bottleneck
339 # outputs "<stamp> <files>" for each revision
340 self.files_by_stamp = {} # stamp -> files
341 revlines = getpipeoutput(['git rev-list --pretty=format:"%at %T" HEAD', 'grep -v ^commit']).strip().split('\n')
342 lines = []
343 for revline in revlines:
344 time, rev = revline.split(' ')
345 linecount = self.getFilesInCommit(rev)
346 lines.append('%d %d' % (int(time), linecount))
348 self.total_commits = len(lines)
349 for line in lines:
350 parts = line.split(' ')
351 if len(parts) != 2:
352 continue
353 (stamp, files) = parts[0:2]
354 try:
355 self.files_by_stamp[int(stamp)] = int(files)
356 except ValueError:
357 print 'Warning: failed to parse line "%s"' % line
359 # extensions
360 self.extensions = {} # extension -> files, lines
361 lines = getpipeoutput(['git ls-tree -r -z HEAD']).split('\000')
362 self.total_files = len(lines)
363 for line in lines:
364 if len(line) == 0:
365 continue
366 parts = re.split('\s+', line, 4)
367 sha1 = parts[2]
368 filename = parts[3]
370 if filename.find('.') == -1 or filename.rfind('.') == 0:
371 ext = ''
372 else:
373 ext = filename[(filename.rfind('.') + 1):]
374 if len(ext) > conf['max_ext_length']:
375 ext = ''
377 if ext not in self.extensions:
378 self.extensions[ext] = {'files': 0, 'lines': 0}
380 self.extensions[ext]['files'] += 1
381 try:
382 self.extensions[ext]['lines'] += self.getLinesInBlob(sha1)
383 except:
384 print 'Warning: Could not count lines for file "%s"' % line
386 # line statistics
387 # outputs:
388 # N files changed, N insertions (+), N deletions(-)
389 # <stamp> <author>
390 self.changes_by_date = {} # stamp -> { files, ins, del }
391 lines = getpipeoutput(['git log --shortstat --pretty=format:"%at %an"']).split('\n')
392 lines.reverse()
393 files = 0; inserted = 0; deleted = 0; total_lines = 0
394 author = None
395 for line in lines:
396 if len(line) == 0:
397 continue
399 # <stamp> <author>
400 if line.find('files changed,') == -1:
401 pos = line.find(' ')
402 if pos != -1:
403 try:
404 (stamp, author) = (int(line[:pos]), line[pos+1:])
405 self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted, 'lines': total_lines }
406 if author not in self.authors:
407 self.authors[author] = { 'lines_added' : 0, 'lines_removed' : 0 }
408 self.authors[author]['lines_added'] = self.authors[author].get('lines_added', 0) + inserted
409 self.authors[author]['lines_removed'] = self.authors[author].get('lines_removed', 0) + deleted
410 except ValueError:
411 print 'Warning: unexpected line "%s"' % line
412 else:
413 print 'Warning: unexpected line "%s"' % line
414 else:
415 numbers = re.findall('\d+', line)
416 if len(numbers) == 3:
417 (files, inserted, deleted) = map(lambda el : int(el), numbers)
418 total_lines += inserted
419 total_lines -= deleted
420 self.total_lines_added += inserted
421 self.total_lines_removed += deleted
422 else:
423 print 'Warning: failed to handle line "%s"' % line
424 (files, inserted, deleted) = (0, 0, 0)
425 #self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted }
426 self.total_lines = total_lines
428 def refine(self):
429 # authors
430 # name -> {place_by_commits, commits_frac, date_first, date_last, timedelta}
431 authors_by_commits = getkeyssortedbyvaluekey(self.authors, 'commits')
432 authors_by_commits.reverse() # most first
433 for i, name in enumerate(authors_by_commits):
434 self.authors[name]['place_by_commits'] = i + 1
436 for name in self.authors.keys():
437 a = self.authors[name]
438 a['commits_frac'] = (100 * float(a['commits'])) / self.getTotalCommits()
439 date_first = datetime.datetime.fromtimestamp(a['first_commit_stamp'])
440 date_last = datetime.datetime.fromtimestamp(a['last_commit_stamp'])
441 delta = date_last - date_first
442 a['date_first'] = date_first.strftime('%Y-%m-%d')
443 a['date_last'] = date_last.strftime('%Y-%m-%d')
444 a['timedelta'] = delta
446 def getActiveDays(self):
447 return self.active_days
449 def getActivityByDayOfWeek(self):
450 return self.activity_by_day_of_week
452 def getActivityByHourOfDay(self):
453 return self.activity_by_hour_of_day
455 def getAuthorInfo(self, author):
456 return self.authors[author]
458 def getAuthors(self):
459 return self.authors.keys()
461 def getCommitDeltaDays(self):
462 return (self.last_commit_stamp - self.first_commit_stamp) / 86400
464 def getDomainInfo(self, domain):
465 return self.domains[domain]
467 def getDomains(self):
468 return self.domains.keys()
470 def getFilesInCommit(self, rev):
471 try:
472 res = self.cache['files_in_tree'][rev]
473 except:
474 res = int(getpipeoutput(['git ls-tree -r --name-only "%s"' % rev, 'wc -l']).split('\n')[0])
475 if 'files_in_tree' not in self.cache:
476 self.cache['files_in_tree'] = {}
477 self.cache['files_in_tree'][rev] = res
479 return res
481 def getFirstCommitDate(self):
482 return datetime.datetime.fromtimestamp(self.first_commit_stamp)
484 def getLastCommitDate(self):
485 return datetime.datetime.fromtimestamp(self.last_commit_stamp)
487 def getLinesInBlob(self, sha1):
488 try:
489 res = self.cache['lines_in_blob'][sha1]
490 except:
491 res = int(getpipeoutput(['git cat-file blob %s' % sha1, 'wc -l']).split()[0])
492 if 'lines_in_blob' not in self.cache:
493 self.cache['lines_in_blob'] = {}
494 self.cache['lines_in_blob'][sha1] = res
495 return res
497 def getTags(self):
498 lines = getpipeoutput(['git show-ref --tags', 'cut -d/ -f3'])
499 return lines.split('\n')
501 def getTagDate(self, tag):
502 return self.revToDate('tags/' + tag)
504 def getTotalAuthors(self):
505 return self.total_authors
507 def getTotalCommits(self):
508 return self.total_commits
510 def getTotalFiles(self):
511 return self.total_files
513 def getTotalLOC(self):
514 return self.total_lines
516 def revToDate(self, rev):
517 stamp = int(getpipeoutput(['git log --pretty=format:%%at "%s" -n 1' % rev]))
518 return datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d')
520 class ReportCreator:
521 """Creates the actual report based on given data."""
522 def __init__(self):
523 pass
525 def create(self, data, path):
526 self.data = data
527 self.path = path
529 def html_linkify(text):
530 return text.lower().replace(' ', '_')
532 def html_header(level, text):
533 name = html_linkify(text)
534 return '\n<h%d><a href="#%s" name="%s">%s</a></h%d>\n\n' % (level, name, name, text, level)
536 class HTMLReportCreator(ReportCreator):
537 def create(self, data, path):
538 ReportCreator.create(self, data, path)
539 self.title = data.projectname
541 # copy static files. Looks in the binary directory, ../share/gitstats and /usr/share/gitstats
542 binarypath = os.path.dirname(os.path.abspath(__file__))
543 secondarypath = os.path.join(binarypath, '..', 'share', 'gitstats')
544 basedirs = [binarypath, secondarypath, '/usr/share/gitstats']
545 for file in ('gitstats.css', 'sortable.js', 'arrow-up.gif', 'arrow-down.gif', 'arrow-none.gif'):
546 for base in basedirs:
547 src = base + '/' + file
548 if os.path.exists(src):
549 shutil.copyfile(src, path + '/' + file)
550 break
551 else:
552 print 'Warning: "%s" not found, so not copied (searched: %s)' % (file, basedirs)
554 f = open(path + "/index.html", 'w')
555 format = '%Y-%m-%d %H:%M:%S'
556 self.printHeader(f)
558 f.write('<h1>GitStats - %s</h1>' % data.projectname)
560 self.printNav(f)
562 f.write('<dl>')
563 f.write('<dt>Project name</dt><dd>%s</dd>' % (data.projectname))
564 f.write('<dt>Generated</dt><dd>%s (in %d seconds)</dd>' % (datetime.datetime.now().strftime(format), time.time() - data.getStampCreated()))
565 f.write('<dt>Generator</dt><dd><a href="http://gitstats.sourceforge.net/">GitStats</a> (version %s)</dd>' % getversion())
566 f.write('<dt>Report Period</dt><dd>%s to %s</dd>' % (data.getFirstCommitDate().strftime(format), data.getLastCommitDate().strftime(format)))
567 f.write('<dt>Age</dt><dd>%d days, %d active days (%3.2f%%)</dd>' % (data.getCommitDeltaDays(), len(data.getActiveDays()), (100.0 * len(data.getActiveDays()) / data.getCommitDeltaDays())))
568 f.write('<dt>Total Files</dt><dd>%s</dd>' % data.getTotalFiles())
569 f.write('<dt>Total Lines of Code</dt><dd>%s (%d added, %d removed)</dd>' % (data.getTotalLOC(), data.total_lines_added, data.total_lines_removed))
570 f.write('<dt>Total Commits</dt><dd>%s (average %.1f commits per active day, %.1f per all days)</dd>' % (data.getTotalCommits(), float(data.getTotalCommits()) / len(data.getActiveDays()), float(data.getTotalCommits()) / data.getCommitDeltaDays()))
571 f.write('<dt>Authors</dt><dd>%s</dd>' % data.getTotalAuthors())
572 f.write('</dl>')
574 f.write('</body>\n</html>')
575 f.close()
578 # Activity
579 f = open(path + '/activity.html', 'w')
580 self.printHeader(f)
581 f.write('<h1>Activity</h1>')
582 self.printNav(f)
584 #f.write('<h2>Last 30 days</h2>')
586 #f.write('<h2>Last 12 months</h2>')
588 # Weekly activity
589 WEEKS = 32
590 f.write(html_header(2, 'Weekly activity'))
591 f.write('<p>Last %d weeks</p>' % WEEKS)
593 # generate weeks to show (previous N weeks from now)
594 now = datetime.datetime.now()
595 deltaweek = datetime.timedelta(7)
596 weeks = []
597 stampcur = now
598 for i in range(0, WEEKS):
599 weeks.insert(0, stampcur.strftime('%Y-%W'))
600 stampcur -= deltaweek
602 # top row: commits & bar
603 f.write('<table class="noborders"><tr>')
604 for i in range(0, WEEKS):
605 commits = 0
606 if weeks[i] in data.activity_by_year_week:
607 commits = data.activity_by_year_week[weeks[i]]
609 percentage = 0
610 if weeks[i] in data.activity_by_year_week:
611 percentage = float(data.activity_by_year_week[weeks[i]]) / data.activity_by_year_week_peak
612 height = max(1, int(200 * percentage))
613 f.write('<td style="text-align: center; vertical-align: bottom">%d<div style="display: block; background-color: red; width: 20px; height: %dpx"></div></td>' % (commits, height))
615 # bottom row: year/week
616 f.write('</tr><tr>')
617 for i in range(0, WEEKS):
618 f.write('<td>%s</td>' % (WEEKS - i))
619 f.write('</tr></table>')
621 # Hour of Day
622 f.write(html_header(2, 'Hour of Day'))
623 hour_of_day = data.getActivityByHourOfDay()
624 f.write('<table><tr><th>Hour</th>')
625 for i in range(0, 24):
626 f.write('<th>%d</th>' % i)
627 f.write('</tr>\n<tr><th>Commits</th>')
628 fp = open(path + '/hour_of_day.dat', 'w')
629 for i in range(0, 24):
630 if i in hour_of_day:
631 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
632 f.write('<td style="background-color: rgb(%d, 0, 0)">%d</td>' % (r, hour_of_day[i]))
633 fp.write('%d %d\n' % (i, hour_of_day[i]))
634 else:
635 f.write('<td>0</td>')
636 fp.write('%d 0\n' % i)
637 fp.close()
638 f.write('</tr>\n<tr><th>%</th>')
639 totalcommits = data.getTotalCommits()
640 for i in range(0, 24):
641 if i in hour_of_day:
642 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
643 f.write('<td style="background-color: rgb(%d, 0, 0)">%.2f</td>' % (r, (100.0 * hour_of_day[i]) / totalcommits))
644 else:
645 f.write('<td>0.00</td>')
646 f.write('</tr></table>')
647 f.write('<img src="hour_of_day.png" alt="Hour of Day" />')
648 fg = open(path + '/hour_of_day.dat', 'w')
649 for i in range(0, 24):
650 if i in hour_of_day:
651 fg.write('%d %d\n' % (i + 1, hour_of_day[i]))
652 else:
653 fg.write('%d 0\n' % (i + 1))
654 fg.close()
656 # Day of Week
657 f.write(html_header(2, 'Day of Week'))
658 day_of_week = data.getActivityByDayOfWeek()
659 f.write('<div class="vtable"><table>')
660 f.write('<tr><th>Day</th><th>Total (%)</th></tr>')
661 fp = open(path + '/day_of_week.dat', 'w')
662 for d in range(0, 7):
663 commits = 0
664 if d in day_of_week:
665 commits = day_of_week[d]
666 fp.write('%d %d\n' % (d + 1, commits))
667 f.write('<tr>')
668 f.write('<th>%d</th>' % (d + 1))
669 if d in day_of_week:
670 f.write('<td>%d (%.2f%%)</td>' % (day_of_week[d], (100.0 * day_of_week[d]) / totalcommits))
671 else:
672 f.write('<td>0</td>')
673 f.write('</tr>')
674 f.write('</table></div>')
675 f.write('<img src="day_of_week.png" alt="Day of Week" />')
676 fp.close()
678 # Hour of Week
679 f.write(html_header(2, 'Hour of Week'))
680 f.write('<table>')
682 f.write('<tr><th>Weekday</th>')
683 for hour in range(0, 24):
684 f.write('<th>%d</th>' % (hour))
685 f.write('</tr>')
687 for weekday in range(0, 7):
688 f.write('<tr><th>%d</th>' % (weekday + 1))
689 for hour in range(0, 24):
690 try:
691 commits = data.activity_by_hour_of_week[weekday][hour]
692 except KeyError:
693 commits = 0
694 if commits != 0:
695 f.write('<td')
696 r = 127 + int((float(commits) / data.activity_by_hour_of_week_busiest) * 128)
697 f.write(' style="background-color: rgb(%d, 0, 0)"' % r)
698 f.write('>%d</td>' % commits)
699 else:
700 f.write('<td></td>')
701 f.write('</tr>')
703 f.write('</table>')
705 # Month of Year
706 f.write(html_header(2, 'Month of Year'))
707 f.write('<div class="vtable"><table>')
708 f.write('<tr><th>Month</th><th>Commits (%)</th></tr>')
709 fp = open (path + '/month_of_year.dat', 'w')
710 for mm in range(1, 13):
711 commits = 0
712 if mm in data.activity_by_month_of_year:
713 commits = data.activity_by_month_of_year[mm]
714 f.write('<tr><td>%d</td><td>%d (%.2f %%)</td></tr>' % (mm, commits, (100.0 * commits) / data.getTotalCommits()))
715 fp.write('%d %d\n' % (mm, commits))
716 fp.close()
717 f.write('</table></div>')
718 f.write('<img src="month_of_year.png" alt="Month of Year" />')
720 # Commits by year/month
721 f.write(html_header(2, 'Commits by year/month'))
722 f.write('<div class="vtable"><table><tr><th>Month</th><th>Commits</th></tr>')
723 for yymm in reversed(sorted(data.commits_by_month.keys())):
724 f.write('<tr><td>%s</td><td>%d</td></tr>' % (yymm, data.commits_by_month[yymm]))
725 f.write('</table></div>')
726 f.write('<img src="commits_by_year_month.png" alt="Commits by year/month" />')
727 fg = open(path + '/commits_by_year_month.dat', 'w')
728 for yymm in sorted(data.commits_by_month.keys()):
729 fg.write('%s %s\n' % (yymm, data.commits_by_month[yymm]))
730 fg.close()
732 # Commits by year
733 f.write(html_header(2, 'Commits by Year'))
734 f.write('<div class="vtable"><table><tr><th>Year</th><th>Commits (% of all)</th></tr>')
735 for yy in reversed(sorted(data.commits_by_year.keys())):
736 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td></tr>' % (yy, data.commits_by_year[yy], (100.0 * data.commits_by_year[yy]) / data.getTotalCommits()))
737 f.write('</table></div>')
738 f.write('<img src="commits_by_year.png" alt="Commits by Year" />')
739 fg = open(path + '/commits_by_year.dat', 'w')
740 for yy in sorted(data.commits_by_year.keys()):
741 fg.write('%d %d\n' % (yy, data.commits_by_year[yy]))
742 fg.close()
744 # Commits by timezone
745 f.write(html_header(2, 'Commits by Timezone'))
746 f.write('<table><tr>')
747 f.write('<th>Timezone</th><th>Commits</th>')
748 max_commits_on_tz = max(data.commits_by_timezone.values())
749 for i in sorted(data.commits_by_timezone.keys(), key = lambda n : int(n)):
750 commits = data.commits_by_timezone[i]
751 r = 127 + int((float(commits) / max_commits_on_tz) * 128)
752 f.write('<tr><th>%s</th><td style="background-color: rgb(%d, 0, 0)">%d</td></tr>' % (i, r, commits))
753 f.write('</tr></table>')
755 f.write('</body></html>')
756 f.close()
759 # Authors
760 f = open(path + '/authors.html', 'w')
761 self.printHeader(f)
763 f.write('<h1>Authors</h1>')
764 self.printNav(f)
766 # Authors :: List of authors
767 f.write(html_header(2, 'List of Authors'))
769 f.write('<table class="authors sortable" id="authors">')
770 f.write('<tr><th>Author</th><th>Commits (%)</th><th>+ lines</th><th>- lines</th><th>First commit</th><th>Last commit</th><th class="unsortable">Age</th><th>Active days</th><th># by commits</th></tr>')
771 for author in sorted(data.getAuthors()):
772 info = data.getAuthorInfo(author)
773 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d</td><td>%d</td><td>%s</td><td>%s</td><td>%s</td><td>%d</td><td>%d</td></tr>' % (author, info['commits'], info['commits_frac'], info['lines_added'], info['lines_removed'], info['date_first'], info['date_last'], info['timedelta'], info['active_days'], info['place_by_commits']))
774 f.write('</table>')
776 # Authors :: Author of Month
777 f.write(html_header(2, 'Author of Month'))
778 f.write('<table class="sortable" id="aom">')
779 f.write('<tr><th>Month</th><th>Author</th><th>Commits (%)</th><th class="unsortable">Next top 5</th></tr>')
780 for yymm in reversed(sorted(data.author_of_month.keys())):
781 authordict = data.author_of_month[yymm]
782 authors = getkeyssortedbyvalues(authordict)
783 authors.reverse()
784 commits = data.author_of_month[yymm][authors[0]]
785 next = ', '.join(authors[1:5])
786 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td></tr>' % (yymm, authors[0], commits, (100.0 * commits) / data.commits_by_month[yymm], data.commits_by_month[yymm], next))
788 f.write('</table>')
790 f.write(html_header(2, 'Author of Year'))
791 f.write('<table class="sortable" id="aoy"><tr><th>Year</th><th>Author</th><th>Commits (%)</th><th class="unsortable">Next top 5</th></tr>')
792 for yy in reversed(sorted(data.author_of_year.keys())):
793 authordict = data.author_of_year[yy]
794 authors = getkeyssortedbyvalues(authordict)
795 authors.reverse()
796 commits = data.author_of_year[yy][authors[0]]
797 next = ', '.join(authors[1:5])
798 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td></tr>' % (yy, authors[0], commits, (100.0 * commits) / data.commits_by_year[yy], data.commits_by_year[yy], next))
799 f.write('</table>')
801 # Domains
802 f.write(html_header(2, 'Commits by Domains'))
803 domains_by_commits = getkeyssortedbyvaluekey(data.domains, 'commits')
804 domains_by_commits.reverse() # most first
805 f.write('<div class="vtable"><table>')
806 f.write('<tr><th>Domains</th><th>Total (%)</th></tr>')
807 fp = open(path + '/domains.dat', 'w')
808 n = 0
809 for domain in domains_by_commits:
810 if n == conf['max_domains']:
811 break
812 commits = 0
813 n += 1
814 info = data.getDomainInfo(domain)
815 fp.write('%s %d %d\n' % (domain, n , info['commits']))
816 f.write('<tr><th>%s</th><td>%d (%.2f%%)</td></tr>' % (domain, info['commits'], (100.0 * info['commits'] / totalcommits)))
817 f.write('</table></div>')
818 f.write('<img src="domains.png" alt="Commits by Domains" />')
819 fp.close()
821 f.write('</body></html>')
822 f.close()
825 # Files
826 f = open(path + '/files.html', 'w')
827 self.printHeader(f)
828 f.write('<h1>Files</h1>')
829 self.printNav(f)
831 f.write('<dl>\n')
832 f.write('<dt>Total files</dt><dd>%d</dd>' % data.getTotalFiles())
833 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
834 f.write('<dt>Average file size</dt><dd>%.2f bytes</dd>' % ((100.0 * data.getTotalLOC()) / data.getTotalFiles()))
835 f.write('</dl>\n')
837 # Files :: File count by date
838 f.write(html_header(2, 'File count by date'))
840 # use set to get rid of duplicate/unnecessary entries
841 files_by_date = set()
842 for stamp in sorted(data.files_by_stamp.keys()):
843 files_by_date.add('%s %d' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
845 fg = open(path + '/files_by_date.dat', 'w')
846 for line in sorted(list(files_by_date)):
847 fg.write('%s\n' % line)
848 #for stamp in sorted(data.files_by_stamp.keys()):
849 # fg.write('%s %d\n' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
850 fg.close()
852 f.write('<img src="files_by_date.png" alt="Files by Date" />')
854 #f.write('<h2>Average file size by date</h2>')
856 # Files :: Extensions
857 f.write(html_header(2, 'Extensions'))
858 f.write('<table class="sortable" id="ext"><tr><th>Extension</th><th>Files (%)</th><th>Lines (%)</th><th>Lines/file</th></tr>')
859 for ext in sorted(data.extensions.keys()):
860 files = data.extensions[ext]['files']
861 lines = data.extensions[ext]['lines']
862 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d (%.2f%%)</td><td>%d</td></tr>' % (ext, files, (100.0 * files) / data.getTotalFiles(), lines, (100.0 * lines) / data.getTotalLOC(), lines / files))
863 f.write('</table>')
865 f.write('</body></html>')
866 f.close()
869 # Lines
870 f = open(path + '/lines.html', 'w')
871 self.printHeader(f)
872 f.write('<h1>Lines</h1>')
873 self.printNav(f)
875 f.write('<dl>\n')
876 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
877 f.write('</dl>\n')
879 f.write(html_header(2, 'Lines of Code'))
880 f.write('<img src="lines_of_code.png" />')
882 fg = open(path + '/lines_of_code.dat', 'w')
883 for stamp in sorted(data.changes_by_date.keys()):
884 fg.write('%d %d\n' % (stamp, data.changes_by_date[stamp]['lines']))
885 fg.close()
887 f.write('</body></html>')
888 f.close()
891 # tags.html
892 f = open(path + '/tags.html', 'w')
893 self.printHeader(f)
894 f.write('<h1>Tags</h1>')
895 self.printNav(f)
897 f.write('<dl>')
898 f.write('<dt>Total tags</dt><dd>%d</dd>' % len(data.tags))
899 if len(data.tags) > 0:
900 f.write('<dt>Average commits per tag</dt><dd>%.2f</dd>' % (1.0 * data.getTotalCommits() / len(data.tags)))
901 f.write('</dl>')
903 f.write('<table class="tags">')
904 f.write('<tr><th>Name</th><th>Date</th><th>Commits</th><th>Authors</th></tr>')
905 # sort the tags by date desc
906 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), data.tags.items()))))
907 for tag in tags_sorted_by_date_desc:
908 authorinfo = []
909 authors_by_commits = getkeyssortedbyvalues(data.tags[tag]['authors'])
910 for i in reversed(authors_by_commits):
911 authorinfo.append('%s (%d)' % (i, data.tags[tag]['authors'][i]))
912 f.write('<tr><td>%s</td><td>%s</td><td>%d</td><td>%s</td></tr>' % (tag, data.tags[tag]['date'], data.tags[tag]['commits'], ', '.join(authorinfo)))
913 f.write('</table>')
915 f.write('</body></html>')
916 f.close()
918 self.createGraphs(path)
920 def createGraphs(self, path):
921 print 'Generating graphs...'
923 # hour of day
924 f = open(path + '/hour_of_day.plot', 'w')
925 f.write(GNUPLOT_COMMON)
926 f.write(
928 set output 'hour_of_day.png'
929 unset key
930 set xrange [0.5:24.5]
931 set xtics 4
932 set ylabel "Commits"
933 plot 'hour_of_day.dat' using 1:2:(0.5) w boxes fs solid
934 """)
935 f.close()
937 # day of week
938 f = open(path + '/day_of_week.plot', 'w')
939 f.write(GNUPLOT_COMMON)
940 f.write(
942 set output 'day_of_week.png'
943 unset key
944 set xrange [0.5:7.5]
945 set xtics 1
946 set ylabel "Commits"
947 plot 'day_of_week.dat' using 1:2:(0.5) w boxes fs solid
948 """)
949 f.close()
951 # Domains
952 f = open(path + '/domains.plot', 'w')
953 f.write(GNUPLOT_COMMON)
954 f.write(
956 set output 'domains.png'
957 unset key
958 unset xtics
959 set grid y
960 set ylabel "Commits"
961 plot 'domains.dat' using 2:3:(0.5) with boxes fs solid, '' using 2:3:1 with labels rotate by 45 offset 0,1
962 """)
963 f.close()
965 # Month of Year
966 f = open(path + '/month_of_year.plot', 'w')
967 f.write(GNUPLOT_COMMON)
968 f.write(
970 set output 'month_of_year.png'
971 unset key
972 set xrange [0.5:12.5]
973 set xtics 1
974 set ylabel "Commits"
975 plot 'month_of_year.dat' using 1:2:(0.5) w boxes fs solid
976 """)
977 f.close()
979 # commits_by_year_month
980 f = open(path + '/commits_by_year_month.plot', 'w')
981 f.write(GNUPLOT_COMMON)
982 f.write(
984 set output 'commits_by_year_month.png'
985 unset key
986 set xdata time
987 set timefmt "%Y-%m"
988 set format x "%Y-%m"
989 set xtics rotate by 90 15768000
990 set bmargin 5
991 set ylabel "Commits"
992 plot 'commits_by_year_month.dat' using 1:2:(0.5) w boxes fs solid
993 """)
994 f.close()
996 # commits_by_year
997 f = open(path + '/commits_by_year.plot', 'w')
998 f.write(GNUPLOT_COMMON)
999 f.write(
1001 set output 'commits_by_year.png'
1002 unset key
1003 set xtics 1 rotate by 90
1004 set ylabel "Commits"
1005 set yrange [0:]
1006 plot 'commits_by_year.dat' using 1:2:(0.5) w boxes fs solid
1007 """)
1008 f.close()
1010 # Files by date
1011 f = open(path + '/files_by_date.plot', 'w')
1012 f.write(GNUPLOT_COMMON)
1013 f.write(
1015 set output 'files_by_date.png'
1016 unset key
1017 set xdata time
1018 set timefmt "%Y-%m-%d"
1019 set format x "%Y-%m-%d"
1020 set ylabel "Files"
1021 set xtics rotate by 90
1022 set ytics autofreq
1023 set bmargin 6
1024 plot 'files_by_date.dat' using 1:2 w steps
1025 """)
1026 f.close()
1028 # Lines of Code
1029 f = open(path + '/lines_of_code.plot', 'w')
1030 f.write(GNUPLOT_COMMON)
1031 f.write(
1033 set output 'lines_of_code.png'
1034 unset key
1035 set xdata time
1036 set timefmt "%s"
1037 set format x "%Y-%m-%d"
1038 set ylabel "Lines"
1039 set xtics rotate by 90
1040 set bmargin 6
1041 plot 'lines_of_code.dat' using 1:2 w lines
1042 """)
1043 f.close()
1045 os.chdir(path)
1046 files = glob.glob(path + '/*.plot')
1047 for f in files:
1048 out = getpipeoutput([gnuplot_cmd + ' "%s"' % f])
1049 if len(out) > 0:
1050 print out
1052 def printHeader(self, f, title = ''):
1053 f.write(
1054 """<?xml version="1.0" encoding="UTF-8"?>
1055 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
1056 <html xmlns="http://www.w3.org/1999/xhtml">
1057 <head>
1058 <title>GitStats - %s</title>
1059 <link rel="stylesheet" href="%s" type="text/css" />
1060 <meta name="generator" content="GitStats %s" />
1061 <script type="text/javascript" src="sortable.js"></script>
1062 </head>
1063 <body>
1064 """ % (self.title, conf['style'], getversion()))
1066 def printNav(self, f):
1067 f.write("""
1068 <div class="nav">
1069 <ul>
1070 <li><a href="index.html">General</a></li>
1071 <li><a href="activity.html">Activity</a></li>
1072 <li><a href="authors.html">Authors</a></li>
1073 <li><a href="files.html">Files</a></li>
1074 <li><a href="lines.html">Lines</a></li>
1075 <li><a href="tags.html">Tags</a></li>
1076 </ul>
1077 </div>
1078 """)
1081 class GitStats:
1082 def run(self, args_orig):
1083 optlist, args = getopt.getopt(args_orig, 'c:')
1084 for o,v in optlist:
1085 if o == '-c':
1086 key, value = v.split('=', 1)
1087 if key not in conf:
1088 raise 'Error: no such key "%s" in config' % key
1089 conf[key] = value
1091 if len(args) < 2:
1092 print """
1093 Usage: gitstats [options] <gitpath> <outputpath>
1095 Options:
1096 -c key=value Override configuration value
1098 Default config values:
1100 """ % conf
1101 sys.exit(0)
1103 gitpath = args[0]
1104 outputpath = os.path.abspath(args[1])
1105 rundir = os.getcwd()
1107 try:
1108 os.makedirs(outputpath)
1109 except OSError:
1110 pass
1111 if not os.path.isdir(outputpath):
1112 print 'FATAL: Output path is not a directory or does not exist'
1113 sys.exit(1)
1115 print 'Git path: %s' % gitpath
1116 print 'Output path: %s' % outputpath
1118 os.chdir(gitpath)
1120 cachefile = os.path.join(outputpath, 'gitstats.cache')
1122 print 'Collecting data...'
1123 data = GitDataCollector()
1124 data.loadCache(cachefile)
1125 data.collect(gitpath)
1126 print 'Refining data...'
1127 data.saveCache(cachefile)
1128 data.refine()
1130 os.chdir(rundir)
1132 print 'Generating report...'
1133 report = HTMLReportCreator()
1134 report.create(data, outputpath)
1136 time_end = time.time()
1137 exectime_internal = time_end - time_start
1138 print 'Execution time %.5f secs, %.5f secs (%.2f %%) in external commands)' % (exectime_internal, exectime_external, (100.0 * exectime_external) / exectime_internal)
1140 g = GitStats()
1141 g.run(sys.argv[1:])