Cleanup: moved rest of the code to a new class.
[gitstats.git] / gitstats
blob9dc9c273eb644e7f24fd779d4c82e57f08bfbfdb
1 #!/usr/bin/env python
2 # Copyright (c) 2007-2010 Heikki Hokkanen <hoxu@users.sf.net> & others (see doc/author.txt)
3 # GPLv2 / GPLv3
4 import datetime
5 import glob
6 import os
7 import pickle
8 import platform
9 import re
10 import shutil
11 import subprocess
12 import sys
13 import time
14 import zlib
16 GNUPLOT_COMMON = 'set terminal png transparent\nset size 1.0,0.5\n'
17 MAX_EXT_LENGTH = 10 # maximum file extension length
18 ON_LINUX = (platform.system() == 'Linux')
20 exectime_internal = 0.0
21 exectime_external = 0.0
22 time_start = time.time()
24 # By default, gnuplot is searched from path, but can be overridden with the
25 # environment variable "GNUPLOT"
26 gnuplot_cmd = 'gnuplot'
27 if 'GNUPLOT' in os.environ:
28 gnuplot_cmd = os.environ['GNUPLOT']
30 def getpipeoutput(cmds, quiet = False):
31 global exectime_external
32 start = time.time()
33 if not quiet and ON_LINUX and os.isatty(1):
34 print '>> ' + ' | '.join(cmds),
35 sys.stdout.flush()
36 p0 = subprocess.Popen(cmds[0], stdout = subprocess.PIPE, shell = True)
37 p = p0
38 for x in cmds[1:]:
39 p = subprocess.Popen(x, stdin = p0.stdout, stdout = subprocess.PIPE, shell = True)
40 p0 = p
41 output = p.communicate()[0]
42 end = time.time()
43 if not quiet:
44 if ON_LINUX and os.isatty(1):
45 print '\r',
46 print '[%.5f] >> %s' % (end - start, ' | '.join(cmds))
47 exectime_external += (end - start)
48 return output.rstrip('\n')
50 def getkeyssortedbyvalues(dict):
51 return map(lambda el : el[1], sorted(map(lambda el : (el[1], el[0]), dict.items())))
53 # dict['author'] = { 'commits': 512 } - ...key(dict, 'commits')
54 def getkeyssortedbyvaluekey(d, key):
55 return map(lambda el : el[1], sorted(map(lambda el : (d[el][key], el), d.keys())))
57 VERSION = 0
58 def getversion():
59 global VERSION
60 if VERSION == 0:
61 VERSION = getpipeoutput(["git rev-parse --short HEAD"]).split('\n')[0]
62 return VERSION
64 class DataCollector:
65 """Manages data collection from a revision control repository."""
66 def __init__(self):
67 self.stamp_created = time.time()
68 self.cache = {}
71 # This should be the main function to extract data from the repository.
72 def collect(self, dir):
73 self.dir = dir
74 self.projectname = os.path.basename(os.path.abspath(dir))
77 # Load cacheable data
78 def loadCache(self, cachefile):
79 if not os.path.exists(cachefile):
80 return
81 print 'Loading cache...'
82 f = open(cachefile)
83 try:
84 self.cache = pickle.loads(zlib.decompress(f.read()))
85 except:
86 # temporary hack to upgrade non-compressed caches
87 f.seek(0)
88 self.cache = pickle.load(f)
89 f.close()
92 # Produce any additional statistics from the extracted data.
93 def refine(self):
94 pass
97 # : get a dictionary of author
98 def getAuthorInfo(self, author):
99 return None
101 def getActivityByDayOfWeek(self):
102 return {}
104 def getActivityByHourOfDay(self):
105 return {}
107 # : get a dictionary of domains
108 def getDomainInfo(self, domain):
109 return None
112 # Get a list of authors
113 def getAuthors(self):
114 return []
116 def getFirstCommitDate(self):
117 return datetime.datetime.now()
119 def getLastCommitDate(self):
120 return datetime.datetime.now()
122 def getStampCreated(self):
123 return self.stamp_created
125 def getTags(self):
126 return []
128 def getTotalAuthors(self):
129 return -1
131 def getTotalCommits(self):
132 return -1
134 def getTotalFiles(self):
135 return -1
137 def getTotalLOC(self):
138 return -1
141 # Save cacheable data
142 def saveCache(self, cachefile):
143 print 'Saving cache...'
144 f = open(cachefile, 'w')
145 #pickle.dump(self.cache, f)
146 data = zlib.compress(pickle.dumps(self.cache))
147 f.write(data)
148 f.close()
150 class GitDataCollector(DataCollector):
151 def collect(self, dir):
152 DataCollector.collect(self, dir)
154 try:
155 self.total_authors = int(getpipeoutput(['git log', 'git shortlog -s', 'wc -l']))
156 except:
157 self.total_authors = 0
158 #self.total_lines = int(getoutput('git-ls-files -z |xargs -0 cat |wc -l'))
160 self.activity_by_hour_of_day = {} # hour -> commits
161 self.activity_by_day_of_week = {} # day -> commits
162 self.activity_by_month_of_year = {} # month [1-12] -> commits
163 self.activity_by_hour_of_week = {} # weekday -> hour -> commits
164 self.activity_by_hour_of_day_busiest = 0
165 self.activity_by_hour_of_week_busiest = 0
166 self.activity_by_year_week = {} # yy_wNN -> commits
167 self.activity_by_year_week_peak = 0
169 self.authors = {} # name -> {commits, first_commit_stamp, last_commit_stamp, last_active_day, active_days, lines_added, lines_removed}
171 # domains
172 self.domains = {} # domain -> commits
174 # author of the month
175 self.author_of_month = {} # month -> author -> commits
176 self.author_of_year = {} # year -> author -> commits
177 self.commits_by_month = {} # month -> commits
178 self.commits_by_year = {} # year -> commits
179 self.first_commit_stamp = 0
180 self.last_commit_stamp = 0
181 self.last_active_day = None
182 self.active_days = set()
184 # lines
185 self.total_lines = 0
186 self.total_lines_added = 0
187 self.total_lines_removed = 0
189 # timezone
190 self.commits_by_timezone = {} # timezone -> commits
192 # tags
193 self.tags = {}
194 lines = getpipeoutput(['git show-ref --tags']).split('\n')
195 for line in lines:
196 if len(line) == 0:
197 continue
198 (hash, tag) = line.split(' ')
200 tag = tag.replace('refs/tags/', '')
201 output = getpipeoutput(['git log "%s" --pretty=format:"%%at %%an" -n 1' % hash])
202 if len(output) > 0:
203 parts = output.split(' ')
204 stamp = 0
205 try:
206 stamp = int(parts[0])
207 except ValueError:
208 stamp = 0
209 self.tags[tag] = { 'stamp': stamp, 'hash' : hash, 'date' : datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), 'commits': 0, 'authors': {} }
211 # collect info on tags, starting from latest
212 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), self.tags.items()))))
213 prev = None
214 for tag in reversed(tags_sorted_by_date_desc):
215 cmd = 'git shortlog -s "%s"' % tag
216 if prev != None:
217 cmd += ' "^%s"' % prev
218 output = getpipeoutput([cmd])
219 if len(output) == 0:
220 continue
221 prev = tag
222 for line in output.split('\n'):
223 parts = re.split('\s+', line, 2)
224 commits = int(parts[1])
225 author = parts[2]
226 self.tags[tag]['commits'] += commits
227 self.tags[tag]['authors'][author] = commits
229 # Collect revision statistics
230 # Outputs "<stamp> <date> <time> <timezone> <author> '<' <mail> '>'"
231 lines = getpipeoutput(['git rev-list --pretty=format:"%at %ai %an <%aE>" HEAD', 'grep -v ^commit']).split('\n')
232 for line in lines:
233 parts = line.split(' ', 4)
234 author = ''
235 try:
236 stamp = int(parts[0])
237 except ValueError:
238 stamp = 0
239 timezone = parts[3]
240 author, mail = parts[4].split('<', 1)
241 author = author.rstrip()
242 mail = mail.rstrip('>')
243 domain = '?'
244 if mail.find('@') != -1:
245 domain = mail.rsplit('@', 1)[1]
246 date = datetime.datetime.fromtimestamp(float(stamp))
248 # First and last commit stamp
249 if self.last_commit_stamp == 0:
250 self.last_commit_stamp = stamp
251 self.first_commit_stamp = stamp
253 # activity
254 # hour
255 hour = date.hour
256 self.activity_by_hour_of_day[hour] = self.activity_by_hour_of_day.get(hour, 0) + 1
257 # most active hour?
258 if self.activity_by_hour_of_day[hour] > self.activity_by_hour_of_day_busiest:
259 self.activity_by_hour_of_day_busiest = self.activity_by_hour_of_day[hour]
261 # day of week
262 day = date.weekday()
263 self.activity_by_day_of_week[day] = self.activity_by_day_of_week.get(day, 0) + 1
265 # domain stats
266 if domain not in self.domains:
267 self.domains[domain] = {}
268 # commits
269 self.domains[domain]['commits'] = self.domains[domain].get('commits', 0) + 1
271 # hour of week
272 if day not in self.activity_by_hour_of_week:
273 self.activity_by_hour_of_week[day] = {}
274 self.activity_by_hour_of_week[day][hour] = self.activity_by_hour_of_week[day].get(hour, 0) + 1
275 # most active hour?
276 if self.activity_by_hour_of_week[day][hour] > self.activity_by_hour_of_week_busiest:
277 self.activity_by_hour_of_week_busiest = self.activity_by_hour_of_week[day][hour]
279 # month of year
280 month = date.month
281 self.activity_by_month_of_year[month] = self.activity_by_month_of_year.get(month, 0) + 1
283 # yearly/weekly activity
284 yyw = date.strftime('%Y-%W')
285 self.activity_by_year_week[yyw] = self.activity_by_year_week.get(yyw, 0) + 1
286 if self.activity_by_year_week_peak < self.activity_by_year_week[yyw]:
287 self.activity_by_year_week_peak = self.activity_by_year_week[yyw]
289 # author stats
290 if author not in self.authors:
291 self.authors[author] = {}
292 # commits
293 if 'last_commit_stamp' not in self.authors[author]:
294 self.authors[author]['last_commit_stamp'] = stamp
295 self.authors[author]['first_commit_stamp'] = stamp
296 self.authors[author]['commits'] = self.authors[author].get('commits', 0) + 1
298 # author of the month/year
299 yymm = date.strftime('%Y-%m')
300 if yymm in self.author_of_month:
301 self.author_of_month[yymm][author] = self.author_of_month[yymm].get(author, 0) + 1
302 else:
303 self.author_of_month[yymm] = {}
304 self.author_of_month[yymm][author] = 1
305 self.commits_by_month[yymm] = self.commits_by_month.get(yymm, 0) + 1
307 yy = date.year
308 if yy in self.author_of_year:
309 self.author_of_year[yy][author] = self.author_of_year[yy].get(author, 0) + 1
310 else:
311 self.author_of_year[yy] = {}
312 self.author_of_year[yy][author] = 1
313 self.commits_by_year[yy] = self.commits_by_year.get(yy, 0) + 1
315 # authors: active days
316 yymmdd = date.strftime('%Y-%m-%d')
317 if 'last_active_day' not in self.authors[author]:
318 self.authors[author]['last_active_day'] = yymmdd
319 self.authors[author]['active_days'] = 1
320 elif yymmdd != self.authors[author]['last_active_day']:
321 self.authors[author]['last_active_day'] = yymmdd
322 self.authors[author]['active_days'] += 1
324 # project: active days
325 if yymmdd != self.last_active_day:
326 self.last_active_day = yymmdd
327 self.active_days.add(yymmdd)
329 # timezone
330 self.commits_by_timezone[timezone] = self.commits_by_timezone.get(timezone, 0) + 1
332 # TODO Optimize this, it's the worst bottleneck
333 # outputs "<stamp> <files>" for each revision
334 self.files_by_stamp = {} # stamp -> files
335 revlines = getpipeoutput(['git rev-list --pretty=format:"%at %T" HEAD', 'grep -v ^commit']).strip().split('\n')
336 lines = []
337 for revline in revlines:
338 time, rev = revline.split(' ')
339 linecount = self.getFilesInCommit(rev)
340 lines.append('%d %d' % (int(time), linecount))
342 self.total_commits = len(lines)
343 for line in lines:
344 parts = line.split(' ')
345 if len(parts) != 2:
346 continue
347 (stamp, files) = parts[0:2]
348 try:
349 self.files_by_stamp[int(stamp)] = int(files)
350 except ValueError:
351 print 'Warning: failed to parse line "%s"' % line
353 # extensions
354 self.extensions = {} # extension -> files, lines
355 lines = getpipeoutput(['git ls-tree -r -z HEAD']).split('\000')
356 self.total_files = len(lines)
357 for line in lines:
358 if len(line) == 0:
359 continue
360 parts = re.split('\s+', line, 4)
361 sha1 = parts[2]
362 filename = parts[3]
364 if filename.find('.') == -1 or filename.rfind('.') == 0:
365 ext = ''
366 else:
367 ext = filename[(filename.rfind('.') + 1):]
368 if len(ext) > MAX_EXT_LENGTH:
369 ext = ''
371 if ext not in self.extensions:
372 self.extensions[ext] = {'files': 0, 'lines': 0}
374 self.extensions[ext]['files'] += 1
375 try:
376 self.extensions[ext]['lines'] += self.getLinesInBlob(sha1)
377 except:
378 print 'Warning: Could not count lines for file "%s"' % line
380 # line statistics
381 # outputs:
382 # N files changed, N insertions (+), N deletions(-)
383 # <stamp> <author>
384 self.changes_by_date = {} # stamp -> { files, ins, del }
385 lines = getpipeoutput(['git log --shortstat --pretty=format:"%at %an"']).split('\n')
386 lines.reverse()
387 files = 0; inserted = 0; deleted = 0; total_lines = 0
388 author = None
389 for line in lines:
390 if len(line) == 0:
391 continue
393 # <stamp> <author>
394 if line.find('files changed,') == -1:
395 pos = line.find(' ')
396 if pos != -1:
397 try:
398 (stamp, author) = (int(line[:pos]), line[pos+1:])
399 self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted, 'lines': total_lines }
400 if author not in self.authors:
401 self.authors[author] = { 'lines_added' : 0, 'lines_removed' : 0 }
402 self.authors[author]['lines_added'] = self.authors[author].get('lines_added', 0) + inserted
403 self.authors[author]['lines_removed'] = self.authors[author].get('lines_removed', 0) + deleted
404 except ValueError:
405 print 'Warning: unexpected line "%s"' % line
406 else:
407 print 'Warning: unexpected line "%s"' % line
408 else:
409 numbers = re.findall('\d+', line)
410 if len(numbers) == 3:
411 (files, inserted, deleted) = map(lambda el : int(el), numbers)
412 total_lines += inserted
413 total_lines -= deleted
414 self.total_lines_added += inserted
415 self.total_lines_removed += deleted
416 else:
417 print 'Warning: failed to handle line "%s"' % line
418 (files, inserted, deleted) = (0, 0, 0)
419 #self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted }
420 self.total_lines = total_lines
422 def refine(self):
423 # authors
424 # name -> {place_by_commits, commits_frac, date_first, date_last, timedelta}
425 authors_by_commits = getkeyssortedbyvaluekey(self.authors, 'commits')
426 authors_by_commits.reverse() # most first
427 for i, name in enumerate(authors_by_commits):
428 self.authors[name]['place_by_commits'] = i + 1
430 for name in self.authors.keys():
431 a = self.authors[name]
432 a['commits_frac'] = (100 * float(a['commits'])) / self.getTotalCommits()
433 date_first = datetime.datetime.fromtimestamp(a['first_commit_stamp'])
434 date_last = datetime.datetime.fromtimestamp(a['last_commit_stamp'])
435 delta = date_last - date_first
436 a['date_first'] = date_first.strftime('%Y-%m-%d')
437 a['date_last'] = date_last.strftime('%Y-%m-%d')
438 a['timedelta'] = delta
440 def getActiveDays(self):
441 return self.active_days
443 def getActivityByDayOfWeek(self):
444 return self.activity_by_day_of_week
446 def getActivityByHourOfDay(self):
447 return self.activity_by_hour_of_day
449 def getAuthorInfo(self, author):
450 return self.authors[author]
452 def getAuthors(self):
453 return self.authors.keys()
455 def getCommitDeltaDays(self):
456 return (self.last_commit_stamp - self.first_commit_stamp) / 86400
458 def getDomainInfo(self, domain):
459 return self.domains[domain]
461 def getDomains(self):
462 return self.domains.keys()
464 def getFilesInCommit(self, rev):
465 try:
466 res = self.cache['files_in_tree'][rev]
467 except:
468 res = int(getpipeoutput(['git ls-tree -r --name-only "%s"' % rev, 'wc -l']).split('\n')[0])
469 if 'files_in_tree' not in self.cache:
470 self.cache['files_in_tree'] = {}
471 self.cache['files_in_tree'][rev] = res
473 return res
475 def getFirstCommitDate(self):
476 return datetime.datetime.fromtimestamp(self.first_commit_stamp)
478 def getLastCommitDate(self):
479 return datetime.datetime.fromtimestamp(self.last_commit_stamp)
481 def getLinesInBlob(self, sha1):
482 try:
483 res = self.cache['lines_in_blob'][sha1]
484 except:
485 res = int(getpipeoutput(['git cat-file blob %s' % sha1, 'wc -l']).split()[0])
486 if 'lines_in_blob' not in self.cache:
487 self.cache['lines_in_blob'] = {}
488 self.cache['lines_in_blob'][sha1] = res
489 return res
491 def getTags(self):
492 lines = getpipeoutput(['git show-ref --tags', 'cut -d/ -f3'])
493 return lines.split('\n')
495 def getTagDate(self, tag):
496 return self.revToDate('tags/' + tag)
498 def getTotalAuthors(self):
499 return self.total_authors
501 def getTotalCommits(self):
502 return self.total_commits
504 def getTotalFiles(self):
505 return self.total_files
507 def getTotalLOC(self):
508 return self.total_lines
510 def revToDate(self, rev):
511 stamp = int(getpipeoutput(['git log --pretty=format:%%at "%s" -n 1' % rev]))
512 return datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d')
514 class ReportCreator:
515 """Creates the actual report based on given data."""
516 def __init__(self):
517 pass
519 def create(self, data, path):
520 self.data = data
521 self.path = path
523 def html_linkify(text):
524 return text.lower().replace(' ', '_')
526 def html_header(level, text):
527 name = html_linkify(text)
528 return '\n<h%d><a href="#%s" name="%s">%s</a></h%d>\n\n' % (level, name, name, text, level)
530 class HTMLReportCreator(ReportCreator):
531 def create(self, data, path):
532 ReportCreator.create(self, data, path)
533 self.title = data.projectname
535 # copy static files. Looks in the binary directory, ../share/gitstats and /usr/share/gitstats
536 binarypath = os.path.dirname(os.path.abspath(__file__))
537 secondarypath = os.path.join(binarypath, '..', 'share', 'gitstats')
538 basedirs = [binarypath, secondarypath, '/usr/share/gitstats']
539 for file in ('gitstats.css', 'sortable.js', 'arrow-up.gif', 'arrow-down.gif', 'arrow-none.gif'):
540 for base in basedirs:
541 src = base + '/' + file
542 if os.path.exists(src):
543 shutil.copyfile(src, path + '/' + file)
544 break
545 else:
546 print 'Warning: "%s" not found, so not copied (searched: %s)' % (file, basedirs)
548 f = open(path + "/index.html", 'w')
549 format = '%Y-%m-%d %H:%M:%S'
550 self.printHeader(f)
552 f.write('<h1>GitStats - %s</h1>' % data.projectname)
554 self.printNav(f)
556 f.write('<dl>')
557 f.write('<dt>Project name</dt><dd>%s</dd>' % (data.projectname))
558 f.write('<dt>Generated</dt><dd>%s (in %d seconds)</dd>' % (datetime.datetime.now().strftime(format), time.time() - data.getStampCreated()))
559 f.write('<dt>Generator</dt><dd><a href="http://gitstats.sourceforge.net/">GitStats</a> (version %s)</dd>' % getversion())
560 f.write('<dt>Report Period</dt><dd>%s to %s</dd>' % (data.getFirstCommitDate().strftime(format), data.getLastCommitDate().strftime(format)))
561 f.write('<dt>Age</dt><dd>%d days, %d active days (%3.2f%%)</dd>' % (data.getCommitDeltaDays(), len(data.getActiveDays()), (100.0 * len(data.getActiveDays()) / data.getCommitDeltaDays())))
562 f.write('<dt>Total Files</dt><dd>%s</dd>' % data.getTotalFiles())
563 f.write('<dt>Total Lines of Code</dt><dd>%s (%d added, %d removed)</dd>' % (data.getTotalLOC(), data.total_lines_added, data.total_lines_removed))
564 f.write('<dt>Total Commits</dt><dd>%s (average %.1f commits per active day, %.1f per all days)</dd>' % (data.getTotalCommits(), float(data.getTotalCommits()) / len(data.getActiveDays()), float(data.getTotalCommits()) / data.getCommitDeltaDays()))
565 f.write('<dt>Authors</dt><dd>%s</dd>' % data.getTotalAuthors())
566 f.write('</dl>')
568 f.write('</body>\n</html>')
569 f.close()
572 # Activity
573 f = open(path + '/activity.html', 'w')
574 self.printHeader(f)
575 f.write('<h1>Activity</h1>')
576 self.printNav(f)
578 #f.write('<h2>Last 30 days</h2>')
580 #f.write('<h2>Last 12 months</h2>')
582 # Weekly activity
583 WEEKS = 32
584 f.write(html_header(2, 'Weekly activity'))
585 f.write('<p>Last %d weeks</p>' % WEEKS)
587 # generate weeks to show (previous N weeks from now)
588 now = datetime.datetime.now()
589 deltaweek = datetime.timedelta(7)
590 weeks = []
591 stampcur = now
592 for i in range(0, WEEKS):
593 weeks.insert(0, stampcur.strftime('%Y-%W'))
594 stampcur -= deltaweek
596 # top row: commits & bar
597 f.write('<table class="noborders"><tr>')
598 for i in range(0, WEEKS):
599 commits = 0
600 if weeks[i] in data.activity_by_year_week:
601 commits = data.activity_by_year_week[weeks[i]]
603 percentage = 0
604 if weeks[i] in data.activity_by_year_week:
605 percentage = float(data.activity_by_year_week[weeks[i]]) / data.activity_by_year_week_peak
606 height = max(1, int(200 * percentage))
607 f.write('<td style="text-align: center; vertical-align: bottom">%d<div style="display: block; background-color: red; width: 20px; height: %dpx"></div></td>' % (commits, height))
609 # bottom row: year/week
610 f.write('</tr><tr>')
611 for i in range(0, WEEKS):
612 f.write('<td>%s</td>' % (WEEKS - i))
613 f.write('</tr></table>')
615 # Hour of Day
616 f.write(html_header(2, 'Hour of Day'))
617 hour_of_day = data.getActivityByHourOfDay()
618 f.write('<table><tr><th>Hour</th>')
619 for i in range(0, 24):
620 f.write('<th>%d</th>' % i)
621 f.write('</tr>\n<tr><th>Commits</th>')
622 fp = open(path + '/hour_of_day.dat', 'w')
623 for i in range(0, 24):
624 if i in hour_of_day:
625 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
626 f.write('<td style="background-color: rgb(%d, 0, 0)">%d</td>' % (r, hour_of_day[i]))
627 fp.write('%d %d\n' % (i, hour_of_day[i]))
628 else:
629 f.write('<td>0</td>')
630 fp.write('%d 0\n' % i)
631 fp.close()
632 f.write('</tr>\n<tr><th>%</th>')
633 totalcommits = data.getTotalCommits()
634 for i in range(0, 24):
635 if i in hour_of_day:
636 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
637 f.write('<td style="background-color: rgb(%d, 0, 0)">%.2f</td>' % (r, (100.0 * hour_of_day[i]) / totalcommits))
638 else:
639 f.write('<td>0.00</td>')
640 f.write('</tr></table>')
641 f.write('<img src="hour_of_day.png" alt="Hour of Day" />')
642 fg = open(path + '/hour_of_day.dat', 'w')
643 for i in range(0, 24):
644 if i in hour_of_day:
645 fg.write('%d %d\n' % (i + 1, hour_of_day[i]))
646 else:
647 fg.write('%d 0\n' % (i + 1))
648 fg.close()
650 # Day of Week
651 f.write(html_header(2, 'Day of Week'))
652 day_of_week = data.getActivityByDayOfWeek()
653 f.write('<div class="vtable"><table>')
654 f.write('<tr><th>Day</th><th>Total (%)</th></tr>')
655 fp = open(path + '/day_of_week.dat', 'w')
656 for d in range(0, 7):
657 commits = 0
658 if d in day_of_week:
659 commits = day_of_week[d]
660 fp.write('%d %d\n' % (d + 1, commits))
661 f.write('<tr>')
662 f.write('<th>%d</th>' % (d + 1))
663 if d in day_of_week:
664 f.write('<td>%d (%.2f%%)</td>' % (day_of_week[d], (100.0 * day_of_week[d]) / totalcommits))
665 else:
666 f.write('<td>0</td>')
667 f.write('</tr>')
668 f.write('</table></div>')
669 f.write('<img src="day_of_week.png" alt="Day of Week" />')
670 fp.close()
672 # Hour of Week
673 f.write(html_header(2, 'Hour of Week'))
674 f.write('<table>')
676 f.write('<tr><th>Weekday</th>')
677 for hour in range(0, 24):
678 f.write('<th>%d</th>' % (hour))
679 f.write('</tr>')
681 for weekday in range(0, 7):
682 f.write('<tr><th>%d</th>' % (weekday + 1))
683 for hour in range(0, 24):
684 try:
685 commits = data.activity_by_hour_of_week[weekday][hour]
686 except KeyError:
687 commits = 0
688 if commits != 0:
689 f.write('<td')
690 r = 127 + int((float(commits) / data.activity_by_hour_of_week_busiest) * 128)
691 f.write(' style="background-color: rgb(%d, 0, 0)"' % r)
692 f.write('>%d</td>' % commits)
693 else:
694 f.write('<td></td>')
695 f.write('</tr>')
697 f.write('</table>')
699 # Month of Year
700 f.write(html_header(2, 'Month of Year'))
701 f.write('<div class="vtable"><table>')
702 f.write('<tr><th>Month</th><th>Commits (%)</th></tr>')
703 fp = open (path + '/month_of_year.dat', 'w')
704 for mm in range(1, 13):
705 commits = 0
706 if mm in data.activity_by_month_of_year:
707 commits = data.activity_by_month_of_year[mm]
708 f.write('<tr><td>%d</td><td>%d (%.2f %%)</td></tr>' % (mm, commits, (100.0 * commits) / data.getTotalCommits()))
709 fp.write('%d %d\n' % (mm, commits))
710 fp.close()
711 f.write('</table></div>')
712 f.write('<img src="month_of_year.png" alt="Month of Year" />')
714 # Commits by year/month
715 f.write(html_header(2, 'Commits by year/month'))
716 f.write('<div class="vtable"><table><tr><th>Month</th><th>Commits</th></tr>')
717 for yymm in reversed(sorted(data.commits_by_month.keys())):
718 f.write('<tr><td>%s</td><td>%d</td></tr>' % (yymm, data.commits_by_month[yymm]))
719 f.write('</table></div>')
720 f.write('<img src="commits_by_year_month.png" alt="Commits by year/month" />')
721 fg = open(path + '/commits_by_year_month.dat', 'w')
722 for yymm in sorted(data.commits_by_month.keys()):
723 fg.write('%s %s\n' % (yymm, data.commits_by_month[yymm]))
724 fg.close()
726 # Commits by year
727 f.write(html_header(2, 'Commits by Year'))
728 f.write('<div class="vtable"><table><tr><th>Year</th><th>Commits (% of all)</th></tr>')
729 for yy in reversed(sorted(data.commits_by_year.keys())):
730 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td></tr>' % (yy, data.commits_by_year[yy], (100.0 * data.commits_by_year[yy]) / data.getTotalCommits()))
731 f.write('</table></div>')
732 f.write('<img src="commits_by_year.png" alt="Commits by Year" />')
733 fg = open(path + '/commits_by_year.dat', 'w')
734 for yy in sorted(data.commits_by_year.keys()):
735 fg.write('%d %d\n' % (yy, data.commits_by_year[yy]))
736 fg.close()
738 # Commits by timezone
739 f.write(html_header(2, 'Commits by Timezone'))
740 f.write('<table><tr>')
741 f.write('<th>Timezone</th><th>Commits</th>')
742 max_commits_on_tz = max(data.commits_by_timezone.values())
743 for i in sorted(data.commits_by_timezone.keys(), key = lambda n : int(n)):
744 commits = data.commits_by_timezone[i]
745 r = 127 + int((float(commits) / max_commits_on_tz) * 128)
746 f.write('<tr><th>%s</th><td style="background-color: rgb(%d, 0, 0)">%d</td></tr>' % (i, r, commits))
747 f.write('</tr></table>')
749 f.write('</body></html>')
750 f.close()
753 # Authors
754 f = open(path + '/authors.html', 'w')
755 self.printHeader(f)
757 f.write('<h1>Authors</h1>')
758 self.printNav(f)
760 # Authors :: List of authors
761 f.write(html_header(2, 'List of Authors'))
763 f.write('<table class="authors sortable" id="authors">')
764 f.write('<tr><th>Author</th><th>Commits (%)</th><th>+ lines</th><th>- lines</th><th>First commit</th><th>Last commit</th><th class="unsortable">Age</th><th>Active days</th><th># by commits</th></tr>')
765 for author in sorted(data.getAuthors()):
766 info = data.getAuthorInfo(author)
767 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d</td><td>%d</td><td>%s</td><td>%s</td><td>%s</td><td>%d</td><td>%d</td></tr>' % (author, info['commits'], info['commits_frac'], info['lines_added'], info['lines_removed'], info['date_first'], info['date_last'], info['timedelta'], info['active_days'], info['place_by_commits']))
768 f.write('</table>')
770 # Authors :: Author of Month
771 f.write(html_header(2, 'Author of Month'))
772 f.write('<table class="sortable" id="aom">')
773 f.write('<tr><th>Month</th><th>Author</th><th>Commits (%)</th><th class="unsortable">Next top 5</th></tr>')
774 for yymm in reversed(sorted(data.author_of_month.keys())):
775 authordict = data.author_of_month[yymm]
776 authors = getkeyssortedbyvalues(authordict)
777 authors.reverse()
778 commits = data.author_of_month[yymm][authors[0]]
779 next = ', '.join(authors[1:5])
780 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td></tr>' % (yymm, authors[0], commits, (100.0 * commits) / data.commits_by_month[yymm], data.commits_by_month[yymm], next))
782 f.write('</table>')
784 f.write(html_header(2, 'Author of Year'))
785 f.write('<table class="sortable" id="aoy"><tr><th>Year</th><th>Author</th><th>Commits (%)</th><th class="unsortable">Next top 5</th></tr>')
786 for yy in reversed(sorted(data.author_of_year.keys())):
787 authordict = data.author_of_year[yy]
788 authors = getkeyssortedbyvalues(authordict)
789 authors.reverse()
790 commits = data.author_of_year[yy][authors[0]]
791 next = ', '.join(authors[1:5])
792 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td></tr>' % (yy, authors[0], commits, (100.0 * commits) / data.commits_by_year[yy], data.commits_by_year[yy], next))
793 f.write('</table>')
795 # Domains
796 f.write(html_header(2, 'Commits by Domains'))
797 domains_by_commits = getkeyssortedbyvaluekey(data.domains, 'commits')
798 domains_by_commits.reverse() # most first
799 f.write('<div class="vtable"><table>')
800 f.write('<tr><th>Domains</th><th>Total (%)</th></tr>')
801 fp = open(path + '/domains.dat', 'w')
802 n = 0
803 max_domains = 10
804 for domain in domains_by_commits:
805 if n == max_domains:
806 break
807 commits = 0
808 n += 1
809 info = data.getDomainInfo(domain)
810 fp.write('%s %d %d\n' % (domain, n , info['commits']))
811 f.write('<tr><th>%s</th><td>%d (%.2f%%)</td></tr>' % (domain, info['commits'], (100.0 * info['commits'] / totalcommits)))
812 f.write('</table></div>')
813 f.write('<img src="domains.png" alt="Commits by Domains" />')
814 fp.close()
816 f.write('</body></html>')
817 f.close()
820 # Files
821 f = open(path + '/files.html', 'w')
822 self.printHeader(f)
823 f.write('<h1>Files</h1>')
824 self.printNav(f)
826 f.write('<dl>\n')
827 f.write('<dt>Total files</dt><dd>%d</dd>' % data.getTotalFiles())
828 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
829 f.write('<dt>Average file size</dt><dd>%.2f bytes</dd>' % ((100.0 * data.getTotalLOC()) / data.getTotalFiles()))
830 f.write('</dl>\n')
832 # Files :: File count by date
833 f.write(html_header(2, 'File count by date'))
835 # use set to get rid of duplicate/unnecessary entries
836 files_by_date = set()
837 for stamp in sorted(data.files_by_stamp.keys()):
838 files_by_date.add('%s %d' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
840 fg = open(path + '/files_by_date.dat', 'w')
841 for line in sorted(list(files_by_date)):
842 fg.write('%s\n' % line)
843 #for stamp in sorted(data.files_by_stamp.keys()):
844 # fg.write('%s %d\n' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
845 fg.close()
847 f.write('<img src="files_by_date.png" alt="Files by Date" />')
849 #f.write('<h2>Average file size by date</h2>')
851 # Files :: Extensions
852 f.write(html_header(2, 'Extensions'))
853 f.write('<table class="sortable" id="ext"><tr><th>Extension</th><th>Files (%)</th><th>Lines (%)</th><th>Lines/file</th></tr>')
854 for ext in sorted(data.extensions.keys()):
855 files = data.extensions[ext]['files']
856 lines = data.extensions[ext]['lines']
857 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d (%.2f%%)</td><td>%d</td></tr>' % (ext, files, (100.0 * files) / data.getTotalFiles(), lines, (100.0 * lines) / data.getTotalLOC(), lines / files))
858 f.write('</table>')
860 f.write('</body></html>')
861 f.close()
864 # Lines
865 f = open(path + '/lines.html', 'w')
866 self.printHeader(f)
867 f.write('<h1>Lines</h1>')
868 self.printNav(f)
870 f.write('<dl>\n')
871 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
872 f.write('</dl>\n')
874 f.write(html_header(2, 'Lines of Code'))
875 f.write('<img src="lines_of_code.png" />')
877 fg = open(path + '/lines_of_code.dat', 'w')
878 for stamp in sorted(data.changes_by_date.keys()):
879 fg.write('%d %d\n' % (stamp, data.changes_by_date[stamp]['lines']))
880 fg.close()
882 f.write('</body></html>')
883 f.close()
886 # tags.html
887 f = open(path + '/tags.html', 'w')
888 self.printHeader(f)
889 f.write('<h1>Tags</h1>')
890 self.printNav(f)
892 f.write('<dl>')
893 f.write('<dt>Total tags</dt><dd>%d</dd>' % len(data.tags))
894 if len(data.tags) > 0:
895 f.write('<dt>Average commits per tag</dt><dd>%.2f</dd>' % (1.0 * data.getTotalCommits() / len(data.tags)))
896 f.write('</dl>')
898 f.write('<table class="tags">')
899 f.write('<tr><th>Name</th><th>Date</th><th>Commits</th><th>Authors</th></tr>')
900 # sort the tags by date desc
901 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), data.tags.items()))))
902 for tag in tags_sorted_by_date_desc:
903 authorinfo = []
904 authors_by_commits = getkeyssortedbyvalues(data.tags[tag]['authors'])
905 for i in reversed(authors_by_commits):
906 authorinfo.append('%s (%d)' % (i, data.tags[tag]['authors'][i]))
907 f.write('<tr><td>%s</td><td>%s</td><td>%d</td><td>%s</td></tr>' % (tag, data.tags[tag]['date'], data.tags[tag]['commits'], ', '.join(authorinfo)))
908 f.write('</table>')
910 f.write('</body></html>')
911 f.close()
913 self.createGraphs(path)
915 def createGraphs(self, path):
916 print 'Generating graphs...'
918 # hour of day
919 f = open(path + '/hour_of_day.plot', 'w')
920 f.write(GNUPLOT_COMMON)
921 f.write(
923 set output 'hour_of_day.png'
924 unset key
925 set xrange [0.5:24.5]
926 set xtics 4
927 set ylabel "Commits"
928 plot 'hour_of_day.dat' using 1:2:(0.5) w boxes fs solid
929 """)
930 f.close()
932 # day of week
933 f = open(path + '/day_of_week.plot', 'w')
934 f.write(GNUPLOT_COMMON)
935 f.write(
937 set output 'day_of_week.png'
938 unset key
939 set xrange [0.5:7.5]
940 set xtics 1
941 set ylabel "Commits"
942 plot 'day_of_week.dat' using 1:2:(0.5) w boxes fs solid
943 """)
944 f.close()
946 # Domains
947 f = open(path + '/domains.plot', 'w')
948 f.write(GNUPLOT_COMMON)
949 f.write(
951 set output 'domains.png'
952 unset key
953 unset xtics
954 set grid y
955 set ylabel "Commits"
956 plot 'domains.dat' using 2:3:(0.5) with boxes fs solid, '' using 2:3:1 with labels rotate by 45 offset 0,1
957 """)
958 f.close()
960 # Month of Year
961 f = open(path + '/month_of_year.plot', 'w')
962 f.write(GNUPLOT_COMMON)
963 f.write(
965 set output 'month_of_year.png'
966 unset key
967 set xrange [0.5:12.5]
968 set xtics 1
969 set ylabel "Commits"
970 plot 'month_of_year.dat' using 1:2:(0.5) w boxes fs solid
971 """)
972 f.close()
974 # commits_by_year_month
975 f = open(path + '/commits_by_year_month.plot', 'w')
976 f.write(GNUPLOT_COMMON)
977 f.write(
979 set output 'commits_by_year_month.png'
980 unset key
981 set xdata time
982 set timefmt "%Y-%m"
983 set format x "%Y-%m"
984 set xtics rotate by 90 15768000
985 set bmargin 5
986 set ylabel "Commits"
987 plot 'commits_by_year_month.dat' using 1:2:(0.5) w boxes fs solid
988 """)
989 f.close()
991 # commits_by_year
992 f = open(path + '/commits_by_year.plot', 'w')
993 f.write(GNUPLOT_COMMON)
994 f.write(
996 set output 'commits_by_year.png'
997 unset key
998 set xtics 1 rotate by 90
999 set ylabel "Commits"
1000 set yrange [0:]
1001 plot 'commits_by_year.dat' using 1:2:(0.5) w boxes fs solid
1002 """)
1003 f.close()
1005 # Files by date
1006 f = open(path + '/files_by_date.plot', 'w')
1007 f.write(GNUPLOT_COMMON)
1008 f.write(
1010 set output 'files_by_date.png'
1011 unset key
1012 set xdata time
1013 set timefmt "%Y-%m-%d"
1014 set format x "%Y-%m-%d"
1015 set ylabel "Files"
1016 set xtics rotate by 90
1017 set ytics autofreq
1018 set bmargin 6
1019 plot 'files_by_date.dat' using 1:2 w steps
1020 """)
1021 f.close()
1023 # Lines of Code
1024 f = open(path + '/lines_of_code.plot', 'w')
1025 f.write(GNUPLOT_COMMON)
1026 f.write(
1028 set output 'lines_of_code.png'
1029 unset key
1030 set xdata time
1031 set timefmt "%s"
1032 set format x "%Y-%m-%d"
1033 set ylabel "Lines"
1034 set xtics rotate by 90
1035 set bmargin 6
1036 plot 'lines_of_code.dat' using 1:2 w lines
1037 """)
1038 f.close()
1040 os.chdir(path)
1041 files = glob.glob(path + '/*.plot')
1042 for f in files:
1043 out = getpipeoutput([gnuplot_cmd + ' "%s"' % f])
1044 if len(out) > 0:
1045 print out
1047 def printHeader(self, f, title = ''):
1048 f.write(
1049 """<?xml version="1.0" encoding="UTF-8"?>
1050 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
1051 <html xmlns="http://www.w3.org/1999/xhtml">
1052 <head>
1053 <title>GitStats - %s</title>
1054 <link rel="stylesheet" href="gitstats.css" type="text/css" />
1055 <meta name="generator" content="GitStats %s" />
1056 <script type="text/javascript" src="sortable.js"></script>
1057 </head>
1058 <body>
1059 """ % (self.title, getversion()))
1061 def printNav(self, f):
1062 f.write("""
1063 <div class="nav">
1064 <ul>
1065 <li><a href="index.html">General</a></li>
1066 <li><a href="activity.html">Activity</a></li>
1067 <li><a href="authors.html">Authors</a></li>
1068 <li><a href="files.html">Files</a></li>
1069 <li><a href="lines.html">Lines</a></li>
1070 <li><a href="tags.html">Tags</a></li>
1071 </ul>
1072 </div>
1073 """)
1076 class GitStats:
1077 def run(self, args):
1078 if len(args) < 2:
1079 print """
1080 Usage: gitstats [options] <gitpath> <outputpath>
1082 Options:
1084 sys.exit(0)
1086 gitpath = args[0]
1087 outputpath = os.path.abspath(args[1])
1088 rundir = os.getcwd()
1090 try:
1091 os.makedirs(outputpath)
1092 except OSError:
1093 pass
1094 if not os.path.isdir(outputpath):
1095 print 'FATAL: Output path is not a directory or does not exist'
1096 sys.exit(1)
1098 print 'Git path: %s' % gitpath
1099 print 'Output path: %s' % outputpath
1101 os.chdir(gitpath)
1103 cachefile = os.path.join(outputpath, 'gitstats.cache')
1105 print 'Collecting data...'
1106 data = GitDataCollector()
1107 data.loadCache(cachefile)
1108 data.collect(gitpath)
1109 print 'Refining data...'
1110 data.saveCache(cachefile)
1111 data.refine()
1113 os.chdir(rundir)
1115 print 'Generating report...'
1116 report = HTMLReportCreator()
1117 report.create(data, outputpath)
1119 time_end = time.time()
1120 exectime_internal = time_end - time_start
1121 print 'Execution time %.5f secs, %.5f secs (%.2f %%) in external commands)' % (exectime_internal, exectime_external, (100.0 * exectime_external) / exectime_internal)
1123 g = GitStats()
1124 g.run(sys.argv[1:])