Remove "total_authors = 0" fallback.
[gitstats.git] / gitstats
blob7303957f7fda5a434c11355ff71a01d345a45535
1 #!/usr/bin/env python
2 # Copyright (c) 2007-2012 Heikki Hokkanen <hoxu@users.sf.net> & others (see doc/author.txt)
3 # GPLv2 / GPLv3
4 import datetime
5 import getopt
6 import glob
7 import os
8 import pickle
9 import platform
10 import re
11 import shutil
12 import subprocess
13 import sys
14 import time
15 import zlib
17 os.environ['LC_ALL'] = 'C'
19 GNUPLOT_COMMON = 'set terminal png transparent size 640,240\nset size 1.0,1.0\n'
20 ON_LINUX = (platform.system() == 'Linux')
21 WEEKDAYS = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')
23 exectime_internal = 0.0
24 exectime_external = 0.0
25 time_start = time.time()
27 # By default, gnuplot is searched from path, but can be overridden with the
28 # environment variable "GNUPLOT"
29 gnuplot_cmd = 'gnuplot'
30 if 'GNUPLOT' in os.environ:
31 gnuplot_cmd = os.environ['GNUPLOT']
33 conf = {
34 'max_domains': 10,
35 'max_ext_length': 10,
36 'style': 'gitstats.css',
37 'max_authors': 20,
38 'authors_top': 5,
39 'commit_begin': '',
40 'commit_end': 'HEAD',
41 'linear_linestats': 1,
42 'project_name': '',
45 def getpipeoutput(cmds, quiet = False):
46 global exectime_external
47 start = time.time()
48 if not quiet and ON_LINUX and os.isatty(1):
49 print '>> ' + ' | '.join(cmds),
50 sys.stdout.flush()
51 p0 = subprocess.Popen(cmds[0], stdout = subprocess.PIPE, shell = True)
52 p = p0
53 for x in cmds[1:]:
54 p = subprocess.Popen(x, stdin = p0.stdout, stdout = subprocess.PIPE, shell = True)
55 p0 = p
56 output = p.communicate()[0]
57 end = time.time()
58 if not quiet:
59 if ON_LINUX and os.isatty(1):
60 print '\r',
61 print '[%.5f] >> %s' % (end - start, ' | '.join(cmds))
62 exectime_external += (end - start)
63 return output.rstrip('\n')
65 def getcommitrange(defaultrange = 'HEAD', end_only = False):
66 if len(conf['commit_end']) > 0:
67 if end_only or len(conf['commit_begin']) == 0:
68 return conf['commit_end']
69 return '%s..%s' % (conf['commit_begin'], conf['commit_end'])
70 return defaultrange
72 def getkeyssortedbyvalues(dict):
73 return map(lambda el : el[1], sorted(map(lambda el : (el[1], el[0]), dict.items())))
75 # dict['author'] = { 'commits': 512 } - ...key(dict, 'commits')
76 def getkeyssortedbyvaluekey(d, key):
77 return map(lambda el : el[1], sorted(map(lambda el : (d[el][key], el), d.keys())))
79 def getstatsummarycounts(line):
80 numbers = re.findall('\d+', line)
81 if len(numbers) == 1:
82 # neither insertions nor deletions: may probably only happen for "0 files changed"
83 numbers.append(0);
84 numbers.append(0);
85 elif len(numbers) == 2 and line.find('(+)') != -1:
86 numbers.append(0); # only insertions were printed on line
87 elif len(numbers) == 2 and line.find('(-)') != -1:
88 numbers.insert(1, 0); # only deletions were printed on line
89 return numbers
91 VERSION = 0
92 def getversion():
93 global VERSION
94 if VERSION == 0:
95 VERSION = getpipeoutput(["git rev-parse --short %s" % getcommitrange('HEAD')]).split('\n')[0]
96 return VERSION
98 def getgitversion():
99 return getpipeoutput(['git --version']).split('\n')[0]
101 def getgnuplotversion():
102 return getpipeoutput(['gnuplot --version']).split('\n')[0]
104 class DataCollector:
105 """Manages data collection from a revision control repository."""
106 def __init__(self):
107 self.stamp_created = time.time()
108 self.cache = {}
109 self.total_authors = 0
110 self.activity_by_hour_of_day = {} # hour -> commits
111 self.activity_by_day_of_week = {} # day -> commits
112 self.activity_by_month_of_year = {} # month [1-12] -> commits
113 self.activity_by_hour_of_week = {} # weekday -> hour -> commits
114 self.activity_by_hour_of_day_busiest = 0
115 self.activity_by_hour_of_week_busiest = 0
116 self.activity_by_year_week = {} # yy_wNN -> commits
117 self.activity_by_year_week_peak = 0
119 self.authors = {} # name -> {commits, first_commit_stamp, last_commit_stamp, last_active_day, active_days, lines_added, lines_removed}
121 self.total_commits = 0
122 self.total_files = 0
123 self.authors_by_commits = 0
125 # domains
126 self.domains = {} # domain -> commits
128 # author of the month
129 self.author_of_month = {} # month -> author -> commits
130 self.author_of_year = {} # year -> author -> commits
131 self.commits_by_month = {} # month -> commits
132 self.commits_by_year = {} # year -> commits
133 self.lines_added_by_month = {} # month -> lines added
134 self.lines_added_by_year = {} # year -> lines added
135 self.lines_removed_by_month = {} # month -> lines removed
136 self.lines_removed_by_year = {} # year -> lines removed
137 self.first_commit_stamp = 0
138 self.last_commit_stamp = 0
139 self.last_active_day = None
140 self.active_days = set()
142 # lines
143 self.total_lines = 0
144 self.total_lines_added = 0
145 self.total_lines_removed = 0
147 # size
148 self.total_size = 0
150 # timezone
151 self.commits_by_timezone = {} # timezone -> commits
153 # tags
154 self.tags = {}
156 self.files_by_stamp = {} # stamp -> files
158 # extensions
159 self.extensions = {} # extension -> files, lines
161 # line statistics
162 self.changes_by_date = {} # stamp -> { files, ins, del }
165 # This should be the main function to extract data from the repository.
166 def collect(self, dir):
167 self.dir = dir
168 if len(conf['project_name']) == 0:
169 self.projectname = os.path.basename(os.path.abspath(dir))
170 else:
171 self.projectname = conf['project_name']
174 # Load cacheable data
175 def loadCache(self, cachefile):
176 if not os.path.exists(cachefile):
177 return
178 print 'Loading cache...'
179 f = open(cachefile, 'rb')
180 try:
181 self.cache = pickle.loads(zlib.decompress(f.read()))
182 except:
183 # temporary hack to upgrade non-compressed caches
184 f.seek(0)
185 self.cache = pickle.load(f)
186 f.close()
189 # Produce any additional statistics from the extracted data.
190 def refine(self):
191 pass
194 # : get a dictionary of author
195 def getAuthorInfo(self, author):
196 return None
198 def getActivityByDayOfWeek(self):
199 return {}
201 def getActivityByHourOfDay(self):
202 return {}
204 # : get a dictionary of domains
205 def getDomainInfo(self, domain):
206 return None
209 # Get a list of authors
210 def getAuthors(self):
211 return []
213 def getFirstCommitDate(self):
214 return datetime.datetime.now()
216 def getLastCommitDate(self):
217 return datetime.datetime.now()
219 def getStampCreated(self):
220 return self.stamp_created
222 def getTags(self):
223 return []
225 def getTotalAuthors(self):
226 return -1
228 def getTotalCommits(self):
229 return -1
231 def getTotalFiles(self):
232 return -1
234 def getTotalLOC(self):
235 return -1
238 # Save cacheable data
239 def saveCache(self, cachefile):
240 print 'Saving cache...'
241 f = open(cachefile, 'wb')
242 #pickle.dump(self.cache, f)
243 data = zlib.compress(pickle.dumps(self.cache))
244 f.write(data)
245 f.close()
247 class GitDataCollector(DataCollector):
248 def collect(self, dir):
249 DataCollector.collect(self, dir)
251 self.total_authors += int(getpipeoutput(['git shortlog -s %s' % getcommitrange(), 'wc -l']))
252 #self.total_lines = int(getoutput('git-ls-files -z |xargs -0 cat |wc -l'))
254 # tags
255 lines = getpipeoutput(['git show-ref --tags']).split('\n')
256 for line in lines:
257 if len(line) == 0:
258 continue
259 (hash, tag) = line.split(' ')
261 tag = tag.replace('refs/tags/', '')
262 output = getpipeoutput(['git log "%s" --pretty=format:"%%at %%aN" -n 1' % hash])
263 if len(output) > 0:
264 parts = output.split(' ')
265 stamp = 0
266 try:
267 stamp = int(parts[0])
268 except ValueError:
269 stamp = 0
270 self.tags[tag] = { 'stamp': stamp, 'hash' : hash, 'date' : datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), 'commits': 0, 'authors': {} }
272 # collect info on tags, starting from latest
273 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), self.tags.items()))))
274 prev = None
275 for tag in reversed(tags_sorted_by_date_desc):
276 cmd = 'git shortlog -s "%s"' % tag
277 if prev != None:
278 cmd += ' "^%s"' % prev
279 output = getpipeoutput([cmd])
280 if len(output) == 0:
281 continue
282 prev = tag
283 for line in output.split('\n'):
284 parts = re.split('\s+', line, 2)
285 commits = int(parts[1])
286 author = parts[2]
287 self.tags[tag]['commits'] += commits
288 self.tags[tag]['authors'][author] = commits
290 # Collect revision statistics
291 # Outputs "<stamp> <date> <time> <timezone> <author> '<' <mail> '>'"
292 lines = getpipeoutput(['git rev-list --pretty=format:"%%at %%ai %%aN <%%aE>" %s' % getcommitrange('HEAD'), 'grep -v ^commit']).split('\n')
293 for line in lines:
294 parts = line.split(' ', 4)
295 author = ''
296 try:
297 stamp = int(parts[0])
298 except ValueError:
299 stamp = 0
300 timezone = parts[3]
301 author, mail = parts[4].split('<', 1)
302 author = author.rstrip()
303 mail = mail.rstrip('>')
304 domain = '?'
305 if mail.find('@') != -1:
306 domain = mail.rsplit('@', 1)[1]
307 date = datetime.datetime.fromtimestamp(float(stamp))
309 # First and last commit stamp (may be in any order because of cherry-picking and patches)
310 if stamp > self.last_commit_stamp:
311 self.last_commit_stamp = stamp
312 if self.first_commit_stamp == 0 or stamp < self.first_commit_stamp:
313 self.first_commit_stamp = stamp
315 # activity
316 # hour
317 hour = date.hour
318 self.activity_by_hour_of_day[hour] = self.activity_by_hour_of_day.get(hour, 0) + 1
319 # most active hour?
320 if self.activity_by_hour_of_day[hour] > self.activity_by_hour_of_day_busiest:
321 self.activity_by_hour_of_day_busiest = self.activity_by_hour_of_day[hour]
323 # day of week
324 day = date.weekday()
325 self.activity_by_day_of_week[day] = self.activity_by_day_of_week.get(day, 0) + 1
327 # domain stats
328 if domain not in self.domains:
329 self.domains[domain] = {}
330 # commits
331 self.domains[domain]['commits'] = self.domains[domain].get('commits', 0) + 1
333 # hour of week
334 if day not in self.activity_by_hour_of_week:
335 self.activity_by_hour_of_week[day] = {}
336 self.activity_by_hour_of_week[day][hour] = self.activity_by_hour_of_week[day].get(hour, 0) + 1
337 # most active hour?
338 if self.activity_by_hour_of_week[day][hour] > self.activity_by_hour_of_week_busiest:
339 self.activity_by_hour_of_week_busiest = self.activity_by_hour_of_week[day][hour]
341 # month of year
342 month = date.month
343 self.activity_by_month_of_year[month] = self.activity_by_month_of_year.get(month, 0) + 1
345 # yearly/weekly activity
346 yyw = date.strftime('%Y-%W')
347 self.activity_by_year_week[yyw] = self.activity_by_year_week.get(yyw, 0) + 1
348 if self.activity_by_year_week_peak < self.activity_by_year_week[yyw]:
349 self.activity_by_year_week_peak = self.activity_by_year_week[yyw]
351 # author stats
352 if author not in self.authors:
353 self.authors[author] = {}
354 # commits, note again that commits may be in any date order because of cherry-picking and patches
355 if 'last_commit_stamp' not in self.authors[author]:
356 self.authors[author]['last_commit_stamp'] = stamp
357 if stamp > self.authors[author]['last_commit_stamp']:
358 self.authors[author]['last_commit_stamp'] = stamp
359 if 'first_commit_stamp' not in self.authors[author]:
360 self.authors[author]['first_commit_stamp'] = stamp
361 if stamp < self.authors[author]['first_commit_stamp']:
362 self.authors[author]['first_commit_stamp'] = stamp
364 # author of the month/year
365 yymm = date.strftime('%Y-%m')
366 if yymm in self.author_of_month:
367 self.author_of_month[yymm][author] = self.author_of_month[yymm].get(author, 0) + 1
368 else:
369 self.author_of_month[yymm] = {}
370 self.author_of_month[yymm][author] = 1
371 self.commits_by_month[yymm] = self.commits_by_month.get(yymm, 0) + 1
373 yy = date.year
374 if yy in self.author_of_year:
375 self.author_of_year[yy][author] = self.author_of_year[yy].get(author, 0) + 1
376 else:
377 self.author_of_year[yy] = {}
378 self.author_of_year[yy][author] = 1
379 self.commits_by_year[yy] = self.commits_by_year.get(yy, 0) + 1
381 # authors: active days
382 yymmdd = date.strftime('%Y-%m-%d')
383 if 'last_active_day' not in self.authors[author]:
384 self.authors[author]['last_active_day'] = yymmdd
385 self.authors[author]['active_days'] = set([yymmdd])
386 elif yymmdd != self.authors[author]['last_active_day']:
387 self.authors[author]['last_active_day'] = yymmdd
388 self.authors[author]['active_days'].add(yymmdd)
390 # project: active days
391 if yymmdd != self.last_active_day:
392 self.last_active_day = yymmdd
393 self.active_days.add(yymmdd)
395 # timezone
396 self.commits_by_timezone[timezone] = self.commits_by_timezone.get(timezone, 0) + 1
398 # TODO Optimize this, it's the worst bottleneck
399 # outputs "<stamp> <files>" for each revision
400 revlines = getpipeoutput(['git rev-list --pretty=format:"%%at %%T" %s' % getcommitrange('HEAD'), 'grep -v ^commit']).strip().split('\n')
401 lines = []
402 for revline in revlines:
403 time, rev = revline.split(' ')
404 linecount = self.getFilesInCommit(rev)
405 lines.append('%d %d' % (int(time), linecount))
407 self.total_commits += len(lines)
408 for line in lines:
409 parts = line.split(' ')
410 if len(parts) != 2:
411 continue
412 (stamp, files) = parts[0:2]
413 try:
414 self.files_by_stamp[int(stamp)] = int(files)
415 except ValueError:
416 print 'Warning: failed to parse line "%s"' % line
418 # extensions and size of files
419 lines = getpipeoutput(['git ls-tree -r -l -z %s' % getcommitrange('HEAD', end_only = True)]).split('\000')
420 for line in lines:
421 if len(line) == 0:
422 continue
423 parts = re.split('\s+', line, 5)
424 if parts[0] == '160000' and parts[3] == '-':
425 # skip submodules
426 continue
427 sha1 = parts[2]
428 size = int(parts[3])
429 fullpath = parts[4]
431 self.total_size += size
432 self.total_files += 1
434 filename = fullpath.split('/')[-1] # strip directories
435 if filename.find('.') == -1 or filename.rfind('.') == 0:
436 ext = ''
437 else:
438 ext = filename[(filename.rfind('.') + 1):]
439 if len(ext) > conf['max_ext_length']:
440 ext = ''
442 if ext not in self.extensions:
443 self.extensions[ext] = {'files': 0, 'lines': 0}
445 self.extensions[ext]['files'] += 1
446 try:
447 self.extensions[ext]['lines'] += self.getLinesInBlob(sha1)
448 except:
449 print 'Warning: Could not count lines for file "%s"' % line
451 # line statistics
452 # outputs:
453 # N files changed, N insertions (+), N deletions(-)
454 # <stamp> <author>
455 self.changes_by_date = {} # stamp -> { files, ins, del }
456 # computation of lines of code by date is better done
457 # on a linear history.
458 extra = ''
459 if conf['linear_linestats']:
460 extra = '--first-parent -m'
461 lines = getpipeoutput(['git log --shortstat %s --pretty=format:"%%at %%aN" %s' % (extra, getcommitrange('HEAD'))]).split('\n')
462 lines.reverse()
463 files = 0; inserted = 0; deleted = 0; total_lines = 0
464 author = None
465 for line in lines:
466 if len(line) == 0:
467 continue
469 # <stamp> <author>
470 if re.search('files? changed', line) == None:
471 pos = line.find(' ')
472 if pos != -1:
473 try:
474 (stamp, author) = (int(line[:pos]), line[pos+1:])
475 self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted, 'lines': total_lines }
477 date = datetime.datetime.fromtimestamp(stamp)
478 yymm = date.strftime('%Y-%m')
479 self.lines_added_by_month[yymm] = self.lines_added_by_month.get(yymm, 0) + inserted
480 self.lines_removed_by_month[yymm] = self.lines_removed_by_month.get(yymm, 0) + deleted
482 yy = date.year
483 self.lines_added_by_year[yy] = self.lines_added_by_year.get(yy,0) + inserted
484 self.lines_removed_by_year[yy] = self.lines_removed_by_year.get(yy, 0) + deleted
486 files, inserted, deleted = 0, 0, 0
487 except ValueError:
488 print 'Warning: unexpected line "%s"' % line
489 else:
490 print 'Warning: unexpected line "%s"' % line
491 else:
492 numbers = getstatsummarycounts(line)
494 if len(numbers) == 3:
495 (files, inserted, deleted) = map(lambda el : int(el), numbers)
496 total_lines += inserted
497 total_lines -= deleted
498 self.total_lines_added += inserted
499 self.total_lines_removed += deleted
501 else:
502 print 'Warning: failed to handle line "%s"' % line
503 (files, inserted, deleted) = (0, 0, 0)
504 #self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted }
505 self.total_lines = total_lines
507 # Per-author statistics
509 # defined for stamp, author only if author commited at this timestamp.
510 self.changes_by_date_by_author = {} # stamp -> author -> lines_added
512 # Similar to the above, but never use --first-parent
513 # (we need to walk through every commit to know who
514 # committed what, not just through mainline)
515 lines = getpipeoutput(['git log --shortstat --date-order --pretty=format:"%%at %%aN" %s' % (getcommitrange('HEAD'))]).split('\n')
516 lines.reverse()
517 files = 0; inserted = 0; deleted = 0
518 author = None
519 stamp = 0
520 for line in lines:
521 if len(line) == 0:
522 continue
524 # <stamp> <author>
525 if re.search('files? changed', line) == None:
526 pos = line.find(' ')
527 if pos != -1:
528 try:
529 oldstamp = stamp
530 (stamp, author) = (int(line[:pos]), line[pos+1:])
531 if oldstamp > stamp:
532 # clock skew, keep old timestamp to avoid having ugly graph
533 stamp = oldstamp
534 if author not in self.authors:
535 self.authors[author] = { 'lines_added' : 0, 'lines_removed' : 0, 'commits' : 0}
536 self.authors[author]['commits'] = self.authors[author].get('commits', 0) + 1
537 self.authors[author]['lines_added'] = self.authors[author].get('lines_added', 0) + inserted
538 self.authors[author]['lines_removed'] = self.authors[author].get('lines_removed', 0) + deleted
539 if stamp not in self.changes_by_date_by_author:
540 self.changes_by_date_by_author[stamp] = {}
541 if author not in self.changes_by_date_by_author[stamp]:
542 self.changes_by_date_by_author[stamp][author] = {}
543 self.changes_by_date_by_author[stamp][author]['lines_added'] = self.authors[author]['lines_added']
544 self.changes_by_date_by_author[stamp][author]['commits'] = self.authors[author]['commits']
545 files, inserted, deleted = 0, 0, 0
546 except ValueError:
547 print 'Warning: unexpected line "%s"' % line
548 else:
549 print 'Warning: unexpected line "%s"' % line
550 else:
551 numbers = getstatsummarycounts(line);
553 if len(numbers) == 3:
554 (files, inserted, deleted) = map(lambda el : int(el), numbers)
555 else:
556 print 'Warning: failed to handle line "%s"' % line
557 (files, inserted, deleted) = (0, 0, 0)
559 def refine(self):
560 # authors
561 # name -> {place_by_commits, commits_frac, date_first, date_last, timedelta}
562 self.authors_by_commits = getkeyssortedbyvaluekey(self.authors, 'commits')
563 self.authors_by_commits.reverse() # most first
564 for i, name in enumerate(self.authors_by_commits):
565 self.authors[name]['place_by_commits'] = i + 1
567 for name in self.authors.keys():
568 a = self.authors[name]
569 a['commits_frac'] = (100 * float(a['commits'])) / self.getTotalCommits()
570 date_first = datetime.datetime.fromtimestamp(a['first_commit_stamp'])
571 date_last = datetime.datetime.fromtimestamp(a['last_commit_stamp'])
572 delta = date_last - date_first
573 a['date_first'] = date_first.strftime('%Y-%m-%d')
574 a['date_last'] = date_last.strftime('%Y-%m-%d')
575 a['timedelta'] = delta
576 if 'lines_added' not in a: a['lines_added'] = 0
577 if 'lines_removed' not in a: a['lines_removed'] = 0
579 def getActiveDays(self):
580 return self.active_days
582 def getActivityByDayOfWeek(self):
583 return self.activity_by_day_of_week
585 def getActivityByHourOfDay(self):
586 return self.activity_by_hour_of_day
588 def getAuthorInfo(self, author):
589 return self.authors[author]
591 def getAuthors(self, limit = None):
592 res = getkeyssortedbyvaluekey(self.authors, 'commits')
593 res.reverse()
594 return res[:limit]
596 def getCommitDeltaDays(self):
597 return (self.last_commit_stamp / 86400 - self.first_commit_stamp / 86400) + 1
599 def getDomainInfo(self, domain):
600 return self.domains[domain]
602 def getDomains(self):
603 return self.domains.keys()
605 def getFilesInCommit(self, rev):
606 try:
607 res = self.cache['files_in_tree'][rev]
608 except:
609 res = int(getpipeoutput(['git ls-tree -r --name-only "%s"' % rev, 'wc -l']).split('\n')[0])
610 if 'files_in_tree' not in self.cache:
611 self.cache['files_in_tree'] = {}
612 self.cache['files_in_tree'][rev] = res
614 return res
616 def getFirstCommitDate(self):
617 return datetime.datetime.fromtimestamp(self.first_commit_stamp)
619 def getLastCommitDate(self):
620 return datetime.datetime.fromtimestamp(self.last_commit_stamp)
622 def getLinesInBlob(self, sha1):
623 try:
624 res = self.cache['lines_in_blob'][sha1]
625 except:
626 res = int(getpipeoutput(['git cat-file blob %s' % sha1, 'wc -l']).split()[0])
627 if 'lines_in_blob' not in self.cache:
628 self.cache['lines_in_blob'] = {}
629 self.cache['lines_in_blob'][sha1] = res
630 return res
632 def getTags(self):
633 lines = getpipeoutput(['git show-ref --tags', 'cut -d/ -f3'])
634 return lines.split('\n')
636 def getTagDate(self, tag):
637 return self.revToDate('tags/' + tag)
639 def getTotalAuthors(self):
640 return self.total_authors
642 def getTotalCommits(self):
643 return self.total_commits
645 def getTotalFiles(self):
646 return self.total_files
648 def getTotalLOC(self):
649 return self.total_lines
651 def getTotalSize(self):
652 return self.total_size
654 def revToDate(self, rev):
655 stamp = int(getpipeoutput(['git log --pretty=format:%%at "%s" -n 1' % rev]))
656 return datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d')
658 class ReportCreator:
659 """Creates the actual report based on given data."""
660 def __init__(self):
661 pass
663 def create(self, data, path):
664 self.data = data
665 self.path = path
667 def html_linkify(text):
668 return text.lower().replace(' ', '_')
670 def html_header(level, text):
671 name = html_linkify(text)
672 return '\n<h%d><a href="#%s" name="%s">%s</a></h%d>\n\n' % (level, name, name, text, level)
674 class HTMLReportCreator(ReportCreator):
675 def create(self, data, path):
676 ReportCreator.create(self, data, path)
677 self.title = data.projectname
679 # copy static files. Looks in the binary directory, ../share/gitstats and /usr/share/gitstats
680 binarypath = os.path.dirname(os.path.abspath(__file__))
681 secondarypath = os.path.join(binarypath, '..', 'share', 'gitstats')
682 basedirs = [binarypath, secondarypath, '/usr/share/gitstats']
683 for file in ('gitstats.css', 'sortable.js', 'arrow-up.gif', 'arrow-down.gif', 'arrow-none.gif'):
684 for base in basedirs:
685 src = base + '/' + file
686 if os.path.exists(src):
687 shutil.copyfile(src, path + '/' + file)
688 break
689 else:
690 print 'Warning: "%s" not found, so not copied (searched: %s)' % (file, basedirs)
692 f = open(path + "/index.html", 'w')
693 format = '%Y-%m-%d %H:%M:%S'
694 self.printHeader(f)
696 f.write('<h1>GitStats - %s</h1>' % data.projectname)
698 self.printNav(f)
700 f.write('<dl>')
701 f.write('<dt>Project name</dt><dd>%s</dd>' % (data.projectname))
702 f.write('<dt>Generated</dt><dd>%s (in %d seconds)</dd>' % (datetime.datetime.now().strftime(format), time.time() - data.getStampCreated()))
703 f.write('<dt>Generator</dt><dd><a href="http://gitstats.sourceforge.net/">GitStats</a> (version %s), %s, %s</dd>' % (getversion(), getgitversion(), getgnuplotversion()))
704 f.write('<dt>Report Period</dt><dd>%s to %s</dd>' % (data.getFirstCommitDate().strftime(format), data.getLastCommitDate().strftime(format)))
705 f.write('<dt>Age</dt><dd>%d days, %d active days (%3.2f%%)</dd>' % (data.getCommitDeltaDays(), len(data.getActiveDays()), (100.0 * len(data.getActiveDays()) / data.getCommitDeltaDays())))
706 f.write('<dt>Total Files</dt><dd>%s</dd>' % data.getTotalFiles())
707 f.write('<dt>Total Lines of Code</dt><dd>%s (%d added, %d removed)</dd>' % (data.getTotalLOC(), data.total_lines_added, data.total_lines_removed))
708 f.write('<dt>Total Commits</dt><dd>%s (average %.1f commits per active day, %.1f per all days)</dd>' % (data.getTotalCommits(), float(data.getTotalCommits()) / len(data.getActiveDays()), float(data.getTotalCommits()) / data.getCommitDeltaDays()))
709 f.write('<dt>Authors</dt><dd>%s (average %.1f commits per author)</dd>' % (data.getTotalAuthors(), (1.0 * data.getTotalCommits()) / data.getTotalAuthors()))
710 f.write('</dl>')
712 f.write('</body>\n</html>')
713 f.close()
716 # Activity
717 f = open(path + '/activity.html', 'w')
718 self.printHeader(f)
719 f.write('<h1>Activity</h1>')
720 self.printNav(f)
722 #f.write('<h2>Last 30 days</h2>')
724 #f.write('<h2>Last 12 months</h2>')
726 # Weekly activity
727 WEEKS = 32
728 f.write(html_header(2, 'Weekly activity'))
729 f.write('<p>Last %d weeks</p>' % WEEKS)
731 # generate weeks to show (previous N weeks from now)
732 now = datetime.datetime.now()
733 deltaweek = datetime.timedelta(7)
734 weeks = []
735 stampcur = now
736 for i in range(0, WEEKS):
737 weeks.insert(0, stampcur.strftime('%Y-%W'))
738 stampcur -= deltaweek
740 # top row: commits & bar
741 f.write('<table class="noborders"><tr>')
742 for i in range(0, WEEKS):
743 commits = 0
744 if weeks[i] in data.activity_by_year_week:
745 commits = data.activity_by_year_week[weeks[i]]
747 percentage = 0
748 if weeks[i] in data.activity_by_year_week:
749 percentage = float(data.activity_by_year_week[weeks[i]]) / data.activity_by_year_week_peak
750 height = max(1, int(200 * percentage))
751 f.write('<td style="text-align: center; vertical-align: bottom">%d<div style="display: block; background-color: red; width: 20px; height: %dpx"></div></td>' % (commits, height))
753 # bottom row: year/week
754 f.write('</tr><tr>')
755 for i in range(0, WEEKS):
756 f.write('<td>%s</td>' % (WEEKS - i))
757 f.write('</tr></table>')
759 # Hour of Day
760 f.write(html_header(2, 'Hour of Day'))
761 hour_of_day = data.getActivityByHourOfDay()
762 f.write('<table><tr><th>Hour</th>')
763 for i in range(0, 24):
764 f.write('<th>%d</th>' % i)
765 f.write('</tr>\n<tr><th>Commits</th>')
766 fp = open(path + '/hour_of_day.dat', 'w')
767 for i in range(0, 24):
768 if i in hour_of_day:
769 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
770 f.write('<td style="background-color: rgb(%d, 0, 0)">%d</td>' % (r, hour_of_day[i]))
771 fp.write('%d %d\n' % (i, hour_of_day[i]))
772 else:
773 f.write('<td>0</td>')
774 fp.write('%d 0\n' % i)
775 fp.close()
776 f.write('</tr>\n<tr><th>%</th>')
777 totalcommits = data.getTotalCommits()
778 for i in range(0, 24):
779 if i in hour_of_day:
780 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
781 f.write('<td style="background-color: rgb(%d, 0, 0)">%.2f</td>' % (r, (100.0 * hour_of_day[i]) / totalcommits))
782 else:
783 f.write('<td>0.00</td>')
784 f.write('</tr></table>')
785 f.write('<img src="hour_of_day.png" alt="Hour of Day" />')
786 fg = open(path + '/hour_of_day.dat', 'w')
787 for i in range(0, 24):
788 if i in hour_of_day:
789 fg.write('%d %d\n' % (i + 1, hour_of_day[i]))
790 else:
791 fg.write('%d 0\n' % (i + 1))
792 fg.close()
794 # Day of Week
795 f.write(html_header(2, 'Day of Week'))
796 day_of_week = data.getActivityByDayOfWeek()
797 f.write('<div class="vtable"><table>')
798 f.write('<tr><th>Day</th><th>Total (%)</th></tr>')
799 fp = open(path + '/day_of_week.dat', 'w')
800 for d in range(0, 7):
801 commits = 0
802 if d in day_of_week:
803 commits = day_of_week[d]
804 fp.write('%d %s %d\n' % (d + 1, WEEKDAYS[d], commits))
805 f.write('<tr>')
806 f.write('<th>%s</th>' % (WEEKDAYS[d]))
807 if d in day_of_week:
808 f.write('<td>%d (%.2f%%)</td>' % (day_of_week[d], (100.0 * day_of_week[d]) / totalcommits))
809 else:
810 f.write('<td>0</td>')
811 f.write('</tr>')
812 f.write('</table></div>')
813 f.write('<img src="day_of_week.png" alt="Day of Week" />')
814 fp.close()
816 # Hour of Week
817 f.write(html_header(2, 'Hour of Week'))
818 f.write('<table>')
820 f.write('<tr><th>Weekday</th>')
821 for hour in range(0, 24):
822 f.write('<th>%d</th>' % (hour))
823 f.write('</tr>')
825 for weekday in range(0, 7):
826 f.write('<tr><th>%s</th>' % (WEEKDAYS[weekday]))
827 for hour in range(0, 24):
828 try:
829 commits = data.activity_by_hour_of_week[weekday][hour]
830 except KeyError:
831 commits = 0
832 if commits != 0:
833 f.write('<td')
834 r = 127 + int((float(commits) / data.activity_by_hour_of_week_busiest) * 128)
835 f.write(' style="background-color: rgb(%d, 0, 0)"' % r)
836 f.write('>%d</td>' % commits)
837 else:
838 f.write('<td></td>')
839 f.write('</tr>')
841 f.write('</table>')
843 # Month of Year
844 f.write(html_header(2, 'Month of Year'))
845 f.write('<div class="vtable"><table>')
846 f.write('<tr><th>Month</th><th>Commits (%)</th></tr>')
847 fp = open (path + '/month_of_year.dat', 'w')
848 for mm in range(1, 13):
849 commits = 0
850 if mm in data.activity_by_month_of_year:
851 commits = data.activity_by_month_of_year[mm]
852 f.write('<tr><td>%d</td><td>%d (%.2f %%)</td></tr>' % (mm, commits, (100.0 * commits) / data.getTotalCommits()))
853 fp.write('%d %d\n' % (mm, commits))
854 fp.close()
855 f.write('</table></div>')
856 f.write('<img src="month_of_year.png" alt="Month of Year" />')
858 # Commits by year/month
859 f.write(html_header(2, 'Commits by year/month'))
860 f.write('<div class="vtable"><table><tr><th>Month</th><th>Commits</th><th>Lines added</th><th>Lines removed</th></tr>')
861 for yymm in reversed(sorted(data.commits_by_month.keys())):
862 f.write('<tr><td>%s</td><td>%d</td><td>%d</td><td>%d</td></tr>' % (yymm, data.commits_by_month.get(yymm,0), data.lines_added_by_month.get(yymm,0), data.lines_removed_by_month.get(yymm,0)))
863 f.write('</table></div>')
864 f.write('<img src="commits_by_year_month.png" alt="Commits by year/month" />')
865 fg = open(path + '/commits_by_year_month.dat', 'w')
866 for yymm in sorted(data.commits_by_month.keys()):
867 fg.write('%s %s\n' % (yymm, data.commits_by_month[yymm]))
868 fg.close()
870 # Commits by year
871 f.write(html_header(2, 'Commits by Year'))
872 f.write('<div class="vtable"><table><tr><th>Year</th><th>Commits (% of all)</th><th>Lines added</th><th>Lines removed</th></tr>')
873 for yy in reversed(sorted(data.commits_by_year.keys())):
874 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d</td><td>%d</td></tr>' % (yy, data.commits_by_year.get(yy,0), (100.0 * data.commits_by_year.get(yy,0)) / data.getTotalCommits(), data.lines_added_by_year.get(yy,0), data.lines_removed_by_year.get(yy,0)))
875 f.write('</table></div>')
876 f.write('<img src="commits_by_year.png" alt="Commits by Year" />')
877 fg = open(path + '/commits_by_year.dat', 'w')
878 for yy in sorted(data.commits_by_year.keys()):
879 fg.write('%d %d\n' % (yy, data.commits_by_year[yy]))
880 fg.close()
882 # Commits by timezone
883 f.write(html_header(2, 'Commits by Timezone'))
884 f.write('<table><tr>')
885 f.write('<th>Timezone</th><th>Commits</th>')
886 max_commits_on_tz = max(data.commits_by_timezone.values())
887 for i in sorted(data.commits_by_timezone.keys(), key = lambda n : int(n)):
888 commits = data.commits_by_timezone[i]
889 r = 127 + int((float(commits) / max_commits_on_tz) * 128)
890 f.write('<tr><th>%s</th><td style="background-color: rgb(%d, 0, 0)">%d</td></tr>' % (i, r, commits))
891 f.write('</tr></table>')
893 f.write('</body></html>')
894 f.close()
897 # Authors
898 f = open(path + '/authors.html', 'w')
899 self.printHeader(f)
901 f.write('<h1>Authors</h1>')
902 self.printNav(f)
904 # Authors :: List of authors
905 f.write(html_header(2, 'List of Authors'))
907 f.write('<table class="authors sortable" id="authors">')
908 f.write('<tr><th>Author</th><th>Commits (%)</th><th>+ lines</th><th>- lines</th><th>First commit</th><th>Last commit</th><th class="unsortable">Age</th><th>Active days</th><th># by commits</th></tr>')
909 for author in data.getAuthors(conf['max_authors']):
910 info = data.getAuthorInfo(author)
911 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d</td><td>%d</td><td>%s</td><td>%s</td><td>%s</td><td>%d</td><td>%d</td></tr>' % (author, info['commits'], info['commits_frac'], info['lines_added'], info['lines_removed'], info['date_first'], info['date_last'], info['timedelta'], len(info['active_days']), info['place_by_commits']))
912 f.write('</table>')
914 allauthors = data.getAuthors()
915 if len(allauthors) > conf['max_authors']:
916 rest = allauthors[conf['max_authors']:]
917 f.write('<p class="moreauthors">These didn\'t make it to the top: %s</p>' % ', '.join(rest))
919 f.write(html_header(2, 'Cumulated Added Lines of Code per Author'))
920 f.write('<img src="lines_of_code_by_author.png" alt="Lines of code per Author" />')
921 if len(allauthors) > conf['max_authors']:
922 f.write('<p class="moreauthors">Only top %d authors shown</p>' % conf['max_authors'])
924 f.write(html_header(2, 'Commits per Author'))
925 f.write('<img src="commits_by_author.png" alt="Commits per Author" />')
926 if len(allauthors) > conf['max_authors']:
927 f.write('<p class="moreauthors">Only top %d authors shown</p>' % conf['max_authors'])
929 fgl = open(path + '/lines_of_code_by_author.dat', 'w')
930 fgc = open(path + '/commits_by_author.dat', 'w')
932 lines_by_authors = {} # cumulated added lines by
933 # author. to save memory,
934 # changes_by_date_by_author[stamp][author] is defined
935 # only at points where author commits.
936 # lines_by_authors allows us to generate all the
937 # points in the .dat file.
939 # Don't rely on getAuthors to give the same order each
940 # time. Be robust and keep the list in a variable.
941 commits_by_authors = {} # cumulated added lines by
943 self.authors_to_plot = data.getAuthors(conf['max_authors'])
944 for author in self.authors_to_plot:
945 lines_by_authors[author] = 0
946 commits_by_authors[author] = 0
947 for stamp in sorted(data.changes_by_date_by_author.keys()):
948 fgl.write('%d' % stamp)
949 fgc.write('%d' % stamp)
950 for author in self.authors_to_plot:
951 if author in data.changes_by_date_by_author[stamp].keys():
952 lines_by_authors[author] = data.changes_by_date_by_author[stamp][author]['lines_added']
953 commits_by_authors[author] = data.changes_by_date_by_author[stamp][author]['commits']
954 fgl.write(' %d' % lines_by_authors[author])
955 fgc.write(' %d' % commits_by_authors[author])
956 fgl.write('\n')
957 fgc.write('\n')
958 fgl.close()
959 fgc.close()
961 # Authors :: Author of Month
962 f.write(html_header(2, 'Author of Month'))
963 f.write('<table class="sortable" id="aom">')
964 f.write('<tr><th>Month</th><th>Author</th><th>Commits (%%)</th><th class="unsortable">Next top %d</th><th>Number of authors</th></tr>' % conf['authors_top'])
965 for yymm in reversed(sorted(data.author_of_month.keys())):
966 authordict = data.author_of_month[yymm]
967 authors = getkeyssortedbyvalues(authordict)
968 authors.reverse()
969 commits = data.author_of_month[yymm][authors[0]]
970 next = ', '.join(authors[1:conf['authors_top']+1])
971 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td><td>%d</td></tr>' % (yymm, authors[0], commits, (100.0 * commits) / data.commits_by_month[yymm], data.commits_by_month[yymm], next, len(authors)))
973 f.write('</table>')
975 f.write(html_header(2, 'Author of Year'))
976 f.write('<table class="sortable" id="aoy"><tr><th>Year</th><th>Author</th><th>Commits (%%)</th><th class="unsortable">Next top %d</th><th>Number of authors</th></tr>' % conf['authors_top'])
977 for yy in reversed(sorted(data.author_of_year.keys())):
978 authordict = data.author_of_year[yy]
979 authors = getkeyssortedbyvalues(authordict)
980 authors.reverse()
981 commits = data.author_of_year[yy][authors[0]]
982 next = ', '.join(authors[1:conf['authors_top']+1])
983 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td><td>%d</td></tr>' % (yy, authors[0], commits, (100.0 * commits) / data.commits_by_year[yy], data.commits_by_year[yy], next, len(authors)))
984 f.write('</table>')
986 # Domains
987 f.write(html_header(2, 'Commits by Domains'))
988 domains_by_commits = getkeyssortedbyvaluekey(data.domains, 'commits')
989 domains_by_commits.reverse() # most first
990 f.write('<div class="vtable"><table>')
991 f.write('<tr><th>Domains</th><th>Total (%)</th></tr>')
992 fp = open(path + '/domains.dat', 'w')
993 n = 0
994 for domain in domains_by_commits:
995 if n == conf['max_domains']:
996 break
997 commits = 0
998 n += 1
999 info = data.getDomainInfo(domain)
1000 fp.write('%s %d %d\n' % (domain, n , info['commits']))
1001 f.write('<tr><th>%s</th><td>%d (%.2f%%)</td></tr>' % (domain, info['commits'], (100.0 * info['commits'] / totalcommits)))
1002 f.write('</table></div>')
1003 f.write('<img src="domains.png" alt="Commits by Domains" />')
1004 fp.close()
1006 f.write('</body></html>')
1007 f.close()
1010 # Files
1011 f = open(path + '/files.html', 'w')
1012 self.printHeader(f)
1013 f.write('<h1>Files</h1>')
1014 self.printNav(f)
1016 f.write('<dl>\n')
1017 f.write('<dt>Total files</dt><dd>%d</dd>' % data.getTotalFiles())
1018 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
1019 try:
1020 f.write('<dt>Average file size</dt><dd>%.2f bytes</dd>' % (float(data.getTotalSize()) / data.getTotalFiles()))
1021 except ZeroDivisionError:
1022 pass
1023 f.write('</dl>\n')
1025 # Files :: File count by date
1026 f.write(html_header(2, 'File count by date'))
1028 # use set to get rid of duplicate/unnecessary entries
1029 files_by_date = set()
1030 for stamp in sorted(data.files_by_stamp.keys()):
1031 files_by_date.add('%s %d' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
1033 fg = open(path + '/files_by_date.dat', 'w')
1034 for line in sorted(list(files_by_date)):
1035 fg.write('%s\n' % line)
1036 #for stamp in sorted(data.files_by_stamp.keys()):
1037 # fg.write('%s %d\n' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
1038 fg.close()
1040 f.write('<img src="files_by_date.png" alt="Files by Date" />')
1042 #f.write('<h2>Average file size by date</h2>')
1044 # Files :: Extensions
1045 f.write(html_header(2, 'Extensions'))
1046 f.write('<table class="sortable" id="ext"><tr><th>Extension</th><th>Files (%)</th><th>Lines (%)</th><th>Lines/file</th></tr>')
1047 for ext in sorted(data.extensions.keys()):
1048 files = data.extensions[ext]['files']
1049 lines = data.extensions[ext]['lines']
1050 try:
1051 loc_percentage = (100.0 * lines) / data.getTotalLOC()
1052 except ZeroDivisionError:
1053 loc_percentage = 0
1054 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d (%.2f%%)</td><td>%d</td></tr>' % (ext, files, (100.0 * files) / data.getTotalFiles(), lines, loc_percentage, lines / files))
1055 f.write('</table>')
1057 f.write('</body></html>')
1058 f.close()
1061 # Lines
1062 f = open(path + '/lines.html', 'w')
1063 self.printHeader(f)
1064 f.write('<h1>Lines</h1>')
1065 self.printNav(f)
1067 f.write('<dl>\n')
1068 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
1069 f.write('</dl>\n')
1071 f.write(html_header(2, 'Lines of Code'))
1072 f.write('<img src="lines_of_code.png" />')
1074 fg = open(path + '/lines_of_code.dat', 'w')
1075 for stamp in sorted(data.changes_by_date.keys()):
1076 fg.write('%d %d\n' % (stamp, data.changes_by_date[stamp]['lines']))
1077 fg.close()
1079 f.write('</body></html>')
1080 f.close()
1083 # tags.html
1084 f = open(path + '/tags.html', 'w')
1085 self.printHeader(f)
1086 f.write('<h1>Tags</h1>')
1087 self.printNav(f)
1089 f.write('<dl>')
1090 f.write('<dt>Total tags</dt><dd>%d</dd>' % len(data.tags))
1091 if len(data.tags) > 0:
1092 f.write('<dt>Average commits per tag</dt><dd>%.2f</dd>' % (1.0 * data.getTotalCommits() / len(data.tags)))
1093 f.write('</dl>')
1095 f.write('<table class="tags">')
1096 f.write('<tr><th>Name</th><th>Date</th><th>Commits</th><th>Authors</th></tr>')
1097 # sort the tags by date desc
1098 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), data.tags.items()))))
1099 for tag in tags_sorted_by_date_desc:
1100 authorinfo = []
1101 self.authors_by_commits = getkeyssortedbyvalues(data.tags[tag]['authors'])
1102 for i in reversed(self.authors_by_commits):
1103 authorinfo.append('%s (%d)' % (i, data.tags[tag]['authors'][i]))
1104 f.write('<tr><td>%s</td><td>%s</td><td>%d</td><td>%s</td></tr>' % (tag, data.tags[tag]['date'], data.tags[tag]['commits'], ', '.join(authorinfo)))
1105 f.write('</table>')
1107 f.write('</body></html>')
1108 f.close()
1110 self.createGraphs(path)
1112 def createGraphs(self, path):
1113 print 'Generating graphs...'
1115 # hour of day
1116 f = open(path + '/hour_of_day.plot', 'w')
1117 f.write(GNUPLOT_COMMON)
1118 f.write(
1120 set output 'hour_of_day.png'
1121 unset key
1122 set xrange [0.5:24.5]
1123 set xtics 4
1124 set grid y
1125 set ylabel "Commits"
1126 plot 'hour_of_day.dat' using 1:2:(0.5) w boxes fs solid
1127 """)
1128 f.close()
1130 # day of week
1131 f = open(path + '/day_of_week.plot', 'w')
1132 f.write(GNUPLOT_COMMON)
1133 f.write(
1135 set output 'day_of_week.png'
1136 unset key
1137 set xrange [0.5:7.5]
1138 set xtics 1
1139 set grid y
1140 set ylabel "Commits"
1141 plot 'day_of_week.dat' using 1:3:(0.5):xtic(2) w boxes fs solid
1142 """)
1143 f.close()
1145 # Domains
1146 f = open(path + '/domains.plot', 'w')
1147 f.write(GNUPLOT_COMMON)
1148 f.write(
1150 set output 'domains.png'
1151 unset key
1152 unset xtics
1153 set yrange [0:]
1154 set grid y
1155 set ylabel "Commits"
1156 plot 'domains.dat' using 2:3:(0.5) with boxes fs solid, '' using 2:3:1 with labels rotate by 45 offset 0,1
1157 """)
1158 f.close()
1160 # Month of Year
1161 f = open(path + '/month_of_year.plot', 'w')
1162 f.write(GNUPLOT_COMMON)
1163 f.write(
1165 set output 'month_of_year.png'
1166 unset key
1167 set xrange [0.5:12.5]
1168 set xtics 1
1169 set grid y
1170 set ylabel "Commits"
1171 plot 'month_of_year.dat' using 1:2:(0.5) w boxes fs solid
1172 """)
1173 f.close()
1175 # commits_by_year_month
1176 f = open(path + '/commits_by_year_month.plot', 'w')
1177 f.write(GNUPLOT_COMMON)
1178 f.write(
1180 set output 'commits_by_year_month.png'
1181 unset key
1182 set xdata time
1183 set timefmt "%Y-%m"
1184 set format x "%Y-%m"
1185 set xtics rotate
1186 set bmargin 5
1187 set grid y
1188 set ylabel "Commits"
1189 plot 'commits_by_year_month.dat' using 1:2:(0.5) w boxes fs solid
1190 """)
1191 f.close()
1193 # commits_by_year
1194 f = open(path + '/commits_by_year.plot', 'w')
1195 f.write(GNUPLOT_COMMON)
1196 f.write(
1198 set output 'commits_by_year.png'
1199 unset key
1200 set xtics 1 rotate
1201 set grid y
1202 set ylabel "Commits"
1203 set yrange [0:]
1204 plot 'commits_by_year.dat' using 1:2:(0.5) w boxes fs solid
1205 """)
1206 f.close()
1208 # Files by date
1209 f = open(path + '/files_by_date.plot', 'w')
1210 f.write(GNUPLOT_COMMON)
1211 f.write(
1213 set output 'files_by_date.png'
1214 unset key
1215 set xdata time
1216 set timefmt "%Y-%m-%d"
1217 set format x "%Y-%m-%d"
1218 set grid y
1219 set ylabel "Files"
1220 set xtics rotate
1221 set ytics autofreq
1222 set bmargin 6
1223 plot 'files_by_date.dat' using 1:2 w steps
1224 """)
1225 f.close()
1227 # Lines of Code
1228 f = open(path + '/lines_of_code.plot', 'w')
1229 f.write(GNUPLOT_COMMON)
1230 f.write(
1232 set output 'lines_of_code.png'
1233 unset key
1234 set xdata time
1235 set timefmt "%s"
1236 set format x "%Y-%m-%d"
1237 set grid y
1238 set ylabel "Lines"
1239 set xtics rotate
1240 set bmargin 6
1241 plot 'lines_of_code.dat' using 1:2 w lines
1242 """)
1243 f.close()
1245 # Lines of Code Added per author
1246 f = open(path + '/lines_of_code_by_author.plot', 'w')
1247 f.write(GNUPLOT_COMMON)
1248 f.write(
1250 set terminal png transparent size 640,480
1251 set output 'lines_of_code_by_author.png'
1252 set key left top
1253 set xdata time
1254 set timefmt "%s"
1255 set format x "%Y-%m-%d"
1256 set grid y
1257 set ylabel "Lines"
1258 set xtics rotate
1259 set bmargin 6
1260 plot """
1262 i = 1
1263 plots = []
1264 for a in self.authors_to_plot:
1265 i = i + 1
1266 plots.append("""'lines_of_code_by_author.dat' using 1:%d title "%s" w lines""" % (i, a.replace("\"", "\\\"")))
1267 f.write(", ".join(plots))
1268 f.write('\n')
1270 f.close()
1272 # Commits per author
1273 f = open(path + '/commits_by_author.plot', 'w')
1274 f.write(GNUPLOT_COMMON)
1275 f.write(
1277 set terminal png transparent size 640,480
1278 set output 'commits_by_author.png'
1279 set key left top
1280 set xdata time
1281 set timefmt "%s"
1282 set format x "%Y-%m-%d"
1283 set grid y
1284 set ylabel "Commits"
1285 set xtics rotate
1286 set bmargin 6
1287 plot """
1289 i = 1
1290 plots = []
1291 for a in self.authors_to_plot:
1292 i = i + 1
1293 plots.append("""'commits_by_author.dat' using 1:%d title "%s" w lines""" % (i, a.replace("\"", "\\\"")))
1294 f.write(", ".join(plots))
1295 f.write('\n')
1297 f.close()
1299 os.chdir(path)
1300 files = glob.glob(path + '/*.plot')
1301 for f in files:
1302 out = getpipeoutput([gnuplot_cmd + ' "%s"' % f])
1303 if len(out) > 0:
1304 print out
1306 def printHeader(self, f, title = ''):
1307 f.write(
1308 """<?xml version="1.0" encoding="UTF-8"?>
1309 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
1310 <html xmlns="http://www.w3.org/1999/xhtml">
1311 <head>
1312 <title>GitStats - %s</title>
1313 <link rel="stylesheet" href="%s" type="text/css" />
1314 <meta name="generator" content="GitStats %s" />
1315 <script type="text/javascript" src="sortable.js"></script>
1316 </head>
1317 <body>
1318 """ % (self.title, conf['style'], getversion()))
1320 def printNav(self, f):
1321 f.write("""
1322 <div class="nav">
1323 <ul>
1324 <li><a href="index.html">General</a></li>
1325 <li><a href="activity.html">Activity</a></li>
1326 <li><a href="authors.html">Authors</a></li>
1327 <li><a href="files.html">Files</a></li>
1328 <li><a href="lines.html">Lines</a></li>
1329 <li><a href="tags.html">Tags</a></li>
1330 </ul>
1331 </div>
1332 """)
1335 class GitStats:
1336 def run(self, args_orig):
1337 optlist, args = getopt.getopt(args_orig, 'c:')
1338 for o,v in optlist:
1339 if o == '-c':
1340 key, value = v.split('=', 1)
1341 if key not in conf:
1342 raise KeyError('no such key "%s" in config' % key)
1343 if isinstance(conf[key], int):
1344 conf[key] = int(value)
1345 else:
1346 conf[key] = value
1348 if len(args) < 2:
1349 print """
1350 Usage: gitstats [options] <gitpath..> <outputpath>
1352 Options:
1353 -c key=value Override configuration value
1355 Default config values:
1357 """ % conf
1358 sys.exit(0)
1360 outputpath = os.path.abspath(args[-1])
1361 rundir = os.getcwd()
1363 try:
1364 os.makedirs(outputpath)
1365 except OSError:
1366 pass
1367 if not os.path.isdir(outputpath):
1368 print 'FATAL: Output path is not a directory or does not exist'
1369 sys.exit(1)
1371 print 'Output path: %s' % outputpath
1372 cachefile = os.path.join(outputpath, 'gitstats.cache')
1374 data = GitDataCollector()
1375 data.loadCache(cachefile)
1377 for gitpath in args[0:-1]:
1378 print 'Git path: %s' % gitpath
1380 os.chdir(gitpath)
1382 print 'Collecting data...'
1383 data.collect(gitpath)
1385 print 'Refining data...'
1386 data.saveCache(cachefile)
1387 data.refine()
1389 os.chdir(rundir)
1391 print 'Generating report...'
1392 report = HTMLReportCreator()
1393 report.create(data, outputpath)
1395 time_end = time.time()
1396 exectime_internal = time_end - time_start
1397 print 'Execution time %.5f secs, %.5f secs (%.2f %%) in external commands)' % (exectime_internal, exectime_external, (100.0 * exectime_external) / exectime_internal)
1398 if sys.stdin.isatty():
1399 print 'You may now run:'
1400 print
1401 print ' sensible-browser \'%s\'' % os.path.join(outputpath, 'index.html').replace("'", "'\\''")
1402 print
1404 if __name__=='__main__':
1405 g = GitStats()
1406 g.run(sys.argv[1:])