Rename doc files to uppercase for consistency.
[gitstats.git] / gitstats
blobda3de1f35e5c5c86ef73e2ec52c10edf346a21da
1 #!/usr/bin/env python
2 # Copyright (c) 2007-2012 Heikki Hokkanen <hoxu@users.sf.net> & others (see doc/author.txt)
3 # GPLv2 / GPLv3
4 import datetime
5 import getopt
6 import glob
7 import os
8 import pickle
9 import platform
10 import re
11 import shutil
12 import subprocess
13 import sys
14 import time
15 import zlib
17 os.environ['LC_ALL'] = 'C'
19 GNUPLOT_COMMON = 'set terminal png transparent size 640,240\nset size 1.0,1.0\n'
20 ON_LINUX = (platform.system() == 'Linux')
21 WEEKDAYS = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')
23 exectime_internal = 0.0
24 exectime_external = 0.0
25 time_start = time.time()
27 # By default, gnuplot is searched from path, but can be overridden with the
28 # environment variable "GNUPLOT"
29 gnuplot_cmd = 'gnuplot'
30 if 'GNUPLOT' in os.environ:
31 gnuplot_cmd = os.environ['GNUPLOT']
33 conf = {
34 'max_domains': 10,
35 'max_ext_length': 10,
36 'style': 'gitstats.css',
37 'max_authors': 20,
38 'authors_top': 5,
39 'commit_begin': '',
40 'commit_end': 'HEAD',
41 'linear_linestats': 1,
42 'project_name': '',
43 'merge_authors': {}
46 def getpipeoutput(cmds, quiet = False):
47 global exectime_external
48 start = time.time()
49 if not quiet and ON_LINUX and os.isatty(1):
50 print '>> ' + ' | '.join(cmds),
51 sys.stdout.flush()
52 p0 = subprocess.Popen(cmds[0], stdout = subprocess.PIPE, shell = True)
53 p = p0
54 for x in cmds[1:]:
55 p = subprocess.Popen(x, stdin = p0.stdout, stdout = subprocess.PIPE, shell = True)
56 p0 = p
57 output = p.communicate()[0]
58 end = time.time()
59 if not quiet:
60 if ON_LINUX and os.isatty(1):
61 print '\r',
62 print '[%.5f] >> %s' % (end - start, ' | '.join(cmds))
63 exectime_external += (end - start)
64 return output.rstrip('\n')
66 def getcommitrange(defaultrange = 'HEAD', end_only = False):
67 if len(conf['commit_end']) > 0:
68 if end_only or len(conf['commit_begin']) == 0:
69 return conf['commit_end']
70 return '%s..%s' % (conf['commit_begin'], conf['commit_end'])
71 return defaultrange
73 def getkeyssortedbyvalues(dict):
74 return map(lambda el : el[1], sorted(map(lambda el : (el[1], el[0]), dict.items())))
76 # dict['author'] = { 'commits': 512 } - ...key(dict, 'commits')
77 def getkeyssortedbyvaluekey(d, key):
78 return map(lambda el : el[1], sorted(map(lambda el : (d[el][key], el), d.keys())))
80 def getstatsummarycounts(line):
81 numbers = re.findall('\d+', line)
82 if len(numbers) == 1:
83 # neither insertions nor deletions: may probably only happen for "0 files changed"
84 numbers.append(0);
85 numbers.append(0);
86 elif len(numbers) == 2 and line.find('(+)') != -1:
87 numbers.append(0); # only insertions were printed on line
88 elif len(numbers) == 2 and line.find('(-)') != -1:
89 numbers.insert(1, 0); # only deletions were printed on line
90 return numbers
92 VERSION = 0
93 def getversion():
94 global VERSION
95 if VERSION == 0:
96 gitstats_repo = os.path.dirname(os.path.abspath(__file__))
97 VERSION = getpipeoutput(["git --git-dir=%s/.git --work-tree=%s rev-parse --short %s" %
98 (gitstats_repo, gitstats_repo, getcommitrange('HEAD').split('\n')[0])])
99 return VERSION
101 def getgitversion():
102 return getpipeoutput(['git --version']).split('\n')[0]
104 def getgnuplotversion():
105 return getpipeoutput(['%s --version' % gnuplot_cmd]).split('\n')[0]
107 class DataCollector:
108 """Manages data collection from a revision control repository."""
109 def __init__(self):
110 self.stamp_created = time.time()
111 self.cache = {}
112 self.total_authors = 0
113 self.activity_by_hour_of_day = {} # hour -> commits
114 self.activity_by_day_of_week = {} # day -> commits
115 self.activity_by_month_of_year = {} # month [1-12] -> commits
116 self.activity_by_hour_of_week = {} # weekday -> hour -> commits
117 self.activity_by_hour_of_day_busiest = 0
118 self.activity_by_hour_of_week_busiest = 0
119 self.activity_by_year_week = {} # yy_wNN -> commits
120 self.activity_by_year_week_peak = 0
122 self.authors = {} # name -> {commits, first_commit_stamp, last_commit_stamp, last_active_day, active_days, lines_added, lines_removed}
124 self.total_commits = 0
125 self.total_files = 0
126 self.authors_by_commits = 0
128 # domains
129 self.domains = {} # domain -> commits
131 # author of the month
132 self.author_of_month = {} # month -> author -> commits
133 self.author_of_year = {} # year -> author -> commits
134 self.commits_by_month = {} # month -> commits
135 self.commits_by_year = {} # year -> commits
136 self.lines_added_by_month = {} # month -> lines added
137 self.lines_added_by_year = {} # year -> lines added
138 self.lines_removed_by_month = {} # month -> lines removed
139 self.lines_removed_by_year = {} # year -> lines removed
140 self.first_commit_stamp = 0
141 self.last_commit_stamp = 0
142 self.last_active_day = None
143 self.active_days = set()
145 # lines
146 self.total_lines = 0
147 self.total_lines_added = 0
148 self.total_lines_removed = 0
150 # size
151 self.total_size = 0
153 # timezone
154 self.commits_by_timezone = {} # timezone -> commits
156 # tags
157 self.tags = {}
159 self.files_by_stamp = {} # stamp -> files
161 # extensions
162 self.extensions = {} # extension -> files, lines
164 # line statistics
165 self.changes_by_date = {} # stamp -> { files, ins, del }
168 # This should be the main function to extract data from the repository.
169 def collect(self, dir):
170 self.dir = dir
171 if len(conf['project_name']) == 0:
172 self.projectname = os.path.basename(os.path.abspath(dir))
173 else:
174 self.projectname = conf['project_name']
177 # Load cacheable data
178 def loadCache(self, cachefile):
179 if not os.path.exists(cachefile):
180 return
181 print 'Loading cache...'
182 f = open(cachefile, 'rb')
183 try:
184 self.cache = pickle.loads(zlib.decompress(f.read()))
185 except:
186 # temporary hack to upgrade non-compressed caches
187 f.seek(0)
188 self.cache = pickle.load(f)
189 f.close()
192 # Produce any additional statistics from the extracted data.
193 def refine(self):
194 pass
197 # : get a dictionary of author
198 def getAuthorInfo(self, author):
199 return None
201 def getActivityByDayOfWeek(self):
202 return {}
204 def getActivityByHourOfDay(self):
205 return {}
207 # : get a dictionary of domains
208 def getDomainInfo(self, domain):
209 return None
212 # Get a list of authors
213 def getAuthors(self):
214 return []
216 def getFirstCommitDate(self):
217 return datetime.datetime.now()
219 def getLastCommitDate(self):
220 return datetime.datetime.now()
222 def getStampCreated(self):
223 return self.stamp_created
225 def getTags(self):
226 return []
228 def getTotalAuthors(self):
229 return -1
231 def getTotalCommits(self):
232 return -1
234 def getTotalFiles(self):
235 return -1
237 def getTotalLOC(self):
238 return -1
241 # Save cacheable data
242 def saveCache(self, cachefile):
243 print 'Saving cache...'
244 tempfile = cachefile + '.tmp'
245 f = open(tempfile, 'wb')
246 #pickle.dump(self.cache, f)
247 data = zlib.compress(pickle.dumps(self.cache))
248 f.write(data)
249 f.close()
250 try:
251 os.remove(cachefile)
252 except OSError:
253 pass
254 os.rename(tempfile, cachefile)
256 class GitDataCollector(DataCollector):
257 def collect(self, dir):
258 DataCollector.collect(self, dir)
260 self.total_authors += int(getpipeoutput(['git shortlog -s %s' % getcommitrange(), 'wc -l']))
261 #self.total_lines = int(getoutput('git-ls-files -z |xargs -0 cat |wc -l'))
263 # tags
264 lines = getpipeoutput(['git show-ref --tags']).split('\n')
265 for line in lines:
266 if len(line) == 0:
267 continue
268 (hash, tag) = line.split(' ')
270 tag = tag.replace('refs/tags/', '')
271 output = getpipeoutput(['git log "%s" --pretty=format:"%%at %%aN" -n 1' % hash])
272 if len(output) > 0:
273 parts = output.split(' ')
274 stamp = 0
275 try:
276 stamp = int(parts[0])
277 except ValueError:
278 stamp = 0
279 self.tags[tag] = { 'stamp': stamp, 'hash' : hash, 'date' : datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), 'commits': 0, 'authors': {} }
281 # collect info on tags, starting from latest
282 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), self.tags.items()))))
283 prev = None
284 for tag in reversed(tags_sorted_by_date_desc):
285 cmd = 'git shortlog -s "%s"' % tag
286 if prev != None:
287 cmd += ' "^%s"' % prev
288 output = getpipeoutput([cmd])
289 if len(output) == 0:
290 continue
291 prev = tag
292 for line in output.split('\n'):
293 parts = re.split('\s+', line, 2)
294 commits = int(parts[1])
295 author = parts[2]
296 if author in conf['merge_authors']:
297 author = conf['merge_authors'][author]
298 self.tags[tag]['commits'] += commits
299 self.tags[tag]['authors'][author] = commits
301 # Collect revision statistics
302 # Outputs "<stamp> <date> <time> <timezone> <author> '<' <mail> '>'"
303 lines = getpipeoutput(['git rev-list --pretty=format:"%%at %%ai %%aN <%%aE>" %s' % getcommitrange('HEAD'), 'grep -v ^commit']).split('\n')
304 for line in lines:
305 parts = line.split(' ', 4)
306 author = ''
307 try:
308 stamp = int(parts[0])
309 except ValueError:
310 stamp = 0
311 timezone = parts[3]
312 author, mail = parts[4].split('<', 1)
313 author = author.rstrip()
314 if author in conf['merge_authors']:
315 author = conf['merge_authors'][author]
316 mail = mail.rstrip('>')
317 domain = '?'
318 if mail.find('@') != -1:
319 domain = mail.rsplit('@', 1)[1]
320 date = datetime.datetime.fromtimestamp(float(stamp))
322 # First and last commit stamp (may be in any order because of cherry-picking and patches)
323 if stamp > self.last_commit_stamp:
324 self.last_commit_stamp = stamp
325 if self.first_commit_stamp == 0 or stamp < self.first_commit_stamp:
326 self.first_commit_stamp = stamp
328 # activity
329 # hour
330 hour = date.hour
331 self.activity_by_hour_of_day[hour] = self.activity_by_hour_of_day.get(hour, 0) + 1
332 # most active hour?
333 if self.activity_by_hour_of_day[hour] > self.activity_by_hour_of_day_busiest:
334 self.activity_by_hour_of_day_busiest = self.activity_by_hour_of_day[hour]
336 # day of week
337 day = date.weekday()
338 self.activity_by_day_of_week[day] = self.activity_by_day_of_week.get(day, 0) + 1
340 # domain stats
341 if domain not in self.domains:
342 self.domains[domain] = {}
343 # commits
344 self.domains[domain]['commits'] = self.domains[domain].get('commits', 0) + 1
346 # hour of week
347 if day not in self.activity_by_hour_of_week:
348 self.activity_by_hour_of_week[day] = {}
349 self.activity_by_hour_of_week[day][hour] = self.activity_by_hour_of_week[day].get(hour, 0) + 1
350 # most active hour?
351 if self.activity_by_hour_of_week[day][hour] > self.activity_by_hour_of_week_busiest:
352 self.activity_by_hour_of_week_busiest = self.activity_by_hour_of_week[day][hour]
354 # month of year
355 month = date.month
356 self.activity_by_month_of_year[month] = self.activity_by_month_of_year.get(month, 0) + 1
358 # yearly/weekly activity
359 yyw = date.strftime('%Y-%W')
360 self.activity_by_year_week[yyw] = self.activity_by_year_week.get(yyw, 0) + 1
361 if self.activity_by_year_week_peak < self.activity_by_year_week[yyw]:
362 self.activity_by_year_week_peak = self.activity_by_year_week[yyw]
364 # author stats
365 if author not in self.authors:
366 self.authors[author] = {}
367 # commits, note again that commits may be in any date order because of cherry-picking and patches
368 if 'last_commit_stamp' not in self.authors[author]:
369 self.authors[author]['last_commit_stamp'] = stamp
370 if stamp > self.authors[author]['last_commit_stamp']:
371 self.authors[author]['last_commit_stamp'] = stamp
372 if 'first_commit_stamp' not in self.authors[author]:
373 self.authors[author]['first_commit_stamp'] = stamp
374 if stamp < self.authors[author]['first_commit_stamp']:
375 self.authors[author]['first_commit_stamp'] = stamp
377 # author of the month/year
378 yymm = date.strftime('%Y-%m')
379 if yymm in self.author_of_month:
380 self.author_of_month[yymm][author] = self.author_of_month[yymm].get(author, 0) + 1
381 else:
382 self.author_of_month[yymm] = {}
383 self.author_of_month[yymm][author] = 1
384 self.commits_by_month[yymm] = self.commits_by_month.get(yymm, 0) + 1
386 yy = date.year
387 if yy in self.author_of_year:
388 self.author_of_year[yy][author] = self.author_of_year[yy].get(author, 0) + 1
389 else:
390 self.author_of_year[yy] = {}
391 self.author_of_year[yy][author] = 1
392 self.commits_by_year[yy] = self.commits_by_year.get(yy, 0) + 1
394 # authors: active days
395 yymmdd = date.strftime('%Y-%m-%d')
396 if 'last_active_day' not in self.authors[author]:
397 self.authors[author]['last_active_day'] = yymmdd
398 self.authors[author]['active_days'] = set([yymmdd])
399 elif yymmdd != self.authors[author]['last_active_day']:
400 self.authors[author]['last_active_day'] = yymmdd
401 self.authors[author]['active_days'].add(yymmdd)
403 # project: active days
404 if yymmdd != self.last_active_day:
405 self.last_active_day = yymmdd
406 self.active_days.add(yymmdd)
408 # timezone
409 self.commits_by_timezone[timezone] = self.commits_by_timezone.get(timezone, 0) + 1
411 # TODO Optimize this, it's the worst bottleneck
412 # outputs "<stamp> <files>" for each revision
413 revlines = getpipeoutput(['git rev-list --pretty=format:"%%at %%T" %s' % getcommitrange('HEAD'), 'grep -v ^commit']).strip().split('\n')
414 lines = []
415 for revline in revlines:
416 time, rev = revline.split(' ')
417 linecount = self.getFilesInCommit(rev)
418 lines.append('%d %d' % (int(time), linecount))
420 self.total_commits += len(lines)
421 for line in lines:
422 parts = line.split(' ')
423 if len(parts) != 2:
424 continue
425 (stamp, files) = parts[0:2]
426 try:
427 self.files_by_stamp[int(stamp)] = int(files)
428 except ValueError:
429 print 'Warning: failed to parse line "%s"' % line
431 # extensions and size of files
432 lines = getpipeoutput(['git ls-tree -r -l -z %s' % getcommitrange('HEAD', end_only = True)]).split('\000')
433 for line in lines:
434 if len(line) == 0:
435 continue
436 parts = re.split('\s+', line, 5)
437 if parts[0] == '160000' and parts[3] == '-':
438 # skip submodules
439 continue
440 sha1 = parts[2]
441 size = int(parts[3])
442 fullpath = parts[4]
444 self.total_size += size
445 self.total_files += 1
447 filename = fullpath.split('/')[-1] # strip directories
448 if filename.find('.') == -1 or filename.rfind('.') == 0:
449 ext = ''
450 else:
451 ext = filename[(filename.rfind('.') + 1):]
452 if len(ext) > conf['max_ext_length']:
453 ext = ''
455 if ext not in self.extensions:
456 self.extensions[ext] = {'files': 0, 'lines': 0}
458 self.extensions[ext]['files'] += 1
459 try:
460 self.extensions[ext]['lines'] += self.getLinesInBlob(sha1)
461 except:
462 print 'Warning: Could not count lines for file "%s"' % line
464 # line statistics
465 # outputs:
466 # N files changed, N insertions (+), N deletions(-)
467 # <stamp> <author>
468 self.changes_by_date = {} # stamp -> { files, ins, del }
469 # computation of lines of code by date is better done
470 # on a linear history.
471 extra = ''
472 if conf['linear_linestats']:
473 extra = '--first-parent -m'
474 lines = getpipeoutput(['git log --shortstat %s --pretty=format:"%%at %%aN" %s' % (extra, getcommitrange('HEAD'))]).split('\n')
475 lines.reverse()
476 files = 0; inserted = 0; deleted = 0; total_lines = 0
477 author = None
478 for line in lines:
479 if len(line) == 0:
480 continue
482 # <stamp> <author>
483 if re.search('files? changed', line) == None:
484 pos = line.find(' ')
485 if pos != -1:
486 try:
487 (stamp, author) = (int(line[:pos]), line[pos+1:])
488 if author in conf['merge_authors']:
489 author = conf['merge_authors'][author]
490 self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted, 'lines': total_lines }
492 date = datetime.datetime.fromtimestamp(stamp)
493 yymm = date.strftime('%Y-%m')
494 self.lines_added_by_month[yymm] = self.lines_added_by_month.get(yymm, 0) + inserted
495 self.lines_removed_by_month[yymm] = self.lines_removed_by_month.get(yymm, 0) + deleted
497 yy = date.year
498 self.lines_added_by_year[yy] = self.lines_added_by_year.get(yy,0) + inserted
499 self.lines_removed_by_year[yy] = self.lines_removed_by_year.get(yy, 0) + deleted
501 files, inserted, deleted = 0, 0, 0
502 except ValueError:
503 print 'Warning: unexpected line "%s"' % line
504 else:
505 print 'Warning: unexpected line "%s"' % line
506 else:
507 numbers = getstatsummarycounts(line)
509 if len(numbers) == 3:
510 (files, inserted, deleted) = map(lambda el : int(el), numbers)
511 total_lines += inserted
512 total_lines -= deleted
513 self.total_lines_added += inserted
514 self.total_lines_removed += deleted
516 else:
517 print 'Warning: failed to handle line "%s"' % line
518 (files, inserted, deleted) = (0, 0, 0)
519 #self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted }
520 self.total_lines += total_lines
522 # Per-author statistics
524 # defined for stamp, author only if author commited at this timestamp.
525 self.changes_by_date_by_author = {} # stamp -> author -> lines_added
527 # Similar to the above, but never use --first-parent
528 # (we need to walk through every commit to know who
529 # committed what, not just through mainline)
530 lines = getpipeoutput(['git log --shortstat --date-order --pretty=format:"%%at %%aN" %s' % (getcommitrange('HEAD'))]).split('\n')
531 lines.reverse()
532 files = 0; inserted = 0; deleted = 0
533 author = None
534 stamp = 0
535 for line in lines:
536 if len(line) == 0:
537 continue
539 # <stamp> <author>
540 if re.search('files? changed', line) == None:
541 pos = line.find(' ')
542 if pos != -1:
543 try:
544 oldstamp = stamp
545 (stamp, author) = (int(line[:pos]), line[pos+1:])
546 if author in conf['merge_authors']:
547 author = conf['merge_authors'][author]
548 if oldstamp > stamp:
549 # clock skew, keep old timestamp to avoid having ugly graph
550 stamp = oldstamp
551 if author not in self.authors:
552 self.authors[author] = { 'lines_added' : 0, 'lines_removed' : 0, 'commits' : 0}
553 self.authors[author]['commits'] = self.authors[author].get('commits', 0) + 1
554 self.authors[author]['lines_added'] = self.authors[author].get('lines_added', 0) + inserted
555 self.authors[author]['lines_removed'] = self.authors[author].get('lines_removed', 0) + deleted
556 if stamp not in self.changes_by_date_by_author:
557 self.changes_by_date_by_author[stamp] = {}
558 if author not in self.changes_by_date_by_author[stamp]:
559 self.changes_by_date_by_author[stamp][author] = {}
560 self.changes_by_date_by_author[stamp][author]['lines_added'] = self.authors[author]['lines_added']
561 self.changes_by_date_by_author[stamp][author]['commits'] = self.authors[author]['commits']
562 files, inserted, deleted = 0, 0, 0
563 except ValueError:
564 print 'Warning: unexpected line "%s"' % line
565 else:
566 print 'Warning: unexpected line "%s"' % line
567 else:
568 numbers = getstatsummarycounts(line);
570 if len(numbers) == 3:
571 (files, inserted, deleted) = map(lambda el : int(el), numbers)
572 else:
573 print 'Warning: failed to handle line "%s"' % line
574 (files, inserted, deleted) = (0, 0, 0)
576 def refine(self):
577 # authors
578 # name -> {place_by_commits, commits_frac, date_first, date_last, timedelta}
579 self.authors_by_commits = getkeyssortedbyvaluekey(self.authors, 'commits')
580 self.authors_by_commits.reverse() # most first
581 for i, name in enumerate(self.authors_by_commits):
582 self.authors[name]['place_by_commits'] = i + 1
584 for name in self.authors.keys():
585 a = self.authors[name]
586 a['commits_frac'] = (100 * float(a['commits'])) / self.getTotalCommits()
587 date_first = datetime.datetime.fromtimestamp(a['first_commit_stamp'])
588 date_last = datetime.datetime.fromtimestamp(a['last_commit_stamp'])
589 delta = date_last - date_first
590 a['date_first'] = date_first.strftime('%Y-%m-%d')
591 a['date_last'] = date_last.strftime('%Y-%m-%d')
592 a['timedelta'] = delta
593 if 'lines_added' not in a: a['lines_added'] = 0
594 if 'lines_removed' not in a: a['lines_removed'] = 0
596 def getActiveDays(self):
597 return self.active_days
599 def getActivityByDayOfWeek(self):
600 return self.activity_by_day_of_week
602 def getActivityByHourOfDay(self):
603 return self.activity_by_hour_of_day
605 def getAuthorInfo(self, author):
606 return self.authors[author]
608 def getAuthors(self, limit = None):
609 res = getkeyssortedbyvaluekey(self.authors, 'commits')
610 res.reverse()
611 return res[:limit]
613 def getCommitDeltaDays(self):
614 return (self.last_commit_stamp / 86400 - self.first_commit_stamp / 86400) + 1
616 def getDomainInfo(self, domain):
617 return self.domains[domain]
619 def getDomains(self):
620 return self.domains.keys()
622 def getFilesInCommit(self, rev):
623 try:
624 res = self.cache['files_in_tree'][rev]
625 except:
626 res = int(getpipeoutput(['git ls-tree -r --name-only "%s"' % rev, 'wc -l']).split('\n')[0])
627 if 'files_in_tree' not in self.cache:
628 self.cache['files_in_tree'] = {}
629 self.cache['files_in_tree'][rev] = res
631 return res
633 def getFirstCommitDate(self):
634 return datetime.datetime.fromtimestamp(self.first_commit_stamp)
636 def getLastCommitDate(self):
637 return datetime.datetime.fromtimestamp(self.last_commit_stamp)
639 def getLinesInBlob(self, sha1):
640 try:
641 res = self.cache['lines_in_blob'][sha1]
642 except:
643 res = int(getpipeoutput(['git cat-file blob %s' % sha1, 'wc -l']).split()[0])
644 if 'lines_in_blob' not in self.cache:
645 self.cache['lines_in_blob'] = {}
646 self.cache['lines_in_blob'][sha1] = res
647 return res
649 def getTags(self):
650 lines = getpipeoutput(['git show-ref --tags', 'cut -d/ -f3'])
651 return lines.split('\n')
653 def getTagDate(self, tag):
654 return self.revToDate('tags/' + tag)
656 def getTotalAuthors(self):
657 return self.total_authors
659 def getTotalCommits(self):
660 return self.total_commits
662 def getTotalFiles(self):
663 return self.total_files
665 def getTotalLOC(self):
666 return self.total_lines
668 def getTotalSize(self):
669 return self.total_size
671 def revToDate(self, rev):
672 stamp = int(getpipeoutput(['git log --pretty=format:%%at "%s" -n 1' % rev]))
673 return datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d')
675 class ReportCreator:
676 """Creates the actual report based on given data."""
677 def __init__(self):
678 pass
680 def create(self, data, path):
681 self.data = data
682 self.path = path
684 def html_linkify(text):
685 return text.lower().replace(' ', '_')
687 def html_header(level, text):
688 name = html_linkify(text)
689 return '\n<h%d><a href="#%s" name="%s">%s</a></h%d>\n\n' % (level, name, name, text, level)
691 class HTMLReportCreator(ReportCreator):
692 def create(self, data, path):
693 ReportCreator.create(self, data, path)
694 self.title = data.projectname
696 # copy static files. Looks in the binary directory, ../share/gitstats and /usr/share/gitstats
697 binarypath = os.path.dirname(os.path.abspath(__file__))
698 secondarypath = os.path.join(binarypath, '..', 'share', 'gitstats')
699 basedirs = [binarypath, secondarypath, '/usr/share/gitstats']
700 for file in ('gitstats.css', 'sortable.js', 'arrow-up.gif', 'arrow-down.gif', 'arrow-none.gif'):
701 for base in basedirs:
702 src = base + '/' + file
703 if os.path.exists(src):
704 shutil.copyfile(src, path + '/' + file)
705 break
706 else:
707 print 'Warning: "%s" not found, so not copied (searched: %s)' % (file, basedirs)
709 f = open(path + "/index.html", 'w')
710 format = '%Y-%m-%d %H:%M:%S'
711 self.printHeader(f)
713 f.write('<h1>GitStats - %s</h1>' % data.projectname)
715 self.printNav(f)
717 f.write('<dl>')
718 f.write('<dt>Project name</dt><dd>%s</dd>' % (data.projectname))
719 f.write('<dt>Generated</dt><dd>%s (in %d seconds)</dd>' % (datetime.datetime.now().strftime(format), time.time() - data.getStampCreated()))
720 f.write('<dt>Generator</dt><dd><a href="http://gitstats.sourceforge.net/">GitStats</a> (version %s), %s, %s</dd>' % (getversion(), getgitversion(), getgnuplotversion()))
721 f.write('<dt>Report Period</dt><dd>%s to %s</dd>' % (data.getFirstCommitDate().strftime(format), data.getLastCommitDate().strftime(format)))
722 f.write('<dt>Age</dt><dd>%d days, %d active days (%3.2f%%)</dd>' % (data.getCommitDeltaDays(), len(data.getActiveDays()), (100.0 * len(data.getActiveDays()) / data.getCommitDeltaDays())))
723 f.write('<dt>Total Files</dt><dd>%s</dd>' % data.getTotalFiles())
724 f.write('<dt>Total Lines of Code</dt><dd>%s (%d added, %d removed)</dd>' % (data.getTotalLOC(), data.total_lines_added, data.total_lines_removed))
725 f.write('<dt>Total Commits</dt><dd>%s (average %.1f commits per active day, %.1f per all days)</dd>' % (data.getTotalCommits(), float(data.getTotalCommits()) / len(data.getActiveDays()), float(data.getTotalCommits()) / data.getCommitDeltaDays()))
726 f.write('<dt>Authors</dt><dd>%s (average %.1f commits per author)</dd>' % (data.getTotalAuthors(), (1.0 * data.getTotalCommits()) / data.getTotalAuthors()))
727 f.write('</dl>')
729 f.write('</body>\n</html>')
730 f.close()
733 # Activity
734 f = open(path + '/activity.html', 'w')
735 self.printHeader(f)
736 f.write('<h1>Activity</h1>')
737 self.printNav(f)
739 #f.write('<h2>Last 30 days</h2>')
741 #f.write('<h2>Last 12 months</h2>')
743 # Weekly activity
744 WEEKS = 32
745 f.write(html_header(2, 'Weekly activity'))
746 f.write('<p>Last %d weeks</p>' % WEEKS)
748 # generate weeks to show (previous N weeks from now)
749 now = datetime.datetime.now()
750 deltaweek = datetime.timedelta(7)
751 weeks = []
752 stampcur = now
753 for i in range(0, WEEKS):
754 weeks.insert(0, stampcur.strftime('%Y-%W'))
755 stampcur -= deltaweek
757 # top row: commits & bar
758 f.write('<table class="noborders"><tr>')
759 for i in range(0, WEEKS):
760 commits = 0
761 if weeks[i] in data.activity_by_year_week:
762 commits = data.activity_by_year_week[weeks[i]]
764 percentage = 0
765 if weeks[i] in data.activity_by_year_week:
766 percentage = float(data.activity_by_year_week[weeks[i]]) / data.activity_by_year_week_peak
767 height = max(1, int(200 * percentage))
768 f.write('<td style="text-align: center; vertical-align: bottom">%d<div style="display: block; background-color: red; width: 20px; height: %dpx"></div></td>' % (commits, height))
770 # bottom row: year/week
771 f.write('</tr><tr>')
772 for i in range(0, WEEKS):
773 f.write('<td>%s</td>' % (WEEKS - i))
774 f.write('</tr></table>')
776 # Hour of Day
777 f.write(html_header(2, 'Hour of Day'))
778 hour_of_day = data.getActivityByHourOfDay()
779 f.write('<table><tr><th>Hour</th>')
780 for i in range(0, 24):
781 f.write('<th>%d</th>' % i)
782 f.write('</tr>\n<tr><th>Commits</th>')
783 fp = open(path + '/hour_of_day.dat', 'w')
784 for i in range(0, 24):
785 if i in hour_of_day:
786 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
787 f.write('<td style="background-color: rgb(%d, 0, 0)">%d</td>' % (r, hour_of_day[i]))
788 fp.write('%d %d\n' % (i, hour_of_day[i]))
789 else:
790 f.write('<td>0</td>')
791 fp.write('%d 0\n' % i)
792 fp.close()
793 f.write('</tr>\n<tr><th>%</th>')
794 totalcommits = data.getTotalCommits()
795 for i in range(0, 24):
796 if i in hour_of_day:
797 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
798 f.write('<td style="background-color: rgb(%d, 0, 0)">%.2f</td>' % (r, (100.0 * hour_of_day[i]) / totalcommits))
799 else:
800 f.write('<td>0.00</td>')
801 f.write('</tr></table>')
802 f.write('<img src="hour_of_day.png" alt="Hour of Day" />')
803 fg = open(path + '/hour_of_day.dat', 'w')
804 for i in range(0, 24):
805 if i in hour_of_day:
806 fg.write('%d %d\n' % (i + 1, hour_of_day[i]))
807 else:
808 fg.write('%d 0\n' % (i + 1))
809 fg.close()
811 # Day of Week
812 f.write(html_header(2, 'Day of Week'))
813 day_of_week = data.getActivityByDayOfWeek()
814 f.write('<div class="vtable"><table>')
815 f.write('<tr><th>Day</th><th>Total (%)</th></tr>')
816 fp = open(path + '/day_of_week.dat', 'w')
817 for d in range(0, 7):
818 commits = 0
819 if d in day_of_week:
820 commits = day_of_week[d]
821 fp.write('%d %s %d\n' % (d + 1, WEEKDAYS[d], commits))
822 f.write('<tr>')
823 f.write('<th>%s</th>' % (WEEKDAYS[d]))
824 if d in day_of_week:
825 f.write('<td>%d (%.2f%%)</td>' % (day_of_week[d], (100.0 * day_of_week[d]) / totalcommits))
826 else:
827 f.write('<td>0</td>')
828 f.write('</tr>')
829 f.write('</table></div>')
830 f.write('<img src="day_of_week.png" alt="Day of Week" />')
831 fp.close()
833 # Hour of Week
834 f.write(html_header(2, 'Hour of Week'))
835 f.write('<table>')
837 f.write('<tr><th>Weekday</th>')
838 for hour in range(0, 24):
839 f.write('<th>%d</th>' % (hour))
840 f.write('</tr>')
842 for weekday in range(0, 7):
843 f.write('<tr><th>%s</th>' % (WEEKDAYS[weekday]))
844 for hour in range(0, 24):
845 try:
846 commits = data.activity_by_hour_of_week[weekday][hour]
847 except KeyError:
848 commits = 0
849 if commits != 0:
850 f.write('<td')
851 r = 127 + int((float(commits) / data.activity_by_hour_of_week_busiest) * 128)
852 f.write(' style="background-color: rgb(%d, 0, 0)"' % r)
853 f.write('>%d</td>' % commits)
854 else:
855 f.write('<td></td>')
856 f.write('</tr>')
858 f.write('</table>')
860 # Month of Year
861 f.write(html_header(2, 'Month of Year'))
862 f.write('<div class="vtable"><table>')
863 f.write('<tr><th>Month</th><th>Commits (%)</th></tr>')
864 fp = open (path + '/month_of_year.dat', 'w')
865 for mm in range(1, 13):
866 commits = 0
867 if mm in data.activity_by_month_of_year:
868 commits = data.activity_by_month_of_year[mm]
869 f.write('<tr><td>%d</td><td>%d (%.2f %%)</td></tr>' % (mm, commits, (100.0 * commits) / data.getTotalCommits()))
870 fp.write('%d %d\n' % (mm, commits))
871 fp.close()
872 f.write('</table></div>')
873 f.write('<img src="month_of_year.png" alt="Month of Year" />')
875 # Commits by year/month
876 f.write(html_header(2, 'Commits by year/month'))
877 f.write('<div class="vtable"><table><tr><th>Month</th><th>Commits</th><th>Lines added</th><th>Lines removed</th></tr>')
878 for yymm in reversed(sorted(data.commits_by_month.keys())):
879 f.write('<tr><td>%s</td><td>%d</td><td>%d</td><td>%d</td></tr>' % (yymm, data.commits_by_month.get(yymm,0), data.lines_added_by_month.get(yymm,0), data.lines_removed_by_month.get(yymm,0)))
880 f.write('</table></div>')
881 f.write('<img src="commits_by_year_month.png" alt="Commits by year/month" />')
882 fg = open(path + '/commits_by_year_month.dat', 'w')
883 for yymm in sorted(data.commits_by_month.keys()):
884 fg.write('%s %s\n' % (yymm, data.commits_by_month[yymm]))
885 fg.close()
887 # Commits by year
888 f.write(html_header(2, 'Commits by Year'))
889 f.write('<div class="vtable"><table><tr><th>Year</th><th>Commits (% of all)</th><th>Lines added</th><th>Lines removed</th></tr>')
890 for yy in reversed(sorted(data.commits_by_year.keys())):
891 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d</td><td>%d</td></tr>' % (yy, data.commits_by_year.get(yy,0), (100.0 * data.commits_by_year.get(yy,0)) / data.getTotalCommits(), data.lines_added_by_year.get(yy,0), data.lines_removed_by_year.get(yy,0)))
892 f.write('</table></div>')
893 f.write('<img src="commits_by_year.png" alt="Commits by Year" />')
894 fg = open(path + '/commits_by_year.dat', 'w')
895 for yy in sorted(data.commits_by_year.keys()):
896 fg.write('%d %d\n' % (yy, data.commits_by_year[yy]))
897 fg.close()
899 # Commits by timezone
900 f.write(html_header(2, 'Commits by Timezone'))
901 f.write('<table><tr>')
902 f.write('<th>Timezone</th><th>Commits</th>')
903 max_commits_on_tz = max(data.commits_by_timezone.values())
904 for i in sorted(data.commits_by_timezone.keys(), key = lambda n : int(n)):
905 commits = data.commits_by_timezone[i]
906 r = 127 + int((float(commits) / max_commits_on_tz) * 128)
907 f.write('<tr><th>%s</th><td style="background-color: rgb(%d, 0, 0)">%d</td></tr>' % (i, r, commits))
908 f.write('</tr></table>')
910 f.write('</body></html>')
911 f.close()
914 # Authors
915 f = open(path + '/authors.html', 'w')
916 self.printHeader(f)
918 f.write('<h1>Authors</h1>')
919 self.printNav(f)
921 # Authors :: List of authors
922 f.write(html_header(2, 'List of Authors'))
924 f.write('<table class="authors sortable" id="authors">')
925 f.write('<tr><th>Author</th><th>Commits (%)</th><th>+ lines</th><th>- lines</th><th>First commit</th><th>Last commit</th><th class="unsortable">Age</th><th>Active days</th><th># by commits</th></tr>')
926 for author in data.getAuthors(conf['max_authors']):
927 info = data.getAuthorInfo(author)
928 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d</td><td>%d</td><td>%s</td><td>%s</td><td>%s</td><td>%d</td><td>%d</td></tr>' % (author, info['commits'], info['commits_frac'], info['lines_added'], info['lines_removed'], info['date_first'], info['date_last'], info['timedelta'], len(info['active_days']), info['place_by_commits']))
929 f.write('</table>')
931 allauthors = data.getAuthors()
932 if len(allauthors) > conf['max_authors']:
933 rest = allauthors[conf['max_authors']:]
934 f.write('<p class="moreauthors">These didn\'t make it to the top: %s</p>' % ', '.join(rest))
936 f.write(html_header(2, 'Cumulated Added Lines of Code per Author'))
937 f.write('<img src="lines_of_code_by_author.png" alt="Lines of code per Author" />')
938 if len(allauthors) > conf['max_authors']:
939 f.write('<p class="moreauthors">Only top %d authors shown</p>' % conf['max_authors'])
941 f.write(html_header(2, 'Commits per Author'))
942 f.write('<img src="commits_by_author.png" alt="Commits per Author" />')
943 if len(allauthors) > conf['max_authors']:
944 f.write('<p class="moreauthors">Only top %d authors shown</p>' % conf['max_authors'])
946 fgl = open(path + '/lines_of_code_by_author.dat', 'w')
947 fgc = open(path + '/commits_by_author.dat', 'w')
949 lines_by_authors = {} # cumulated added lines by
950 # author. to save memory,
951 # changes_by_date_by_author[stamp][author] is defined
952 # only at points where author commits.
953 # lines_by_authors allows us to generate all the
954 # points in the .dat file.
956 # Don't rely on getAuthors to give the same order each
957 # time. Be robust and keep the list in a variable.
958 commits_by_authors = {} # cumulated added lines by
960 self.authors_to_plot = data.getAuthors(conf['max_authors'])
961 for author in self.authors_to_plot:
962 lines_by_authors[author] = 0
963 commits_by_authors[author] = 0
964 for stamp in sorted(data.changes_by_date_by_author.keys()):
965 fgl.write('%d' % stamp)
966 fgc.write('%d' % stamp)
967 for author in self.authors_to_plot:
968 if author in data.changes_by_date_by_author[stamp].keys():
969 lines_by_authors[author] = data.changes_by_date_by_author[stamp][author]['lines_added']
970 commits_by_authors[author] = data.changes_by_date_by_author[stamp][author]['commits']
971 fgl.write(' %d' % lines_by_authors[author])
972 fgc.write(' %d' % commits_by_authors[author])
973 fgl.write('\n')
974 fgc.write('\n')
975 fgl.close()
976 fgc.close()
978 # Authors :: Author of Month
979 f.write(html_header(2, 'Author of Month'))
980 f.write('<table class="sortable" id="aom">')
981 f.write('<tr><th>Month</th><th>Author</th><th>Commits (%%)</th><th class="unsortable">Next top %d</th><th>Number of authors</th></tr>' % conf['authors_top'])
982 for yymm in reversed(sorted(data.author_of_month.keys())):
983 authordict = data.author_of_month[yymm]
984 authors = getkeyssortedbyvalues(authordict)
985 authors.reverse()
986 commits = data.author_of_month[yymm][authors[0]]
987 next = ', '.join(authors[1:conf['authors_top']+1])
988 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td><td>%d</td></tr>' % (yymm, authors[0], commits, (100.0 * commits) / data.commits_by_month[yymm], data.commits_by_month[yymm], next, len(authors)))
990 f.write('</table>')
992 f.write(html_header(2, 'Author of Year'))
993 f.write('<table class="sortable" id="aoy"><tr><th>Year</th><th>Author</th><th>Commits (%%)</th><th class="unsortable">Next top %d</th><th>Number of authors</th></tr>' % conf['authors_top'])
994 for yy in reversed(sorted(data.author_of_year.keys())):
995 authordict = data.author_of_year[yy]
996 authors = getkeyssortedbyvalues(authordict)
997 authors.reverse()
998 commits = data.author_of_year[yy][authors[0]]
999 next = ', '.join(authors[1:conf['authors_top']+1])
1000 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td><td>%d</td></tr>' % (yy, authors[0], commits, (100.0 * commits) / data.commits_by_year[yy], data.commits_by_year[yy], next, len(authors)))
1001 f.write('</table>')
1003 # Domains
1004 f.write(html_header(2, 'Commits by Domains'))
1005 domains_by_commits = getkeyssortedbyvaluekey(data.domains, 'commits')
1006 domains_by_commits.reverse() # most first
1007 f.write('<div class="vtable"><table>')
1008 f.write('<tr><th>Domains</th><th>Total (%)</th></tr>')
1009 fp = open(path + '/domains.dat', 'w')
1010 n = 0
1011 for domain in domains_by_commits:
1012 if n == conf['max_domains']:
1013 break
1014 commits = 0
1015 n += 1
1016 info = data.getDomainInfo(domain)
1017 fp.write('%s %d %d\n' % (domain, n , info['commits']))
1018 f.write('<tr><th>%s</th><td>%d (%.2f%%)</td></tr>' % (domain, info['commits'], (100.0 * info['commits'] / totalcommits)))
1019 f.write('</table></div>')
1020 f.write('<img src="domains.png" alt="Commits by Domains" />')
1021 fp.close()
1023 f.write('</body></html>')
1024 f.close()
1027 # Files
1028 f = open(path + '/files.html', 'w')
1029 self.printHeader(f)
1030 f.write('<h1>Files</h1>')
1031 self.printNav(f)
1033 f.write('<dl>\n')
1034 f.write('<dt>Total files</dt><dd>%d</dd>' % data.getTotalFiles())
1035 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
1036 try:
1037 f.write('<dt>Average file size</dt><dd>%.2f bytes</dd>' % (float(data.getTotalSize()) / data.getTotalFiles()))
1038 except ZeroDivisionError:
1039 pass
1040 f.write('</dl>\n')
1042 # Files :: File count by date
1043 f.write(html_header(2, 'File count by date'))
1045 # use set to get rid of duplicate/unnecessary entries
1046 files_by_date = set()
1047 for stamp in sorted(data.files_by_stamp.keys()):
1048 files_by_date.add('%s %d' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
1050 fg = open(path + '/files_by_date.dat', 'w')
1051 for line in sorted(list(files_by_date)):
1052 fg.write('%s\n' % line)
1053 #for stamp in sorted(data.files_by_stamp.keys()):
1054 # fg.write('%s %d\n' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
1055 fg.close()
1057 f.write('<img src="files_by_date.png" alt="Files by Date" />')
1059 #f.write('<h2>Average file size by date</h2>')
1061 # Files :: Extensions
1062 f.write(html_header(2, 'Extensions'))
1063 f.write('<table class="sortable" id="ext"><tr><th>Extension</th><th>Files (%)</th><th>Lines (%)</th><th>Lines/file</th></tr>')
1064 for ext in sorted(data.extensions.keys()):
1065 files = data.extensions[ext]['files']
1066 lines = data.extensions[ext]['lines']
1067 try:
1068 loc_percentage = (100.0 * lines) / data.getTotalLOC()
1069 except ZeroDivisionError:
1070 loc_percentage = 0
1071 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d (%.2f%%)</td><td>%d</td></tr>' % (ext, files, (100.0 * files) / data.getTotalFiles(), lines, loc_percentage, lines / files))
1072 f.write('</table>')
1074 f.write('</body></html>')
1075 f.close()
1078 # Lines
1079 f = open(path + '/lines.html', 'w')
1080 self.printHeader(f)
1081 f.write('<h1>Lines</h1>')
1082 self.printNav(f)
1084 f.write('<dl>\n')
1085 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
1086 f.write('</dl>\n')
1088 f.write(html_header(2, 'Lines of Code'))
1089 f.write('<img src="lines_of_code.png" />')
1091 fg = open(path + '/lines_of_code.dat', 'w')
1092 for stamp in sorted(data.changes_by_date.keys()):
1093 fg.write('%d %d\n' % (stamp, data.changes_by_date[stamp]['lines']))
1094 fg.close()
1096 f.write('</body></html>')
1097 f.close()
1100 # tags.html
1101 f = open(path + '/tags.html', 'w')
1102 self.printHeader(f)
1103 f.write('<h1>Tags</h1>')
1104 self.printNav(f)
1106 f.write('<dl>')
1107 f.write('<dt>Total tags</dt><dd>%d</dd>' % len(data.tags))
1108 if len(data.tags) > 0:
1109 f.write('<dt>Average commits per tag</dt><dd>%.2f</dd>' % (1.0 * data.getTotalCommits() / len(data.tags)))
1110 f.write('</dl>')
1112 f.write('<table class="tags">')
1113 f.write('<tr><th>Name</th><th>Date</th><th>Commits</th><th>Authors</th></tr>')
1114 # sort the tags by date desc
1115 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), data.tags.items()))))
1116 for tag in tags_sorted_by_date_desc:
1117 authorinfo = []
1118 self.authors_by_commits = getkeyssortedbyvalues(data.tags[tag]['authors'])
1119 for i in reversed(self.authors_by_commits):
1120 authorinfo.append('%s (%d)' % (i, data.tags[tag]['authors'][i]))
1121 f.write('<tr><td>%s</td><td>%s</td><td>%d</td><td>%s</td></tr>' % (tag, data.tags[tag]['date'], data.tags[tag]['commits'], ', '.join(authorinfo)))
1122 f.write('</table>')
1124 f.write('</body></html>')
1125 f.close()
1127 self.createGraphs(path)
1129 def createGraphs(self, path):
1130 print 'Generating graphs...'
1132 # hour of day
1133 f = open(path + '/hour_of_day.plot', 'w')
1134 f.write(GNUPLOT_COMMON)
1135 f.write(
1137 set output 'hour_of_day.png'
1138 unset key
1139 set xrange [0.5:24.5]
1140 set xtics 4
1141 set grid y
1142 set ylabel "Commits"
1143 plot 'hour_of_day.dat' using 1:2:(0.5) w boxes fs solid
1144 """)
1145 f.close()
1147 # day of week
1148 f = open(path + '/day_of_week.plot', 'w')
1149 f.write(GNUPLOT_COMMON)
1150 f.write(
1152 set output 'day_of_week.png'
1153 unset key
1154 set xrange [0.5:7.5]
1155 set xtics 1
1156 set grid y
1157 set ylabel "Commits"
1158 plot 'day_of_week.dat' using 1:3:(0.5):xtic(2) w boxes fs solid
1159 """)
1160 f.close()
1162 # Domains
1163 f = open(path + '/domains.plot', 'w')
1164 f.write(GNUPLOT_COMMON)
1165 f.write(
1167 set output 'domains.png'
1168 unset key
1169 unset xtics
1170 set yrange [0:]
1171 set grid y
1172 set ylabel "Commits"
1173 plot 'domains.dat' using 2:3:(0.5) with boxes fs solid, '' using 2:3:1 with labels rotate by 45 offset 0,1
1174 """)
1175 f.close()
1177 # Month of Year
1178 f = open(path + '/month_of_year.plot', 'w')
1179 f.write(GNUPLOT_COMMON)
1180 f.write(
1182 set output 'month_of_year.png'
1183 unset key
1184 set xrange [0.5:12.5]
1185 set xtics 1
1186 set grid y
1187 set ylabel "Commits"
1188 plot 'month_of_year.dat' using 1:2:(0.5) w boxes fs solid
1189 """)
1190 f.close()
1192 # commits_by_year_month
1193 f = open(path + '/commits_by_year_month.plot', 'w')
1194 f.write(GNUPLOT_COMMON)
1195 f.write(
1197 set output 'commits_by_year_month.png'
1198 unset key
1199 set xdata time
1200 set timefmt "%Y-%m"
1201 set format x "%Y-%m"
1202 set xtics rotate
1203 set bmargin 5
1204 set grid y
1205 set ylabel "Commits"
1206 plot 'commits_by_year_month.dat' using 1:2:(0.5) w boxes fs solid
1207 """)
1208 f.close()
1210 # commits_by_year
1211 f = open(path + '/commits_by_year.plot', 'w')
1212 f.write(GNUPLOT_COMMON)
1213 f.write(
1215 set output 'commits_by_year.png'
1216 unset key
1217 set xtics 1 rotate
1218 set grid y
1219 set ylabel "Commits"
1220 set yrange [0:]
1221 plot 'commits_by_year.dat' using 1:2:(0.5) w boxes fs solid
1222 """)
1223 f.close()
1225 # Files by date
1226 f = open(path + '/files_by_date.plot', 'w')
1227 f.write(GNUPLOT_COMMON)
1228 f.write(
1230 set output 'files_by_date.png'
1231 unset key
1232 set xdata time
1233 set timefmt "%Y-%m-%d"
1234 set format x "%Y-%m-%d"
1235 set grid y
1236 set ylabel "Files"
1237 set xtics rotate
1238 set ytics autofreq
1239 set bmargin 6
1240 plot 'files_by_date.dat' using 1:2 w steps
1241 """)
1242 f.close()
1244 # Lines of Code
1245 f = open(path + '/lines_of_code.plot', 'w')
1246 f.write(GNUPLOT_COMMON)
1247 f.write(
1249 set output 'lines_of_code.png'
1250 unset key
1251 set xdata time
1252 set timefmt "%s"
1253 set format x "%Y-%m-%d"
1254 set grid y
1255 set ylabel "Lines"
1256 set xtics rotate
1257 set bmargin 6
1258 plot 'lines_of_code.dat' using 1:2 w lines
1259 """)
1260 f.close()
1262 # Lines of Code Added per author
1263 f = open(path + '/lines_of_code_by_author.plot', 'w')
1264 f.write(GNUPLOT_COMMON)
1265 f.write(
1267 set terminal png transparent size 640,480
1268 set output 'lines_of_code_by_author.png'
1269 set key left top
1270 set xdata time
1271 set timefmt "%s"
1272 set format x "%Y-%m-%d"
1273 set grid y
1274 set ylabel "Lines"
1275 set xtics rotate
1276 set bmargin 6
1277 plot """
1279 i = 1
1280 plots = []
1281 for a in self.authors_to_plot:
1282 i = i + 1
1283 plots.append("""'lines_of_code_by_author.dat' using 1:%d title "%s" w lines""" % (i, a.replace("\"", "\\\"")))
1284 f.write(", ".join(plots))
1285 f.write('\n')
1287 f.close()
1289 # Commits per author
1290 f = open(path + '/commits_by_author.plot', 'w')
1291 f.write(GNUPLOT_COMMON)
1292 f.write(
1294 set terminal png transparent size 640,480
1295 set output 'commits_by_author.png'
1296 set key left top
1297 set xdata time
1298 set timefmt "%s"
1299 set format x "%Y-%m-%d"
1300 set grid y
1301 set ylabel "Commits"
1302 set xtics rotate
1303 set bmargin 6
1304 plot """
1306 i = 1
1307 plots = []
1308 for a in self.authors_to_plot:
1309 i = i + 1
1310 plots.append("""'commits_by_author.dat' using 1:%d title "%s" w lines""" % (i, a.replace("\"", "\\\"")))
1311 f.write(", ".join(plots))
1312 f.write('\n')
1314 f.close()
1316 os.chdir(path)
1317 files = glob.glob(path + '/*.plot')
1318 for f in files:
1319 out = getpipeoutput([gnuplot_cmd + ' "%s"' % f])
1320 if len(out) > 0:
1321 print out
1323 def printHeader(self, f, title = ''):
1324 f.write(
1325 """<?xml version="1.0" encoding="UTF-8"?>
1326 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
1327 <html xmlns="http://www.w3.org/1999/xhtml">
1328 <head>
1329 <title>GitStats - %s</title>
1330 <link rel="stylesheet" href="%s" type="text/css" />
1331 <meta name="generator" content="GitStats %s" />
1332 <script type="text/javascript" src="sortable.js"></script>
1333 </head>
1334 <body>
1335 """ % (self.title, conf['style'], getversion()))
1337 def printNav(self, f):
1338 f.write("""
1339 <div class="nav">
1340 <ul>
1341 <li><a href="index.html">General</a></li>
1342 <li><a href="activity.html">Activity</a></li>
1343 <li><a href="authors.html">Authors</a></li>
1344 <li><a href="files.html">Files</a></li>
1345 <li><a href="lines.html">Lines</a></li>
1346 <li><a href="tags.html">Tags</a></li>
1347 </ul>
1348 </div>
1349 """)
1352 class GitStats:
1353 def run(self, args_orig):
1354 optlist, args = getopt.getopt(args_orig, 'c:')
1355 for o,v in optlist:
1356 if o == '-c':
1357 key, value = v.split('=', 1)
1358 if key not in conf:
1359 raise KeyError('no such key "%s" in config' % key)
1360 if isinstance(conf[key], int):
1361 conf[key] = int(value)
1362 elif isinstance(conf[key], dict):
1363 kk,vv = value.split(',', 1)
1364 conf[key][kk] = vv
1365 else:
1366 conf[key] = value
1368 if len(args) < 2:
1369 print """
1370 Usage: gitstats [options] <gitpath..> <outputpath>
1372 Options:
1373 -c key=value Override configuration value
1375 Default config values:
1377 """ % conf
1378 sys.exit(0)
1380 outputpath = os.path.abspath(args[-1])
1381 rundir = os.getcwd()
1383 try:
1384 os.makedirs(outputpath)
1385 except OSError:
1386 pass
1387 if not os.path.isdir(outputpath):
1388 print 'FATAL: Output path is not a directory or does not exist'
1389 sys.exit(1)
1391 if not getgnuplotversion():
1392 print 'gnuplot not found'
1393 sys.exit(1)
1395 print 'Output path: %s' % outputpath
1396 cachefile = os.path.join(outputpath, 'gitstats.cache')
1398 data = GitDataCollector()
1399 data.loadCache(cachefile)
1401 for gitpath in args[0:-1]:
1402 print 'Git path: %s' % gitpath
1404 os.chdir(gitpath)
1406 print 'Collecting data...'
1407 data.collect(gitpath)
1409 print 'Refining data...'
1410 data.saveCache(cachefile)
1411 data.refine()
1413 os.chdir(rundir)
1415 print 'Generating report...'
1416 report = HTMLReportCreator()
1417 report.create(data, outputpath)
1419 time_end = time.time()
1420 exectime_internal = time_end - time_start
1421 print 'Execution time %.5f secs, %.5f secs (%.2f %%) in external commands)' % (exectime_internal, exectime_external, (100.0 * exectime_external) / exectime_internal)
1422 if sys.stdin.isatty():
1423 print 'You may now run:'
1424 print
1425 print ' sensible-browser \'%s\'' % os.path.join(outputpath, 'index.html').replace("'", "'\\''")
1426 print
1428 if __name__=='__main__':
1429 g = GitStats()
1430 g.run(sys.argv[1:])