Add statistics about changed lines in time
[gitstats.git] / gitstats
blob1672ea27c071cc626256b0ac9482c6c5ddc5b82c
1 #!/usr/bin/env python
2 # Copyright (c) 2007-2011 Heikki Hokkanen <hoxu@users.sf.net> & others (see doc/author.txt)
3 # GPLv2 / GPLv3
4 import datetime
5 import getopt
6 import glob
7 import os
8 import pickle
9 import platform
10 import re
11 import shutil
12 import subprocess
13 import sys
14 import time
15 import zlib
17 GNUPLOT_COMMON = 'set terminal png transparent size 640,240\nset size 1.0,1.0\n'
18 ON_LINUX = (platform.system() == 'Linux')
19 WEEKDAYS = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')
21 exectime_internal = 0.0
22 exectime_external = 0.0
23 time_start = time.time()
25 # By default, gnuplot is searched from path, but can be overridden with the
26 # environment variable "GNUPLOT"
27 gnuplot_cmd = 'gnuplot'
28 if 'GNUPLOT' in os.environ:
29 gnuplot_cmd = os.environ['GNUPLOT']
31 conf = {
32 'max_domains': 10,
33 'max_ext_length': 10,
34 'style': 'gitstats.css',
35 'max_authors': 20,
36 'authors_top': 5,
37 'commit_begin': '',
38 'commit_end': 'HEAD',
39 'linear_linestats': 1,
40 'project_name': '',
43 def getpipeoutput(cmds, quiet = False):
44 global exectime_external
45 start = time.time()
46 if not quiet and ON_LINUX and os.isatty(1):
47 print '>> ' + ' | '.join(cmds),
48 sys.stdout.flush()
49 p0 = subprocess.Popen(cmds[0], stdout = subprocess.PIPE, shell = True)
50 p = p0
51 for x in cmds[1:]:
52 p = subprocess.Popen(x, stdin = p0.stdout, stdout = subprocess.PIPE, shell = True)
53 p0 = p
54 output = p.communicate()[0]
55 end = time.time()
56 if not quiet:
57 if ON_LINUX and os.isatty(1):
58 print '\r',
59 print '[%.5f] >> %s' % (end - start, ' | '.join(cmds))
60 exectime_external += (end - start)
61 return output.rstrip('\n')
63 def getcommitrange(defaultrange = 'HEAD', end_only = False):
64 if len(conf['commit_end']) > 0:
65 if end_only or len(conf['commit_begin']) == 0:
66 return conf['commit_end']
67 return '%s..%s' % (conf['commit_begin'], conf['commit_end'])
68 return defaultrange
70 def getkeyssortedbyvalues(dict):
71 return map(lambda el : el[1], sorted(map(lambda el : (el[1], el[0]), dict.items())))
73 # dict['author'] = { 'commits': 512 } - ...key(dict, 'commits')
74 def getkeyssortedbyvaluekey(d, key):
75 return map(lambda el : el[1], sorted(map(lambda el : (d[el][key], el), d.keys())))
77 VERSION = 0
78 def getversion():
79 global VERSION
80 if VERSION == 0:
81 VERSION = getpipeoutput(["git rev-parse --short %s" % getcommitrange('HEAD')]).split('\n')[0]
82 return VERSION
84 def getgitversion():
85 return getpipeoutput(['git --version']).split('\n')[0]
87 def getgnuplotversion():
88 return getpipeoutput(['gnuplot --version']).split('\n')[0]
90 class DataCollector:
91 """Manages data collection from a revision control repository."""
92 def __init__(self):
93 self.stamp_created = time.time()
94 self.cache = {}
95 self.total_authors = 0
96 self.activity_by_hour_of_day = {} # hour -> commits
97 self.activity_by_day_of_week = {} # day -> commits
98 self.activity_by_month_of_year = {} # month [1-12] -> commits
99 self.activity_by_hour_of_week = {} # weekday -> hour -> commits
100 self.activity_by_hour_of_day_busiest = 0
101 self.activity_by_hour_of_week_busiest = 0
102 self.activity_by_year_week = {} # yy_wNN -> commits
103 self.activity_by_year_week_peak = 0
105 self.authors = {} # name -> {commits, first_commit_stamp, last_commit_stamp, last_active_day, active_days, lines_added, lines_removed}
107 self.total_commits = 0
108 self.total_files = 0
109 self.authors_by_commits = 0
111 # domains
112 self.domains = {} # domain -> commits
114 # author of the month
115 self.author_of_month = {} # month -> author -> commits
116 self.author_of_year = {} # year -> author -> commits
117 self.commits_by_month = {} # month -> commits
118 self.commits_by_year = {} # year -> commits
119 self.lines_added_by_month = {} # month -> lines added
120 self.lines_added_by_year = {} # year -> lines added
121 self.lines_removed_by_month = {} # month -> lines removed
122 self.lines_removed_by_year = {} # year -> lines removed
123 self.first_commit_stamp = 0
124 self.last_commit_stamp = 0
125 self.last_active_day = None
126 self.active_days = set()
128 # lines
129 self.total_lines = 0
130 self.total_lines_added = 0
131 self.total_lines_removed = 0
133 # timezone
134 self.commits_by_timezone = {} # timezone -> commits
136 # tags
137 self.tags = {}
139 self.files_by_stamp = {} # stamp -> files
141 # extensions
142 self.extensions = {} # extension -> files, lines
144 # line statistics
145 self.changes_by_date = {} # stamp -> { files, ins, del }
148 # This should be the main function to extract data from the repository.
149 def collect(self, dir):
150 self.dir = dir
151 if len(conf['project_name']) == 0:
152 self.projectname = os.path.basename(os.path.abspath(dir))
153 else:
154 self.projectname = conf['project_name']
157 # Load cacheable data
158 def loadCache(self, cachefile):
159 if not os.path.exists(cachefile):
160 return
161 print 'Loading cache...'
162 f = open(cachefile, 'rb')
163 try:
164 self.cache = pickle.loads(zlib.decompress(f.read()))
165 except:
166 # temporary hack to upgrade non-compressed caches
167 f.seek(0)
168 self.cache = pickle.load(f)
169 f.close()
172 # Produce any additional statistics from the extracted data.
173 def refine(self):
174 pass
177 # : get a dictionary of author
178 def getAuthorInfo(self, author):
179 return None
181 def getActivityByDayOfWeek(self):
182 return {}
184 def getActivityByHourOfDay(self):
185 return {}
187 # : get a dictionary of domains
188 def getDomainInfo(self, domain):
189 return None
192 # Get a list of authors
193 def getAuthors(self):
194 return []
196 def getFirstCommitDate(self):
197 return datetime.datetime.now()
199 def getLastCommitDate(self):
200 return datetime.datetime.now()
202 def getStampCreated(self):
203 return self.stamp_created
205 def getTags(self):
206 return []
208 def getTotalAuthors(self):
209 return -1
211 def getTotalCommits(self):
212 return -1
214 def getTotalFiles(self):
215 return -1
217 def getTotalLOC(self):
218 return -1
221 # Save cacheable data
222 def saveCache(self, cachefile):
223 print 'Saving cache...'
224 f = open(cachefile, 'wb')
225 #pickle.dump(self.cache, f)
226 data = zlib.compress(pickle.dumps(self.cache))
227 f.write(data)
228 f.close()
230 class GitDataCollector(DataCollector):
231 def collect(self, dir):
232 DataCollector.collect(self, dir)
234 try:
235 self.total_authors += int(getpipeoutput(['git shortlog -s %s' % getcommitrange(), 'wc -l']))
236 except:
237 self.total_authors = 0
238 #self.total_lines = int(getoutput('git-ls-files -z |xargs -0 cat |wc -l'))
240 # tags
241 lines = getpipeoutput(['git show-ref --tags']).split('\n')
242 for line in lines:
243 if len(line) == 0:
244 continue
245 (hash, tag) = line.split(' ')
247 tag = tag.replace('refs/tags/', '')
248 output = getpipeoutput(['git log "%s" --pretty=format:"%%at %%aN" -n 1' % hash])
249 if len(output) > 0:
250 parts = output.split(' ')
251 stamp = 0
252 try:
253 stamp = int(parts[0])
254 except ValueError:
255 stamp = 0
256 self.tags[tag] = { 'stamp': stamp, 'hash' : hash, 'date' : datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), 'commits': 0, 'authors': {} }
258 # collect info on tags, starting from latest
259 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), self.tags.items()))))
260 prev = None
261 for tag in reversed(tags_sorted_by_date_desc):
262 cmd = 'git shortlog -s "%s"' % tag
263 if prev != None:
264 cmd += ' "^%s"' % prev
265 output = getpipeoutput([cmd])
266 if len(output) == 0:
267 continue
268 prev = tag
269 for line in output.split('\n'):
270 parts = re.split('\s+', line, 2)
271 commits = int(parts[1])
272 author = parts[2]
273 self.tags[tag]['commits'] = commits
274 self.tags[tag]['authors'][author] = commits
276 # Collect revision statistics
277 # Outputs "<stamp> <date> <time> <timezone> <author> '<' <mail> '>'"
278 lines = getpipeoutput(['git rev-list --pretty=format:"%%at %%ai %%aN <%%aE>" %s' % getcommitrange('HEAD'), 'grep -v ^commit']).split('\n')
279 for line in lines:
280 parts = line.split(' ', 4)
281 author = ''
282 try:
283 stamp = int(parts[0])
284 except ValueError:
285 stamp = 0
286 timezone = parts[3]
287 author, mail = parts[4].split('<', 1)
288 author = author.rstrip()
289 mail = mail.rstrip('>')
290 domain = '?'
291 if mail.find('@') != -1:
292 domain = mail.rsplit('@', 1)[1]
293 date = datetime.datetime.fromtimestamp(float(stamp))
295 # First and last commit stamp (may be in any order because of cherry-picking and patches)
296 if stamp > self.last_commit_stamp:
297 self.last_commit_stamp = stamp
298 if self.first_commit_stamp == 0 or stamp < self.first_commit_stamp:
299 self.first_commit_stamp = stamp
301 # activity
302 # hour
303 hour = date.hour
304 self.activity_by_hour_of_day[hour] = self.activity_by_hour_of_day.get(hour, 0) + 1
305 # most active hour?
306 if self.activity_by_hour_of_day[hour] > self.activity_by_hour_of_day_busiest:
307 self.activity_by_hour_of_day_busiest = self.activity_by_hour_of_day[hour]
309 # day of week
310 day = date.weekday()
311 self.activity_by_day_of_week[day] = self.activity_by_day_of_week.get(day, 0) + 1
313 # domain stats
314 if domain not in self.domains:
315 self.domains[domain] = {}
316 # commits
317 self.domains[domain]['commits'] = self.domains[domain].get('commits', 0) + 1
319 # hour of week
320 if day not in self.activity_by_hour_of_week:
321 self.activity_by_hour_of_week[day] = {}
322 self.activity_by_hour_of_week[day][hour] = self.activity_by_hour_of_week[day].get(hour, 0) + 1
323 # most active hour?
324 if self.activity_by_hour_of_week[day][hour] > self.activity_by_hour_of_week_busiest:
325 self.activity_by_hour_of_week_busiest = self.activity_by_hour_of_week[day][hour]
327 # month of year
328 month = date.month
329 self.activity_by_month_of_year[month] = self.activity_by_month_of_year.get(month, 0) + 1
331 # yearly/weekly activity
332 yyw = date.strftime('%Y-%W')
333 self.activity_by_year_week[yyw] = self.activity_by_year_week.get(yyw, 0) + 1
334 if self.activity_by_year_week_peak < self.activity_by_year_week[yyw]:
335 self.activity_by_year_week_peak = self.activity_by_year_week[yyw]
337 # author stats
338 if author not in self.authors:
339 self.authors[author] = {}
340 # commits, note again that commits may be in any date order because of cherry-picking and patches
341 if 'last_commit_stamp' not in self.authors[author]:
342 self.authors[author]['last_commit_stamp'] = stamp
343 if stamp > self.authors[author]['last_commit_stamp']:
344 self.authors[author]['last_commit_stamp'] = stamp
345 if 'first_commit_stamp' not in self.authors[author]:
346 self.authors[author]['first_commit_stamp'] = stamp
347 if stamp < self.authors[author]['first_commit_stamp']:
348 self.authors[author]['first_commit_stamp'] = stamp
350 # author of the month/year
351 yymm = date.strftime('%Y-%m')
352 if yymm in self.author_of_month:
353 self.author_of_month[yymm][author] = self.author_of_month[yymm].get(author, 0) + 1
354 else:
355 self.author_of_month[yymm] = {}
356 self.author_of_month[yymm][author] = 1
357 self.commits_by_month[yymm] = self.commits_by_month.get(yymm, 0) + 1
359 yy = date.year
360 if yy in self.author_of_year:
361 self.author_of_year[yy][author] = self.author_of_year[yy].get(author, 0) + 1
362 else:
363 self.author_of_year[yy] = {}
364 self.author_of_year[yy][author] = 1
365 self.commits_by_year[yy] = self.commits_by_year.get(yy, 0) + 1
367 # authors: active days
368 yymmdd = date.strftime('%Y-%m-%d')
369 if 'last_active_day' not in self.authors[author]:
370 self.authors[author]['last_active_day'] = yymmdd
371 self.authors[author]['active_days'] = set([yymmdd])
372 elif yymmdd != self.authors[author]['last_active_day']:
373 self.authors[author]['last_active_day'] = yymmdd
374 self.authors[author]['active_days'].add(yymmdd)
376 # project: active days
377 if yymmdd != self.last_active_day:
378 self.last_active_day = yymmdd
379 self.active_days.add(yymmdd)
381 # timezone
382 self.commits_by_timezone[timezone] = self.commits_by_timezone.get(timezone, 0) + 1
384 # TODO Optimize this, it's the worst bottleneck
385 # outputs "<stamp> <files>" for each revision
386 revlines = getpipeoutput(['git rev-list --pretty=format:"%%at %%T" %s' % getcommitrange('HEAD'), 'grep -v ^commit']).strip().split('\n')
387 lines = []
388 for revline in revlines:
389 time, rev = revline.split(' ')
390 linecount = self.getFilesInCommit(rev)
391 lines.append('%d %d' % (int(time), linecount))
393 self.total_commits += len(lines)
394 for line in lines:
395 parts = line.split(' ')
396 if len(parts) != 2:
397 continue
398 (stamp, files) = parts[0:2]
399 try:
400 self.files_by_stamp[int(stamp)] = int(files)
401 except ValueError:
402 print 'Warning: failed to parse line "%s"' % line
404 # extensions
405 lines = getpipeoutput(['git ls-tree -r -z %s' % getcommitrange('HEAD', end_only = True)]).split('\000')
406 self.total_files += len(lines)
407 for line in lines:
408 if len(line) == 0:
409 continue
410 parts = re.split('\s+', line, 4)
411 sha1 = parts[2]
412 fullpath = parts[3]
414 filename = fullpath.split('/')[-1] # strip directories
415 if filename.find('.') == -1 or filename.rfind('.') == 0:
416 ext = ''
417 else:
418 ext = filename[(filename.rfind('.') + 1):]
419 if len(ext) > conf['max_ext_length']:
420 ext = ''
422 if ext not in self.extensions:
423 self.extensions[ext] = {'files': 0, 'lines': 0}
425 self.extensions[ext]['files'] += 1
426 try:
427 self.extensions[ext]['lines'] += self.getLinesInBlob(sha1)
428 except:
429 print 'Warning: Could not count lines for file "%s"' % line
431 # line statistics
432 # outputs:
433 # N files changed, N insertions (+), N deletions(-)
434 # <stamp> <author>
435 self.changes_by_date = {} # stamp -> { files, ins, del }
436 # computation of lines of code by date is better done
437 # on a linear history.
438 extra = ''
439 if conf['linear_linestats']:
440 extra = '--first-parent -m'
441 lines = getpipeoutput(['git log --shortstat %s --pretty=format:"%%at %%aN" %s' % (extra, getcommitrange('HEAD'))]).split('\n')
442 lines.reverse()
443 files = 0; inserted = 0; deleted = 0; total_lines = 0
444 author = None
445 for line in lines:
446 if len(line) == 0:
447 continue
449 # <stamp> <author>
450 if line.find('files changed,') == -1:
451 pos = line.find(' ')
452 if pos != -1:
453 try:
454 (stamp, author) = (int(line[:pos]), line[pos+1:])
455 self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted, 'lines': total_lines }
457 date = datetime.datetime.fromtimestamp(stamp)
458 yymm = date.strftime('%Y-%m')
459 self.lines_added_by_month[yymm] = self.lines_added_by_month.get(yymm, 0) + inserted
460 self.lines_removed_by_month[yymm] = self.lines_removed_by_month.get(yymm, 0) + deleted
462 yy = date.year
463 self.lines_added_by_year[yy] = self.lines_added_by_year.get(yy,0) + inserted
464 self.lines_removed_by_year[yy] = self.lines_removed_by_year.get(yy, 0) + deleted
466 files, inserted, deleted = 0, 0, 0
467 except ValueError:
468 print 'Warning: unexpected line "%s"' % line
469 else:
470 print 'Warning: unexpected line "%s"' % line
471 else:
472 numbers = re.findall('\d+', line)
473 if len(numbers) == 3:
474 (files, inserted, deleted) = map(lambda el : int(el), numbers)
475 total_lines += inserted
476 total_lines -= deleted
477 self.total_lines_added += inserted
478 self.total_lines_removed += deleted
480 else:
481 print 'Warning: failed to handle line "%s"' % line
482 (files, inserted, deleted) = (0, 0, 0)
483 #self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted }
484 self.total_lines = total_lines
486 # Per-author statistics
488 # defined for stamp, author only if author commited at this timestamp.
489 self.changes_by_date_by_author = {} # stamp -> author -> lines_added
491 # Similar to the above, but never use --first-parent
492 # (we need to walk through every commit to know who
493 # committed what, not just through mainline)
494 lines = getpipeoutput(['git log --shortstat --date-order --pretty=format:"%%at %%aN" %s' % (getcommitrange('HEAD'))]).split('\n')
495 lines.reverse()
496 files = 0; inserted = 0; deleted = 0
497 author = None
498 stamp = 0
499 for line in lines:
500 if len(line) == 0:
501 continue
503 # <stamp> <author>
504 if line.find('files changed,') == -1:
505 pos = line.find(' ')
506 if pos != -1:
507 try:
508 oldstamp = stamp
509 (stamp, author) = (int(line[:pos]), line[pos+1:])
510 if oldstamp > stamp:
511 # clock skew, keep old timestamp to avoid having ugly graph
512 stamp = oldstamp
513 if author not in self.authors:
514 self.authors[author] = { 'lines_added' : 0, 'lines_removed' : 0, 'commits' : 0}
515 self.authors[author]['commits'] = self.authors[author].get('commits', 0) + 1
516 self.authors[author]['lines_added'] = self.authors[author].get('lines_added', 0) + inserted
517 self.authors[author]['lines_removed'] = self.authors[author].get('lines_removed', 0) + deleted
518 if stamp not in self.changes_by_date_by_author:
519 self.changes_by_date_by_author[stamp] = {}
520 if author not in self.changes_by_date_by_author[stamp]:
521 self.changes_by_date_by_author[stamp][author] = {}
522 self.changes_by_date_by_author[stamp][author]['lines_added'] = self.authors[author]['lines_added']
523 self.changes_by_date_by_author[stamp][author]['commits'] = self.authors[author]['commits']
524 files, inserted, deleted = 0, 0, 0
525 except ValueError:
526 print 'Warning: unexpected line "%s"' % line
527 else:
528 print 'Warning: unexpected line "%s"' % line
529 else:
530 numbers = re.findall('\d+', line)
531 if len(numbers) == 3:
532 (files, inserted, deleted) = map(lambda el : int(el), numbers)
533 else:
534 print 'Warning: failed to handle line "%s"' % line
535 (files, inserted, deleted) = (0, 0, 0)
537 def refine(self):
538 # authors
539 # name -> {place_by_commits, commits_frac, date_first, date_last, timedelta}
540 self.authors_by_commits = getkeyssortedbyvaluekey(self.authors, 'commits')
541 self.authors_by_commits.reverse() # most first
542 for i, name in enumerate(self.authors_by_commits):
543 self.authors[name]['place_by_commits'] = i + 1
545 for name in self.authors.keys():
546 a = self.authors[name]
547 a['commits_frac'] = (100 * float(a['commits'])) / self.getTotalCommits()
548 date_first = datetime.datetime.fromtimestamp(a['first_commit_stamp'])
549 date_last = datetime.datetime.fromtimestamp(a['last_commit_stamp'])
550 delta = date_last - date_first
551 a['date_first'] = date_first.strftime('%Y-%m-%d')
552 a['date_last'] = date_last.strftime('%Y-%m-%d')
553 a['timedelta'] = delta
554 if 'lines_added' not in a: a['lines_added'] = 0
555 if 'lines_removed' not in a: a['lines_removed'] = 0
557 def getActiveDays(self):
558 return self.active_days
560 def getActivityByDayOfWeek(self):
561 return self.activity_by_day_of_week
563 def getActivityByHourOfDay(self):
564 return self.activity_by_hour_of_day
566 def getAuthorInfo(self, author):
567 return self.authors[author]
569 def getAuthors(self, limit = None):
570 res = getkeyssortedbyvaluekey(self.authors, 'commits')
571 res.reverse()
572 return res[:limit]
574 def getCommitDeltaDays(self):
575 return (self.last_commit_stamp - self.first_commit_stamp) / 86400 + 1
577 def getDomainInfo(self, domain):
578 return self.domains[domain]
580 def getDomains(self):
581 return self.domains.keys()
583 def getFilesInCommit(self, rev):
584 try:
585 res = self.cache['files_in_tree'][rev]
586 except:
587 res = int(getpipeoutput(['git ls-tree -r --name-only "%s"' % rev, 'wc -l']).split('\n')[0])
588 if 'files_in_tree' not in self.cache:
589 self.cache['files_in_tree'] = {}
590 self.cache['files_in_tree'][rev] = res
592 return res
594 def getFirstCommitDate(self):
595 return datetime.datetime.fromtimestamp(self.first_commit_stamp)
597 def getLastCommitDate(self):
598 return datetime.datetime.fromtimestamp(self.last_commit_stamp)
600 def getLinesInBlob(self, sha1):
601 try:
602 res = self.cache['lines_in_blob'][sha1]
603 except:
604 res = int(getpipeoutput(['git cat-file blob %s' % sha1, 'wc -l']).split()[0])
605 if 'lines_in_blob' not in self.cache:
606 self.cache['lines_in_blob'] = {}
607 self.cache['lines_in_blob'][sha1] = res
608 return res
610 def getTags(self):
611 lines = getpipeoutput(['git show-ref --tags', 'cut -d/ -f3'])
612 return lines.split('\n')
614 def getTagDate(self, tag):
615 return self.revToDate('tags/' + tag)
617 def getTotalAuthors(self):
618 return self.total_authors
620 def getTotalCommits(self):
621 return self.total_commits
623 def getTotalFiles(self):
624 return self.total_files
626 def getTotalLOC(self):
627 return self.total_lines
629 def revToDate(self, rev):
630 stamp = int(getpipeoutput(['git log --pretty=format:%%at "%s" -n 1' % rev]))
631 return datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d')
633 class ReportCreator:
634 """Creates the actual report based on given data."""
635 def __init__(self):
636 pass
638 def create(self, data, path):
639 self.data = data
640 self.path = path
642 def html_linkify(text):
643 return text.lower().replace(' ', '_')
645 def html_header(level, text):
646 name = html_linkify(text)
647 return '\n<h%d><a href="#%s" name="%s">%s</a></h%d>\n\n' % (level, name, name, text, level)
649 class HTMLReportCreator(ReportCreator):
650 def create(self, data, path):
651 ReportCreator.create(self, data, path)
652 self.title = data.projectname
654 # copy static files. Looks in the binary directory, ../share/gitstats and /usr/share/gitstats
655 binarypath = os.path.dirname(os.path.abspath(__file__))
656 secondarypath = os.path.join(binarypath, '..', 'share', 'gitstats')
657 basedirs = [binarypath, secondarypath, '/usr/share/gitstats']
658 for file in ('gitstats.css', 'sortable.js', 'arrow-up.gif', 'arrow-down.gif', 'arrow-none.gif'):
659 for base in basedirs:
660 src = base + '/' + file
661 if os.path.exists(src):
662 shutil.copyfile(src, path + '/' + file)
663 break
664 else:
665 print 'Warning: "%s" not found, so not copied (searched: %s)' % (file, basedirs)
667 f = open(path + "/index.html", 'w')
668 format = '%Y-%m-%d %H:%M:%S'
669 self.printHeader(f)
671 f.write('<h1>GitStats - %s</h1>' % data.projectname)
673 self.printNav(f)
675 f.write('<dl>')
676 f.write('<dt>Project name</dt><dd>%s</dd>' % (data.projectname))
677 f.write('<dt>Generated</dt><dd>%s (in %d seconds)</dd>' % (datetime.datetime.now().strftime(format), time.time() - data.getStampCreated()))
678 f.write('<dt>Generator</dt><dd><a href="http://gitstats.sourceforge.net/">GitStats</a> (version %s), %s, %s</dd>' % (getversion(), getgitversion(), getgnuplotversion()))
679 f.write('<dt>Report Period</dt><dd>%s to %s</dd>' % (data.getFirstCommitDate().strftime(format), data.getLastCommitDate().strftime(format)))
680 f.write('<dt>Age</dt><dd>%d days, %d active days (%3.2f%%)</dd>' % (data.getCommitDeltaDays(), len(data.getActiveDays()), (100.0 * len(data.getActiveDays()) / data.getCommitDeltaDays())))
681 f.write('<dt>Total Files</dt><dd>%s</dd>' % data.getTotalFiles())
682 f.write('<dt>Total Lines of Code</dt><dd>%s (%d added, %d removed)</dd>' % (data.getTotalLOC(), data.total_lines_added, data.total_lines_removed))
683 f.write('<dt>Total Commits</dt><dd>%s (average %.1f commits per active day, %.1f per all days)</dd>' % (data.getTotalCommits(), float(data.getTotalCommits()) / len(data.getActiveDays()), float(data.getTotalCommits()) / data.getCommitDeltaDays()))
684 f.write('<dt>Authors</dt><dd>%s (average %.1f commits per author)</dd>' % (data.getTotalAuthors(), (1.0 * data.getTotalCommits()) / data.getTotalAuthors()))
685 f.write('</dl>')
687 f.write('</body>\n</html>')
688 f.close()
691 # Activity
692 f = open(path + '/activity.html', 'w')
693 self.printHeader(f)
694 f.write('<h1>Activity</h1>')
695 self.printNav(f)
697 #f.write('<h2>Last 30 days</h2>')
699 #f.write('<h2>Last 12 months</h2>')
701 # Weekly activity
702 WEEKS = 32
703 f.write(html_header(2, 'Weekly activity'))
704 f.write('<p>Last %d weeks</p>' % WEEKS)
706 # generate weeks to show (previous N weeks from now)
707 now = datetime.datetime.now()
708 deltaweek = datetime.timedelta(7)
709 weeks = []
710 stampcur = now
711 for i in range(0, WEEKS):
712 weeks.insert(0, stampcur.strftime('%Y-%W'))
713 stampcur -= deltaweek
715 # top row: commits & bar
716 f.write('<table class="noborders"><tr>')
717 for i in range(0, WEEKS):
718 commits = 0
719 if weeks[i] in data.activity_by_year_week:
720 commits = data.activity_by_year_week[weeks[i]]
722 percentage = 0
723 if weeks[i] in data.activity_by_year_week:
724 percentage = float(data.activity_by_year_week[weeks[i]]) / data.activity_by_year_week_peak
725 height = max(1, int(200 * percentage))
726 f.write('<td style="text-align: center; vertical-align: bottom">%d<div style="display: block; background-color: red; width: 20px; height: %dpx"></div></td>' % (commits, height))
728 # bottom row: year/week
729 f.write('</tr><tr>')
730 for i in range(0, WEEKS):
731 f.write('<td>%s</td>' % (WEEKS - i))
732 f.write('</tr></table>')
734 # Hour of Day
735 f.write(html_header(2, 'Hour of Day'))
736 hour_of_day = data.getActivityByHourOfDay()
737 f.write('<table><tr><th>Hour</th>')
738 for i in range(0, 24):
739 f.write('<th>%d</th>' % i)
740 f.write('</tr>\n<tr><th>Commits</th>')
741 fp = open(path + '/hour_of_day.dat', 'w')
742 for i in range(0, 24):
743 if i in hour_of_day:
744 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
745 f.write('<td style="background-color: rgb(%d, 0, 0)">%d</td>' % (r, hour_of_day[i]))
746 fp.write('%d %d\n' % (i, hour_of_day[i]))
747 else:
748 f.write('<td>0</td>')
749 fp.write('%d 0\n' % i)
750 fp.close()
751 f.write('</tr>\n<tr><th>%</th>')
752 totalcommits = data.getTotalCommits()
753 for i in range(0, 24):
754 if i in hour_of_day:
755 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
756 f.write('<td style="background-color: rgb(%d, 0, 0)">%.2f</td>' % (r, (100.0 * hour_of_day[i]) / totalcommits))
757 else:
758 f.write('<td>0.00</td>')
759 f.write('</tr></table>')
760 f.write('<img src="hour_of_day.png" alt="Hour of Day" />')
761 fg = open(path + '/hour_of_day.dat', 'w')
762 for i in range(0, 24):
763 if i in hour_of_day:
764 fg.write('%d %d\n' % (i + 1, hour_of_day[i]))
765 else:
766 fg.write('%d 0\n' % (i + 1))
767 fg.close()
769 # Day of Week
770 f.write(html_header(2, 'Day of Week'))
771 day_of_week = data.getActivityByDayOfWeek()
772 f.write('<div class="vtable"><table>')
773 f.write('<tr><th>Day</th><th>Total (%)</th></tr>')
774 fp = open(path + '/day_of_week.dat', 'w')
775 for d in range(0, 7):
776 commits = 0
777 if d in day_of_week:
778 commits = day_of_week[d]
779 fp.write('%d %s %d\n' % (d + 1, WEEKDAYS[d], commits))
780 f.write('<tr>')
781 f.write('<th>%s</th>' % (WEEKDAYS[d]))
782 if d in day_of_week:
783 f.write('<td>%d (%.2f%%)</td>' % (day_of_week[d], (100.0 * day_of_week[d]) / totalcommits))
784 else:
785 f.write('<td>0</td>')
786 f.write('</tr>')
787 f.write('</table></div>')
788 f.write('<img src="day_of_week.png" alt="Day of Week" />')
789 fp.close()
791 # Hour of Week
792 f.write(html_header(2, 'Hour of Week'))
793 f.write('<table>')
795 f.write('<tr><th>Weekday</th>')
796 for hour in range(0, 24):
797 f.write('<th>%d</th>' % (hour))
798 f.write('</tr>')
800 for weekday in range(0, 7):
801 f.write('<tr><th>%s</th>' % (WEEKDAYS[weekday]))
802 for hour in range(0, 24):
803 try:
804 commits = data.activity_by_hour_of_week[weekday][hour]
805 except KeyError:
806 commits = 0
807 if commits != 0:
808 f.write('<td')
809 r = 127 + int((float(commits) / data.activity_by_hour_of_week_busiest) * 128)
810 f.write(' style="background-color: rgb(%d, 0, 0)"' % r)
811 f.write('>%d</td>' % commits)
812 else:
813 f.write('<td></td>')
814 f.write('</tr>')
816 f.write('</table>')
818 # Month of Year
819 f.write(html_header(2, 'Month of Year'))
820 f.write('<div class="vtable"><table>')
821 f.write('<tr><th>Month</th><th>Commits (%)</th></tr>')
822 fp = open (path + '/month_of_year.dat', 'w')
823 for mm in range(1, 13):
824 commits = 0
825 if mm in data.activity_by_month_of_year:
826 commits = data.activity_by_month_of_year[mm]
827 f.write('<tr><td>%d</td><td>%d (%.2f %%)</td></tr>' % (mm, commits, (100.0 * commits) / data.getTotalCommits()))
828 fp.write('%d %d\n' % (mm, commits))
829 fp.close()
830 f.write('</table></div>')
831 f.write('<img src="month_of_year.png" alt="Month of Year" />')
833 # Commits by year/month
834 f.write(html_header(2, 'Commits by year/month'))
835 f.write('<div class="vtable"><table><tr><th>Month</th><th>Commits</th><th>Lines added</th><th>Lines removed</th></tr>')
836 for yymm in reversed(sorted(data.commits_by_month.keys())):
837 f.write('<tr><td>%s</td><td>%d</td><td>%d</td><td>%d</td></tr>' % (yymm, data.commits_by_month[yymm], data.lines_added_by_month[yymm], data.lines_removed_by_month[yymm]))
838 f.write('</table></div>')
839 f.write('<img src="commits_by_year_month.png" alt="Commits by year/month" />')
840 fg = open(path + '/commits_by_year_month.dat', 'w')
841 for yymm in sorted(data.commits_by_month.keys()):
842 fg.write('%s %s\n' % (yymm, data.commits_by_month[yymm]))
843 fg.close()
845 # Commits by year
846 f.write(html_header(2, 'Commits by Year'))
847 f.write('<div class="vtable"><table><tr><th>Year</th><th>Commits (% of all)</th><th>Lines added</th><th>Lines removed</th></tr>')
848 for yy in reversed(sorted(data.commits_by_year.keys())):
849 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d</td><td>%d</td></tr>' % (yy, data.commits_by_year[yy], (100.0 * data.commits_by_year[yy]) / data.getTotalCommits(), data.lines_added_by_year[yy], data.lines_removed_by_year[yy]))
850 f.write('</table></div>')
851 f.write('<img src="commits_by_year.png" alt="Commits by Year" />')
852 fg = open(path + '/commits_by_year.dat', 'w')
853 for yy in sorted(data.commits_by_year.keys()):
854 fg.write('%d %d\n' % (yy, data.commits_by_year[yy]))
855 fg.close()
857 # Commits by timezone
858 f.write(html_header(2, 'Commits by Timezone'))
859 f.write('<table><tr>')
860 f.write('<th>Timezone</th><th>Commits</th>')
861 max_commits_on_tz = max(data.commits_by_timezone.values())
862 for i in sorted(data.commits_by_timezone.keys(), key = lambda n : int(n)):
863 commits = data.commits_by_timezone[i]
864 r = 127 + int((float(commits) / max_commits_on_tz) * 128)
865 f.write('<tr><th>%s</th><td style="background-color: rgb(%d, 0, 0)">%d</td></tr>' % (i, r, commits))
866 f.write('</tr></table>')
868 f.write('</body></html>')
869 f.close()
872 # Authors
873 f = open(path + '/authors.html', 'w')
874 self.printHeader(f)
876 f.write('<h1>Authors</h1>')
877 self.printNav(f)
879 # Authors :: List of authors
880 f.write(html_header(2, 'List of Authors'))
882 f.write('<table class="authors sortable" id="authors">')
883 f.write('<tr><th>Author</th><th>Commits (%)</th><th>+ lines</th><th>- lines</th><th>First commit</th><th>Last commit</th><th class="unsortable">Age</th><th>Active days</th><th># by commits</th></tr>')
884 for author in data.getAuthors(conf['max_authors']):
885 info = data.getAuthorInfo(author)
886 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d</td><td>%d</td><td>%s</td><td>%s</td><td>%s</td><td>%d</td><td>%d</td></tr>' % (author, info['commits'], info['commits_frac'], info['lines_added'], info['lines_removed'], info['date_first'], info['date_last'], info['timedelta'], len(info['active_days']), info['place_by_commits']))
887 f.write('</table>')
889 allauthors = data.getAuthors()
890 if len(allauthors) > conf['max_authors']:
891 rest = allauthors[conf['max_authors']:]
892 f.write('<p class="moreauthors">These didn\'t make it to the top: %s</p>' % ', '.join(rest))
894 f.write(html_header(2, 'Cumulated Added Lines of Code per Author'))
895 f.write('<img src="lines_of_code_by_author.png" alt="Lines of code per Author" />')
896 if len(allauthors) > conf['max_authors']:
897 f.write('<p class="moreauthors">Only top %d authors shown</p>' % conf['max_authors'])
899 f.write(html_header(2, 'Commits per Author'))
900 f.write('<img src="commits_by_author.png" alt="Commits per Author" />')
901 if len(allauthors) > conf['max_authors']:
902 f.write('<p class="moreauthors">Only top %d authors shown</p>' % conf['max_authors'])
904 fgl = open(path + '/lines_of_code_by_author.dat', 'w')
905 fgc = open(path + '/commits_by_author.dat', 'w')
907 lines_by_authors = {} # cumulated added lines by
908 # author. to save memory,
909 # changes_by_date_by_author[stamp][author] is defined
910 # only at points where author commits.
911 # lines_by_authors allows us to generate all the
912 # points in the .dat file.
914 # Don't rely on getAuthors to give the same order each
915 # time. Be robust and keep the list in a variable.
916 commits_by_authors = {} # cumulated added lines by
918 self.authors_to_plot = data.getAuthors(conf['max_authors'])
919 for author in self.authors_to_plot:
920 lines_by_authors[author] = 0
921 commits_by_authors[author] = 0
922 for stamp in sorted(data.changes_by_date_by_author.keys()):
923 fgl.write('%d' % stamp)
924 fgc.write('%d' % stamp)
925 for author in self.authors_to_plot:
926 if author in data.changes_by_date_by_author[stamp].keys():
927 lines_by_authors[author] = data.changes_by_date_by_author[stamp][author]['lines_added']
928 commits_by_authors[author] = data.changes_by_date_by_author[stamp][author]['commits']
929 fgl.write(' %d' % lines_by_authors[author])
930 fgc.write(' %d' % commits_by_authors[author])
931 fgl.write('\n')
932 fgc.write('\n')
933 fgl.close()
934 fgc.close()
936 # Authors :: Author of Month
937 f.write(html_header(2, 'Author of Month'))
938 f.write('<table class="sortable" id="aom">')
939 f.write('<tr><th>Month</th><th>Author</th><th>Commits (%%)</th><th class="unsortable">Next top %d</th><th>Number of authors</th></tr>' % conf['authors_top'])
940 for yymm in reversed(sorted(data.author_of_month.keys())):
941 authordict = data.author_of_month[yymm]
942 authors = getkeyssortedbyvalues(authordict)
943 authors.reverse()
944 commits = data.author_of_month[yymm][authors[0]]
945 next = ', '.join(authors[1:conf['authors_top']+1])
946 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td><td>%d</td></tr>' % (yymm, authors[0], commits, (100.0 * commits) / data.commits_by_month[yymm], data.commits_by_month[yymm], next, len(authors)))
948 f.write('</table>')
950 f.write(html_header(2, 'Author of Year'))
951 f.write('<table class="sortable" id="aoy"><tr><th>Year</th><th>Author</th><th>Commits (%%)</th><th class="unsortable">Next top %d</th><th>Number of authors</th></tr>' % conf['authors_top'])
952 for yy in reversed(sorted(data.author_of_year.keys())):
953 authordict = data.author_of_year[yy]
954 authors = getkeyssortedbyvalues(authordict)
955 authors.reverse()
956 commits = data.author_of_year[yy][authors[0]]
957 next = ', '.join(authors[1:conf['authors_top']+1])
958 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td><td>%d</td></tr>' % (yy, authors[0], commits, (100.0 * commits) / data.commits_by_year[yy], data.commits_by_year[yy], next, len(authors)))
959 f.write('</table>')
961 # Domains
962 f.write(html_header(2, 'Commits by Domains'))
963 domains_by_commits = getkeyssortedbyvaluekey(data.domains, 'commits')
964 domains_by_commits.reverse() # most first
965 f.write('<div class="vtable"><table>')
966 f.write('<tr><th>Domains</th><th>Total (%)</th></tr>')
967 fp = open(path + '/domains.dat', 'w')
968 n = 0
969 for domain in domains_by_commits:
970 if n == conf['max_domains']:
971 break
972 commits = 0
973 n += 1
974 info = data.getDomainInfo(domain)
975 fp.write('%s %d %d\n' % (domain, n , info['commits']))
976 f.write('<tr><th>%s</th><td>%d (%.2f%%)</td></tr>' % (domain, info['commits'], (100.0 * info['commits'] / totalcommits)))
977 f.write('</table></div>')
978 f.write('<img src="domains.png" alt="Commits by Domains" />')
979 fp.close()
981 f.write('</body></html>')
982 f.close()
985 # Files
986 f = open(path + '/files.html', 'w')
987 self.printHeader(f)
988 f.write('<h1>Files</h1>')
989 self.printNav(f)
991 f.write('<dl>\n')
992 f.write('<dt>Total files</dt><dd>%d</dd>' % data.getTotalFiles())
993 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
994 f.write('<dt>Average file size</dt><dd>%.2f bytes</dd>' % ((100.0 * data.getTotalLOC()) / data.getTotalFiles()))
995 f.write('</dl>\n')
997 # Files :: File count by date
998 f.write(html_header(2, 'File count by date'))
1000 # use set to get rid of duplicate/unnecessary entries
1001 files_by_date = set()
1002 for stamp in sorted(data.files_by_stamp.keys()):
1003 files_by_date.add('%s %d' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
1005 fg = open(path + '/files_by_date.dat', 'w')
1006 for line in sorted(list(files_by_date)):
1007 fg.write('%s\n' % line)
1008 #for stamp in sorted(data.files_by_stamp.keys()):
1009 # fg.write('%s %d\n' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
1010 fg.close()
1012 f.write('<img src="files_by_date.png" alt="Files by Date" />')
1014 #f.write('<h2>Average file size by date</h2>')
1016 # Files :: Extensions
1017 f.write(html_header(2, 'Extensions'))
1018 f.write('<table class="sortable" id="ext"><tr><th>Extension</th><th>Files (%)</th><th>Lines (%)</th><th>Lines/file</th></tr>')
1019 for ext in sorted(data.extensions.keys()):
1020 files = data.extensions[ext]['files']
1021 lines = data.extensions[ext]['lines']
1022 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d (%.2f%%)</td><td>%d</td></tr>' % (ext, files, (100.0 * files) / data.getTotalFiles(), lines, (100.0 * lines) / data.getTotalLOC(), lines / files))
1023 f.write('</table>')
1025 f.write('</body></html>')
1026 f.close()
1029 # Lines
1030 f = open(path + '/lines.html', 'w')
1031 self.printHeader(f)
1032 f.write('<h1>Lines</h1>')
1033 self.printNav(f)
1035 f.write('<dl>\n')
1036 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
1037 f.write('</dl>\n')
1039 f.write(html_header(2, 'Lines of Code'))
1040 f.write('<img src="lines_of_code.png" />')
1042 fg = open(path + '/lines_of_code.dat', 'w')
1043 for stamp in sorted(data.changes_by_date.keys()):
1044 fg.write('%d %d\n' % (stamp, data.changes_by_date[stamp]['lines']))
1045 fg.close()
1047 f.write('</body></html>')
1048 f.close()
1051 # tags.html
1052 f = open(path + '/tags.html', 'w')
1053 self.printHeader(f)
1054 f.write('<h1>Tags</h1>')
1055 self.printNav(f)
1057 f.write('<dl>')
1058 f.write('<dt>Total tags</dt><dd>%d</dd>' % len(data.tags))
1059 if len(data.tags) > 0:
1060 f.write('<dt>Average commits per tag</dt><dd>%.2f</dd>' % (1.0 * data.getTotalCommits() / len(data.tags)))
1061 f.write('</dl>')
1063 f.write('<table class="tags">')
1064 f.write('<tr><th>Name</th><th>Date</th><th>Commits</th><th>Authors</th></tr>')
1065 # sort the tags by date desc
1066 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), data.tags.items()))))
1067 for tag in tags_sorted_by_date_desc:
1068 authorinfo = []
1069 self.authors_by_commits = getkeyssortedbyvalues(data.tags[tag]['authors'])
1070 for i in reversed(self.authors_by_commits):
1071 authorinfo.append('%s (%d)' % (i, data.tags[tag]['authors'][i]))
1072 f.write('<tr><td>%s</td><td>%s</td><td>%d</td><td>%s</td></tr>' % (tag, data.tags[tag]['date'], data.tags[tag]['commits'], ', '.join(authorinfo)))
1073 f.write('</table>')
1075 f.write('</body></html>')
1076 f.close()
1078 self.createGraphs(path)
1080 def createGraphs(self, path):
1081 print 'Generating graphs...'
1083 # hour of day
1084 f = open(path + '/hour_of_day.plot', 'w')
1085 f.write(GNUPLOT_COMMON)
1086 f.write(
1088 set output 'hour_of_day.png'
1089 unset key
1090 set xrange [0.5:24.5]
1091 set xtics 4
1092 set grid y
1093 set ylabel "Commits"
1094 plot 'hour_of_day.dat' using 1:2:(0.5) w boxes fs solid
1095 """)
1096 f.close()
1098 # day of week
1099 f = open(path + '/day_of_week.plot', 'w')
1100 f.write(GNUPLOT_COMMON)
1101 f.write(
1103 set output 'day_of_week.png'
1104 unset key
1105 set xrange [0.5:7.5]
1106 set xtics 1
1107 set grid y
1108 set ylabel "Commits"
1109 plot 'day_of_week.dat' using 1:3:(0.5):xtic(2) w boxes fs solid
1110 """)
1111 f.close()
1113 # Domains
1114 f = open(path + '/domains.plot', 'w')
1115 f.write(GNUPLOT_COMMON)
1116 f.write(
1118 set output 'domains.png'
1119 unset key
1120 unset xtics
1121 set yrange [0:]
1122 set grid y
1123 set ylabel "Commits"
1124 plot 'domains.dat' using 2:3:(0.5) with boxes fs solid, '' using 2:3:1 with labels rotate by 45 offset 0,1
1125 """)
1126 f.close()
1128 # Month of Year
1129 f = open(path + '/month_of_year.plot', 'w')
1130 f.write(GNUPLOT_COMMON)
1131 f.write(
1133 set output 'month_of_year.png'
1134 unset key
1135 set xrange [0.5:12.5]
1136 set xtics 1
1137 set grid y
1138 set ylabel "Commits"
1139 plot 'month_of_year.dat' using 1:2:(0.5) w boxes fs solid
1140 """)
1141 f.close()
1143 # commits_by_year_month
1144 f = open(path + '/commits_by_year_month.plot', 'w')
1145 f.write(GNUPLOT_COMMON)
1146 f.write(
1148 set output 'commits_by_year_month.png'
1149 unset key
1150 set xdata time
1151 set timefmt "%Y-%m"
1152 set format x "%Y-%m"
1153 set xtics rotate
1154 set bmargin 5
1155 set grid y
1156 set ylabel "Commits"
1157 plot 'commits_by_year_month.dat' using 1:2:(0.5) w boxes fs solid
1158 """)
1159 f.close()
1161 # commits_by_year
1162 f = open(path + '/commits_by_year.plot', 'w')
1163 f.write(GNUPLOT_COMMON)
1164 f.write(
1166 set output 'commits_by_year.png'
1167 unset key
1168 set xtics 1 rotate
1169 set grid y
1170 set ylabel "Commits"
1171 set yrange [0:]
1172 plot 'commits_by_year.dat' using 1:2:(0.5) w boxes fs solid
1173 """)
1174 f.close()
1176 # Files by date
1177 f = open(path + '/files_by_date.plot', 'w')
1178 f.write(GNUPLOT_COMMON)
1179 f.write(
1181 set output 'files_by_date.png'
1182 unset key
1183 set xdata time
1184 set timefmt "%Y-%m-%d"
1185 set format x "%Y-%m-%d"
1186 set grid y
1187 set ylabel "Files"
1188 set xtics rotate
1189 set ytics autofreq
1190 set bmargin 6
1191 plot 'files_by_date.dat' using 1:2 w steps
1192 """)
1193 f.close()
1195 # Lines of Code
1196 f = open(path + '/lines_of_code.plot', 'w')
1197 f.write(GNUPLOT_COMMON)
1198 f.write(
1200 set output 'lines_of_code.png'
1201 unset key
1202 set xdata time
1203 set timefmt "%s"
1204 set format x "%Y-%m-%d"
1205 set grid y
1206 set ylabel "Lines"
1207 set xtics rotate
1208 set bmargin 6
1209 plot 'lines_of_code.dat' using 1:2 w lines
1210 """)
1211 f.close()
1213 # Lines of Code Added per author
1214 f = open(path + '/lines_of_code_by_author.plot', 'w')
1215 f.write(GNUPLOT_COMMON)
1216 f.write(
1218 set terminal png transparent size 640,480
1219 set output 'lines_of_code_by_author.png'
1220 set key left top
1221 set xdata time
1222 set timefmt "%s"
1223 set format x "%Y-%m-%d"
1224 set grid y
1225 set ylabel "Lines"
1226 set xtics rotate
1227 set bmargin 6
1228 plot """
1230 i = 1
1231 plots = []
1232 for a in self.authors_to_plot:
1233 i = i + 1
1234 plots.append("""'lines_of_code_by_author.dat' using 1:%d title "%s" w lines""" % (i, a.replace("\"", "\\\"")))
1235 f.write(", ".join(plots))
1236 f.write('\n')
1238 f.close()
1240 # Commits per author
1241 f = open(path + '/commits_by_author.plot', 'w')
1242 f.write(GNUPLOT_COMMON)
1243 f.write(
1245 set terminal png transparent size 640,480
1246 set output 'commits_by_author.png'
1247 set key left top
1248 set xdata time
1249 set timefmt "%s"
1250 set format x "%Y-%m-%d"
1251 set grid y
1252 set ylabel "Commits"
1253 set xtics rotate
1254 set bmargin 6
1255 plot """
1257 i = 1
1258 plots = []
1259 for a in self.authors_to_plot:
1260 i = i + 1
1261 plots.append("""'commits_by_author.dat' using 1:%d title "%s" w lines""" % (i, a.replace("\"", "\\\"")))
1262 f.write(", ".join(plots))
1263 f.write('\n')
1265 f.close()
1267 os.chdir(path)
1268 files = glob.glob(path + '/*.plot')
1269 for f in files:
1270 out = getpipeoutput([gnuplot_cmd + ' "%s"' % f])
1271 if len(out) > 0:
1272 print out
1274 def printHeader(self, f, title = ''):
1275 f.write(
1276 """<?xml version="1.0" encoding="UTF-8"?>
1277 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
1278 <html xmlns="http://www.w3.org/1999/xhtml">
1279 <head>
1280 <title>GitStats - %s</title>
1281 <link rel="stylesheet" href="%s" type="text/css" />
1282 <meta name="generator" content="GitStats %s" />
1283 <script type="text/javascript" src="sortable.js"></script>
1284 </head>
1285 <body>
1286 """ % (self.title, conf['style'], getversion()))
1288 def printNav(self, f):
1289 f.write("""
1290 <div class="nav">
1291 <ul>
1292 <li><a href="index.html">General</a></li>
1293 <li><a href="activity.html">Activity</a></li>
1294 <li><a href="authors.html">Authors</a></li>
1295 <li><a href="files.html">Files</a></li>
1296 <li><a href="lines.html">Lines</a></li>
1297 <li><a href="tags.html">Tags</a></li>
1298 </ul>
1299 </div>
1300 """)
1303 class GitStats:
1304 def run(self, args_orig):
1305 optlist, args = getopt.getopt(args_orig, 'c:')
1306 for o,v in optlist:
1307 if o == '-c':
1308 key, value = v.split('=', 1)
1309 if key not in conf:
1310 raise KeyError('no such key "%s" in config' % key)
1311 if isinstance(conf[key], int):
1312 conf[key] = int(value)
1313 else:
1314 conf[key] = value
1316 if len(args) < 2:
1317 print """
1318 Usage: gitstats [options] <gitpath..> <outputpath>
1320 Options:
1321 -c key=value Override configuration value
1323 Default config values:
1325 """ % conf
1326 sys.exit(0)
1328 outputpath = os.path.abspath(args[-1])
1329 rundir = os.getcwd()
1331 try:
1332 os.makedirs(outputpath)
1333 except OSError:
1334 pass
1335 if not os.path.isdir(outputpath):
1336 print 'FATAL: Output path is not a directory or does not exist'
1337 sys.exit(1)
1339 print 'Output path: %s' % outputpath
1340 cachefile = os.path.join(outputpath, 'gitstats.cache')
1342 data = GitDataCollector()
1343 data.loadCache(cachefile)
1345 for gitpath in args[0:-1]:
1346 print 'Git path: %s' % gitpath
1348 os.chdir(gitpath)
1350 print 'Collecting data...'
1351 data.collect(gitpath)
1353 print 'Refining data...'
1354 data.saveCache(cachefile)
1355 data.refine()
1357 os.chdir(rundir)
1359 print 'Generating report...'
1360 report = HTMLReportCreator()
1361 report.create(data, outputpath)
1363 time_end = time.time()
1364 exectime_internal = time_end - time_start
1365 print 'Execution time %.5f secs, %.5f secs (%.2f %%) in external commands)' % (exectime_internal, exectime_external, (100.0 * exectime_external) / exectime_internal)
1367 if __name__=='__main__':
1368 g = GitStats()
1369 g.run(sys.argv[1:])