2 # Copyright (c) 2007-2008 Heikki Hokkanen <hoxu@users.sf.net>
15 GNUPLOT_COMMON = 'set terminal png transparent\nset size 0.5,0.5\n'
16 MAX_EXT_LENGTH = 10 # maximum file extension length
18 exectime_internal = 0.0
19 exectime_external = 0.0
20 time_start = time.time()
22 # By default, gnuplot is searched from path, but can be overridden with the
23 # environment variable "GNUPLOT"
24 gnuplot_cmd = 'gnuplot'
25 if 'GNUPLOT' in os.environ:
26 gnuplot_cmd = os.environ['GNUPLOT']
28 def getpipeoutput(cmds, quiet = False):
29 global exectime_external
32 print '>> ' + ' | '.join(cmds),
34 p0 = subprocess.Popen(cmds[0], stdout = subprocess.PIPE, shell = True)
37 p = subprocess.Popen(x, stdin = p0.stdout, stdout = subprocess.PIPE, shell = True)
39 output = p.communicate()[0]
42 print '\r[%.5f] >> %s' % (end - start, ' | '.join(cmds))
43 exectime_external += (end - start)
44 return output.rstrip('\n')
46 def getkeyssortedbyvalues(dict):
47 return map(lambda el : el[1], sorted(map(lambda el : (el[1], el[0]), dict.items())))
49 # dict['author'] = { 'commits': 512 } - ...key(dict, 'commits')
50 def getkeyssortedbyvaluekey(d, key):
51 return map(lambda el : el[1], sorted(map(lambda el : (d[el][key], el), d.keys())))
54 """Manages data collection from a revision control repository."""
56 self.stamp_created = time.time()
60 # This should be the main function to extract data from the repository.
61 def collect(self, dir):
63 self.projectname = os.path.basename(os.path.abspath(dir))
67 def loadCache(self, cachefile):
68 if not os.path.exists(cachefile):
70 print 'Loading cache...'
73 self.cache = pickle.loads(zlib.decompress(f.read()))
75 # temporary hack to upgrade non-compressed caches
77 self.cache = pickle.load(f)
81 # Produce any additional statistics from the extracted data.
86 # : get a dictionary of author
87 def getAuthorInfo(self, author):
90 def getActivityByDayOfWeek(self):
93 def getActivityByHourOfDay(self):
97 # Get a list of authors
101 def getFirstCommitDate(self):
102 return datetime.datetime.now()
104 def getLastCommitDate(self):
105 return datetime.datetime.now()
107 def getStampCreated(self):
108 return self.stamp_created
113 def getTotalAuthors(self):
116 def getTotalCommits(self):
119 def getTotalFiles(self):
122 def getTotalLOC(self):
126 # Save cacheable data
127 def saveCache(self, filename):
128 print 'Saving cache...'
129 f = open(cachefile, 'w')
130 #pickle.dump(self.cache, f)
131 data = zlib.compress(pickle.dumps(self.cache))
135 class GitDataCollector(DataCollector):
136 def collect(self, dir):
137 DataCollector.collect(self, dir)
140 self.total_authors = int(getpipeoutput(['git log', 'git shortlog -s', 'wc -l']))
142 self.total_authors = 0
143 #self.total_lines = int(getoutput('git-ls-files -z |xargs -0 cat |wc -l'))
145 self.activity_by_hour_of_day = {} # hour -> commits
146 self.activity_by_day_of_week = {} # day -> commits
147 self.activity_by_month_of_year = {} # month [1-12] -> commits
148 self.activity_by_hour_of_week = {} # weekday -> hour -> commits
149 self.activity_by_hour_of_day_busiest = 0
150 self.activity_by_hour_of_week_busiest = 0
152 self.authors = {} # name -> {commits, first_commit_stamp, last_commit_stamp}
154 # author of the month
155 self.author_of_month = {} # month -> author -> commits
156 self.author_of_year = {} # year -> author -> commits
157 self.commits_by_month = {} # month -> commits
158 self.commits_by_year = {} # year -> commits
159 self.first_commit_stamp = 0
160 self.last_commit_stamp = 0
164 lines = getpipeoutput(['git show-ref --tags']).split('\n')
168 (hash, tag) = line.split(' ')
170 tag = tag.replace('refs/tags/', '')
171 output = getpipeoutput(['git log "%s" --pretty=format:"%%at %%an" -n 1' % hash])
173 parts = output.split(' ')
176 stamp = int(parts[0])
179 self.tags[tag] = { 'stamp': stamp, 'hash' : hash, 'date' : datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d') }
181 # Collect revision statistics
182 # Outputs "<stamp> <author>"
183 lines = getpipeoutput(['git rev-list --pretty=format:"%at %an" HEAD', 'grep -v ^commit']).split('\n')
185 # linux-2.6 says "<unknown>" for one line O_o
186 parts = line.split(' ')
189 stamp = int(parts[0])
193 author = ' '.join(parts[1:])
194 date = datetime.datetime.fromtimestamp(float(stamp))
196 # First and last commit stamp
197 if self.last_commit_stamp == 0:
198 self.last_commit_stamp = stamp
199 self.first_commit_stamp = stamp
204 if hour in self.activity_by_hour_of_day:
205 self.activity_by_hour_of_day[hour] += 1
207 self.activity_by_hour_of_day[hour] = 1
209 if self.activity_by_hour_of_day[hour] > self.activity_by_hour_of_day_busiest:
210 self.activity_by_hour_of_day_busiest = self.activity_by_hour_of_day[hour]
214 if day in self.activity_by_day_of_week:
215 self.activity_by_day_of_week[day] += 1
217 self.activity_by_day_of_week[day] = 1
220 if day not in self.activity_by_hour_of_week:
221 self.activity_by_hour_of_week[day] = {}
222 if hour not in self.activity_by_hour_of_week[day]:
223 self.activity_by_hour_of_week[day][hour] = 1
225 self.activity_by_hour_of_week[day][hour] += 1
227 if self.activity_by_hour_of_week[day][hour] > self.activity_by_hour_of_week_busiest:
228 self.activity_by_hour_of_week_busiest = self.activity_by_hour_of_week[day][hour]
232 if month in self.activity_by_month_of_year:
233 self.activity_by_month_of_year[month] += 1
235 self.activity_by_month_of_year[month] = 1
238 if author not in self.authors:
239 self.authors[author] = {}
241 if 'last_commit_stamp' not in self.authors[author]:
242 self.authors[author]['last_commit_stamp'] = stamp
243 self.authors[author]['first_commit_stamp'] = stamp
244 if 'commits' in self.authors[author]:
245 self.authors[author]['commits'] += 1
247 self.authors[author]['commits'] = 1
249 # author of the month/year
250 yymm = datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m')
251 if yymm in self.author_of_month:
252 if author in self.author_of_month[yymm]:
253 self.author_of_month[yymm][author] += 1
255 self.author_of_month[yymm][author] = 1
257 self.author_of_month[yymm] = {}
258 self.author_of_month[yymm][author] = 1
259 if yymm in self.commits_by_month:
260 self.commits_by_month[yymm] += 1
262 self.commits_by_month[yymm] = 1
264 yy = datetime.datetime.fromtimestamp(stamp).year
265 if yy in self.author_of_year:
266 if author in self.author_of_year[yy]:
267 self.author_of_year[yy][author] += 1
269 self.author_of_year[yy][author] = 1
271 self.author_of_year[yy] = {}
272 self.author_of_year[yy][author] = 1
273 if yy in self.commits_by_year:
274 self.commits_by_year[yy] += 1
276 self.commits_by_year[yy] = 1
278 # TODO Optimize this, it's the worst bottleneck
279 # outputs "<stamp> <files>" for each revision
280 self.files_by_stamp = {} # stamp -> files
281 revlines = getpipeoutput(['git rev-list --pretty=format:"%at %T" HEAD', 'grep -v ^commit']).strip().split('\n')
283 for revline in revlines:
284 time, rev = revline.split(' ')
285 #linecount = int(getpipeoutput(['git-ls-tree -r --name-only "%s"' % rev, 'wc -l']).split('\n')[0])
286 linecount = self.getFilesInCommit(rev)
287 lines.append('%d %d' % (int(time), linecount))
289 self.total_commits = len(lines)
291 parts = line.split(' ')
294 (stamp, files) = parts[0:2]
296 self.files_by_stamp[int(stamp)] = int(files)
298 print 'Warning: failed to parse line "%s"' % line
301 self.extensions = {} # extension -> files, lines
302 lines = getpipeoutput(['git ls-files']).split('\n')
303 self.total_files = len(lines)
305 base = os.path.basename(line)
306 # Ignore extensionless (including .hidden files)
307 if base.find('.') == -1 or base.rfind('.') == 0:
310 ext = base[(base.rfind('.') + 1):]
311 if len(ext) > MAX_EXT_LENGTH:
314 if ext not in self.extensions:
315 self.extensions[ext] = {'files': 0, 'lines': 0}
317 self.extensions[ext]['files'] += 1
319 # Escaping could probably be improved here
320 self.extensions[ext]['lines'] += int(getpipeoutput(['wc -l "%s"' % line]).split()[0])
322 print 'Warning: Could not count lines for file "%s"' % line
326 # N files changed, N insertions (+), N deletions(-)
328 self.changes_by_date = {} # stamp -> { files, ins, del }
329 lines = getpipeoutput(['git log --shortstat --pretty=format:"%at %an"']).split('\n')
331 files = 0; inserted = 0; deleted = 0; total_lines = 0
337 if line.find('files changed,') == -1:
341 (stamp, author) = (int(line[:pos]), line[pos+1:])
342 self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted, 'lines': total_lines }
344 print 'Warning: unexpected line "%s"' % line
346 print 'Warning: unexpected line "%s"' % line
348 numbers = re.findall('\d+', line)
349 if len(numbers) == 3:
350 (files, inserted, deleted) = map(lambda el : int(el), numbers)
351 total_lines += inserted
352 total_lines -= deleted
354 print 'Warning: failed to handle line "%s"' % line
355 (files, inserted, deleted) = (0, 0, 0)
356 #self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted }
357 self.total_lines = total_lines
361 # name -> {place_by_commits, commits_frac, date_first, date_last, timedelta}
362 authors_by_commits = getkeyssortedbyvaluekey(self.authors, 'commits')
363 authors_by_commits.reverse() # most first
364 for i, name in enumerate(authors_by_commits):
365 self.authors[name]['place_by_commits'] = i + 1
367 for name in self.authors.keys():
368 a = self.authors[name]
369 a['commits_frac'] = (100 * float(a['commits'])) / self.getTotalCommits()
370 date_first = datetime.datetime.fromtimestamp(a['first_commit_stamp'])
371 date_last = datetime.datetime.fromtimestamp(a['last_commit_stamp'])
372 delta = date_last - date_first
373 a['date_first'] = date_first.strftime('%Y-%m-%d')
374 a['date_last'] = date_last.strftime('%Y-%m-%d')
375 a['timedelta'] = delta
377 def getActivityByDayOfWeek(self):
378 return self.activity_by_day_of_week
380 def getActivityByHourOfDay(self):
381 return self.activity_by_hour_of_day
383 def getAuthorInfo(self, author):
384 return self.authors[author]
386 def getAuthors(self):
387 return self.authors.keys()
389 def getFilesInCommit(self, rev):
391 res = self.cache['files_in_tree'][rev]
393 res = int(getpipeoutput(['git ls-tree -r --name-only "%s"' % rev, 'wc -l']).split('\n')[0])
394 if 'files_in_tree' not in self.cache:
395 self.cache['files_in_tree'] = {}
396 self.cache['files_in_tree'][rev] = res
400 def getFirstCommitDate(self):
401 return datetime.datetime.fromtimestamp(self.first_commit_stamp)
403 def getLastCommitDate(self):
404 return datetime.datetime.fromtimestamp(self.last_commit_stamp)
407 lines = getpipeoutput(['git show-ref --tags', 'cut -d/ -f3'])
408 return lines.split('\n')
410 def getTagDate(self, tag):
411 return self.revToDate('tags/' + tag)
413 def getTotalAuthors(self):
414 return self.total_authors
416 def getTotalCommits(self):
417 return self.total_commits
419 def getTotalFiles(self):
420 return self.total_files
422 def getTotalLOC(self):
423 return self.total_lines
425 def revToDate(self, rev):
426 stamp = int(getpipeoutput(['git log --pretty=format:%%at "%s" -n 1' % rev]))
427 return datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d')
430 """Creates the actual report based on given data."""
434 def create(self, data, path):
438 def html_linkify(text):
439 return text.lower().replace(' ', '_')
441 def html_header(level, text):
442 name = html_linkify(text)
443 return '\n<h%d><a href="#%s" name="%s">%s</a></h%d>\n\n' % (level, name, name, text, level)
445 class HTMLReportCreator(ReportCreator):
446 def create(self, data, path):
447 ReportCreator.create(self, data, path)
448 self.title = data.projectname
450 # copy static files if they do not exist
451 for file in ('gitstats.css', 'sortable.js', 'arrow-up.gif', 'arrow-down.gif', 'arrow-none.gif'):
452 basedir = os.path.dirname(os.path.abspath(__file__))
453 shutil.copyfile(basedir + '/' + file, path + '/' + file)
455 f = open(path + "/index.html", 'w')
456 format = '%Y-%m-%d %H:%m:%S'
459 f.write('<h1>GitStats - %s</h1>' % data.projectname)
464 f.write('<dt>Project name</dt><dd>%s</dd>' % (data.projectname))
465 f.write('<dt>Generated</dt><dd>%s (in %d seconds)</dd>' % (datetime.datetime.now().strftime(format), time.time() - data.getStampCreated()));
466 f.write('<dt>Report Period</dt><dd>%s to %s</dd>' % (data.getFirstCommitDate().strftime(format), data.getLastCommitDate().strftime(format)))
467 f.write('<dt>Total Files</dt><dd>%s</dd>' % data.getTotalFiles())
468 f.write('<dt>Total Lines of Code</dt><dd>%s</dd>' % data.getTotalLOC())
469 f.write('<dt>Total Commits</dt><dd>%s</dd>' % data.getTotalCommits())
470 f.write('<dt>Authors</dt><dd>%s</dd>' % data.getTotalAuthors())
473 f.write('</body>\n</html>');
478 f = open(path + '/activity.html', 'w')
480 f.write('<h1>Activity</h1>')
483 #f.write('<h2>Last 30 days</h2>')
485 #f.write('<h2>Last 12 months</h2>')
488 f.write(html_header(2, 'Hour of Day'))
489 hour_of_day = data.getActivityByHourOfDay()
490 f.write('<table><tr><th>Hour</th>')
491 for i in range(1, 25):
492 f.write('<th>%d</th>' % i)
493 f.write('</tr>\n<tr><th>Commits</th>')
494 fp = open(path + '/hour_of_day.dat', 'w')
495 for i in range(0, 24):
497 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
498 f.write('<td style="background-color: rgb(%d, 0, 0)">%d</td>' % (r, hour_of_day[i]))
499 fp.write('%d %d\n' % (i, hour_of_day[i]))
501 f.write('<td>0</td>')
502 fp.write('%d 0\n' % i)
504 f.write('</tr>\n<tr><th>%</th>')
505 totalcommits = data.getTotalCommits()
506 for i in range(0, 24):
508 r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
509 f.write('<td style="background-color: rgb(%d, 0, 0)">%.2f</td>' % (r, (100.0 * hour_of_day[i]) / totalcommits))
511 f.write('<td>0.00</td>')
512 f.write('</tr></table>')
513 f.write('<img src="hour_of_day.png" alt="Hour of Day" />')
514 fg = open(path + '/hour_of_day.dat', 'w')
515 for i in range(0, 24):
517 fg.write('%d %d\n' % (i + 1, hour_of_day[i]))
519 fg.write('%d 0\n' % (i + 1))
523 f.write(html_header(2, 'Day of Week'))
524 day_of_week = data.getActivityByDayOfWeek()
525 f.write('<div class="vtable"><table>')
526 f.write('<tr><th>Day</th><th>Total (%)</th></tr>')
527 fp = open(path + '/day_of_week.dat', 'w')
528 for d in range(0, 7):
531 commits = day_of_week[d]
532 fp.write('%d %d\n' % (d + 1, commits))
534 f.write('<th>%d</th>' % (d + 1))
536 f.write('<td>%d (%.2f%%)</td>' % (day_of_week[d], (100.0 * day_of_week[d]) / totalcommits))
538 f.write('<td>0</td>')
540 f.write('</table></div>')
541 f.write('<img src="day_of_week.png" alt="Day of Week" />')
545 f.write(html_header(2, 'Hour of Week'))
548 f.write('<tr><th>Weekday</th>')
549 for hour in range(0, 24):
550 f.write('<th>%d</th>' % (hour + 1))
553 for weekday in range(0, 7):
554 f.write('<tr><th>%d</th>' % (weekday + 1))
555 for hour in range(0, 24):
557 commits = data.activity_by_hour_of_week[weekday][hour]
562 r = 127 + int((float(commits) / data.activity_by_hour_of_week_busiest) * 128)
563 f.write(' style="background-color: rgb(%d, 0, 0)"' % r);
564 f.write('>%d</td>' % commits);
572 f.write(html_header(2, 'Month of Year'))
573 f.write('<div class="vtable"><table>')
574 f.write('<tr><th>Month</th><th>Commits (%)</th></tr>')
575 fp = open (path + '/month_of_year.dat', 'w')
576 for mm in range(1, 13):
578 if mm in data.activity_by_month_of_year:
579 commits = data.activity_by_month_of_year[mm]
580 f.write('<tr><td>%d</td><td>%d (%.2f %%)</td></tr>' % (mm, commits, (100.0 * commits) / data.getTotalCommits()))
581 fp.write('%d %d\n' % (mm, commits))
583 f.write('</table></div>')
584 f.write('<img src="month_of_year.png" alt="Month of Year" />')
586 # Commits by year/month
587 f.write(html_header(2, 'Commits by year/month'))
588 f.write('<div class="vtable"><table><tr><th>Month</th><th>Commits</th></tr>')
589 for yymm in reversed(sorted(data.commits_by_month.keys())):
590 f.write('<tr><td>%s</td><td>%d</td></tr>' % (yymm, data.commits_by_month[yymm]))
591 f.write('</table></div>')
592 f.write('<img src="commits_by_year_month.png" alt="Commits by year/month" />')
593 fg = open(path + '/commits_by_year_month.dat', 'w')
594 for yymm in sorted(data.commits_by_month.keys()):
595 fg.write('%s %s\n' % (yymm, data.commits_by_month[yymm]))
599 f.write(html_header(2, 'Commits by Year'))
600 f.write('<div class="vtable"><table><tr><th>Year</th><th>Commits (% of all)</th></tr>')
601 for yy in reversed(sorted(data.commits_by_year.keys())):
602 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td></tr>' % (yy, data.commits_by_year[yy], (100.0 * data.commits_by_year[yy]) / data.getTotalCommits()))
603 f.write('</table></div>')
604 f.write('<img src="commits_by_year.png" alt="Commits by Year" />')
605 fg = open(path + '/commits_by_year.dat', 'w')
606 for yy in sorted(data.commits_by_year.keys()):
607 fg.write('%d %d\n' % (yy, data.commits_by_year[yy]))
610 f.write('</body></html>')
615 f = open(path + '/authors.html', 'w')
618 f.write('<h1>Authors</h1>')
621 # Authors :: List of authors
622 f.write(html_header(2, 'List of Authors'))
624 f.write('<table class="authors sortable" id="authors">')
625 f.write('<tr><th>Author</th><th>Commits (%)</th><th>First commit</th><th>Last commit</th><th class="unsortable">Age</th><th># by commits</th></tr>')
626 for author in sorted(data.getAuthors()):
627 info = data.getAuthorInfo(author)
628 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%s</td><td>%s</td><td>%s</td><td>%d</td></tr>' % (author, info['commits'], info['commits_frac'], info['date_first'], info['date_last'], info['timedelta'], info['place_by_commits']))
631 # Authors :: Author of Month
632 f.write(html_header(2, 'Author of Month'))
633 f.write('<table class="sortable" id="aom">')
634 f.write('<tr><th>Month</th><th>Author</th><th>Commits (%)</th><th class="unsortable">Next top 5</th></tr>')
635 for yymm in reversed(sorted(data.author_of_month.keys())):
636 authordict = data.author_of_month[yymm]
637 authors = getkeyssortedbyvalues(authordict)
639 commits = data.author_of_month[yymm][authors[0]]
640 next = ', '.join(authors[1:5])
641 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td></tr>' % (yymm, authors[0], commits, (100 * commits) / data.commits_by_month[yymm], data.commits_by_month[yymm], next))
645 f.write(html_header(2, 'Author of Year'))
646 f.write('<table class="sortable" id="aoy"><tr><th>Year</th><th>Author</th><th>Commits (%)</th><th class="unsortable">Next top 5</th></tr>')
647 for yy in reversed(sorted(data.author_of_year.keys())):
648 authordict = data.author_of_year[yy]
649 authors = getkeyssortedbyvalues(authordict)
651 commits = data.author_of_year[yy][authors[0]]
652 next = ', '.join(authors[1:5])
653 f.write('<tr><td>%s</td><td>%s</td><td>%d (%.2f%% of %d)</td><td>%s</td></tr>' % (yy, authors[0], commits, (100 * commits) / data.commits_by_year[yy], data.commits_by_year[yy], next))
656 f.write('</body></html>')
661 f = open(path + '/files.html', 'w')
663 f.write('<h1>Files</h1>')
667 f.write('<dt>Total files</dt><dd>%d</dd>' % data.getTotalFiles())
668 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
669 f.write('<dt>Average file size</dt><dd>%.2f bytes</dd>' % ((100.0 * data.getTotalLOC()) / data.getTotalFiles()))
672 # Files :: File count by date
673 f.write(html_header(2, 'File count by date'))
675 fg = open(path + '/files_by_date.dat', 'w')
676 for stamp in sorted(data.files_by_stamp.keys()):
677 fg.write('%s %d\n' % (datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), data.files_by_stamp[stamp]))
680 f.write('<img src="files_by_date.png" alt="Files by Date" />')
682 #f.write('<h2>Average file size by date</h2>')
684 # Files :: Extensions
685 f.write(html_header(2, 'Extensions'))
686 f.write('<table class="sortable" id="ext"><tr><th>Extension</th><th>Files (%)</th><th>Lines (%)</th><th>Lines/file</th></tr>')
687 for ext in sorted(data.extensions.keys()):
688 files = data.extensions[ext]['files']
689 lines = data.extensions[ext]['lines']
690 f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d (%.2f%%)</td><td>%d</td></tr>' % (ext, files, (100.0 * files) / data.getTotalFiles(), lines, (100.0 * lines) / data.getTotalLOC(), lines / files))
693 f.write('</body></html>')
698 f = open(path + '/lines.html', 'w')
700 f.write('<h1>Lines</h1>')
704 f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
707 f.write(html_header(2, 'Lines of Code'))
708 f.write('<img src="lines_of_code.png" />')
710 fg = open(path + '/lines_of_code.dat', 'w')
711 for stamp in sorted(data.changes_by_date.keys()):
712 fg.write('%d %d\n' % (stamp, data.changes_by_date[stamp]['lines']))
715 f.write('</body></html>')
720 f = open(path + '/tags.html', 'w')
722 f.write('<h1>Tags</h1>')
726 f.write('<dt>Total tags</dt><dd>%d</dd>' % len(data.tags))
727 if len(data.tags) > 0:
728 f.write('<dt>Average commits per tag</dt><dd>%.2f</dd>' % (data.getTotalCommits() / len(data.tags)))
732 f.write('<tr><th>Name</th><th>Date</th></tr>')
733 # sort the tags by date desc
734 tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), data.tags.items()))))
735 for tag in tags_sorted_by_date_desc:
736 f.write('<tr><td>%s</td><td>%s</td></tr>' % (tag, data.tags[tag]['date']))
739 f.write('</body></html>')
742 self.createGraphs(path)
744 def createGraphs(self, path):
745 print 'Generating graphs...'
748 f = open(path + '/hour_of_day.plot', 'w')
749 f.write(GNUPLOT_COMMON)
752 set output 'hour_of_day.png'
754 set xrange [0.5:24.5]
757 plot 'hour_of_day.dat' using 1:2:(0.5) w boxes fs solid
762 f = open(path + '/day_of_week.plot', 'w')
763 f.write(GNUPLOT_COMMON)
766 set output 'day_of_week.png'
771 plot 'day_of_week.dat' using 1:2:(0.5) w boxes fs solid
776 f = open(path + '/month_of_year.plot', 'w')
777 f.write(GNUPLOT_COMMON)
780 set output 'month_of_year.png'
782 set xrange [0.5:12.5]
785 plot 'month_of_year.dat' using 1:2:(0.5) w boxes fs solid
789 # commits_by_year_month
790 f = open(path + '/commits_by_year_month.plot', 'w')
791 f.write(GNUPLOT_COMMON)
794 set output 'commits_by_year_month.png'
799 set xtics rotate by 90 15768000
802 plot 'commits_by_year_month.dat' using 1:2:(0.5) w boxes fs solid
807 f = open(path + '/commits_by_year.plot', 'w')
808 f.write(GNUPLOT_COMMON)
811 set output 'commits_by_year.png'
816 plot 'commits_by_year.dat' using 1:2:(0.5) w boxes fs solid
821 f = open(path + '/files_by_date.plot', 'w')
822 f.write(GNUPLOT_COMMON)
825 set output 'files_by_date.png'
828 set timefmt "%Y-%m-%d"
829 set format x "%Y-%m-%d"
831 set xtics rotate by 90
833 plot 'files_by_date.dat' using 1:2 w histeps
838 f = open(path + '/lines_of_code.plot', 'w')
839 f.write(GNUPLOT_COMMON)
842 set output 'lines_of_code.png'
846 set format x "%Y-%m-%d"
848 set xtics rotate by 90
850 plot 'lines_of_code.dat' using 1:2 w lines
855 files = glob.glob(path + '/*.plot')
857 out = getpipeoutput([gnuplot_cmd + ' "%s"' % f])
861 def printHeader(self, f, title = ''):
863 """<?xml version="1.0" encoding="UTF-8"?>
864 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
865 <html xmlns="http://www.w3.org/1999/xhtml">
867 <title>GitStats - %s</title>
868 <link rel="stylesheet" href="gitstats.css" type="text/css" />
869 <meta name="generator" content="GitStats" />
870 <script type="text/javascript" src="sortable.js"></script>
875 def printNav(self, f):
879 <li><a href="index.html">General</a></li>
880 <li><a href="activity.html">Activity</a></li>
881 <li><a href="authors.html">Authors</a></li>
882 <li><a href="files.html">Files</a></li>
883 <li><a href="lines.html">Lines</a></li>
884 <li><a href="tags.html">Tags</a></li>
891 Usage: gitstats [options] <gitpath> <outputpath>
896 if len(sys.argv) < 3:
900 gitpath = sys.argv[1]
901 outputpath = os.path.abspath(sys.argv[2])
905 os.makedirs(outputpath)
908 if not os.path.isdir(outputpath):
909 print 'FATAL: Output path is not a directory or does not exist'
912 print 'Git path: %s' % gitpath
913 print 'Output path: %s' % outputpath
917 cachefile = os.path.join(outputpath, 'gitstats.cache')
919 print 'Collecting data...'
920 data = GitDataCollector()
921 data.loadCache(cachefile)
922 data.collect(gitpath)
923 print 'Refining data...'
924 data.saveCache(cachefile)
929 print 'Generating report...'
930 report = HTMLReportCreator()
931 report.create(data, outputpath)
933 time_end = time.time()
934 exectime_internal = time_end - time_start
935 print 'Execution time %.5f secs, %.5f secs (%.2f %%) in external commands)' % (exectime_internal, exectime_external, (100.0 * exectime_external) / exectime_internal)