|
|
@@ -1,4 +1,4 @@
|
|
1
|
|
-#!/usr/bin/env python2
|
|
|
1
|
+#!/usr/bin/env python3
|
|
2
|
2
|
# Copyright (c) 2007-2014 Heikki Hokkanen <hoxu@users.sf.net> & others (see doc/AUTHOR)
|
|
3
|
3
|
# GPLv2 / GPLv3
|
|
4
|
4
|
import datetime
|
|
|
@@ -14,8 +14,8 @@ import sys
|
|
14
|
14
|
import time
|
|
15
|
15
|
import zlib
|
|
16
|
16
|
|
|
17
|
|
-if sys.version_info < (2, 6):
|
|
18
|
|
- print >> sys.stderr, "Python 2.6 or higher is required for gitstats"
|
|
|
17
|
+if sys.version_info < (3, 6):
|
|
|
18
|
+ print("Python 3.6 or higher is required for gitstats", file=sys.stderr)
|
|
19
|
19
|
sys.exit(1)
|
|
20
|
20
|
|
|
21
|
21
|
from multiprocessing import Pool
|
|
|
@@ -54,7 +54,7 @@ def getpipeoutput(cmds, quiet = False):
|
|
54
|
54
|
global exectime_external
|
|
55
|
55
|
start = time.time()
|
|
56
|
56
|
if not quiet and ON_LINUX and os.isatty(1):
|
|
57
|
|
- print '>> ' + ' | '.join(cmds),
|
|
|
57
|
+ print('>> ' + ' | '.join(cmds), end='')
|
|
58
|
58
|
sys.stdout.flush()
|
|
59
|
59
|
p = subprocess.Popen(cmds[0], stdout = subprocess.PIPE, shell = True)
|
|
60
|
60
|
processes=[p]
|
|
|
@@ -67,10 +67,10 @@ def getpipeoutput(cmds, quiet = False):
|
|
67
|
67
|
end = time.time()
|
|
68
|
68
|
if not quiet:
|
|
69
|
69
|
if ON_LINUX and os.isatty(1):
|
|
70
|
|
- print '\r',
|
|
71
|
|
- print '[%.5f] >> %s' % (end - start, ' | '.join(cmds))
|
|
|
70
|
+ print('\r', end='')
|
|
|
71
|
+ print('[%.5f] >> %s' % (end - start, ' | '.join(cmds)))
|
|
72
|
72
|
exectime_external += (end - start)
|
|
73
|
|
- return output.rstrip('\n')
|
|
|
73
|
+ return output.decode('utf-8', errors='replace').rstrip('\n')
|
|
74
|
74
|
|
|
75
|
75
|
def getlogrange(defaultrange = 'HEAD', end_only = True):
|
|
76
|
76
|
commit_range = getcommitrange(defaultrange, end_only)
|
|
|
@@ -86,14 +86,14 @@ def getcommitrange(defaultrange = 'HEAD', end_only = False):
|
|
86
|
86
|
return defaultrange
|
|
87
|
87
|
|
|
88
|
88
|
def getkeyssortedbyvalues(dict):
|
|
89
|
|
- return map(lambda el : el[1], sorted(map(lambda el : (el[1], el[0]), dict.items())))
|
|
|
89
|
+ return list(map(lambda el : el[1], sorted(map(lambda el : (el[1], el[0]), dict.items()))))
|
|
90
|
90
|
|
|
91
|
91
|
# dict['author'] = { 'commits': 512 } - ...key(dict, 'commits')
|
|
92
|
92
|
def getkeyssortedbyvaluekey(d, key):
|
|
93
|
|
- return map(lambda el : el[1], sorted(map(lambda el : (d[el][key], el), d.keys())))
|
|
|
93
|
+ return list(map(lambda el : el[1], sorted(map(lambda el : (d[el][key], el), d.keys()))))
|
|
94
|
94
|
|
|
95
|
95
|
def getstatsummarycounts(line):
|
|
96
|
|
- numbers = re.findall('\d+', line)
|
|
|
96
|
+ numbers = re.findall(r'\d+', line)
|
|
97
|
97
|
if len(numbers) == 1:
|
|
98
|
98
|
# neither insertions nor deletions: may probably only happen for "0 files changed"
|
|
99
|
99
|
numbers.append(0);
|
|
|
@@ -207,7 +207,7 @@ class DataCollector:
|
|
207
|
207
|
def loadCache(self, cachefile):
|
|
208
|
208
|
if not os.path.exists(cachefile):
|
|
209
|
209
|
return
|
|
210
|
|
- print 'Loading cache...'
|
|
|
210
|
+ print('Loading cache...')
|
|
211
|
211
|
f = open(cachefile, 'rb')
|
|
212
|
212
|
try:
|
|
213
|
213
|
self.cache = pickle.loads(zlib.decompress(f.read()))
|
|
|
@@ -269,7 +269,7 @@ class DataCollector:
|
|
269
|
269
|
##
|
|
270
|
270
|
# Save cacheable data
|
|
271
|
271
|
def saveCache(self, cachefile):
|
|
272
|
|
- print 'Saving cache...'
|
|
|
272
|
+ print('Saving cache...')
|
|
273
|
273
|
tempfile = cachefile + '.tmp'
|
|
274
|
274
|
f = open(tempfile, 'wb')
|
|
275
|
275
|
#pickle.dump(self.cache, f)
|
|
|
@@ -308,7 +308,7 @@ class GitDataCollector(DataCollector):
|
|
308
|
308
|
self.tags[tag] = { 'stamp': stamp, 'hash' : hash, 'date' : datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'), 'commits': 0, 'authors': {} }
|
|
309
|
309
|
|
|
310
|
310
|
# collect info on tags, starting from latest
|
|
311
|
|
- tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), self.tags.items()))))
|
|
|
311
|
+ tags_sorted_by_date_desc = list(map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), self.tags.items())))))
|
|
312
|
312
|
prev = None
|
|
313
|
313
|
for tag in reversed(tags_sorted_by_date_desc):
|
|
314
|
314
|
cmd = 'git shortlog -s "%s"' % tag
|
|
|
@@ -319,7 +319,7 @@ class GitDataCollector(DataCollector):
|
|
319
|
319
|
continue
|
|
320
|
320
|
prev = tag
|
|
321
|
321
|
for line in output.split('\n'):
|
|
322
|
|
- parts = re.split('\s+', line, 2)
|
|
|
322
|
+ parts = re.split(r'\s+', line, 2)
|
|
323
|
323
|
commits = int(parts[1])
|
|
324
|
324
|
author = parts[2]
|
|
325
|
325
|
self.tags[tag]['commits'] += commits
|
|
|
@@ -444,10 +444,10 @@ class GitDataCollector(DataCollector):
|
|
444
|
444
|
time, rev = revline.split(' ')
|
|
445
|
445
|
#if cache empty then add time and rev to list of new rev's
|
|
446
|
446
|
#otherwise try to read needed info from cache
|
|
447
|
|
- if 'files_in_tree' not in self.cache.keys():
|
|
|
447
|
+ if 'files_in_tree' not in self.cache:
|
|
448
|
448
|
revs_to_read.append((time,rev))
|
|
449
|
449
|
continue
|
|
450
|
|
- if rev in self.cache['files_in_tree'].keys():
|
|
|
450
|
+ if rev in self.cache['files_in_tree']:
|
|
451
|
451
|
lines.append('%d %d' % (int(time), self.cache['files_in_tree'][rev]))
|
|
452
|
452
|
else:
|
|
453
|
453
|
revs_to_read.append((time,rev))
|
|
|
@@ -474,7 +474,7 @@ class GitDataCollector(DataCollector):
|
|
474
|
474
|
try:
|
|
475
|
475
|
self.files_by_stamp[int(stamp)] = int(files)
|
|
476
|
476
|
except ValueError:
|
|
477
|
|
- print 'Warning: failed to parse line "%s"' % line
|
|
|
477
|
+ print('Warning: failed to parse line "%s"' % line)
|
|
478
|
478
|
|
|
479
|
479
|
# extensions and size of files
|
|
480
|
480
|
lines = getpipeoutput(['git ls-tree -r -l -z %s' % getcommitrange('HEAD', end_only = True)]).split('\000')
|
|
|
@@ -482,7 +482,7 @@ class GitDataCollector(DataCollector):
|
|
482
|
482
|
for line in lines:
|
|
483
|
483
|
if len(line) == 0:
|
|
484
|
484
|
continue
|
|
485
|
|
- parts = re.split('\s+', line, 4)
|
|
|
485
|
+ parts = re.split(r'\s+', line, 4)
|
|
486
|
486
|
if parts[0] == '160000' and parts[3] == '-':
|
|
487
|
487
|
# skip submodules
|
|
488
|
488
|
continue
|
|
|
@@ -505,10 +505,10 @@ class GitDataCollector(DataCollector):
|
|
505
|
505
|
self.extensions[ext]['files'] += 1
|
|
506
|
506
|
#if cache empty then add ext and blob id to list of new blob's
|
|
507
|
507
|
#otherwise try to read needed info from cache
|
|
508
|
|
- if 'lines_in_blob' not in self.cache.keys():
|
|
|
508
|
+ if 'lines_in_blob' not in self.cache:
|
|
509
|
509
|
blobs_to_read.append((ext,blob_id))
|
|
510
|
510
|
continue
|
|
511
|
|
- if blob_id in self.cache['lines_in_blob'].keys():
|
|
|
511
|
+ if blob_id in self.cache['lines_in_blob']:
|
|
512
|
512
|
self.extensions[ext]['lines'] += self.cache['lines_in_blob'][blob_id]
|
|
513
|
513
|
else:
|
|
514
|
514
|
blobs_to_read.append((ext,blob_id))
|
|
|
@@ -563,21 +563,21 @@ class GitDataCollector(DataCollector):
|
|
563
|
563
|
|
|
564
|
564
|
files, inserted, deleted = 0, 0, 0
|
|
565
|
565
|
except ValueError:
|
|
566
|
|
- print 'Warning: unexpected line "%s"' % line
|
|
|
566
|
+ print('Warning: unexpected line "%s"' % line)
|
|
567
|
567
|
else:
|
|
568
|
|
- print 'Warning: unexpected line "%s"' % line
|
|
|
568
|
+ print('Warning: unexpected line "%s"' % line)
|
|
569
|
569
|
else:
|
|
570
|
570
|
numbers = getstatsummarycounts(line)
|
|
571
|
571
|
|
|
572
|
572
|
if len(numbers) == 3:
|
|
573
|
|
- (files, inserted, deleted) = map(lambda el : int(el), numbers)
|
|
|
573
|
+ (files, inserted, deleted) = list(map(lambda el : int(el), numbers))
|
|
574
|
574
|
total_lines += inserted
|
|
575
|
575
|
total_lines -= deleted
|
|
576
|
576
|
self.total_lines_added += inserted
|
|
577
|
577
|
self.total_lines_removed += deleted
|
|
578
|
578
|
|
|
579
|
579
|
else:
|
|
580
|
|
- print 'Warning: failed to handle line "%s"' % line
|
|
|
580
|
+ print('Warning: failed to handle line "%s"' % line)
|
|
581
|
581
|
(files, inserted, deleted) = (0, 0, 0)
|
|
582
|
582
|
#self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted }
|
|
583
|
583
|
self.total_lines += total_lines
|
|
|
@@ -622,16 +622,16 @@ class GitDataCollector(DataCollector):
|
|
622
|
622
|
self.changes_by_date_by_author[stamp][author]['commits'] = self.authors[author]['commits']
|
|
623
|
623
|
files, inserted, deleted = 0, 0, 0
|
|
624
|
624
|
except ValueError:
|
|
625
|
|
- print 'Warning: unexpected line "%s"' % line
|
|
|
625
|
+ print('Warning: unexpected line "%s"' % line)
|
|
626
|
626
|
else:
|
|
627
|
|
- print 'Warning: unexpected line "%s"' % line
|
|
|
627
|
+ print('Warning: unexpected line "%s"' % line)
|
|
628
|
628
|
else:
|
|
629
|
629
|
numbers = getstatsummarycounts(line);
|
|
630
|
630
|
|
|
631
|
631
|
if len(numbers) == 3:
|
|
632
|
|
- (files, inserted, deleted) = map(lambda el : int(el), numbers)
|
|
|
632
|
+ (files, inserted, deleted) = list(map(lambda el : int(el), numbers))
|
|
633
|
633
|
else:
|
|
634
|
|
- print 'Warning: failed to handle line "%s"' % line
|
|
|
634
|
+ print('Warning: failed to handle line "%s"' % line)
|
|
635
|
635
|
(files, inserted, deleted) = (0, 0, 0)
|
|
636
|
636
|
|
|
637
|
637
|
def refine(self):
|
|
|
@@ -642,7 +642,7 @@ class GitDataCollector(DataCollector):
|
|
642
|
642
|
for i, name in enumerate(self.authors_by_commits):
|
|
643
|
643
|
self.authors[name]['place_by_commits'] = i + 1
|
|
644
|
644
|
|
|
645
|
|
- for name in self.authors.keys():
|
|
|
645
|
+ for name in list(self.authors.keys()):
|
|
646
|
646
|
a = self.authors[name]
|
|
647
|
647
|
a['commits_frac'] = (100 * float(a['commits'])) / self.getTotalCommits()
|
|
648
|
648
|
date_first = datetime.datetime.fromtimestamp(a['first_commit_stamp'])
|
|
|
@@ -672,13 +672,13 @@ class GitDataCollector(DataCollector):
|
|
672
|
672
|
return res[:limit]
|
|
673
|
673
|
|
|
674
|
674
|
def getCommitDeltaDays(self):
|
|
675
|
|
- return (self.last_commit_stamp / 86400 - self.first_commit_stamp / 86400) + 1
|
|
|
675
|
+ return (self.last_commit_stamp // 86400 - self.first_commit_stamp // 86400) + 1
|
|
676
|
676
|
|
|
677
|
677
|
def getDomainInfo(self, domain):
|
|
678
|
678
|
return self.domains[domain]
|
|
679
|
679
|
|
|
680
|
680
|
def getDomains(self):
|
|
681
|
|
- return self.domains.keys()
|
|
|
681
|
+ return list(self.domains.keys())
|
|
682
|
682
|
|
|
683
|
683
|
def getFirstCommitDate(self):
|
|
684
|
684
|
return datetime.datetime.fromtimestamp(self.first_commit_stamp)
|
|
|
@@ -733,6 +733,17 @@ class HTMLReportCreator(ReportCreator):
|
|
733
|
733
|
ReportCreator.create(self, data, path)
|
|
734
|
734
|
self.title = data.projectname
|
|
735
|
735
|
|
|
|
736
|
+ # Prepare safe local values to avoid division-by-zero and empty-collection errors
|
|
|
737
|
+ total_commits = data.getTotalCommits()
|
|
|
738
|
+ total_active_days = len(data.getActiveDays()) if hasattr(data, 'getActiveDays') else 0
|
|
|
739
|
+ delta_days = data.getCommitDeltaDays() if hasattr(data, 'getCommitDeltaDays') else 0
|
|
|
740
|
+ total_authors = data.getTotalAuthors()
|
|
|
741
|
+ # busiest counters: use 1 as denominator if no activity recorded to avoid ZeroDivisionError
|
|
|
742
|
+ hour_of_day_busiest = data.activity_by_hour_of_day_busiest if getattr(data, 'activity_by_hour_of_day_busiest', 0) > 0 else 1
|
|
|
743
|
+ hour_of_week_busiest = data.activity_by_hour_of_week_busiest if getattr(data, 'activity_by_hour_of_week_busiest', 0) > 0 else 1
|
|
|
744
|
+ # timezone max for coloring; default to 1 if empty
|
|
|
745
|
+ max_commits_on_tz = max(data.commits_by_timezone.values()) if data.commits_by_timezone else 1
|
|
|
746
|
+
|
|
736
|
747
|
# copy static files. Looks in the binary directory, ../share/gitstats and /usr/share/gitstats
|
|
737
|
748
|
binarypath = os.path.dirname(os.path.abspath(__file__))
|
|
738
|
749
|
secondarypath = os.path.join(binarypath, '..', 'share', 'gitstats')
|
|
|
@@ -744,7 +755,7 @@ class HTMLReportCreator(ReportCreator):
|
|
744
|
755
|
shutil.copyfile(src, path + '/' + file)
|
|
745
|
756
|
break
|
|
746
|
757
|
else:
|
|
747
|
|
- print 'Warning: "%s" not found, so not copied (searched: %s)' % (file, basedirs)
|
|
|
758
|
+ print('Warning: "%s" not found, so not copied (searched: %s)' % (file, basedirs))
|
|
748
|
759
|
|
|
749
|
760
|
f = open(path + "/index.html", 'w')
|
|
750
|
761
|
format = '%Y-%m-%d %H:%M:%S'
|
|
|
@@ -759,11 +770,14 @@ class HTMLReportCreator(ReportCreator):
|
|
759
|
770
|
f.write('<dt>Generated</dt><dd>%s (in %d seconds)</dd>' % (datetime.datetime.now().strftime(format), time.time() - data.getStampCreated()))
|
|
760
|
771
|
f.write('<dt>Generator</dt><dd><a href="http://gitstats.sourceforge.net/">GitStats</a> (version %s), %s, %s</dd>' % (getversion(), getgitversion(), getgnuplotversion()))
|
|
761
|
772
|
f.write('<dt>Report Period</dt><dd>%s to %s</dd>' % (data.getFirstCommitDate().strftime(format), data.getLastCommitDate().strftime(format)))
|
|
762
|
|
- f.write('<dt>Age</dt><dd>%d days, %d active days (%3.2f%%)</dd>' % (data.getCommitDeltaDays(), len(data.getActiveDays()), (100.0 * len(data.getActiveDays()) / data.getCommitDeltaDays())))
|
|
|
773
|
+ f.write('<dt>Age</dt><dd>%d days, %d active days (%3.2f%%)</dd>' % (data.getCommitDeltaDays(), total_active_days, (100.0 * total_active_days / data.getCommitDeltaDays()) if data.getCommitDeltaDays() else 0.0))
|
|
763
|
774
|
f.write('<dt>Total Files</dt><dd>%s</dd>' % data.getTotalFiles())
|
|
764
|
775
|
f.write('<dt>Total Lines of Code</dt><dd>%s (%d added, %d removed)</dd>' % (data.getTotalLOC(), data.total_lines_added, data.total_lines_removed))
|
|
765
|
|
- f.write('<dt>Total Commits</dt><dd>%s (average %.1f commits per active day, %.1f per all days)</dd>' % (data.getTotalCommits(), float(data.getTotalCommits()) / len(data.getActiveDays()), float(data.getTotalCommits()) / data.getCommitDeltaDays()))
|
|
766
|
|
- f.write('<dt>Authors</dt><dd>%s (average %.1f commits per author)</dd>' % (data.getTotalAuthors(), (1.0 * data.getTotalCommits()) / data.getTotalAuthors()))
|
|
|
776
|
+ avg_active = float(total_commits) / total_active_days if total_active_days else 0.0
|
|
|
777
|
+ avg_all = float(total_commits) / delta_days if delta_days else 0.0
|
|
|
778
|
+ f.write('<dt>Total Commits</dt><dd>%s (average %.1f commits per active day, %.1f per all days)</dd>' % (total_commits, avg_active, avg_all))
|
|
|
779
|
+ avg_per_author = float(total_commits) / total_authors if total_authors else 0.0
|
|
|
780
|
+ f.write('<dt>Authors</dt><dd>%s (average %.1f commits per author)</dd>' % (total_authors, avg_per_author))
|
|
767
|
781
|
f.write('</dl>')
|
|
768
|
782
|
|
|
769
|
783
|
f.write('</body>\n</html>')
|
|
|
@@ -823,7 +837,7 @@ class HTMLReportCreator(ReportCreator):
|
|
823
|
837
|
fp = open(path + '/hour_of_day.dat', 'w')
|
|
824
|
838
|
for i in range(0, 24):
|
|
825
|
839
|
if i in hour_of_day:
|
|
826
|
|
- r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
|
|
|
840
|
+ r = 127 + int((float(hour_of_day[i]) / hour_of_day_busiest) * 128)
|
|
827
|
841
|
f.write('<td style="background-color: rgb(%d, 0, 0)">%d</td>' % (r, hour_of_day[i]))
|
|
828
|
842
|
fp.write('%d %d\n' % (i, hour_of_day[i]))
|
|
829
|
843
|
else:
|
|
|
@@ -831,11 +845,12 @@ class HTMLReportCreator(ReportCreator):
|
|
831
|
845
|
fp.write('%d 0\n' % i)
|
|
832
|
846
|
fp.close()
|
|
833
|
847
|
f.write('</tr>\n<tr><th>%</th>')
|
|
834
|
|
- totalcommits = data.getTotalCommits()
|
|
|
848
|
+ totalcommits = total_commits
|
|
835
|
849
|
for i in range(0, 24):
|
|
836
|
850
|
if i in hour_of_day:
|
|
837
|
|
- r = 127 + int((float(hour_of_day[i]) / data.activity_by_hour_of_day_busiest) * 128)
|
|
838
|
|
- f.write('<td style="background-color: rgb(%d, 0, 0)">%.2f</td>' % (r, (100.0 * hour_of_day[i]) / totalcommits))
|
|
|
851
|
+ r = 127 + int((float(hour_of_day[i]) / hour_of_day_busiest) * 128)
|
|
|
852
|
+ percent = (100.0 * hour_of_day[i]) / totalcommits if totalcommits else 0.0
|
|
|
853
|
+ f.write('<td style="background-color: rgb(%d, 0, 0)">%.2f</td>' % (r, percent))
|
|
839
|
854
|
else:
|
|
840
|
855
|
f.write('<td>0.00</td>')
|
|
841
|
856
|
f.write('</tr></table>')
|
|
|
@@ -862,7 +877,8 @@ class HTMLReportCreator(ReportCreator):
|
|
862
|
877
|
f.write('<tr>')
|
|
863
|
878
|
f.write('<th>%s</th>' % (WEEKDAYS[d]))
|
|
864
|
879
|
if d in day_of_week:
|
|
865
|
|
- f.write('<td>%d (%.2f%%)</td>' % (day_of_week[d], (100.0 * day_of_week[d]) / totalcommits))
|
|
|
880
|
+ percent = (100.0 * day_of_week[d]) / totalcommits if totalcommits else 0.0
|
|
|
881
|
+ f.write('<td>%d (%.2f%%)</td>' % (day_of_week[d], percent))
|
|
866
|
882
|
else:
|
|
867
|
883
|
f.write('<td>0</td>')
|
|
868
|
884
|
f.write('</tr>')
|
|
|
@@ -906,7 +922,8 @@ class HTMLReportCreator(ReportCreator):
|
|
906
|
922
|
commits = 0
|
|
907
|
923
|
if mm in data.activity_by_month_of_year:
|
|
908
|
924
|
commits = data.activity_by_month_of_year[mm]
|
|
909
|
|
- f.write('<tr><td>%d</td><td>%d (%.2f %%)</td></tr>' % (mm, commits, (100.0 * commits) / data.getTotalCommits()))
|
|
|
925
|
+ percent = (100.0 * commits) / total_commits if total_commits else 0.0
|
|
|
926
|
+ f.write('<tr><td>%d</td><td>%d (%.2f %%)</td></tr>' % (mm, commits, percent))
|
|
910
|
927
|
fp.write('%d %d\n' % (mm, commits))
|
|
911
|
928
|
fp.close()
|
|
912
|
929
|
f.write('</table></div>')
|
|
|
@@ -928,7 +945,9 @@ class HTMLReportCreator(ReportCreator):
|
|
928
|
945
|
f.write(html_header(2, 'Commits by Year'))
|
|
929
|
946
|
f.write('<div class="vtable"><table><tr><th>Year</th><th>Commits (% of all)</th><th>Lines added</th><th>Lines removed</th></tr>')
|
|
930
|
947
|
for yy in reversed(sorted(data.commits_by_year.keys())):
|
|
931
|
|
- f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d</td><td>%d</td></tr>' % (yy, data.commits_by_year.get(yy,0), (100.0 * data.commits_by_year.get(yy,0)) / data.getTotalCommits(), data.lines_added_by_year.get(yy,0), data.lines_removed_by_year.get(yy,0)))
|
|
|
948
|
+ commits = data.commits_by_year.get(yy, 0)
|
|
|
949
|
+ percent = (100.0 * commits) / total_commits if total_commits else 0.0
|
|
|
950
|
+ f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d</td><td>%d</td></tr>' % (yy, commits, percent, data.lines_added_by_year.get(yy,0), data.lines_removed_by_year.get(yy,0)))
|
|
932
|
951
|
f.write('</table></div>')
|
|
933
|
952
|
f.write('<img src="commits_by_year.png" alt="Commits by Year">')
|
|
934
|
953
|
fg = open(path + '/commits_by_year.dat', 'w')
|
|
|
@@ -941,7 +960,7 @@ class HTMLReportCreator(ReportCreator):
|
|
941
|
960
|
f.write('<table><tr>')
|
|
942
|
961
|
f.write('<th>Timezone</th><th>Commits</th>')
|
|
943
|
962
|
f.write('</tr>')
|
|
944
|
|
- max_commits_on_tz = max(data.commits_by_timezone.values())
|
|
|
963
|
+ max_commits_on_tz = max(data.commits_by_timezone.values()) if data.commits_by_timezone else 1
|
|
945
|
964
|
for i in sorted(data.commits_by_timezone.keys(), key = lambda n : int(n)):
|
|
946
|
965
|
commits = data.commits_by_timezone[i]
|
|
947
|
966
|
r = 127 + int((float(commits) / max_commits_on_tz) * 128)
|
|
|
@@ -1006,7 +1025,7 @@ class HTMLReportCreator(ReportCreator):
|
|
1006
|
1025
|
fgl.write('%d' % stamp)
|
|
1007
|
1026
|
fgc.write('%d' % stamp)
|
|
1008
|
1027
|
for author in self.authors_to_plot:
|
|
1009
|
|
- if author in data.changes_by_date_by_author[stamp].keys():
|
|
|
1028
|
+ if author in data.changes_by_date_by_author[stamp]:
|
|
1010
|
1029
|
lines_by_authors[author] = data.changes_by_date_by_author[stamp][author]['lines_added']
|
|
1011
|
1030
|
commits_by_authors[author] = data.changes_by_date_by_author[stamp][author]['commits']
|
|
1012
|
1031
|
fgl.write(' %d' % lines_by_authors[author])
|
|
|
@@ -1056,7 +1075,8 @@ class HTMLReportCreator(ReportCreator):
|
|
1056
|
1075
|
n += 1
|
|
1057
|
1076
|
info = data.getDomainInfo(domain)
|
|
1058
|
1077
|
fp.write('%s %d %d\n' % (domain, n , info['commits']))
|
|
1059
|
|
- f.write('<tr><th>%s</th><td>%d (%.2f%%)</td></tr>' % (domain, info['commits'], (100.0 * info['commits'] / totalcommits)))
|
|
|
1078
|
+ percent = (100.0 * info['commits'] / total_commits) if total_commits else 0.0
|
|
|
1079
|
+ f.write('<tr><th>%s</th><td>%d (%.2f%%)</td></tr>' % (domain, info['commits'], percent))
|
|
1060
|
1080
|
f.write('</table></div>')
|
|
1061
|
1081
|
f.write('<img src="domains.png" alt="Commits by Domains">')
|
|
1062
|
1082
|
fp.close()
|
|
|
@@ -1075,7 +1095,8 @@ class HTMLReportCreator(ReportCreator):
|
|
1075
|
1095
|
f.write('<dt>Total files</dt><dd>%d</dd>' % data.getTotalFiles())
|
|
1076
|
1096
|
f.write('<dt>Total lines</dt><dd>%d</dd>' % data.getTotalLOC())
|
|
1077
|
1097
|
try:
|
|
1078
|
|
- f.write('<dt>Average file size</dt><dd>%.2f bytes</dd>' % (float(data.getTotalSize()) / data.getTotalFiles()))
|
|
|
1098
|
+ avg_size = float(data.getTotalSize()) / data.getTotalFiles() if data.getTotalFiles() else 0.0
|
|
|
1099
|
+ f.write('<dt>Average file size</dt><dd>%.2f bytes</dd>' % (avg_size))
|
|
1079
|
1100
|
except ZeroDivisionError:
|
|
1080
|
1101
|
pass
|
|
1081
|
1102
|
f.write('</dl>\n')
|
|
|
@@ -1105,11 +1126,10 @@ class HTMLReportCreator(ReportCreator):
|
|
1105
|
1126
|
for ext in sorted(data.extensions.keys()):
|
|
1106
|
1127
|
files = data.extensions[ext]['files']
|
|
1107
|
1128
|
lines = data.extensions[ext]['lines']
|
|
1108
|
|
- try:
|
|
1109
|
|
- loc_percentage = (100.0 * lines) / data.getTotalLOC()
|
|
1110
|
|
- except ZeroDivisionError:
|
|
1111
|
|
- loc_percentage = 0
|
|
1112
|
|
- f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d (%.2f%%)</td><td>%d</td></tr>' % (ext, files, (100.0 * files) / data.getTotalFiles(), lines, loc_percentage, lines / files))
|
|
|
1129
|
+ loc_percentage = (100.0 * lines) / data.getTotalLOC() if data.getTotalLOC() else 0.0
|
|
|
1130
|
+ files_percentage = (100.0 * files) / data.getTotalFiles() if data.getTotalFiles() else 0.0
|
|
|
1131
|
+ lines_per_file = (lines // files) if files else 0
|
|
|
1132
|
+ f.write('<tr><td>%s</td><td>%d (%.2f%%)</td><td>%d (%.2f%%)</td><td>%d</td></tr>' % (ext, files, files_percentage, lines, loc_percentage, lines_per_file))
|
|
1113
|
1133
|
f.write('</table>')
|
|
1114
|
1134
|
|
|
1115
|
1135
|
f.write('</body></html>')
|
|
|
@@ -1153,7 +1173,7 @@ class HTMLReportCreator(ReportCreator):
|
|
1153
|
1173
|
f.write('<table class="tags">')
|
|
1154
|
1174
|
f.write('<tr><th>Name</th><th>Date</th><th>Commits</th><th>Authors</th></tr>')
|
|
1155
|
1175
|
# sort the tags by date desc
|
|
1156
|
|
- tags_sorted_by_date_desc = map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), data.tags.items()))))
|
|
|
1176
|
+ tags_sorted_by_date_desc = list(map(lambda el : el[1], reversed(sorted(map(lambda el : (el[1]['date'], el[0]), data.tags.items())))))
|
|
1157
|
1177
|
for tag in tags_sorted_by_date_desc:
|
|
1158
|
1178
|
authorinfo = []
|
|
1159
|
1179
|
self.authors_by_commits = getkeyssortedbyvalues(data.tags[tag]['authors'])
|
|
|
@@ -1168,7 +1188,7 @@ class HTMLReportCreator(ReportCreator):
|
|
1168
|
1188
|
self.createGraphs(path)
|
|
1169
|
1189
|
|
|
1170
|
1190
|
def createGraphs(self, path):
|
|
1171
|
|
- print 'Generating graphs...'
|
|
|
1191
|
+ print('Generating graphs...')
|
|
1172
|
1192
|
|
|
1173
|
1193
|
# hour of day
|
|
1174
|
1194
|
f = open(path + '/hour_of_day.plot', 'w')
|
|
|
@@ -1370,7 +1390,7 @@ plot """
|
|
1370
|
1390
|
for f in files:
|
|
1371
|
1391
|
out = getpipeoutput([gnuplot_cmd + ' "%s"' % f])
|
|
1372
|
1392
|
if len(out) > 0:
|
|
1373
|
|
- print out
|
|
|
1393
|
+ print(out)
|
|
1374
|
1394
|
|
|
1375
|
1395
|
def printHeader(self, f, title = ''):
|
|
1376
|
1396
|
f.write(
|
|
|
@@ -1401,7 +1421,7 @@ plot """
|
|
1401
|
1421
|
""")
|
|
1402
|
1422
|
|
|
1403
|
1423
|
def usage():
|
|
1404
|
|
- print """
|
|
|
1424
|
+ print("""
|
|
1405
|
1425
|
Usage: gitstats [options] <gitpath..> <outputpath>
|
|
1406
|
1426
|
|
|
1407
|
1427
|
Options:
|
|
|
@@ -1411,7 +1431,7 @@ Default config values:
|
|
1411
|
1431
|
%s
|
|
1412
|
1432
|
|
|
1413
|
1433
|
Please see the manual page for more details.
|
|
1414
|
|
-""" % conf
|
|
|
1434
|
+""" % conf)
|
|
1415
|
1435
|
|
|
1416
|
1436
|
|
|
1417
|
1437
|
class GitStats:
|
|
|
@@ -1442,48 +1462,48 @@ class GitStats:
|
|
1442
|
1462
|
except OSError:
|
|
1443
|
1463
|
pass
|
|
1444
|
1464
|
if not os.path.isdir(outputpath):
|
|
1445
|
|
- print 'FATAL: Output path is not a directory or does not exist'
|
|
|
1465
|
+ print('FATAL: Output path is not a directory or does not exist')
|
|
1446
|
1466
|
sys.exit(1)
|
|
1447
|
1467
|
|
|
1448
|
1468
|
if not getgnuplotversion():
|
|
1449
|
|
- print 'gnuplot not found'
|
|
|
1469
|
+ print('gnuplot not found')
|
|
1450
|
1470
|
sys.exit(1)
|
|
1451
|
1471
|
|
|
1452
|
|
- print 'Output path: %s' % outputpath
|
|
|
1472
|
+ print('Output path: %s' % outputpath)
|
|
1453
|
1473
|
cachefile = os.path.join(outputpath, 'gitstats.cache')
|
|
1454
|
1474
|
|
|
1455
|
1475
|
data = GitDataCollector()
|
|
1456
|
1476
|
data.loadCache(cachefile)
|
|
1457
|
1477
|
|
|
1458
|
1478
|
for gitpath in args[0:-1]:
|
|
1459
|
|
- print 'Git path: %s' % gitpath
|
|
|
1479
|
+ print('Git path: %s' % gitpath)
|
|
1460
|
1480
|
|
|
1461
|
1481
|
prevdir = os.getcwd()
|
|
1462
|
1482
|
os.chdir(gitpath)
|
|
1463
|
1483
|
|
|
1464
|
|
- print 'Collecting data...'
|
|
|
1484
|
+ print('Collecting data...')
|
|
1465
|
1485
|
data.collect(gitpath)
|
|
1466
|
1486
|
|
|
1467
|
1487
|
os.chdir(prevdir)
|
|
1468
|
1488
|
|
|
1469
|
|
- print 'Refining data...'
|
|
|
1489
|
+ print('Refining data...')
|
|
1470
|
1490
|
data.saveCache(cachefile)
|
|
1471
|
1491
|
data.refine()
|
|
1472
|
1492
|
|
|
1473
|
1493
|
os.chdir(rundir)
|
|
1474
|
1494
|
|
|
1475
|
|
- print 'Generating report...'
|
|
|
1495
|
+ print('Generating report...')
|
|
1476
|
1496
|
report = HTMLReportCreator()
|
|
1477
|
1497
|
report.create(data, outputpath)
|
|
1478
|
1498
|
|
|
1479
|
1499
|
time_end = time.time()
|
|
1480
|
1500
|
exectime_internal = time_end - time_start
|
|
1481
|
|
- print 'Execution time %.5f secs, %.5f secs (%.2f %%) in external commands)' % (exectime_internal, exectime_external, (100.0 * exectime_external) / exectime_internal)
|
|
|
1501
|
+ print('Execution time %.5f secs, %.5f secs (%.2f %%) in external commands)' % (exectime_internal, exectime_external, (100.0 * exectime_external) / exectime_internal))
|
|
1482
|
1502
|
if sys.stdin.isatty():
|
|
1483
|
|
- print 'You may now run:'
|
|
1484
|
|
- print
|
|
1485
|
|
- print ' sensible-browser \'%s\'' % os.path.join(outputpath, 'index.html').replace("'", "'\\''")
|
|
1486
|
|
- print
|
|
|
1503
|
+ print('You may now run:')
|
|
|
1504
|
+ print()
|
|
|
1505
|
+ print(' sensible-browser \'%s\'' % os.path.join(outputpath, 'index.html').replace("'", "'\\''"))
|
|
|
1506
|
+ print()
|
|
1487
|
1507
|
|
|
1488
|
1508
|
if __name__=='__main__':
|
|
1489
|
1509
|
g = GitStats()
|