|
|
@@ -1,14 +1,12 @@
|
|
1
|
1
|
import datetime
|
|
2
|
|
-import logging
|
|
3
|
|
-import re
|
|
4
|
|
-import os
|
|
5
|
2
|
|
|
6
|
|
-from multiprocessing import Pool
|
|
|
3
|
+from collections import defaultdict
|
|
7
|
4
|
|
|
8
|
|
-from gitstats.data.author import Author
|
|
9
|
5
|
from gitstats.datacollector import DataCollector
|
|
10
|
|
-from gitstats.miscfuncs import getcommitrange, getlogrange, getnumoffilesfromrev, getnumoflinesinblob, \
|
|
11
|
|
- getpipeoutput, getstatsummarycounts
|
|
|
6
|
+from gitstats.data import Author, AuthorRow, File, LocByDate, Revision, Tag
|
|
|
7
|
+from gitstats.data_generators import gen_author_data, gen_author_totals_data, gen_tag_data, gen_revision_data, \
|
|
|
8
|
+ gen_file_data, gen_loc_data
|
|
|
9
|
+from gitstats.miscfuncs import getpipeoutput
|
|
12
|
10
|
|
|
13
|
11
|
|
|
14
|
12
|
class GitDataCollector(DataCollector):
|
|
|
@@ -18,201 +16,77 @@ class GitDataCollector(DataCollector):
|
|
18
|
16
|
def collect(self, directory):
|
|
19
|
17
|
super(GitDataCollector, self).collect(directory)
|
|
20
|
18
|
|
|
21
|
|
- self.total_authors += int(getpipeoutput(['git shortlog -s %s' % getlogrange(self.conf), 'wc -l']))
|
|
22
|
|
- # self.total_lines = int(getoutput('git-ls-files -z |xargs -0 cat |wc -l'))
|
|
|
19
|
+ self.total_authors += self.get_total_authors()
|
|
23
|
20
|
self.get_tags()
|
|
24
|
21
|
self.get_revision_info()
|
|
25
|
22
|
self.get_file_info()
|
|
26
|
23
|
self.get_loc_info()
|
|
27
|
24
|
self.get_author_info()
|
|
28
|
25
|
|
|
29
|
|
- def xlate(self, name):
|
|
30
|
|
- if name in self.conf['name_xlate']:
|
|
31
|
|
- return self.conf['name_xlate'][name]
|
|
32
|
|
- return name
|
|
|
26
|
+ def get_total_authors(self):
|
|
|
27
|
+ return gen_author_totals_data(self.conf)
|
|
33
|
28
|
|
|
34
|
29
|
def get_author_info(self):
|
|
35
|
30
|
# Per-author statistics
|
|
36
|
31
|
# defined for stamp, author only if author commited at this timestamp.
|
|
37
|
|
- self.changes_by_date_by_author = {} # stamp -> author -> lines_added
|
|
38
|
|
- # Similar to the above, but never use --first-parent
|
|
39
|
|
- # (we need to walk through every commit to know who
|
|
40
|
|
- # committed what, not just through mainline)
|
|
41
|
|
- lines = getpipeoutput(
|
|
42
|
|
- ['git log --shortstat --date-order --pretty=format:"%%at %%aN" %s' % (
|
|
43
|
|
- getlogrange(self.conf, 'HEAD'))]).split('\n')
|
|
44
|
|
- lines.reverse()
|
|
45
|
|
- inserted = 0
|
|
46
|
|
- deleted = 0
|
|
47
|
|
- stamp = 0
|
|
48
|
|
- for line in lines:
|
|
49
|
|
- if len(line) == 0:
|
|
50
|
|
- continue
|
|
51
|
|
-
|
|
52
|
|
- # <stamp> <author>
|
|
53
|
|
- if re.search('files? changed', line) is None:
|
|
54
|
|
- pos = line.find(' ')
|
|
55
|
|
- if pos != -1:
|
|
56
|
|
- try:
|
|
57
|
|
- oldstamp = stamp
|
|
58
|
|
- (stamp, author) = (int(line[:pos]), line[pos + 1:])
|
|
59
|
|
- author = self.xlate(author)
|
|
60
|
|
- if oldstamp > stamp:
|
|
61
|
|
- # clock skew, keep old timestamp to avoid having ugly graph
|
|
62
|
|
- stamp = oldstamp
|
|
63
|
|
- if author not in self.authors:
|
|
64
|
|
- self.authors[author] = Author()
|
|
65
|
|
- self.authors[author].commits += 1
|
|
66
|
|
- self.authors[author].lines_added += inserted
|
|
67
|
|
- self.authors[author].lines_removed += deleted
|
|
68
|
|
- if stamp not in self.changes_by_date_by_author:
|
|
69
|
|
- self.changes_by_date_by_author[stamp] = {}
|
|
70
|
|
- if author not in self.changes_by_date_by_author[stamp]:
|
|
71
|
|
- self.changes_by_date_by_author[stamp][author] = Author()
|
|
72
|
|
- self.changes_by_date_by_author[stamp][author].lines_added = self.authors[author].lines_added
|
|
73
|
|
- self.changes_by_date_by_author[stamp][author].commits = self.authors[author].commits
|
|
74
|
|
- files, inserted, deleted = 0, 0, 0
|
|
75
|
|
- except ValueError:
|
|
76
|
|
- logging.warning(f'unexpected line "{line}')
|
|
77
|
|
- else:
|
|
78
|
|
- logging.warning(f'unexpected line "{line}')
|
|
79
|
|
- else:
|
|
80
|
|
- numbers = getstatsummarycounts(line)
|
|
81
|
32
|
|
|
82
|
|
- if len(numbers) == 3:
|
|
83
|
|
- (files, inserted, deleted) = map(lambda el: int(el), numbers)
|
|
84
|
|
- else:
|
|
85
|
|
- logging.warning(f'Failed to handle line "{line}"')
|
|
86
|
|
- (files, inserted, deleted) = (0, 0, 0)
|
|
|
33
|
+ self.changes_by_date_by_author = defaultdict(lambda: defaultdict(lambda: Author())) # stamp -> author -> lines_added
|
|
|
34
|
+
|
|
|
35
|
+ def row_processor(row: AuthorRow):
|
|
|
36
|
+ self.authors[row.author].commits += 1
|
|
|
37
|
+ self.authors[row.author].lines_added += row.lines_inserted
|
|
|
38
|
+ self.authors[row.author].lines_removed += row.lines_deleted
|
|
|
39
|
+ self.changes_by_date_by_author[row.stamp][row.author].lines_added = self.authors[row.author].lines_added
|
|
|
40
|
+ self.changes_by_date_by_author[row.stamp][row.author].commits = self.authors[row.author].commits
|
|
|
41
|
+
|
|
|
42
|
+ gen_author_data(self.conf, row_processor)
|
|
87
|
43
|
|
|
88
|
44
|
def get_loc_info(self):
|
|
89
|
|
- # line statistics
|
|
90
|
|
- # outputs:
|
|
91
|
|
- # N files changed, N insertions (+), N deletions(-)
|
|
92
|
|
- # <stamp> <author>
|
|
|
45
|
+
|
|
93
|
46
|
self.changes_by_date = {} # stamp -> { files, ins, del }
|
|
94
|
|
- # computation of lines of code by date is better done
|
|
95
|
|
- # on a linear history.
|
|
96
|
|
- extra = ''
|
|
97
|
|
- if self.conf['linear_linestats']:
|
|
98
|
|
- extra = '--first-parent -m'
|
|
99
|
|
- lines = getpipeoutput(
|
|
100
|
|
- ['git log --shortstat %s --pretty=format:"%%at %%aN" %s' % (extra, getlogrange(self.conf, 'HEAD'))]).split(
|
|
101
|
|
- '\n')
|
|
102
|
|
- lines.reverse()
|
|
103
|
|
- files = 0
|
|
104
|
|
- inserted = 0
|
|
105
|
|
- deleted = 0
|
|
106
|
|
- total_lines = 0
|
|
107
|
|
- for line in lines:
|
|
108
|
|
- if len(line) == 0:
|
|
109
|
|
- continue
|
|
110
|
|
-
|
|
111
|
|
- # <stamp> <author>
|
|
112
|
|
- if re.search('files? changed', line) is None:
|
|
113
|
|
- pos = line.find(' ')
|
|
114
|
|
- if pos != -1:
|
|
115
|
|
- try:
|
|
116
|
|
- (stamp, author) = (int(line[:pos]), line[pos + 1:])
|
|
117
|
|
- self.changes_by_date[stamp] = {'files': files, 'ins': inserted, 'del': deleted,
|
|
118
|
|
- 'lines': total_lines}
|
|
119
|
|
-
|
|
120
|
|
- date = datetime.datetime.fromtimestamp(stamp)
|
|
121
|
|
- yymm = date.strftime('%Y-%m')
|
|
122
|
|
- self.lines_added_by_month[yymm] = self.lines_added_by_month.get(yymm, 0) + inserted
|
|
123
|
|
- self.lines_removed_by_month[yymm] = self.lines_removed_by_month.get(yymm, 0) + deleted
|
|
124
|
|
-
|
|
125
|
|
- yy = date.year
|
|
126
|
|
- self.lines_added_by_year[yy] = self.lines_added_by_year.get(yy, 0) + inserted
|
|
127
|
|
- self.lines_removed_by_year[yy] = self.lines_removed_by_year.get(yy, 0) + deleted
|
|
128
|
|
-
|
|
129
|
|
- files, inserted, deleted = 0, 0, 0
|
|
130
|
|
- except ValueError:
|
|
131
|
|
- logging.warning(f'unexpected line "{line}')
|
|
132
|
|
- else:
|
|
133
|
|
- logging.warning(f'unexpected line "{line}')
|
|
134
|
|
- else:
|
|
135
|
|
- numbers = getstatsummarycounts(line)
|
|
|
47
|
+ def row_processor(row: LocByDate):
|
|
|
48
|
+ self.changes_by_date[row.stamp] = {
|
|
|
49
|
+ 'files': row.file_count,
|
|
|
50
|
+ 'ins': row.lines_inserted,
|
|
|
51
|
+ 'del': row.lines_deleted,
|
|
|
52
|
+ 'lines': row.total_lines
|
|
|
53
|
+ }
|
|
|
54
|
+ date = datetime.datetime.fromtimestamp(row.stamp)
|
|
|
55
|
+ yymm = date.strftime('%Y-%m')
|
|
|
56
|
+ self.lines_added_by_month[yymm] = self.lines_added_by_month.get(yymm, 0) + row.lines_inserted
|
|
|
57
|
+ self.lines_removed_by_month[yymm] = self.lines_removed_by_month.get(yymm, 0) + row.lines_deleted
|
|
|
58
|
+
|
|
|
59
|
+ yy = date.year
|
|
|
60
|
+ self.lines_added_by_year[yy] = self.lines_added_by_year.get(yy, 0) + row.lines_inserted
|
|
|
61
|
+ self.lines_removed_by_year[yy] = self.lines_removed_by_year.get(yy, 0) + row.lines_deleted
|
|
136
|
62
|
|
|
137
|
|
- if len(numbers) == 3:
|
|
138
|
|
- (files, inserted, deleted) = map(lambda el: int(el), numbers)
|
|
139
|
|
- total_lines += inserted
|
|
140
|
|
- total_lines -= deleted
|
|
141
|
|
- self.total_lines_added += inserted
|
|
142
|
|
- self.total_lines_removed += deleted
|
|
|
63
|
+ self.total_lines_added += row.lines_inserted
|
|
|
64
|
+ self.total_lines_removed += row.lines_deleted
|
|
143
|
65
|
|
|
144
|
|
- else:
|
|
145
|
|
- logging.warning(f'Failed to handle line "{line}"')
|
|
146
|
|
- (files, inserted, deleted) = (0, 0, 0)
|
|
147
|
|
- # self.changes_by_date[stamp] = { 'files': files, 'ins': inserted, 'del': deleted }
|
|
148
|
|
- self.total_lines += total_lines
|
|
|
66
|
+ self.total_lines += gen_loc_data(self.conf, row_processor)
|
|
149
|
67
|
|
|
150
|
68
|
def get_file_info(self):
|
|
151
|
69
|
# extensions and size of files
|
|
152
|
|
- lines = getpipeoutput(['git ls-tree -r -l -z %s' % getcommitrange(self.conf, 'HEAD', end_only=True)]).split(
|
|
153
|
|
- '\000')
|
|
154
|
|
- blobs_to_read = []
|
|
155
|
|
- for line in lines:
|
|
156
|
|
- if len(line) == 0:
|
|
157
|
|
- continue
|
|
158
|
|
- parts = re.split('\s+', line, 4)
|
|
159
|
|
- if parts[0] == '160000' and parts[3] == '-':
|
|
160
|
|
- # skip submodules
|
|
161
|
|
- continue
|
|
162
|
|
- blob_id = parts[2]
|
|
163
|
|
- size = int(parts[3])
|
|
164
|
|
- fullpath = parts[4]
|
|
165
|
|
-
|
|
166
|
|
- self.total_size += size
|
|
|
70
|
+ def row_processor(row: File):
|
|
|
71
|
+ self.total_size += row.size
|
|
167
|
72
|
self.total_files += 1
|
|
|
73
|
+ if row.ext not in self.extensions:
|
|
|
74
|
+ self.extensions[row.ext] = {'files': 0, 'lines': 0}
|
|
|
75
|
+ self.extensions[row.ext]['files'] += 1
|
|
|
76
|
+ self.extensions[row.ext]['lines'] += row.lines
|
|
168
|
77
|
|
|
169
|
|
- _, ext = os.path.splitext(fullpath)
|
|
170
|
|
- if len(ext) > self.conf['max_ext_length']:
|
|
171
|
|
- ext = ''
|
|
172
|
|
- if ext not in self.extensions:
|
|
173
|
|
- self.extensions[ext] = {'files': 0, 'lines': 0}
|
|
174
|
|
- self.extensions[ext]['files'] += 1
|
|
175
|
|
- # if cache empty then add ext and blob id to list of new blob's
|
|
176
|
|
- # otherwise try to read needed info from cache
|
|
177
|
|
- if 'lines_in_blob' not in self.cache.keys():
|
|
178
|
|
- blobs_to_read.append((ext, blob_id))
|
|
179
|
|
- continue
|
|
180
|
|
- if blob_id in self.cache['lines_in_blob'].keys():
|
|
181
|
|
- self.extensions[ext]['lines'] += self.cache['lines_in_blob'][blob_id]
|
|
182
|
|
- else:
|
|
183
|
|
- blobs_to_read.append((ext, blob_id))
|
|
184
|
|
- # Get info abount line count for new blob's that wasn't found in cache
|
|
185
|
|
- pool = Pool(processes=self.conf['processes'])
|
|
186
|
|
- ext_blob_linecount = pool.map(getnumoflinesinblob, blobs_to_read)
|
|
187
|
|
- pool.terminate()
|
|
188
|
|
- pool.join()
|
|
189
|
|
- # Update cache and write down info about number of number of lines
|
|
190
|
|
- for (ext, blob_id, linecount) in ext_blob_linecount:
|
|
191
|
|
- if 'lines_in_blob' not in self.cache:
|
|
192
|
|
- self.cache['lines_in_blob'] = {}
|
|
193
|
|
- self.cache['lines_in_blob'][blob_id] = linecount
|
|
194
|
|
- self.extensions[ext]['lines'] += self.cache['lines_in_blob'][blob_id]
|
|
|
78
|
+ gen_file_data(self.conf, row_processor)
|
|
195
|
79
|
|
|
196
|
80
|
def get_revision_info(self):
|
|
197
|
81
|
# Collect revision statistics
|
|
198
|
82
|
# Outputs "<stamp> <date> <time> <timezone> <author> '<' <mail> '>'"
|
|
199
|
|
- lines = getpipeoutput(
|
|
200
|
|
- ['git rev-list --pretty=format:"%%at %%ai %%aN <%%aE>" %s' % getlogrange(self.conf, 'HEAD'),
|
|
201
|
|
- 'grep -v ^commit']).split(
|
|
202
|
|
- '\n')
|
|
203
|
|
- for line in lines:
|
|
204
|
|
- parts = line.split(' ', 4)
|
|
205
|
|
- try:
|
|
206
|
|
- stamp = int(parts[0])
|
|
207
|
|
- except ValueError:
|
|
208
|
|
- stamp = 0
|
|
209
|
|
- timezone = parts[3]
|
|
210
|
|
- author, mail = parts[4].split('<', 1)
|
|
211
|
|
- author = self.xlate(author.rstrip())
|
|
212
|
|
- mail = mail.rstrip('>')
|
|
213
|
|
- domain = '?'
|
|
214
|
|
- if mail.find('@') != -1:
|
|
215
|
|
- domain = mail.rsplit('@', 1)[1]
|
|
|
83
|
+
|
|
|
84
|
+ def row_processor(row: Revision):
|
|
|
85
|
+ stamp = row.stamp
|
|
|
86
|
+ domain = row.domain
|
|
|
87
|
+ author = row.author
|
|
|
88
|
+ timezone = row.timezone
|
|
|
89
|
+
|
|
216
|
90
|
date = datetime.datetime.fromtimestamp(float(stamp))
|
|
217
|
91
|
|
|
218
|
92
|
# First and last commit stamp (may be in any order because of cherry-picking and patches)
|
|
|
@@ -258,8 +132,6 @@ class GitDataCollector(DataCollector):
|
|
258
|
132
|
self.activity_by_year_week_peak = self.activity_by_year_week[yyw]
|
|
259
|
133
|
|
|
260
|
134
|
# author stats
|
|
261
|
|
- if author not in self.authors:
|
|
262
|
|
- self.authors[author] = Author()
|
|
263
|
135
|
self.authors[author].activity_by_day_and_hour[day][hour] += 1
|
|
264
|
136
|
# commits, note again that commits may be in any date order because of cherry-picking and patches
|
|
265
|
137
|
if not self.authors[author].last_commit_stamp:
|
|
|
@@ -303,99 +175,36 @@ class GitDataCollector(DataCollector):
|
|
303
|
175
|
|
|
304
|
176
|
# timezone
|
|
305
|
177
|
self.commits_by_timezone[timezone] = self.commits_by_timezone.get(timezone, 0) + 1
|
|
306
|
|
- # outputs "<stamp> <files>" for each revision
|
|
307
|
|
- revlines = getpipeoutput(
|
|
308
|
|
- ['git rev-list --pretty=format:"%%at %%T" %s' % getlogrange(self.conf, 'HEAD'),
|
|
309
|
|
- 'grep -v ^commit']).strip().split('\n')
|
|
310
|
|
- lines = []
|
|
311
|
|
- revs_to_read = []
|
|
312
|
|
- # Look up rev in cache and take info from cache if found
|
|
313
|
|
- # If not append rev to list of rev to read from repo
|
|
314
|
|
- for revline in revlines:
|
|
315
|
|
- time, rev = revline.split(' ')
|
|
316
|
|
- # if cache empty then add time and rev to list of new rev's
|
|
317
|
|
- # otherwise try to read needed info from cache
|
|
318
|
|
- if 'files_in_tree' not in self.cache.keys():
|
|
319
|
|
- revs_to_read.append((time, rev))
|
|
320
|
|
- continue
|
|
321
|
|
- if rev in self.cache['files_in_tree'].keys():
|
|
322
|
|
- lines.append('%d %d' % (int(time), self.cache['files_in_tree'][rev]))
|
|
323
|
|
- else:
|
|
324
|
|
- revs_to_read.append((time, rev))
|
|
325
|
|
- # Read revisions from repo
|
|
326
|
|
- pool = Pool(processes=self.conf['processes'])
|
|
327
|
|
- time_rev_count = pool.map(getnumoffilesfromrev, revs_to_read)
|
|
328
|
|
- pool.terminate()
|
|
329
|
|
- pool.join()
|
|
330
|
|
- # Update cache with new revisions and append then to general list
|
|
331
|
|
- for (time, rev, count) in time_rev_count:
|
|
332
|
|
- if 'files_in_tree' not in self.cache:
|
|
333
|
|
- self.cache['files_in_tree'] = {}
|
|
334
|
|
- self.cache['files_in_tree'][rev] = count
|
|
335
|
|
- lines.append('%d %d' % (int(time), count))
|
|
336
|
|
- self.total_commits += len(lines)
|
|
337
|
|
- for line in lines:
|
|
338
|
|
- parts = line.split(' ')
|
|
339
|
|
- if len(parts) != 2:
|
|
340
|
|
- continue
|
|
341
|
|
- (stamp, files) = parts[0:2]
|
|
342
|
|
- try:
|
|
343
|
|
- self.files_by_stamp[int(stamp)] = int(files)
|
|
344
|
|
- except ValueError:
|
|
345
|
|
- logging.warning(f'Failed to parse line "{line}"')
|
|
|
178
|
+
|
|
|
179
|
+ # file counts
|
|
|
180
|
+ self.files_by_stamp[stamp] = row.file_count
|
|
|
181
|
+
|
|
|
182
|
+ self.total_commits += gen_revision_data(self.conf, row_processor)
|
|
346
|
183
|
|
|
347
|
184
|
def get_tags(self):
|
|
348
|
|
- # tags
|
|
349
|
|
- lines = getpipeoutput(['git show-ref --tags']).split('\n')
|
|
350
|
|
- for line in lines:
|
|
351
|
|
- if len(line) == 0:
|
|
352
|
|
- continue
|
|
353
|
|
- (line_hash, tag) = line.split(' ')
|
|
354
|
|
-
|
|
355
|
|
- tag = tag.replace('refs/tags/', '')
|
|
356
|
|
- output = getpipeoutput(['git log "%s" --pretty=format:"%%at %%aN" -n 1' % line_hash])
|
|
357
|
|
- if len(output) > 0:
|
|
358
|
|
- parts = output.split(' ')
|
|
359
|
|
- try:
|
|
360
|
|
- stamp = int(parts[0])
|
|
361
|
|
- except ValueError:
|
|
362
|
|
- stamp = 0
|
|
363
|
|
- self.tags[tag] = {'stamp': stamp,
|
|
364
|
|
- 'hash': line_hash,
|
|
365
|
|
- 'date': datetime.datetime.fromtimestamp(stamp).strftime('%Y-%m-%d'),
|
|
366
|
|
- 'commits': 0,
|
|
367
|
|
- 'authors': {}}
|
|
368
|
|
- # collect info on tags, starting from latest
|
|
369
|
|
- tags_sorted_by_date_asc = [tup[1] for tup in sorted([(el[1]['date'], el[0]) for el in self.tags.items()])]
|
|
370
|
|
- # tags_sorted_by_date_desc = map(lambda el: el[1],
|
|
371
|
|
- # reversed(sorted(map(lambda el: (el[1]['date'], el[0]), self.tags.items()))))
|
|
372
|
|
- prev = None
|
|
373
|
|
- # for tag in reversed(tags_sorted_by_date_desc):
|
|
374
|
|
- for tag in tags_sorted_by_date_asc:
|
|
375
|
|
- cmd = 'git shortlog -s "%s"' % tag
|
|
376
|
|
- if prev is not None:
|
|
377
|
|
- cmd += ' "^%s"' % prev
|
|
378
|
|
- output = getpipeoutput([cmd])
|
|
379
|
|
- if len(output) == 0:
|
|
380
|
|
- continue
|
|
381
|
|
- prev = tag
|
|
382
|
|
- for line in output.split('\n'):
|
|
383
|
|
- parts = re.split('\s+', line, 2)
|
|
384
|
|
- commits = int(parts[1])
|
|
385
|
|
- author = parts[2]
|
|
386
|
|
- self.tags[tag]['commits'] += commits
|
|
387
|
|
- self.tags[tag]['authors'][author] = commits
|
|
|
185
|
+ def row_processor(row: Tag):
|
|
|
186
|
+ self.tags[row.tag] = {
|
|
|
187
|
+ 'stamp': row.stamp,
|
|
|
188
|
+ 'hash': row.hash,
|
|
|
189
|
+ 'date': datetime.datetime.fromtimestamp(row.stamp).strftime('%Y-%m-%d'),
|
|
|
190
|
+ 'commits': row.commits,
|
|
|
191
|
+ 'authors': row.authors
|
|
|
192
|
+ }
|
|
|
193
|
+
|
|
|
194
|
+ gen_tag_data(self.conf, row_processor)
|
|
388
|
195
|
|
|
389
|
196
|
def refine(self):
|
|
390
|
197
|
# authors
|
|
391
|
198
|
# name -> {place_by_commits, commits_frac, date_first, date_last, timedelta}
|
|
392
|
199
|
self.authors_by_commits = self.getAuthors()
|
|
|
200
|
+ total_commits_without_merge = 0
|
|
393
|
201
|
for i, name in enumerate(self.authors_by_commits):
|
|
394
|
202
|
self.authors[name].place_by_commits = i + 1
|
|
|
203
|
+ total_commits_without_merge += self.authors[name].commits
|
|
395
|
204
|
|
|
396
|
205
|
for name in self.authors.keys():
|
|
397
|
206
|
a = self.authors[name]
|
|
398
|
|
- a.commits_frac = (100 * float(a.commits)) / self.getTotalCommits()
|
|
|
207
|
+ a.commits_frac = (100 * float(a.commits)) / total_commits_without_merge
|
|
399
|
208
|
date_first = datetime.datetime.fromtimestamp(a.first_commit_stamp)
|
|
400
|
209
|
date_last = datetime.datetime.fromtimestamp(a.last_commit_stamp)
|
|
401
|
210
|
delta = date_last - date_first
|