• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1package mappers
2
3import (
4	"crypto/sha256"
5	"database/sql"
6	"encoding/hex"
7	"fmt"
8	"strconv"
9
10	"github.com/pkg/errors"
11	"github.com/satori/go.uuid"
12
13	"repodiff/constants"
14	e "repodiff/entities"
15	"repodiff/interactors"
16	"repodiff/utils"
17)
18
19const expectedDiffRowLen = 9
20const expectedCommitRowLen = 5
21
22func CSVLineToDiffRow(csvColumns []string) (*e.DiffRow, error) {
23	if len(csvColumns) != expectedDiffRowLen {
24		return nil, errors.New(fmt.Sprintf("Got %d columns but expected %d", len(csvColumns), expectedDiffRowLen))
25	}
26	intVals, err := batchToInts(csvColumns[4:]...)
27	if err != nil {
28		return nil, err
29	}
30	diffStatus, err := constants.GetStatusEnum(csvColumns[3])
31	if err != nil {
32		return nil, err
33	}
34
35	return &e.DiffRow{
36		Date:                 csvColumns[0],
37		DownstreamProject:    csvColumns[1],
38		UpstreamProject:      csvColumns[2],
39		DiffStatus:           diffStatus,
40		FilesChanged:         intVals[0],
41		LineInsertions:       intVals[1],
42		LineDeletions:        intVals[2],
43		LineChanges:          intVals[3],
44		CommitsNotUpstreamed: intVals[4],
45		DBInsertTimestamp:    0,
46	}, nil
47}
48
49func CSVLineToCommitRow(csvColumns []string) (*e.CommitRow, error) {
50	if len(csvColumns) != expectedCommitRowLen {
51		return nil, errors.New(fmt.Sprintf("Got %d columns but expected %d", len(csvColumns), expectedCommitRowLen))
52	}
53	return &e.CommitRow{
54		Date:              csvColumns[0],
55		Commit:            csvColumns[1],
56		DownstreamProject: csvColumns[2],
57		Author:            csvColumns[3],
58		Subject:           csvColumns[4],
59	}, nil
60}
61
62func batchToInts(intStrings ...string) ([]int, error) {
63	ints := make([]int, len(intStrings))
64	for i, val := range intStrings {
65		var err error
66		ints[i], err = strconv.Atoi(val)
67		if err != nil {
68			return nil, errors.Wrap(err, fmt.Sprintf("Could not convert from %s", val))
69		}
70	}
71	return ints, nil
72}
73
74func diffRowToDenormalizedCols(d e.AnalyzedDiffRow, rowIndex int) []interface{} {
75	return []interface{}{
76		rowIndex,
77		d.Date,
78		d.DownstreamProject,
79		d.UpstreamProject,
80		constants.StatusToDisplay[d.DiffStatus],
81		d.FilesChanged,
82		d.LineInsertions,
83		d.LineDeletions,
84		d.LineChanges,
85		d.CommitsNotUpstreamed,
86		constants.ProjectTypeToDisplay[d.Type],
87	}
88}
89
90func commitRowToDenormalizedCols(commitRow e.AnalyzedCommitRow, firstSeen e.RepoTimestamp, rowIndex int) []interface{} {
91	return []interface{}{
92		rowIndex,
93		commitRow.Commit,
94		commitRow.DownstreamProject,
95		commitRow.Author,
96		commitRow.Subject,
97		GetAuthorTechArea(commitRow.Author),
98		constants.ProjectTypeToDisplay[commitRow.Type],
99		utils.TimestampToDataStudioDatetime(firstSeen),
100	}
101}
102
103func diffRowToPersistCols(d e.AnalyzedDiffRow, uuidBytes string, timestamp e.RepoTimestamp, rowIndex int) []interface{} {
104	return []interface{}{
105		timestamp,
106		uuidBytes,
107		rowIndex,
108		d.DownstreamProject,
109		d.UpstreamProject,
110		d.DiffStatus,
111		d.FilesChanged,
112		d.LineInsertions,
113		d.LineDeletions,
114		d.LineChanges,
115		d.CommitsNotUpstreamed,
116		d.Type,
117	}
118}
119
120func commitRowToPersistCols(c e.AnalyzedCommitRow, uuidBytes string, timestamp e.RepoTimestamp, rowIndex int) []interface{} {
121	return []interface{}{
122		timestamp,
123		uuidBytes,
124		rowIndex,
125		c.Commit,
126		c.DownstreamProject,
127		c.Author,
128		interactors.FilterNoUnicode(c.Subject),
129		c.Type,
130	}
131}
132
133func DiffRowsToPersistCols(diffRows []e.AnalyzedDiffRow, timestamp e.RepoTimestamp) [][]interface{} {
134	uid := uuid.NewV4()
135
136	rows := make([][]interface{}, len(diffRows))
137	for i, diffRow := range diffRows {
138		rows[i] = diffRowToPersistCols(
139			diffRow,
140			string(uid.Bytes()),
141			timestamp,
142			i,
143		)
144	}
145	return rows
146}
147
148func DiffRowsToDenormalizedCols(diffRows []e.AnalyzedDiffRow) [][]interface{} {
149	rows := make([][]interface{}, len(diffRows))
150	for i, diffRow := range diffRows {
151		rows[i] = diffRowToDenormalizedCols(
152			diffRow,
153			i,
154		)
155	}
156	return rows
157}
158
159func CommitRowsToDenormalizedCols(commitRows []e.AnalyzedCommitRow, commitToTimestamp map[string]e.RepoTimestamp) [][]interface{} {
160	rows := make([][]interface{}, len(commitRows))
161	for i, commitRow := range commitRows {
162		rows[i] = commitRowToDenormalizedCols(
163			commitRow,
164			commitToTimestamp[commitRow.Commit],
165			i,
166		)
167	}
168	return rows
169}
170
171func DiffRowsToAggregateChangesOverTime(diffRows []e.AnalyzedDiffRow) [][]interface{} {
172	if len(diffRows) == 0 {
173		return nil
174	}
175	cols := []interface{}{
176		utils.TimestampToDataStudioDatetime(e.RepoTimestamp(diffRows[0].DBInsertTimestamp)),
177		getSumOfAttribute(
178			diffRows,
179			func(d e.AnalyzedDiffRow) int {
180				if d.DiffStatus == constants.StatusModified {
181					return 1
182				}
183				return 0
184			},
185		),
186		getSumOfAttribute(
187			diffRows,
188			func(d e.AnalyzedDiffRow) int {
189				return d.LineChanges
190			},
191		),
192		getSumOfAttribute(
193			diffRows,
194			func(d e.AnalyzedDiffRow) int {
195				return d.FilesChanged
196			},
197		),
198	}
199	rows := [][]interface{}{
200		cols,
201	}
202	return rows
203}
204
205func getSumOfAttribute(diffRows []e.AnalyzedDiffRow, getAttr func(e.AnalyzedDiffRow) int) int {
206	var sum int
207	for _, d := range diffRows {
208		sum += getAttr(d)
209	}
210	return sum
211}
212
213func CommitRowsToPersistCols(commitRows []e.AnalyzedCommitRow, timestamp e.RepoTimestamp) [][]interface{} {
214	uid := uuid.NewV4()
215
216	rows := make([][]interface{}, len(commitRows))
217	for i, commitRow := range commitRows {
218		rows[i] = commitRowToPersistCols(
219			commitRow,
220			string(uid.Bytes()),
221			timestamp,
222			i,
223		)
224	}
225	return rows
226}
227
228func SQLRowToDiffRow(iterRow *sql.Rows) (e.AnalyzedDiffRow, error) {
229	var d e.AnalyzedDiffRow
230	var uuidBytes []byte
231	var rowIndex int
232	err := iterRow.Scan(
233		&d.DBInsertTimestamp,
234		&uuidBytes,
235		&rowIndex,
236		&d.DownstreamProject,
237		&d.UpstreamProject,
238		&d.DiffStatus,
239		&d.FilesChanged,
240		&d.LineInsertions,
241		&d.LineDeletions,
242		&d.LineChanges,
243		&d.CommitsNotUpstreamed,
244		&d.Type,
245	)
246	d.Date = utils.TimestampToDate(e.RepoTimestamp(d.DBInsertTimestamp))
247	return d, err
248}
249
250func SQLRowToCommitRow(iterRow *sql.Rows) (e.AnalyzedCommitRow, error) {
251	var c e.AnalyzedCommitRow
252	var uuidBytes []byte
253	var rowIndex int
254	var timestamp e.RepoTimestamp
255	err := iterRow.Scan(
256		&timestamp,
257		&uuidBytes,
258		&rowIndex,
259		&c.Commit,
260		&c.DownstreamProject,
261		&c.Author,
262		&c.Subject,
263		&c.Type,
264	)
265	c.Date = utils.TimestampToDate(timestamp)
266	return c, err
267}
268
269// SBL needs test coverage
270func PrependMappedDiffTarget(target e.MappedDiffTarget, rowsOfCols [][]interface{}) [][]interface{} {
271	remapped := make([][]interface{}, len(rowsOfCols))
272	prefix := []interface{}{
273		target.UpstreamTarget,
274		target.DownstreamTarget,
275	}
276	for i, row := range rowsOfCols {
277		remapped[i] = append(
278			prefix,
279			row...,
280		)
281	}
282	return remapped
283}
284
285func AppendDiffTarget(target e.DiffTarget, rowsOfCols [][]interface{}) [][]interface{} {
286	remapped := make([][]interface{}, len(rowsOfCols))
287	suffix := []interface{}{
288		target.Upstream.URL,
289		target.Upstream.Branch,
290		target.Downstream.URL,
291		target.Downstream.Branch,
292	}
293	for i, row := range rowsOfCols {
294		remapped[i] = append(
295			row,
296			suffix...,
297		)
298	}
299	return remapped
300}
301
302func SHA256HexDigest(s string) string {
303	byteArray := sha256.Sum256([]byte(s))
304	return hex.EncodeToString(
305		byteArray[:],
306	)
307}
308
309func GetAuthorTechArea(authorEMail string) string {
310	techAreaIndex, ok := constants.AuthorHashToTechIndex[SHA256HexDigest(authorEMail)]
311	if !ok {
312		return constants.TechAreaDisplay[constants.Unknown]
313	}
314	return constants.TechAreaDisplay[techAreaIndex]
315}
316