forked from taggledevel2/ratchet
-
Notifications
You must be signed in to change notification settings - Fork 0
/
csv.go
84 lines (71 loc) · 1.83 KB
/
csv.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
package util
import (
"bufio"
"bytes"
"fmt"
"sort"
"github.com/tsaserv/ratchet/data"
)
// CSVString returns an empty string for nil values to make sure that the
// text "null" is not written to a file
func CSVString(v interface{}) string {
switch v.(type) {
case nil:
return ""
default:
return fmt.Sprintf("%v", v)
}
}
// CSVParameters allows you to define all of your csv writing preferences in a
// single struct for reuse in multiple processors
type CSVParameters struct {
Writer *CSVWriter
WriteHeader bool
HeaderWritten bool
Header []string
SendUpstream bool
QuoteEscape string
Comma rune
}
// CSVProcess writes the contents to the file and optionally sends the written bytes
// upstream on outputChan
func CSVProcess(params *CSVParameters, d data.JSON, outputChan chan data.JSON, killChan chan error) {
objects, err := data.ObjectsFromJSON(d)
KillPipelineIfErr(err, killChan)
if params.Header == nil {
for k := range objects[0] {
params.Header = append(params.Header, k)
}
sort.Strings(params.Header)
}
rows := [][]string{}
if params.WriteHeader && !params.HeaderWritten {
headerRow := []string{}
for _, k := range params.Header {
headerRow = append(headerRow, CSVString(k))
}
rows = append(rows, headerRow)
params.HeaderWritten = true
}
for _, object := range objects {
row := []string{}
for i := range params.Header {
v := object[params.Header[i]]
row = append(row, CSVString(v))
}
rows = append(rows, row)
}
if params.Comma != 0 {
params.Writer.Comma = params.Comma
}
if params.SendUpstream {
var b bytes.Buffer
params.Writer.SetWriter(bufio.NewWriter(&b))
err = params.Writer.WriteAll(rows)
KillPipelineIfErr(err, killChan)
outputChan <- []byte(b.String())
} else {
err = params.Writer.WriteAll(rows)
KillPipelineIfErr(err, killChan)
}
}