forked from taggledevel2/ratchet
/
csv_writer.go
69 lines (57 loc) · 1.69 KB
/
csv_writer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
package processors
import (
"io"
"sort"
"github.com/dailyburn/ratchet/data"
"github.com/dailyburn/ratchet/util"
)
// CSVWriter is handles converting data.JSON objects into CSV format,
// and writing them to the given io.Writer. The Data
// must be a valid JSON object or a slice of valid JSON objects.
// If you already have Data formatted as a CSV string you can
// use an IoWriter instead.
type CSVWriter struct {
Writer *util.CSVWriter
WriteHeader bool
headerWritten bool
Header []string
}
// NewCSVWriter returns a new CSVWriter wrapping the given io.Writer object
func NewCSVWriter(w io.Writer) *CSVWriter {
return &CSVWriter{Writer: util.NewCSVWriter(w), WriteHeader: true, headerWritten: false}
}
func (w *CSVWriter) ProcessData(d data.JSON, outputChan chan data.JSON, killChan chan error) {
// use util helper to convert Data into []map[string]interface{}
objects, err := data.ObjectsFromJSON(d)
util.KillPipelineIfErr(err, killChan)
if w.Header == nil {
for k := range objects[0] {
w.Header = append(w.Header, k)
}
sort.Strings(w.Header)
}
rows := [][]string{}
if w.WriteHeader && !w.headerWritten {
header_row := []string{}
for _, k := range w.Header {
header_row = append(header_row, util.CSVString(k))
}
rows = append(rows, header_row)
w.headerWritten = true
}
for _, object := range objects {
row := []string{}
for i := range w.Header {
v := object[w.Header[i]]
row = append(row, util.CSVString(v))
}
rows = append(rows, row)
}
err = w.Writer.WriteAll(rows)
util.KillPipelineIfErr(err, killChan)
}
func (w *CSVWriter) Finish(outputChan chan data.JSON, killChan chan error) {
}
func (w *CSVWriter) String() string {
return "CSVWriter"
}