/
encode.go
138 lines (117 loc) · 3.04 KB
/
encode.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
package tlv
import (
"errors"
"fmt"
"io"
"reflect"
"strconv"
"unicode/utf8"
)
const tlvEntityFormat = "%s%s%s"
// Encoder writes EMV Payment Code payload to an output stream.
type Encoder struct {
w io.Writer
tagName string
ignoreTags map[string]struct{}
f TagLengthTranslator
}
// NewEncoder returns a new encoder that writes to w.
func NewEncoder(w io.Writer, tagName string, ignoreTags []string, f TagLengthTranslator) *Encoder {
m := make(map[string]struct{}, len(ignoreTags))
for _, v := range ignoreTags {
m[v] = struct{}{}
}
return &Encoder{
w: w,
tagName: tagName,
ignoreTags: m,
f: f,
}
}
// Encode writes TLV payload of src to the stream.
func (e *Encoder) Encode(src interface{}) error {
v := reflect.ValueOf(src)
if v.IsNil() {
return errors.New("nil pointer passed")
}
tags := tags(v, e.tagName)
v = reflect.Indirect(v)
for _, tag := range tags {
if _, ok := e.ignoreTags[tag.id]; ok {
continue
}
id := tag.id
index := tag.index
f := v.Field(index)
if isTokenizable(f.Type()) {
var res []reflect.Value
if m, ok := reflect.PtrTo(f.Type()).MethodByName("Tokenize"); ok {
res = m.Func.Call([]reflect.Value{f.Addr()})
}
if res == nil {
return errors.New("unexpected value passed")
}
err := res[1].Interface()
if err == nil {
switch nv := res[0].Interface().(type) {
case string:
f = reflect.ValueOf(nv)
default:
return fmt.Errorf("unexpected Tokenizer return type id: %v type: %s", id, nv)
}
} else {
if e, ok := err.(error); ok {
return e
}
return errors.New("unexpected value returned")
}
}
v, err := fieldToString(f)
if err != nil {
return fmt.Errorf("failed to convert field value to string: %s", err)
}
if len(v) < 1 {
continue // value should be non-zero length
}
length := fmt.Sprintf("%02d", utf8.RuneCountInString(v))
if e.f != nil {
strID, strLength := e.f.Translate([]rune(id), []rune(length))
id = string(strID)
length = string(strLength)
}
if _, err := e.w.Write([]byte(fmt.Sprintf(tlvEntityFormat, id, length, v))); err != nil {
return fmt.Errorf("failed to write body: %s", err)
}
}
return nil
}
func fieldToString(v reflect.Value) (ret string, err error) {
switch v.Kind() {
case reflect.String:
ret = v.String()
case reflect.Float64:
ret = strconv.FormatFloat(v.Float(), 'f', -1, 64)
case reflect.Slice:
typ := v.Type().Elem()
switch typ {
case reflect.TypeOf(TLV{}):
for i := 0; i < v.Cap(); i++ {
y := v.Index(i).Interface().(TLV)
ret = ret + y.token()
}
default:
return "", fmt.Errorf("unsupported slice element type %s passed", typ.Kind())
}
default:
return "", fmt.Errorf("unsupported field type %s passed", v.Kind())
}
return
}
// Tokenizer is the interface providing the Tokenize method.
type Tokenizer interface {
Tokenize() (string, error)
}
var _tokenizerInterface = reflect.TypeOf((*Tokenizer)(nil)).Elem()
func isTokenizable(t reflect.Type) bool {
return reflect.PtrTo(t).Implements(_tokenizerInterface)
}