Skip to content

Commit

Permalink
Add a duration type that can be unmarshalled from JSON (#101)
Browse files Browse the repository at this point in the history
  • Loading branch information
xichen2020 committed Feb 6, 2019
1 parent 5697cf5 commit a90d03e
Show file tree
Hide file tree
Showing 6 changed files with 258 additions and 22 deletions.
15 changes: 12 additions & 3 deletions glide.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 0 additions & 2 deletions glide.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@ import:
- protoc-gen-gofast
- package: github.com/valyala/gozstd
version: ^1.1.0
- package: gopkg.in/yaml.v2
version: ~2.2.2
- package: github.com/dgryski/go-bitstream
version: 3522498ce2c8ea06df73e55df58edfbfb33cfdd6
- package: github.com/willf/bitset
Expand Down
40 changes: 23 additions & 17 deletions query/unparsed_query.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,13 @@ import (
"github.com/xichen2020/eventdb/filter"
"github.com/xichen2020/eventdb/x/convert"
"github.com/xichen2020/eventdb/x/hash"
xtime "github.com/xichen2020/eventdb/x/time"

xtime "github.com/m3db/m3x/time"
m3xtime "github.com/m3db/m3x/time"
)

const (
defaultTimeUnit = TimeUnit(xtime.Second)
defaultTimeUnit = TimeUnit(m3xtime.Second)
defaultFilterCombinator = filter.And
defaultRawQuerySizeLimit = 100
defaultGroupedQuerySizeLimit = 10
Expand Down Expand Up @@ -44,11 +45,11 @@ type UnparsedQuery struct {
Namespace string `json:"namespace"`

// Time range portion of the query.
StartTime *int64 `json:"start_time"`
EndTime *int64 `json:"end_time"`
TimeUnit *TimeUnit `json:"time_unit"`
TimeRange *time.Duration `json:"time_range"`
TimeGranularity *time.Duration `json:"time_granularity"`
StartTime *int64 `json:"start_time"`
EndTime *int64 `json:"end_time"`
TimeUnit *TimeUnit `json:"time_unit"`
TimeRange *xtime.Duration `json:"time_range"`
TimeGranularity *xtime.Duration `json:"time_granularity"`

// Filters.
Filters []RawFilterList `json:"filters"`
Expand Down Expand Up @@ -176,29 +177,34 @@ func (q *UnparsedQuery) parseTime() (
endNanos = *q.EndTime * unitDurationNanos
} else if q.StartTime != nil {
startNanos = *q.StartTime * unitDurationNanos
endNanos = startNanos + q.TimeRange.Nanoseconds()
endNanos = startNanos + time.Duration(*q.TimeRange).Nanoseconds()
} else {
endNanos = *q.EndTime * unitDurationNanos
startNanos = endNanos - q.TimeRange.Nanoseconds()
if q.EndTime != nil {
endNanos = *q.EndTime * unitDurationNanos
} else {
endNanos = time.Now().UnixNano()
}
startNanos = endNanos - time.Duration(*q.TimeRange).Nanoseconds()
}

if q.TimeGranularity == nil {
return startNanos, endNanos, granularity, nil
return startNanos, endNanos, nil, nil
}

// Further validation on query granularity.
var (
timeGranularity = time.Duration(*q.TimeGranularity)
rangeInNanos = endNanos - startNanos
maxGranularityAllowed = rangeInNanos / maxGranularityRangeScaleFactor
minGranularityAllowed = rangeInNanos / minGranularityRangeScaleFactor
)
if q.TimeGranularity.Nanoseconds() > maxGranularityAllowed {
return 0, 0, nil, fmt.Errorf("query granularity %v is above maximum allowed %v", *q.TimeGranularity, time.Duration(maxGranularityAllowed))
if timeGranularity.Nanoseconds() > maxGranularityAllowed {
return 0, 0, nil, fmt.Errorf("query granularity %v is above maximum allowed %v", timeGranularity, time.Duration(maxGranularityAllowed))
}
if q.TimeGranularity.Nanoseconds() < minGranularityAllowed {
return 0, 0, nil, fmt.Errorf("query granularity %v is below minimum allowed %v", *q.TimeGranularity, time.Duration(minGranularityAllowed))
if timeGranularity.Nanoseconds() < minGranularityAllowed {
return 0, 0, nil, fmt.Errorf("query granularity %v is below minimum allowed %v", timeGranularity, time.Duration(minGranularityAllowed))
}
return startNanos, endNanos, q.TimeGranularity, nil
return startNanos, endNanos, &timeGranularity, nil
}

func (q *UnparsedQuery) validateTime() error {
Expand Down Expand Up @@ -417,7 +423,7 @@ func (q *UnparsedQuery) parseOrderBy(
if calc.Field != nil && rob.Field == nil {
continue
}
if calc.Field != nil && rob.Field != nil && calc.Field != rob.Field {
if calc.Field != nil && rob.Field != nil && *calc.Field != *rob.Field {
continue
}
if calc.Op == *rob.Op {
Expand Down
130 changes: 130 additions & 0 deletions query/unparsed_query_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
package query

import (
"encoding/json"
"testing"

"github.com/xichen2020/eventdb/calculation"
"github.com/xichen2020/eventdb/document/field"
"github.com/xichen2020/eventdb/filter"

"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/stretchr/testify/require"
)

func TestUnparsedGroupByQueryParse(t *testing.T) {
input := `{
"namespace": "foo",
"end_time": 9876,
"time_range": "1h",
"filters": [
{
"filters": [
{
"field": "field1.field11",
"op": "=",
"value": "value1"
},
{
"field": "field2",
"op": ">",
"value": "value2"
}
],
"filter_combinator": "AND"
}
],
"group_by": [
"field3.field31",
"field4"
],
"calculations": [
{
"op": "COUNT"
},
{
"op": "AVG",
"field": "field5"
}
],
"order_by": [
{
"field": "field3.field31",
"order": "ascending"
},
{
"field": "field5",
"op": "AVG",
"order": "descending"
}
],
"limit": 10
}`

var (
valueUnion1 = field.NewStringUnion("value1")
valueUnion2 = field.NewStringUnion("value2")
)
expected := ParsedQuery{
Namespace: "foo",
StartTimeNanos: 6276000000000,
EndTimeNanos: 9876000000000,
Filters: []FilterList{
{
Filters: []Filter{
{
FieldPath: []string{"field1", "field11"},
Op: filter.Equals,
Value: &valueUnion1,
},
{
FieldPath: []string{"field2"},
Op: filter.LargerThan,
Value: &valueUnion2,
},
},
FilterCombinator: filter.And,
},
},
GroupBy: [][]string{
{"field3", "field31"},
{"field4"},
},
Calculations: []Calculation{
{
Op: calculation.Count,
},
{
Op: calculation.Avg,
FieldPath: []string{"field5"},
},
},
OrderBy: []OrderBy{
{
FieldType: GroupByField,
FieldIndex: 0,
FieldPath: []string{"field3", "field31"},
SortOrder: Ascending,
},
{
FieldType: CalculationField,
FieldIndex: 1,
FieldPath: []string{"field5"},
SortOrder: Descending,
},
},
Limit: 10,
}

var p UnparsedQuery
err := json.Unmarshal([]byte(input), &p)
require.NoError(t, err)
parsed, err := p.Parse(ParseOptions{
FieldPathSeparator: byte('.'),
})
require.NoError(t, err)

queryCmpOpts := cmpopts.IgnoreUnexported(ParsedQuery{})
require.True(t, cmp.Equal(expected, parsed, queryCmpOpts))
}
31 changes: 31 additions & 0 deletions x/time/duration.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
package time

import (
"encoding/json"
"time"
)

// Duration is a time duration that can be unmarshalled from JSON.
type Duration time.Duration

// String returns the duration string.
func (d Duration) String() string { return time.Duration(d).String() }

// MarshalJSON marshals the duration as a string.
func (d Duration) MarshalJSON() ([]byte, error) {
return json.Marshal(time.Duration(d).String())
}

// UnmarshalJSON unmarshals the raw bytes into a duration.
func (d *Duration) UnmarshalJSON(b []byte) error {
var v string
if err := json.Unmarshal(b, &v); err != nil {
return err
}
dur, err := time.ParseDuration(v)
if err != nil {
return err
}
*d = Duration(dur)
return nil
}
62 changes: 62 additions & 0 deletions x/time/duration_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
package time

import (
"encoding/json"
"testing"
"time"

"github.com/stretchr/testify/require"
)

func TestDurationMarshalJSON(t *testing.T) {
inputs := []struct {
dur time.Duration
expected string
}{
{
dur: time.Second,
expected: `"1s"`,
},
{
dur: 5 * time.Minute,
expected: `"5m0s"`,
},
{
dur: 2 * time.Hour,
expected: `"2h0m0s"`,
},
}

for _, input := range inputs {
b, err := json.Marshal(Duration(input.dur))
require.NoError(t, err)
require.Equal(t, input.expected, string(b))
}
}

func TestDurationUnmarshalJSON(t *testing.T) {
inputs := []struct {
expected time.Duration
str string
}{
{
str: `"1s"`,
expected: time.Second,
},
{
str: `"5m"`,
expected: 5 * time.Minute,
},
{
str: `"2h"`,
expected: 2 * time.Hour,
},
}

for _, input := range inputs {
var dur Duration
err := json.Unmarshal([]byte(input.str), &dur)
require.NoError(t, err)
require.Equal(t, input.expected, time.Duration(dur))
}
}

0 comments on commit a90d03e

Please sign in to comment.