Skip to content
Permalink
Browse files

added support for recursive input values

  • Loading branch information...
neelance committed Feb 6, 2017
1 parent 8c84afb commit 5b2978fcb1baf5d104a51d2638e2204abac0f5fd
Showing with 110 additions and 52 deletions.
  1. +22 −1 graphql_test.go
  2. +47 −12 internal/exec/exec.go
  3. +4 −5 internal/exec/introspection.go
  4. +37 −34 internal/exec/packer.go
@@ -1359,6 +1359,20 @@ func (r *inputResolver) NullableEnum(args *struct{ Value *string }) *string {
return args.Value
}

type recursive struct {
Next *recursive
}

func (r *inputResolver) Recursive(args *struct{ Value *recursive }) int32 {
n := int32(0)
v := args.Value
for v != nil {
v = v.Next
n++
}
return n
}

func TestInput(t *testing.T) {
coercionSchema := graphql.MustParseSchema(`
schema {
@@ -1375,12 +1389,17 @@ func TestInput(t *testing.T) {
nullableList(value: [Input]): [Int]
enum(value: Enum!): Enum!
nullableEnum(value: Enum): Enum
recursive(value: RecursiveInput!): Int!
}
input Input {
v: Int!
}
input RecursiveInput {
next: RecursiveInput
}
enum Enum {
Option1
Option2
@@ -1406,6 +1425,7 @@ func TestInput(t *testing.T) {
enum(value: Option2)
nullableEnum1: nullableEnum(value: Option2)
nullableEnum2: nullableEnum(value: null)
recursive(value: {next: {next: {}}})
}
`,
ExpectedResult: `
@@ -1424,7 +1444,8 @@ func TestInput(t *testing.T) {
"nullableList2": null,
"enum": "Option2",
"nullableEnum1": "Option2",
"nullableEnum2": null
"nullableEnum2": null,
"recursive": 3
}
`,
},
@@ -33,10 +33,7 @@ type Exec struct {
}

func Make(s *schema.Schema, resolver interface{}) (*Exec, error) {
b := &execBuilder{
schema: s,
execMap: make(map[typePair]*execMapEntry),
}
b := newExecBuilder(s)

var queryExec, mutationExec iExec

@@ -52,7 +49,9 @@ func Make(s *schema.Schema, resolver interface{}) (*Exec, error) {
}
}

b.finish()
if err := b.finish(); err != nil {
return nil, err
}

return &Exec{
schema: s,
@@ -63,8 +62,10 @@ func Make(s *schema.Schema, resolver interface{}) (*Exec, error) {
}

type execBuilder struct {
schema *schema.Schema
execMap map[typePair]*execMapEntry
schema *schema.Schema
execMap map[typePair]*execMapEntry
packerMap map[typePair]*packerMapEntry
structPackers []*structPacker
}

type typePair struct {
@@ -77,12 +78,46 @@ type execMapEntry struct {
targets []*iExec
}

func (b *execBuilder) finish() {
for _, ref := range b.execMap {
for _, target := range ref.targets {
*target = ref.exec
type packerMapEntry struct {
packer packer
targets []*packer
}

func newExecBuilder(s *schema.Schema) *execBuilder {
return &execBuilder{
schema: s,
execMap: make(map[typePair]*execMapEntry),
packerMap: make(map[typePair]*packerMapEntry),
}
}

func (b *execBuilder) finish() error {
for _, entry := range b.execMap {
for _, target := range entry.targets {
*target = entry.exec
}
}

for _, entry := range b.packerMap {
for _, target := range entry.targets {
*target = entry.packer
}
}

for _, p := range b.structPackers {
p.defaultStruct = reflect.New(p.structType).Elem()
for _, f := range p.fields {
if f.field.Default != nil {
v, err := f.fieldPacker.pack(nil, f.field.Default)
if err != nil {
return err
}
p.defaultStruct.FieldByIndex(f.fieldIndex).Set(v)
}
}
}

return nil
}

func (b *execBuilder) assignExec(target *iExec, t common.Type, resolverType reflect.Type) error {
@@ -230,7 +265,7 @@ func (b *execBuilder) makeFieldExec(typeName string, f *schema.Field, m reflect.
return nil, fmt.Errorf("must have parameter for field arguments")
}
var err error
argsPacker, err = makeStructPacker(b.schema, &f.Args, in[0])
argsPacker, err = b.makeStructPacker(&f.Args, in[0])
if err != nil {
return nil, err
}
@@ -14,10 +14,7 @@ var schemaExec iExec
var typeExec iExec

func init() {
b := &execBuilder{
schema: schema.Meta,
execMap: make(map[typePair]*execMapEntry),
}
b := newExecBuilder(schema.Meta)

if err := b.assignExec(&schemaExec, schema.Meta.Types["__Schema"], reflect.TypeOf(&introspection.Schema{})); err != nil {
panic(err)
@@ -27,7 +24,9 @@ func init() {
panic(err)
}

b.finish()
if err := b.finish(); err != nil {
panic(err)
}
}

func IntrospectSchema(s *schema.Schema) (interface{}, error) {
@@ -15,7 +15,23 @@ type packer interface {
pack(r *request, value interface{}) (reflect.Value, error)
}

func makePacker(s *schema.Schema, schemaType common.Type, reflectType reflect.Type) (packer, error) {
func (b *execBuilder) assignPacker(target *packer, schemaType common.Type, reflectType reflect.Type) error {
k := typePair{schemaType, reflectType}
ref, ok := b.packerMap[k]
if !ok {
ref = &packerMapEntry{}
b.packerMap[k] = ref
var err error
ref.packer, err = b.makePacker(schemaType, reflectType)
if err != nil {
return err
}
}
ref.targets = append(ref.targets, target)
return nil
}

func (b *execBuilder) makePacker(schemaType common.Type, reflectType reflect.Type) (packer, error) {
t, nonNull := unwrapNonNull(schemaType)
if !nonNull {
if reflectType.Kind() != reflect.Ptr {
@@ -27,7 +43,7 @@ func makePacker(s *schema.Schema, schemaType common.Type, reflectType reflect.Ty
elemType = reflectType // keep pointer for input objects
addPtr = false
}
elem, err := makeNonNullPacker(s, t, elemType)
elem, err := b.makeNonNullPacker(t, elemType)
if err != nil {
return nil, err
}
@@ -38,10 +54,10 @@ func makePacker(s *schema.Schema, schemaType common.Type, reflectType reflect.Ty
}, nil
}

return makeNonNullPacker(s, t, reflectType)
return b.makeNonNullPacker(t, reflectType)
}

func makeNonNullPacker(s *schema.Schema, schemaType common.Type, reflectType reflect.Type) (packer, error) {
func (b *execBuilder) makeNonNullPacker(schemaType common.Type, reflectType reflect.Type) (packer, error) {
if u, ok := reflect.New(reflectType).Interface().(Unmarshaler); ok {
if !u.ImplementsGraphQLType(schemaType.String()) {
return nil, fmt.Errorf("can not unmarshal %s into %s", schemaType, reflectType)
@@ -67,7 +83,7 @@ func makeNonNullPacker(s *schema.Schema, schemaType common.Type, reflectType ref
}, nil

case *schema.InputObject:
e, err := makeStructPacker(s, &t.InputMap, reflectType)
e, err := b.makeStructPacker(&t.InputMap, reflectType)
if err != nil {
return nil, err
}
@@ -77,14 +93,13 @@ func makeNonNullPacker(s *schema.Schema, schemaType common.Type, reflectType ref
if reflectType.Kind() != reflect.Slice {
return nil, fmt.Errorf("expected slice, got %s", reflectType)
}
elem, err := makePacker(s, t.OfType, reflectType.Elem())
if err != nil {
p := &listPacker{
sliceType: reflectType,
}
if err := b.assignPacker(&p.elem, t.OfType, reflectType.Elem()); err != nil {
return nil, err
}
return &listPacker{
sliceType: reflectType,
elem: elem,
}, nil
return p, nil

case *schema.Object, *schema.Interface, *schema.Union:
return nil, fmt.Errorf("type of kind %s can not be used as input", t.Kind())
@@ -94,18 +109,15 @@ func makeNonNullPacker(s *schema.Schema, schemaType common.Type, reflectType ref
}
}

func makeStructPacker(s *schema.Schema, obj *common.InputMap, typ reflect.Type) (*structPacker, error) {
func (b *execBuilder) makeStructPacker(obj *common.InputMap, typ reflect.Type) (*structPacker, error) {
if typ.Kind() != reflect.Ptr || typ.Elem().Kind() != reflect.Struct {
return nil, fmt.Errorf("expected pointer to struct, got %s", typ)
}
structType := typ.Elem()

var fields []*structPackerField
defaultStruct := reflect.New(structType).Elem()
for _, f := range obj.Fields {
fe := &structPackerField{
name: f.Name,
}
fe := &structPackerField{field: f}

sf, ok := structType.FieldByNameFunc(func(n string) bool { return strings.EqualFold(n, f.Name) })
if !ok {
@@ -122,28 +134,19 @@ func makeStructPacker(s *schema.Schema, obj *common.InputMap, typ reflect.Type)
ft = &common.NonNull{OfType: ft}
}

p, err := makePacker(s, ft, sf.Type)
if err != nil {
if err := b.assignPacker(&fe.fieldPacker, ft, sf.Type); err != nil {
return nil, fmt.Errorf("field %q: %s", sf.Name, err)
}
fe.fieldPacker = p

if f.Default != nil {
v, err := fe.fieldPacker.pack(nil, f.Default)
if err != nil {
return nil, err
}
defaultStruct.FieldByIndex(fe.fieldIndex).Set(v)
}

fields = append(fields, fe)
}

return &structPacker{
structType: structType,
defaultStruct: defaultStruct,
fields: fields,
}, nil
p := &structPacker{
structType: structType,
fields: fields,
}
b.structPackers = append(b.structPackers, p)
return p, nil
}

type structPacker struct {
@@ -153,7 +156,7 @@ type structPacker struct {
}

type structPackerField struct {
name string
field *common.InputValue
fieldIndex []int
fieldPacker packer
}
@@ -167,7 +170,7 @@ func (p *structPacker) pack(r *request, value interface{}) (reflect.Value, error
v := reflect.New(p.structType)
v.Elem().Set(p.defaultStruct)
for _, f := range p.fields {
if value, ok := values[f.name]; ok {
if value, ok := values[f.field.Name]; ok {
packed, err := f.fieldPacker.pack(r, r.resolveVar(value))
if err != nil {
return reflect.Value{}, err

3 comments on commit 5b2978f

@bsr203

This comment has been minimized.

Copy link

replied Feb 6, 2017

just FYI. Not sure does it conflict spec graphql/graphql-spec#91 (comment)

@neelance

This comment has been minimized.

Copy link
Collaborator Author

replied Feb 6, 2017

This comment is only about fragment recursion when querying, not type recursion in the schema.

@bsr203

This comment has been minimized.

Copy link

replied Feb 6, 2017

ok :-) thanks

Please sign in to comment.
You can’t perform that action at this time.