Skip to content

Commit

Permalink
pkg/compiler: refactor genStructDescs
Browse files Browse the repository at this point in the history
Reduce cyclomatic complexity of genStructDescs.

Update #538
  • Loading branch information
dvyukov committed Jul 31, 2018
1 parent 0e9b376 commit 69eaab1
Showing 1 changed file with 125 additions and 116 deletions.
241 changes: 125 additions & 116 deletions pkg/compiler/gen.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,145 +78,154 @@ func (comp *compiler) genStructDescs(syscalls []*prog.Syscall) []*prog.KeyedStru
// not possible to write them out inline as other types. To break the
// recursion detach them, and write StructDesc's out as separate array
// of KeyedStruct's. prog package will reattach them during init.

padded := make(map[interface{}]bool)
detach := make(map[**prog.StructDesc]bool)
var structs []*prog.KeyedStruct
var rec func(t prog.Type)
checkStruct := func(key prog.StructKey, descp **prog.StructDesc) bool {
detach[descp] = true
desc := *descp
if padded[desc] {
return false
}
padded[desc] = true
for _, f := range desc.Fields {
rec(f)
if !f.Varlen() && f.Size() == sizeUnassigned {
// An inner struct is not padded yet.
// Leave this struct for next iteration.
delete(padded, desc)
return false
}
}
if comp.used[key.Name] {
structs = append(structs, &prog.KeyedStruct{
Key: key,
Desc: desc,
})
}
return true
}
rec = func(t0 prog.Type) {
switch t := t0.(type) {
case *prog.PtrType:
rec(t.Type)
case *prog.ArrayType:
if padded[t] {
return
}
rec(t.Type)
if !t.Type.Varlen() && t.Type.Size() == sizeUnassigned {
// An inner struct is not padded yet.
// Leave this array for next iteration.
return
}
padded[t] = true
t.TypeSize = 0
if t.Kind == prog.ArrayRangeLen && t.RangeBegin == t.RangeEnd && !t.Type.Varlen() {
t.TypeSize = t.RangeBegin * t.Type.Size()
}
case *prog.StructType:
if !checkStruct(t.Key, &t.StructDesc) {
return
}
structNode := comp.structNodes[t.StructDesc]
// Add paddings, calculate size, mark bitfields.
varlen := false
for _, f := range t.Fields {
if f.Varlen() {
varlen = true
}
}
comp.markBitfields(t.Fields)
packed, sizeAttr, alignAttr := comp.parseStructAttrs(structNode)
t.Fields = comp.addAlignment(t.Fields, varlen, packed, alignAttr)
t.AlignAttr = alignAttr
t.TypeSize = 0
if !varlen {
for _, f := range t.Fields {
if !f.BitfieldMiddle() {
t.TypeSize += f.Size()
}
}
if sizeAttr != sizeUnassigned {
if t.TypeSize > sizeAttr {
comp.error(structNode.Pos, "struct %v has size attribute %v"+
" which is less than struct size %v",
structNode.Name.Name, sizeAttr, t.TypeSize)
}
if pad := sizeAttr - t.TypeSize; pad != 0 {
t.Fields = append(t.Fields, genPad(pad))
}
t.TypeSize = sizeAttr
}
}
case *prog.UnionType:
if !checkStruct(t.Key, &t.StructDesc) {
return
}
structNode := comp.structNodes[t.StructDesc]
varlen, sizeAttr := comp.parseUnionAttrs(structNode)
t.TypeSize = 0
if !varlen {
for _, fld := range t.Fields {
sz := fld.Size()
if sizeAttr != sizeUnassigned && sz > sizeAttr {
comp.error(structNode.Pos, "union %v has size attribute %v"+
" which is less than field %v size %v",
structNode.Name.Name, sizeAttr, fld.Name(), sz)
}
if t.TypeSize < sz {
t.TypeSize = sz
}
}
if sizeAttr != sizeUnassigned {
t.TypeSize = sizeAttr
}
}
}
ctx := &structGen{
comp: comp,
padded: make(map[interface{}]bool),
detach: make(map[**prog.StructDesc]bool),
}

// We have to do this in the loop until we pad nothing new
// due to recursive structs.
for {
start := len(padded)
start := len(ctx.padded)
for _, c := range syscalls {
for _, a := range c.Args {
rec(a)
ctx.walk(a)
}
if c.Ret != nil {
rec(c.Ret)
ctx.walk(c.Ret)
}
}
if start == len(padded) {
if start == len(ctx.padded) {
break
}
}

// Detach StructDesc's from StructType's. prog will reattach them again.
for descp := range detach {
for descp := range ctx.detach {
*descp = nil
}

sort.Slice(structs, func(i, j int) bool {
si, sj := structs[i], structs[j]
sort.Slice(ctx.structs, func(i, j int) bool {
si, sj := ctx.structs[i], ctx.structs[j]
if si.Key.Name != sj.Key.Name {
return si.Key.Name < sj.Key.Name
}
return si.Key.Dir < sj.Key.Dir
})
return structs
return ctx.structs
}

type structGen struct {
comp *compiler
padded map[interface{}]bool
detach map[**prog.StructDesc]bool
structs []*prog.KeyedStruct
}

func (ctx *structGen) check(key prog.StructKey, descp **prog.StructDesc) bool {
ctx.detach[descp] = true
desc := *descp
if ctx.padded[desc] {
return false
}
ctx.padded[desc] = true
for _, f := range desc.Fields {
ctx.walk(f)
if !f.Varlen() && f.Size() == sizeUnassigned {
// An inner struct is not padded yet.
// Leave this struct for next iteration.
delete(ctx.padded, desc)
return false
}
}
if ctx.comp.used[key.Name] {
ctx.structs = append(ctx.structs, &prog.KeyedStruct{
Key: key,
Desc: desc,
})
}
return true
}

func (ctx *structGen) walk(t0 prog.Type) {
comp := ctx.comp
switch t := t0.(type) {
case *prog.PtrType:
ctx.walk(t.Type)
case *prog.ArrayType:
if ctx.padded[t] {
return
}
ctx.walk(t.Type)
if !t.Type.Varlen() && t.Type.Size() == sizeUnassigned {
// An inner struct is not padded yet.
// Leave this array for next iteration.
return
}
ctx.padded[t] = true
t.TypeSize = 0
if t.Kind == prog.ArrayRangeLen && t.RangeBegin == t.RangeEnd && !t.Type.Varlen() {
t.TypeSize = t.RangeBegin * t.Type.Size()
}
case *prog.StructType:
if !ctx.check(t.Key, &t.StructDesc) {
return
}
structNode := comp.structNodes[t.StructDesc]
// Add paddings, calculate size, mark bitfields.
varlen := false
for _, f := range t.Fields {
if f.Varlen() {
varlen = true
}
}
comp.markBitfields(t.Fields)
packed, sizeAttr, alignAttr := comp.parseStructAttrs(structNode)
t.Fields = comp.addAlignment(t.Fields, varlen, packed, alignAttr)
t.AlignAttr = alignAttr
t.TypeSize = 0
if !varlen {
for _, f := range t.Fields {
if !f.BitfieldMiddle() {
t.TypeSize += f.Size()
}
}
if sizeAttr != sizeUnassigned {
if t.TypeSize > sizeAttr {
comp.error(structNode.Pos, "struct %v has size attribute %v"+
" which is less than struct size %v",
structNode.Name.Name, sizeAttr, t.TypeSize)
}
if pad := sizeAttr - t.TypeSize; pad != 0 {
t.Fields = append(t.Fields, genPad(pad))
}
t.TypeSize = sizeAttr
}
}
case *prog.UnionType:
if !ctx.check(t.Key, &t.StructDesc) {
return
}
structNode := comp.structNodes[t.StructDesc]
varlen, sizeAttr := comp.parseUnionAttrs(structNode)
t.TypeSize = 0
if !varlen {
for _, fld := range t.Fields {
sz := fld.Size()
if sizeAttr != sizeUnassigned && sz > sizeAttr {
comp.error(structNode.Pos, "union %v has size attribute %v"+
" which is less than field %v size %v",
structNode.Name.Name, sizeAttr, fld.Name(), sz)
}
if t.TypeSize < sz {
t.TypeSize = sz
}
}
if sizeAttr != sizeUnassigned {
t.TypeSize = sizeAttr
}
}
}
}

func (comp *compiler) genStructDesc(res *prog.StructDesc, n *ast.Struct, dir prog.Dir, varlen bool) {
Expand Down

0 comments on commit 69eaab1

Please sign in to comment.