Skip to content

Commit

Permalink
cmd/compile,runtime: redo mid-stack inlining tracebacks
Browse files Browse the repository at this point in the history
Work involved in getting a stack trace is divided between
runtime.Callers and runtime.CallersFrames.

Before this CL, runtime.Callers returns a pc per runtime frame.
runtime.CallersFrames is responsible for expanding a runtime frame
into potentially multiple user frames.

After this CL, runtime.Callers returns a pc per user frame.
runtime.CallersFrames just maps those to user frame info.

Entries in the result of runtime.Callers are now pcs
of the calls (or of the inline marks), not of the instruction
just after the call.

Fixes #29007
Fixes #28640
Update #26320

Change-Id: I1c9567596ff73dc73271311005097a9188c3406f
Reviewed-on: https://go-review.googlesource.com/c/152537
Run-TryBot: Keith Randall <khr@golang.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: David Chase <drchase@google.com>
  • Loading branch information
randall77 committed Dec 28, 2018
1 parent c043fc4 commit 69c2c56
Show file tree
Hide file tree
Showing 39 changed files with 341 additions and 387 deletions.
4 changes: 0 additions & 4 deletions misc/cgo/test/callback.go
Expand Up @@ -179,7 +179,6 @@ func testCallbackCallers(t *testing.T) {
pc := make([]uintptr, 100)
n := 0
name := []string{
"runtime.call16",
"runtime.cgocallbackg1",
"runtime.cgocallbackg",
"runtime.cgocallback_gofunc",
Expand All @@ -193,9 +192,6 @@ func testCallbackCallers(t *testing.T) {
"testing.tRunner",
"runtime.goexit",
}
if unsafe.Sizeof((*byte)(nil)) == 8 {
name[0] = "runtime.call32"
}
nestedCall(func() {
n = runtime.Callers(4, pc)
})
Expand Down
3 changes: 2 additions & 1 deletion src/cmd/compile/internal/amd64/ggen.go
Expand Up @@ -141,7 +141,7 @@ func zeroAuto(pp *gc.Progs, n *gc.Node) {
}
}

func ginsnop(pp *gc.Progs) {
func ginsnop(pp *gc.Progs) *obj.Prog {
// This is actually not the x86 NOP anymore,
// but at the point where it gets used, AX is dead
// so it's okay if we lose the high bits.
Expand All @@ -150,4 +150,5 @@ func ginsnop(pp *gc.Progs) {
p.From.Reg = x86.REG_AX
p.To.Type = obj.TYPE_REG
p.To.Reg = x86.REG_AX
return p
}
3 changes: 2 additions & 1 deletion src/cmd/compile/internal/arm/ggen.go
Expand Up @@ -68,11 +68,12 @@ func zeroAuto(pp *gc.Progs, n *gc.Node) {
}
}

func ginsnop(pp *gc.Progs) {
func ginsnop(pp *gc.Progs) *obj.Prog {
p := pp.Prog(arm.AAND)
p.From.Type = obj.TYPE_REG
p.From.Reg = arm.REG_R0
p.To.Type = obj.TYPE_REG
p.To.Reg = arm.REG_R0
p.Scond = arm.C_SCOND_EQ
return p
}
3 changes: 2 additions & 1 deletion src/cmd/compile/internal/arm64/ggen.go
Expand Up @@ -79,7 +79,8 @@ func zeroAuto(pp *gc.Progs, n *gc.Node) {
}
}

func ginsnop(pp *gc.Progs) {
func ginsnop(pp *gc.Progs) *obj.Prog {
p := pp.Prog(arm64.AHINT)
p.From.Type = obj.TYPE_CONST
return p
}
4 changes: 4 additions & 0 deletions src/cmd/compile/internal/gc/fmt.go
Expand Up @@ -174,6 +174,7 @@ var goopnames = []string{
OGT: ">",
OIF: "if",
OIMAG: "imag",
OINLMARK: "inlmark",
ODEREF: "*",
OLEN: "len",
OLE: "<=",
Expand Down Expand Up @@ -942,6 +943,9 @@ func (n *Node) stmtfmt(s fmt.State, mode fmtMode) {
case ORETJMP:
mode.Fprintf(s, "retjmp %v", n.Sym)

case OINLMARK:
mode.Fprintf(s, "inlmark %d", n.Xoffset)

case OGO:
mode.Fprintf(s, "go %v", n.Left)

Expand Down
2 changes: 1 addition & 1 deletion src/cmd/compile/internal/gc/go.go
Expand Up @@ -257,7 +257,7 @@ type Arch struct {

PadFrame func(int64) int64
ZeroRange func(*Progs, *obj.Prog, int64, int64, *uint32) *obj.Prog
Ginsnop func(*Progs)
Ginsnop func(*Progs) *obj.Prog

// SSAMarkMoves marks any MOVXconst ops that need to avoid clobbering flags.
SSAMarkMoves func(*SSAGenState, *ssa.Block)
Expand Down
9 changes: 9 additions & 0 deletions src/cmd/compile/internal/gc/inl.go
Expand Up @@ -1063,6 +1063,15 @@ func mkinlcall(n, fn *Node, maxCost int32) *Node {
}
newIndex := Ctxt.InlTree.Add(parent, n.Pos, fn.Sym.Linksym())

// Add a inline mark just before the inlined body.
// This mark is inline in the code so that it's a reasonable spot
// to put a breakpoint. Not sure if that's really necessary or not
// (in which case it could go at the end of the function instead).
inlMark := nod(OINLMARK, nil, nil)
inlMark.Pos = n.Pos
inlMark.Xoffset = int64(newIndex)
ninit.Append(inlMark)

if genDwarfInline > 0 {
if !fn.Sym.Linksym().WasInlined() {
Ctxt.DwFixups.SetPrecursorFunc(fn.Sym.Linksym(), fn)
Expand Down
2 changes: 1 addition & 1 deletion src/cmd/compile/internal/gc/order.go
Expand Up @@ -553,7 +553,7 @@ func (o *Order) stmt(n *Node) {
default:
Fatalf("orderstmt %v", n.Op)

case OVARKILL, OVARLIVE:
case OVARKILL, OVARLIVE, OINLMARK:
o.out = append(o.out, n)

case OAS:
Expand Down
12 changes: 12 additions & 0 deletions src/cmd/compile/internal/gc/ssa.go
Expand Up @@ -1204,6 +1204,9 @@ func (s *state) stmt(n *Node) {
p := s.expr(n.Left)
s.nilCheck(p)

case OINLMARK:
s.newValue1I(ssa.OpInlMark, types.TypeVoid, n.Xoffset, s.mem())

default:
s.Fatalf("unhandled stmt %v", n.Op)
}
Expand Down Expand Up @@ -5163,6 +5166,14 @@ func genssa(f *ssa.Func, pp *Progs) {
if v.Args[0].Reg() != v.Reg() {
v.Fatalf("OpConvert should be a no-op: %s; %s", v.Args[0].LongString(), v.LongString())
}
case ssa.OpInlMark:
p := thearch.Ginsnop(s.pp)
if pp.curfn.Func.lsym != nil {
// lsym is nil if the function name is "_".
pp.curfn.Func.lsym.Func.AddInlMark(p, v.AuxInt32())
}
// TODO: if matching line number, merge somehow with previous instruction?

default:
// let the backend handle it
// Special case for first line in function; move it to the start.
Expand Down Expand Up @@ -5543,6 +5554,7 @@ func (s *SSAGenState) Call(v *ssa.Value) *obj.Prog {
s.PrepareCall(v)

p := s.Prog(obj.ACALL)
p.Pos = v.Pos
if sym, ok := v.Aux.(*obj.LSym); ok {
p.To.Type = obj.TYPE_MEM
p.To.Name = obj.NAME_EXTERN
Expand Down
2 changes: 2 additions & 0 deletions src/cmd/compile/internal/gc/syntax.go
Expand Up @@ -46,6 +46,7 @@ type Node struct {
// - ODOT, ODOTPTR, and OINDREGSP use it to indicate offset relative to their base address.
// - OSTRUCTKEY uses it to store the named field's offset.
// - Named OLITERALs use it to store their ambient iota value.
// - OINLMARK stores an index into the inlTree data structure.
// Possibly still more uses. If you find any, document them.
Xoffset int64

Expand Down Expand Up @@ -750,6 +751,7 @@ const (
OVARKILL // variable is dead
OVARLIVE // variable is alive
OINDREGSP // offset plus indirect of REGSP, such as 8(SP).
OINLMARK // start of an inlined body, with file/line of caller. Xoffset is an index into the inline tree.

// arch-specific opcodes
ORETJMP // return to other function
Expand Down
3 changes: 3 additions & 0 deletions src/cmd/compile/internal/gc/walk.go
Expand Up @@ -322,6 +322,9 @@ func walkstmt(n *Node) *Node {
case ORETJMP:
break

case OINLMARK:
break

case OSELECT:
walkselect(n)

Expand Down
3 changes: 2 additions & 1 deletion src/cmd/compile/internal/mips/ggen.go
Expand Up @@ -59,10 +59,11 @@ func zeroAuto(pp *gc.Progs, n *gc.Node) {
}
}

func ginsnop(pp *gc.Progs) {
func ginsnop(pp *gc.Progs) *obj.Prog {
p := pp.Prog(mips.ANOR)
p.From.Type = obj.TYPE_REG
p.From.Reg = mips.REG_R0
p.To.Type = obj.TYPE_REG
p.To.Reg = mips.REG_R0
return p
}
3 changes: 2 additions & 1 deletion src/cmd/compile/internal/mips64/ggen.go
Expand Up @@ -63,10 +63,11 @@ func zeroAuto(pp *gc.Progs, n *gc.Node) {
}
}

func ginsnop(pp *gc.Progs) {
func ginsnop(pp *gc.Progs) *obj.Prog {
p := pp.Prog(mips.ANOR)
p.From.Type = obj.TYPE_REG
p.From.Reg = mips.REG_R0
p.To.Type = obj.TYPE_REG
p.To.Reg = mips.REG_R0
return p
}
9 changes: 5 additions & 4 deletions src/cmd/compile/internal/ppc64/ggen.go
Expand Up @@ -58,15 +58,16 @@ func zeroAuto(pp *gc.Progs, n *gc.Node) {
}
}

func ginsnop(pp *gc.Progs) {
func ginsnop(pp *gc.Progs) *obj.Prog {
p := pp.Prog(ppc64.AOR)
p.From.Type = obj.TYPE_REG
p.From.Reg = ppc64.REG_R0
p.To.Type = obj.TYPE_REG
p.To.Reg = ppc64.REG_R0
return p
}

func ginsnop2(pp *gc.Progs) {
func ginsnop2(pp *gc.Progs) *obj.Prog {
// PPC64 is unusual because TWO nops are required
// (see gc/cgen.go, gc/plive.go -- copy of comment below)
//
Expand All @@ -87,7 +88,7 @@ func ginsnop2(pp *gc.Progs) {
p.From.Reg = ppc64.REGSP
p.To.Type = obj.TYPE_REG
p.To.Reg = ppc64.REG_R2
} else {
ginsnop(pp)
return p
}
return ginsnop(pp)
}
3 changes: 2 additions & 1 deletion src/cmd/compile/internal/s390x/ggen.go
Expand Up @@ -104,10 +104,11 @@ func zeroAuto(pp *gc.Progs, n *gc.Node) {
}
}

func ginsnop(pp *gc.Progs) {
func ginsnop(pp *gc.Progs) *obj.Prog {
p := pp.Prog(s390x.AOR)
p.From.Type = obj.TYPE_REG
p.From.Reg = int16(s390x.REG_R0)
p.To.Type = obj.TYPE_REG
p.To.Reg = int16(s390x.REG_R0)
return p
}
2 changes: 1 addition & 1 deletion src/cmd/compile/internal/ssa/deadcode.go
Expand Up @@ -85,7 +85,7 @@ func liveValues(f *Func, reachable []bool) (live []bool, liveOrderStmts []*Value
}
}
if v.Type.IsVoid() && !live[v.ID] {
// The only Void ops are nil checks. We must keep these.
// The only Void ops are nil checks and inline marks. We must keep these.
live[v.ID] = true
q = append(q, v)
if v.Pos.IsStmt() != src.PosNotStmt {
Expand Down
4 changes: 4 additions & 0 deletions src/cmd/compile/internal/ssa/gen/genericOps.go
Expand Up @@ -480,6 +480,10 @@ var genericOps = []opData{
{name: "VarLive", argLength: 1, aux: "Sym", symEffect: "Read", zeroWidth: true}, // aux is a *gc.Node of a variable that must be kept live. arg0=mem, returns mem
{name: "KeepAlive", argLength: 2, typ: "Mem", zeroWidth: true}, // arg[0] is a value that must be kept alive until this mark. arg[1]=mem, returns mem

// InlMark marks the start of an inlined function body. Its AuxInt field
// distinguishes which entry in the local inline tree it is marking.
{name: "InlMark", argLength: 1, aux: "Int32", typ: "Void"}, // arg[0]=mem, returns void.

// Ops for breaking 64-bit operations on 32-bit architectures
{name: "Int64Make", argLength: 2, typ: "UInt64"}, // arg0=hi, arg1=lo
{name: "Int64Hi", argLength: 1, typ: "UInt32"}, // high 32-bit of arg0
Expand Down
2 changes: 1 addition & 1 deletion src/cmd/compile/internal/ssa/lower.go
Expand Up @@ -21,7 +21,7 @@ func checkLower(f *Func) {
continue // lowered
}
switch v.Op {
case OpSP, OpSB, OpInitMem, OpArg, OpPhi, OpVarDef, OpVarKill, OpVarLive, OpKeepAlive, OpSelect0, OpSelect1, OpConvert:
case OpSP, OpSB, OpInitMem, OpArg, OpPhi, OpVarDef, OpVarKill, OpVarLive, OpKeepAlive, OpSelect0, OpSelect1, OpConvert, OpInlMark:
continue // ok not to lower
case OpGetG:
if f.Config.hasGReg {
Expand Down
7 changes: 7 additions & 0 deletions src/cmd/compile/internal/ssa/opGen.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 4 additions & 2 deletions src/cmd/compile/internal/wasm/ssa.go
Expand Up @@ -58,8 +58,8 @@ func zeroAuto(pp *gc.Progs, n *gc.Node) {
}
}

func ginsnop(pp *gc.Progs) {
pp.Prog(wasm.ANop)
func ginsnop(pp *gc.Progs) *obj.Prog {
return pp.Prog(wasm.ANop)
}

func ssaMarkMoves(s *gc.SSAGenState, b *ssa.Block) {
Expand Down Expand Up @@ -134,10 +134,12 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
if sym, ok := v.Aux.(*obj.LSym); ok {
p := s.Prog(obj.ACALL)
p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: sym}
p.Pos = v.Pos
} else {
getValue64(s, v.Args[0])
p := s.Prog(obj.ACALL)
p.To = obj.Addr{Type: obj.TYPE_NONE}
p.Pos = v.Pos
}

case ssa.OpWasmLoweredMove:
Expand Down
3 changes: 2 additions & 1 deletion src/cmd/compile/internal/x86/ggen.go
Expand Up @@ -53,10 +53,11 @@ func zeroAuto(pp *gc.Progs, n *gc.Node) {
}
}

func ginsnop(pp *gc.Progs) {
func ginsnop(pp *gc.Progs) *obj.Prog {
p := pp.Prog(x86.AXCHGL)
p.From.Type = obj.TYPE_REG
p.From.Reg = x86.REG_AX
p.To.Type = obj.TYPE_REG
p.To.Reg = x86.REG_AX
return p
}
10 changes: 6 additions & 4 deletions src/cmd/internal/goobj/read.go
Expand Up @@ -119,10 +119,11 @@ type FuncData struct {
// An InlinedCall is a node in an InlTree.
// See cmd/internal/obj.InlTree for details.
type InlinedCall struct {
Parent int64
File string
Line int64
Func SymID
Parent int64
File string
Line int64
Func SymID
ParentPC int64
}

// A Package is a parsed Go object file or archive defining a Go package.
Expand Down Expand Up @@ -610,6 +611,7 @@ func (r *objReader) parseObject(prefix []byte) error {
f.InlTree[i].File = r.readSymID().Name
f.InlTree[i].Line = r.readInt()
f.InlTree[i].Func = r.readSymID()
f.InlTree[i].ParentPC = r.readInt()
}
}
}
Expand Down
13 changes: 9 additions & 4 deletions src/cmd/internal/obj/inl.go
Expand Up @@ -47,9 +47,10 @@ type InlTree struct {

// InlinedCall is a node in an InlTree.
type InlinedCall struct {
Parent int // index of the parent in the InlTree or < 0 if outermost call
Pos src.XPos // position of the inlined call
Func *LSym // function that was inlined
Parent int // index of the parent in the InlTree or < 0 if outermost call
Pos src.XPos // position of the inlined call
Func *LSym // function that was inlined
ParentPC int32 // PC of instruction just before inlined body. Only valid in local trees.
}

// Add adds a new call to the tree, returning its index.
Expand All @@ -76,6 +77,10 @@ func (tree *InlTree) CallPos(inlIndex int) src.XPos {
return tree.nodes[inlIndex].Pos
}

func (tree *InlTree) setParentPC(inlIndex int, pc int32) {
tree.nodes[inlIndex].ParentPC = pc
}

// OutermostPos returns the outermost position corresponding to xpos,
// which is where xpos was ultimately inlined to. In the example for
// InlTree, main() contains inlined AST nodes from h(), but the
Expand Down Expand Up @@ -106,6 +111,6 @@ func (ctxt *Link) InnermostPos(xpos src.XPos) src.Pos {
func dumpInlTree(ctxt *Link, tree InlTree) {
for i, call := range tree.nodes {
pos := ctxt.PosTable.Pos(call.Pos)
ctxt.Logf("%0d | %0d | %s (%s)\n", i, call.Parent, call.Func, pos)
ctxt.Logf("%0d | %0d | %s (%s) pc=%d\n", i, call.Parent, call.Func, pos, call.ParentPC)
}
}

0 comments on commit 69c2c56

Please sign in to comment.