/
native_compile.go
216 lines (185 loc) · 6.23 KB
/
native_compile.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
// Copyright 2019 The go-interpreter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package exec
import (
"encoding/binary"
"fmt"
"runtime"
"github.com/ci123chain/wasm-util/exec/internal/compile"
ops "github.com/ci123chain/wasm-util/wasm/operators"
)
// Parameters that decide whether a sequence should be compiled.
// TODO: Expose some way for these to be customized at runtime
// via VMOptions.
const (
// NOTE: must never be less than 5, as room is needed to pack the
// wagon.nativeExec instruction and its parameter.
minInstBytes = 5
minArithInstructionSequence = 2
)
var supportedNativeArchs []nativeArch
type nativeArch struct {
Arch, OS string
make func(endianness binary.ByteOrder) *nativeCompiler
}
// nativeCompiler represents a backend for native code generation + execution.
type nativeCompiler struct {
Scanner sequenceScanner
Builder instructionBuilder
allocator pageAllocator
}
func (c *nativeCompiler) Close() error {
return c.allocator.Close()
}
// pageAllocator is responsible for the efficient allocation of
// executable, aligned regions of executable memory.
type pageAllocator interface {
AllocateExec(asm []byte) (compile.NativeCodeUnit, error)
Close() error
}
// sequenceScanner is responsible for detecting runs of supported opcodes
// that could benefit from compilation into native instructions.
type sequenceScanner interface {
// ScanFunc returns an ordered, non-overlapping set of
// sequences to compile into native code.
ScanFunc(bytecode []byte, meta *compile.BytecodeMetadata) ([]compile.CompilationCandidate, error)
}
// instructionBuilder is responsible for compiling wasm opcodes into
// native instructions.
type instructionBuilder interface {
// Build compiles the specified bytecode into native instructions.
Build(candidate compile.CompilationCandidate, code []byte, meta *compile.BytecodeMetadata) ([]byte, error)
}
// NativeCompilationError represents a failure to compile a sequence
// of instructions to native code.
type NativeCompilationError struct {
Start, End uint
FuncIndex int
Err error
}
func (e NativeCompilationError) Error() string {
return fmt.Sprintf("exec: native compilation failed on vm.funcs[%d].code[%d:%d]: %v", e.FuncIndex, e.Start, e.End, e.Err)
}
func nativeBackend() (bool, *nativeCompiler) {
for _, c := range supportedNativeArchs {
if c.Arch == runtime.GOARCH && c.OS == runtime.GOOS {
backend := c.make(endianess)
return true, backend
}
}
return false, nil
}
func (vm *VM) tryNativeCompile() error {
if vm.nativeBackend == nil {
return nil
}
for i := range vm.funcs {
if _, isGoFunc := vm.funcs[i].(*goFunction); isGoFunc {
continue
}
fn := vm.funcs[i].(compiledFunction)
candidates, err := vm.nativeBackend.Scanner.ScanFunc(fn.code, fn.codeMeta)
if err != nil {
return fmt.Errorf("exec: AOT scan failed on vm.funcs[%d]: %v", i, err)
}
for _, candidate := range candidates {
if (candidate.Metrics.IntegerOps + candidate.Metrics.FloatOps) < minArithInstructionSequence {
continue
}
lower, upper := candidate.Bounds()
if (upper - lower) < minInstBytes {
continue
}
asm, err := vm.nativeBackend.Builder.Build(candidate, fn.code, fn.codeMeta)
if err != nil {
return NativeCompilationError{
Err: err,
Start: lower,
End: upper,
FuncIndex: i,
}
}
unit, err := vm.nativeBackend.allocator.AllocateExec(asm)
if err != nil {
return fmt.Errorf("exec: allocator.AllocateExec() failed: %v", err)
}
fn.asm = append(fn.asm, asmBlock{
nativeUnit: unit,
resumePC: upper,
})
// Patch the wasm opcode stream to call into the native section.
// The number of bytes touched here must always be equal to
// nativeExecPrologueSize and <= minInstructionSequence.
fn.code[lower] = ops.WagonNativeExec
endianess.PutUint32(fn.code[lower+1:], uint32(len(fn.asm)-1))
// make the remainder of the recompiled instructions
// unreachable: this should trap the program in the event that
// a bug in code offsets & candidate sequence detection results in
// a jump to the middle of re-compiled code.
// This conservative behaviour is the least likely to result in
// bugs becoming security issues.
for i := lower + 5; i < upper-1; i++ {
fn.code[i] = ops.Unreachable
}
}
vm.funcs[i] = fn
}
return nil
}
// nativeCodeInvocation calls into one of the assembled code blocks.
// Assembled code blocks expect the following two pieces of
// information on the stack:
// [fp:fp+pointerSize]: sliceHeader for the stack.
// [fp+pointerSize:fp+pointerSize*2]: sliceHeader for locals variables.
func (vm *VM) nativeCodeInvocation(asmIndex uint32) {
block := vm.ctx.asm[asmIndex]
finishSignal := block.nativeUnit.Invoke(&vm.ctx.stack, &vm.ctx.locals, &vm.globals, &vm.memory)
switch finishSignal.CompletionStatus() {
case compile.CompletionOK:
case compile.CompletionFatalInternalError:
panic("fatal error in native execution")
case compile.CompletionBadBounds:
panic("exec: out of bounds memory access")
case compile.CompletionDivideZero:
panic("runtime error: integer divide by zero")
}
vm.ctx.pc = int64(block.resumePC)
}
// CompileStats returns statistics about native compilation performed on
// the VM.
func (vm *VM) CompileStats() NativeCompileStats {
out := NativeCompileStats{
Ops: map[byte]*OpStats{},
}
for i := range vm.funcs {
if _, isGoFunc := vm.funcs[i].(*goFunction); isGoFunc {
continue
}
fn := vm.funcs[i].(compiledFunction)
out.NumCompiledBlocks += len(fn.asm)
for _, inst := range fn.codeMeta.Instructions {
if _, exists := out.Ops[inst.Op]; !exists {
out.Ops[inst.Op] = &OpStats{}
}
// Instructions which are native-compiled are re-written to the
// ops.WagonNativeExec opcode, so a mismatch indicates native compilation.
if fn.code[inst.Start] == inst.Op {
out.Ops[inst.Op].Interpreted++
} else {
out.Ops[inst.Op].Compiled++
}
}
}
return out
}
type OpStats struct {
Interpreted int
Compiled int
}
// NativeCompileStats encapsulates statistics about any native
// compilation performed on the VM.
type NativeCompileStats struct {
Ops map[byte]*OpStats
NumCompiledBlocks int
}