This repository has been archived by the owner on Jan 24, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
/
compiler_value_location.go
425 lines (377 loc) · 14.3 KB
/
compiler_value_location.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
package compiler
import (
"fmt"
"strings"
"github.com/wasilibs/wazerox/internal/asm"
"github.com/wasilibs/wazerox/internal/wasm"
)
var (
// unreservedGeneralPurposeRegisters contains unreserved general purpose registers of integer type.
unreservedGeneralPurposeRegisters []asm.Register
// unreservedVectorRegisters contains unreserved vector registers.
unreservedVectorRegisters []asm.Register
)
func isGeneralPurposeRegister(r asm.Register) bool {
return unreservedGeneralPurposeRegisters[0] <= r && r <= unreservedGeneralPurposeRegisters[len(unreservedGeneralPurposeRegisters)-1]
}
func isVectorRegister(r asm.Register) bool {
return unreservedVectorRegisters[0] <= r && r <= unreservedVectorRegisters[len(unreservedVectorRegisters)-1]
}
// runtimeValueLocation corresponds to each variable pushed onto the wazeroir (virtual) stack,
// and it has the information about where it exists in the physical machine.
// It might exist in registers, or maybe on in the non-virtual physical stack allocated in memory.
type runtimeValueLocation struct {
valueType runtimeValueType
// register is set to asm.NilRegister if the value is stored in the memory stack.
register asm.Register
// conditionalRegister is set to conditionalRegisterStateUnset if the value is not on the conditional register.
conditionalRegister asm.ConditionalRegisterState
// stackPointer is the location of this value in the memory stack at runtime,
stackPointer uint64
}
func (v *runtimeValueLocation) getRegisterType() (ret registerType) {
switch v.valueType {
case runtimeValueTypeI32, runtimeValueTypeI64:
ret = registerTypeGeneralPurpose
case runtimeValueTypeF32, runtimeValueTypeF64,
runtimeValueTypeV128Lo, runtimeValueTypeV128Hi:
ret = registerTypeVector
default:
panic("BUG")
}
return
}
type runtimeValueType byte
const (
runtimeValueTypeNone runtimeValueType = iota
runtimeValueTypeI32
runtimeValueTypeI64
runtimeValueTypeF32
runtimeValueTypeF64
runtimeValueTypeV128Lo
runtimeValueTypeV128Hi
)
func (r runtimeValueType) String() (ret string) {
switch r {
case runtimeValueTypeI32:
ret = "i32"
case runtimeValueTypeI64:
ret = "i64"
case runtimeValueTypeF32:
ret = "f32"
case runtimeValueTypeF64:
ret = "f64"
case runtimeValueTypeV128Lo:
ret = "v128.lo"
case runtimeValueTypeV128Hi:
ret = "v128.hi"
}
return
}
func (v *runtimeValueLocation) setRegister(reg asm.Register) {
v.register = reg
v.conditionalRegister = asm.ConditionalRegisterStateUnset
}
func (v *runtimeValueLocation) onRegister() bool {
return v.register != asm.NilRegister && v.conditionalRegister == asm.ConditionalRegisterStateUnset
}
func (v *runtimeValueLocation) onStack() bool {
return v.register == asm.NilRegister && v.conditionalRegister == asm.ConditionalRegisterStateUnset
}
func (v *runtimeValueLocation) onConditionalRegister() bool {
return v.conditionalRegister != asm.ConditionalRegisterStateUnset
}
func (v *runtimeValueLocation) String() string {
var location string
if v.onStack() {
location = fmt.Sprintf("stack(%d)", v.stackPointer)
} else if v.onConditionalRegister() {
location = fmt.Sprintf("conditional(%d)", v.conditionalRegister)
} else if v.onRegister() {
location = fmt.Sprintf("register(%s)", registerNameFn(v.register))
}
return fmt.Sprintf("{type=%s,location=%s}", v.valueType, location)
}
func newRuntimeValueLocationStack() runtimeValueLocationStack {
return runtimeValueLocationStack{
unreservedVectorRegisters: unreservedVectorRegisters,
unreservedGeneralPurposeRegisters: unreservedGeneralPurposeRegisters,
}
}
// runtimeValueLocationStack represents the wazeroir virtual stack
// where each item holds the location information about where it exists
// on the physical machine at runtime.
//
// Notably this is only used in the compilation phase, not runtime,
// and we change the state of this struct at every wazeroir operation we compile.
// In this way, we can see where the operands of an operation (for example,
// two variables for wazeroir add operation.) exist and check the necessity for
// moving the variable to registers to perform actual CPU instruction
// to achieve wazeroir's add operation.
type runtimeValueLocationStack struct {
// stack holds all the variables.
stack []runtimeValueLocation
// sp is the current stack pointer.
sp uint64
// usedRegisters is the bit map to track the used registers.
usedRegisters usedRegistersMask
// stackPointerCeil tracks max(.sp) across the lifespan of this struct.
stackPointerCeil uint64
// unreservedGeneralPurposeRegisters and unreservedVectorRegisters hold
// architecture dependent unreserved register list.
unreservedGeneralPurposeRegisters, unreservedVectorRegisters []asm.Register
}
func (v *runtimeValueLocationStack) reset() {
stack := v.stack[:0]
*v = runtimeValueLocationStack{
unreservedVectorRegisters: unreservedVectorRegisters,
unreservedGeneralPurposeRegisters: unreservedGeneralPurposeRegisters,
stack: stack,
}
}
func (v *runtimeValueLocationStack) String() string {
var stackStr []string
for i := uint64(0); i < v.sp; i++ {
stackStr = append(stackStr, v.stack[i].String())
}
usedRegisters := v.usedRegisters.list()
return fmt.Sprintf("sp=%d, stack=[%s], used_registers=[%s]", v.sp, strings.Join(stackStr, ","), strings.Join(usedRegisters, ","))
}
// cloneFrom clones the values on `from` into self except for the slice of .stack field.
// The content on .stack will be copied from the origin to self, and grow the underlying slice
// if necessary.
func (v *runtimeValueLocationStack) cloneFrom(from runtimeValueLocationStack) {
// Assigns the same values for fields except for the stack which we want to reuse.
prev := v.stack
*v = from
v.stack = prev[:cap(prev)] // Expand the length to the capacity so that we can minimize "diff" below.
// Copy the content in the stack.
if diff := int(from.sp) - len(v.stack); diff > 0 {
v.stack = append(v.stack, make([]runtimeValueLocation, diff)...)
}
copy(v.stack, from.stack[:from.sp])
}
// pushRuntimeValueLocationOnRegister creates a new runtimeValueLocation with a given register and pushes onto
// the location stack.
func (v *runtimeValueLocationStack) pushRuntimeValueLocationOnRegister(reg asm.Register, vt runtimeValueType) (loc *runtimeValueLocation) {
loc = v.push(reg, asm.ConditionalRegisterStateUnset)
loc.valueType = vt
return
}
// pushRuntimeValueLocationOnRegister creates a new runtimeValueLocation and pushes onto the location stack.
func (v *runtimeValueLocationStack) pushRuntimeValueLocationOnStack() (loc *runtimeValueLocation) {
loc = v.push(asm.NilRegister, asm.ConditionalRegisterStateUnset)
loc.valueType = runtimeValueTypeNone
return
}
// pushRuntimeValueLocationOnRegister creates a new runtimeValueLocation with a given conditional register state
// and pushes onto the location stack.
func (v *runtimeValueLocationStack) pushRuntimeValueLocationOnConditionalRegister(state asm.ConditionalRegisterState) (loc *runtimeValueLocation) {
loc = v.push(asm.NilRegister, state)
loc.valueType = runtimeValueTypeI32
return
}
// push a runtimeValueLocation onto the stack.
func (v *runtimeValueLocationStack) push(reg asm.Register, conditionalRegister asm.ConditionalRegisterState) (ret *runtimeValueLocation) {
if v.sp >= uint64(len(v.stack)) {
// This case we need to grow the stack capacity by appending the item,
// rather than indexing.
v.stack = append(v.stack, runtimeValueLocation{})
}
ret = &v.stack[v.sp]
ret.register, ret.conditionalRegister, ret.stackPointer = reg, conditionalRegister, v.sp
v.sp++
// stackPointerCeil must be set after sp is incremented since
// we skip the stack grow if len(stack) >= basePointer+stackPointerCeil.
if v.sp > v.stackPointerCeil {
v.stackPointerCeil = v.sp
}
return
}
func (v *runtimeValueLocationStack) pop() (loc *runtimeValueLocation) {
v.sp--
loc = &v.stack[v.sp]
return
}
func (v *runtimeValueLocationStack) popV128() (loc *runtimeValueLocation) {
v.sp -= 2
loc = &v.stack[v.sp]
return
}
func (v *runtimeValueLocationStack) peek() (loc *runtimeValueLocation) {
loc = &v.stack[v.sp-1]
return
}
func (v *runtimeValueLocationStack) releaseRegister(loc *runtimeValueLocation) {
v.markRegisterUnused(loc.register)
loc.register = asm.NilRegister
loc.conditionalRegister = asm.ConditionalRegisterStateUnset
}
func (v *runtimeValueLocationStack) markRegisterUnused(regs ...asm.Register) {
for _, reg := range regs {
v.usedRegisters.remove(reg)
}
}
func (v *runtimeValueLocationStack) markRegisterUsed(regs ...asm.Register) {
for _, reg := range regs {
v.usedRegisters.add(reg)
}
}
type registerType byte
const (
registerTypeGeneralPurpose registerType = iota
// registerTypeVector represents a vector register which can be used for either scalar float
// operation or SIMD vector operation depending on the instruction by which the register is used.
//
// Note: In normal assembly language, scalar float and vector register have different notations as
// Vn is for vectors and Qn is for scalar floats on arm64 for example. But on physical hardware,
// they are placed on the same locations. (Qn means the lower 64-bit of Vn vector register on arm64).
//
// In wazero, for the sake of simplicity in the register allocation, we intentionally conflate these two types
// and delegate the decision to the assembler which is aware of the instruction types for which these registers are used.
registerTypeVector
)
func (tp registerType) String() (ret string) {
switch tp {
case registerTypeGeneralPurpose:
ret = "int"
case registerTypeVector:
ret = "vector"
}
return
}
// takeFreeRegister searches for unused registers. Any found are marked used and returned.
func (v *runtimeValueLocationStack) takeFreeRegister(tp registerType) (reg asm.Register, found bool) {
var targetRegs []asm.Register
switch tp {
case registerTypeVector:
targetRegs = v.unreservedVectorRegisters
case registerTypeGeneralPurpose:
targetRegs = v.unreservedGeneralPurposeRegisters
}
for _, candidate := range targetRegs {
if v.usedRegisters.exist(candidate) {
continue
}
return candidate, true
}
return 0, false
}
// Search through the stack, and steal the register from the last used
// variable on the stack.
func (v *runtimeValueLocationStack) takeStealTargetFromUsedRegister(tp registerType) (*runtimeValueLocation, bool) {
for i := uint64(0); i < v.sp; i++ {
loc := &v.stack[i]
if loc.onRegister() {
switch tp {
case registerTypeVector:
if loc.valueType == runtimeValueTypeV128Hi {
panic("BUG: V128Hi must be above the corresponding V128Lo")
}
if isVectorRegister(loc.register) {
return loc, true
}
case registerTypeGeneralPurpose:
if isGeneralPurposeRegister(loc.register) {
return loc, true
}
}
}
}
return nil, false
}
// init sets up the runtimeValueLocationStack which reflects the state of
// the stack at the beginning of the function.
//
// See the diagram in callEngine.stack.
func (v *runtimeValueLocationStack) init(sig *wasm.FunctionType) {
for _, t := range sig.Params {
loc := v.pushRuntimeValueLocationOnStack()
switch t {
case wasm.ValueTypeI32:
loc.valueType = runtimeValueTypeI32
case wasm.ValueTypeI64, wasm.ValueTypeFuncref, wasm.ValueTypeExternref:
loc.valueType = runtimeValueTypeI64
case wasm.ValueTypeF32:
loc.valueType = runtimeValueTypeF32
case wasm.ValueTypeF64:
loc.valueType = runtimeValueTypeF64
case wasm.ValueTypeV128:
loc.valueType = runtimeValueTypeV128Lo
hi := v.pushRuntimeValueLocationOnStack()
hi.valueType = runtimeValueTypeV128Hi
default:
panic("BUG")
}
}
// If the len(results) > len(args), the slots for all results are reserved after
// arguments, so we reflect that into the location stack.
for i := 0; i < sig.ResultNumInUint64-sig.ParamNumInUint64; i++ {
_ = v.pushRuntimeValueLocationOnStack()
}
// Then push the control frame fields.
for i := 0; i < callFrameDataSizeInUint64; i++ {
loc := v.pushRuntimeValueLocationOnStack()
loc.valueType = runtimeValueTypeI64
}
}
// getCallFrameLocations returns each field of callFrame's runtime location.
//
// See the diagram in callEngine.stack.
func (v *runtimeValueLocationStack) getCallFrameLocations(sig *wasm.FunctionType) (
returnAddress, callerStackBasePointerInBytes, callerFunction *runtimeValueLocation,
) {
offset := callFrameOffset(sig)
return &v.stack[offset], &v.stack[offset+1], &v.stack[offset+2]
}
// pushCallFrame pushes a call frame's runtime locations onto the stack assuming that
// the function call parameters are already pushed there.
//
// See the diagram in callEngine.stack.
func (v *runtimeValueLocationStack) pushCallFrame(callTargetFunctionType *wasm.FunctionType) (
returnAddress, callerStackBasePointerInBytes, callerFunction *runtimeValueLocation,
) {
// If len(results) > len(args), we reserve the slots for the results below the call frame.
reservedSlotsBeforeCallFrame := callTargetFunctionType.ResultNumInUint64 - callTargetFunctionType.ParamNumInUint64
for i := 0; i < reservedSlotsBeforeCallFrame; i++ {
v.pushRuntimeValueLocationOnStack()
}
// Push the runtime location for each field of callFrame struct. Note that each of them has
// uint64 type, and therefore must be treated as runtimeValueTypeI64.
// callFrame.returnAddress
returnAddress = v.pushRuntimeValueLocationOnStack()
returnAddress.valueType = runtimeValueTypeI64
// callFrame.returnStackBasePointerInBytes
callerStackBasePointerInBytes = v.pushRuntimeValueLocationOnStack()
callerStackBasePointerInBytes.valueType = runtimeValueTypeI64
// callFrame.function
callerFunction = v.pushRuntimeValueLocationOnStack()
callerFunction.valueType = runtimeValueTypeI64
return
}
// usedRegistersMask tracks the used registers in its bits.
type usedRegistersMask uint64
// add adds the given `r` to the mask.
func (u *usedRegistersMask) add(r asm.Register) {
*u = *u | (1 << registerMaskShift(r))
}
// remove drops the given `r` from the mask.
func (u *usedRegistersMask) remove(r asm.Register) {
*u = *u & ^(1 << registerMaskShift(r))
}
// exist returns true if the given `r` is used.
func (u *usedRegistersMask) exist(r asm.Register) bool {
shift := registerMaskShift(r)
return (*u & (1 << shift)) > 0
}
// list returns the list of debug string of used registers.
// Only used for debugging and testing.
func (u *usedRegistersMask) list() (ret []string) {
mask := *u
for i := 0; i < 64; i++ {
if mask&(1<<i) > 0 {
ret = append(ret, registerNameFn(registerFromMaskShift(i)))
}
}
return
}