-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathinsn_compiler.rb
6046 lines (5009 loc) · 205 KB
/
insn_compiler.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# frozen_string_literal: true
module RubyVM::RJIT
class InsnCompiler
# struct rb_calling_info. Storing flags instead of ci.
CallingInfo = Struct.new(:argc, :flags, :kwarg, :ci_addr, :send_shift, :block_handler) do
def kw_splat = flags & C::VM_CALL_KW_SPLAT != 0
end
# @param ocb [CodeBlock]
# @param exit_compiler [RubyVM::RJIT::ExitCompiler]
def initialize(cb, ocb, exit_compiler)
@ocb = ocb
@exit_compiler = exit_compiler
@cfunc_codegen_table = {}
register_cfunc_codegen_funcs
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
# @param insn `RubyVM::RJIT::Instruction`
def compile(jit, ctx, asm, insn)
asm.incr_counter(:rjit_insns_count)
stack = ctx.stack_size.times.map do |stack_idx|
ctx.get_opnd_type(StackOpnd[ctx.stack_size - stack_idx - 1]).type
end
locals = jit.iseq.body.local_table_size.times.map do |local_idx|
(ctx.local_types[local_idx] || Type::Unknown).type
end
insn_idx = format('%04d', (jit.pc.to_i - jit.iseq.body.iseq_encoded.to_i) / C.VALUE.size)
asm.comment("Insn: #{insn_idx} #{insn.name} (stack: [#{stack.join(', ')}], locals: [#{locals.join(', ')}])")
# 83/102
case insn.name
when :nop then nop(jit, ctx, asm)
when :getlocal then getlocal(jit, ctx, asm)
when :setlocal then setlocal(jit, ctx, asm)
when :getblockparam then getblockparam(jit, ctx, asm)
# setblockparam
when :getblockparamproxy then getblockparamproxy(jit, ctx, asm)
when :getspecial then getspecial(jit, ctx, asm)
# setspecial
when :getinstancevariable then getinstancevariable(jit, ctx, asm)
when :setinstancevariable then setinstancevariable(jit, ctx, asm)
when :getclassvariable then getclassvariable(jit, ctx, asm)
when :setclassvariable then setclassvariable(jit, ctx, asm)
when :opt_getconstant_path then opt_getconstant_path(jit, ctx, asm)
when :getconstant then getconstant(jit, ctx, asm)
# setconstant
when :getglobal then getglobal(jit, ctx, asm)
# setglobal
when :putnil then putnil(jit, ctx, asm)
when :putself then putself(jit, ctx, asm)
when :putobject then putobject(jit, ctx, asm)
when :putspecialobject then putspecialobject(jit, ctx, asm)
when :putstring then putstring(jit, ctx, asm)
when :putchilledstring then putchilledstring(jit, ctx, asm)
when :concatstrings then concatstrings(jit, ctx, asm)
when :anytostring then anytostring(jit, ctx, asm)
when :toregexp then toregexp(jit, ctx, asm)
when :intern then intern(jit, ctx, asm)
when :newarray then newarray(jit, ctx, asm)
when :duparray then duparray(jit, ctx, asm)
# duphash
when :expandarray then expandarray(jit, ctx, asm)
when :concatarray then concatarray(jit, ctx, asm)
when :splatarray then splatarray(jit, ctx, asm)
when :newhash then newhash(jit, ctx, asm)
when :newrange then newrange(jit, ctx, asm)
when :pop then pop(jit, ctx, asm)
when :dup then dup(jit, ctx, asm)
when :dupn then dupn(jit, ctx, asm)
when :swap then swap(jit, ctx, asm)
# opt_reverse
when :topn then topn(jit, ctx, asm)
when :setn then setn(jit, ctx, asm)
when :adjuststack then adjuststack(jit, ctx, asm)
when :defined then defined(jit, ctx, asm)
when :definedivar then definedivar(jit, ctx, asm)
# checkmatch
when :checkkeyword then checkkeyword(jit, ctx, asm)
# checktype
# defineclass
# definemethod
# definesmethod
when :send then send(jit, ctx, asm)
when :opt_send_without_block then opt_send_without_block(jit, ctx, asm)
when :objtostring then objtostring(jit, ctx, asm)
when :opt_str_freeze then opt_str_freeze(jit, ctx, asm)
when :opt_ary_freeze then opt_ary_freeze(jit, ctx, asm)
when :opt_hash_freeze then opt_hash_freeze(jit, ctx, asm)
when :opt_nil_p then opt_nil_p(jit, ctx, asm)
# opt_str_uminus
when :opt_newarray_send then opt_newarray_send(jit, ctx, asm)
when :invokesuper then invokesuper(jit, ctx, asm)
when :invokeblock then invokeblock(jit, ctx, asm)
when :leave then leave(jit, ctx, asm)
when :throw then throw(jit, ctx, asm)
when :jump then jump(jit, ctx, asm)
when :branchif then branchif(jit, ctx, asm)
when :branchunless then branchunless(jit, ctx, asm)
when :branchnil then branchnil(jit, ctx, asm)
# once
when :opt_case_dispatch then opt_case_dispatch(jit, ctx, asm)
when :opt_plus then opt_plus(jit, ctx, asm)
when :opt_minus then opt_minus(jit, ctx, asm)
when :opt_mult then opt_mult(jit, ctx, asm)
when :opt_div then opt_div(jit, ctx, asm)
when :opt_mod then opt_mod(jit, ctx, asm)
when :opt_eq then opt_eq(jit, ctx, asm)
when :opt_neq then opt_neq(jit, ctx, asm)
when :opt_lt then opt_lt(jit, ctx, asm)
when :opt_le then opt_le(jit, ctx, asm)
when :opt_gt then opt_gt(jit, ctx, asm)
when :opt_ge then opt_ge(jit, ctx, asm)
when :opt_ltlt then opt_ltlt(jit, ctx, asm)
when :opt_and then opt_and(jit, ctx, asm)
when :opt_or then opt_or(jit, ctx, asm)
when :opt_aref then opt_aref(jit, ctx, asm)
when :opt_aset then opt_aset(jit, ctx, asm)
# opt_aset_with
# opt_aref_with
when :opt_length then opt_length(jit, ctx, asm)
when :opt_size then opt_size(jit, ctx, asm)
when :opt_empty_p then opt_empty_p(jit, ctx, asm)
when :opt_succ then opt_succ(jit, ctx, asm)
when :opt_not then opt_not(jit, ctx, asm)
when :opt_regexpmatch2 then opt_regexpmatch2(jit, ctx, asm)
# invokebuiltin
when :opt_invokebuiltin_delegate then opt_invokebuiltin_delegate(jit, ctx, asm)
when :opt_invokebuiltin_delegate_leave then opt_invokebuiltin_delegate_leave(jit, ctx, asm)
when :getlocal_WC_0 then getlocal_WC_0(jit, ctx, asm)
when :getlocal_WC_1 then getlocal_WC_1(jit, ctx, asm)
when :setlocal_WC_0 then setlocal_WC_0(jit, ctx, asm)
when :setlocal_WC_1 then setlocal_WC_1(jit, ctx, asm)
when :putobject_INT2FIX_0_ then putobject_INT2FIX_0_(jit, ctx, asm)
when :putobject_INT2FIX_1_ then putobject_INT2FIX_1_(jit, ctx, asm)
else CantCompile
end
end
private
#
# Insns
#
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def nop(jit, ctx, asm)
# Do nothing
KeepCompiling
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def getlocal(jit, ctx, asm)
idx = jit.operand(0)
level = jit.operand(1)
jit_getlocal_generic(jit, ctx, asm, idx:, level:)
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def getlocal_WC_0(jit, ctx, asm)
idx = jit.operand(0)
jit_getlocal_generic(jit, ctx, asm, idx:, level: 0)
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def getlocal_WC_1(jit, ctx, asm)
idx = jit.operand(0)
jit_getlocal_generic(jit, ctx, asm, idx:, level: 1)
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def setlocal(jit, ctx, asm)
idx = jit.operand(0)
level = jit.operand(1)
jit_setlocal_generic(jit, ctx, asm, idx:, level:)
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def setlocal_WC_0(jit, ctx, asm)
idx = jit.operand(0)
jit_setlocal_generic(jit, ctx, asm, idx:, level: 0)
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def setlocal_WC_1(jit, ctx, asm)
idx = jit.operand(0)
jit_setlocal_generic(jit, ctx, asm, idx:, level: 1)
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def getblockparam(jit, ctx, asm)
# EP level
level = jit.operand(1)
# Save the PC and SP because we might allocate
jit_prepare_routine_call(jit, ctx, asm)
# A mirror of the interpreter code. Checking for the case
# where it's pushing rb_block_param_proxy.
side_exit = side_exit(jit, ctx)
# Load environment pointer EP from CFP
ep_reg = :rax
jit_get_ep(asm, level, reg: ep_reg)
# Bail when VM_ENV_FLAGS(ep, VM_FRAME_FLAG_MODIFIED_BLOCK_PARAM) is non zero
# FIXME: This is testing bits in the same place that the WB check is testing.
# We should combine these at some point
asm.test([ep_reg, C.VALUE.size * C::VM_ENV_DATA_INDEX_FLAGS], C::VM_FRAME_FLAG_MODIFIED_BLOCK_PARAM)
# If the frame flag has been modified, then the actual proc value is
# already in the EP and we should just use the value.
frame_flag_modified = asm.new_label('frame_flag_modified')
asm.jnz(frame_flag_modified)
# This instruction writes the block handler to the EP. If we need to
# fire a write barrier for the write, then exit (we'll let the
# interpreter handle it so it can fire the write barrier).
# flags & VM_ENV_FLAG_WB_REQUIRED
asm.test([ep_reg, C.VALUE.size * C::VM_ENV_DATA_INDEX_FLAGS], C::VM_ENV_FLAG_WB_REQUIRED)
# if (flags & VM_ENV_FLAG_WB_REQUIRED) != 0
asm.jnz(side_exit)
# Convert the block handler in to a proc
# call rb_vm_bh_to_procval(const rb_execution_context_t *ec, VALUE block_handler)
asm.mov(C_ARGS[0], EC)
# The block handler for the current frame
# note, VM_ASSERT(VM_ENV_LOCAL_P(ep))
asm.mov(C_ARGS[1], [ep_reg, C.VALUE.size * C::VM_ENV_DATA_INDEX_SPECVAL])
asm.call(C.rb_vm_bh_to_procval)
# Load environment pointer EP from CFP (again)
ep_reg = :rcx
jit_get_ep(asm, level, reg: ep_reg)
# Write the value at the environment pointer
idx = jit.operand(0)
offs = -(C.VALUE.size * idx)
asm.mov([ep_reg, offs], C_RET);
# Set the frame modified flag
asm.mov(:rax, [ep_reg, C.VALUE.size * C::VM_ENV_DATA_INDEX_FLAGS]) # flag_check
asm.or(:rax, C::VM_FRAME_FLAG_MODIFIED_BLOCK_PARAM) # modified_flag
asm.mov([ep_reg, C.VALUE.size * C::VM_ENV_DATA_INDEX_FLAGS], :rax)
asm.write_label(frame_flag_modified)
# Push the proc on the stack
stack_ret = ctx.stack_push(Type::Unknown)
ep_reg = :rax
jit_get_ep(asm, level, reg: ep_reg)
asm.mov(:rax, [ep_reg, offs])
asm.mov(stack_ret, :rax)
KeepCompiling
end
# setblockparam
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def getblockparamproxy(jit, ctx, asm)
# To get block_handler
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
starting_context = ctx.dup # make a copy for use with jit_chain_guard
# A mirror of the interpreter code. Checking for the case
# where it's pushing rb_block_param_proxy.
side_exit = side_exit(jit, ctx)
# EP level
level = jit.operand(1)
# Peek at the block handler so we can check whether it's nil
comptime_handler = jit.peek_at_block_handler(level)
# When a block handler is present, it should always be a GC-guarded
# pointer (VM_BH_ISEQ_BLOCK_P)
if comptime_handler != 0 && comptime_handler & 0x3 != 0x1
asm.incr_counter(:getblockpp_not_gc_guarded)
return CantCompile
end
# Load environment pointer EP from CFP
ep_reg = :rax
jit_get_ep(asm, level, reg: ep_reg)
# Bail when VM_ENV_FLAGS(ep, VM_FRAME_FLAG_MODIFIED_BLOCK_PARAM) is non zero
asm.test([ep_reg, C.VALUE.size * C::VM_ENV_DATA_INDEX_FLAGS], C::VM_FRAME_FLAG_MODIFIED_BLOCK_PARAM)
asm.jnz(counted_exit(side_exit, :getblockpp_block_param_modified))
# Load the block handler for the current frame
# note, VM_ASSERT(VM_ENV_LOCAL_P(ep))
block_handler = :rax
asm.mov(block_handler, [ep_reg, C.VALUE.size * C::VM_ENV_DATA_INDEX_SPECVAL])
# Specialize compilation for the case where no block handler is present
if comptime_handler == 0
# Bail if there is a block handler
asm.cmp(block_handler, 0)
jit_chain_guard(:jnz, jit, starting_context, asm, counted_exit(side_exit, :getblockpp_block_handler_none))
putobject(jit, ctx, asm, val: Qnil)
else
# Block handler is a tagged pointer. Look at the tag. 0x03 is from VM_BH_ISEQ_BLOCK_P().
asm.and(block_handler, 0x3)
# Bail unless VM_BH_ISEQ_BLOCK_P(bh). This also checks for null.
asm.cmp(block_handler, 0x1)
jit_chain_guard(:jnz, jit, starting_context, asm, counted_exit(side_exit, :getblockpp_not_iseq_block))
# Push rb_block_param_proxy. It's a root, so no need to use jit_mov_gc_ptr.
top = ctx.stack_push(Type::BlockParamProxy)
asm.mov(:rax, C.rb_block_param_proxy)
asm.mov(top, :rax)
end
jump_to_next_insn(jit, ctx, asm)
EndBlock
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def getspecial(jit, ctx, asm)
# This takes two arguments, key and type
# key is only used when type == 0
# A non-zero type determines which type of backref to fetch
#rb_num_t key = jit.jit_get_arg(0);
rtype = jit.operand(1)
if rtype == 0
# not yet implemented
return CantCompile;
elsif rtype & 0x01 != 0
# Fetch a "special" backref based on a char encoded by shifting by 1
# Can raise if matchdata uninitialized
jit_prepare_routine_call(jit, ctx, asm)
# call rb_backref_get()
asm.comment('rb_backref_get')
asm.call(C.rb_backref_get)
asm.mov(C_ARGS[0], C_RET) # backref
case [rtype >> 1].pack('c')
in ?&
asm.comment("rb_reg_last_match")
asm.call(C.rb_reg_last_match)
in ?`
asm.comment("rb_reg_match_pre")
asm.call(C.rb_reg_match_pre)
in ?'
asm.comment("rb_reg_match_post")
asm.call(C.rb_reg_match_post)
in ?+
asm.comment("rb_reg_match_last")
asm.call(C.rb_reg_match_last)
end
stack_ret = ctx.stack_push(Type::Unknown)
asm.mov(stack_ret, C_RET)
KeepCompiling
else
# Fetch the N-th match from the last backref based on type shifted by 1
# Can raise if matchdata uninitialized
jit_prepare_routine_call(jit, ctx, asm)
# call rb_backref_get()
asm.comment('rb_backref_get')
asm.call(C.rb_backref_get)
# rb_reg_nth_match((int)(type >> 1), backref);
asm.comment('rb_reg_nth_match')
asm.mov(C_ARGS[0], rtype >> 1)
asm.mov(C_ARGS[1], C_RET) # backref
asm.call(C.rb_reg_nth_match)
stack_ret = ctx.stack_push(Type::Unknown)
asm.mov(stack_ret, C_RET)
KeepCompiling
end
end
# setspecial
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def getinstancevariable(jit, ctx, asm)
# Specialize on a compile-time receiver, and split a block for chain guards
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
id = jit.operand(0)
comptime_obj = jit.peek_at_self
jit_getivar(jit, ctx, asm, comptime_obj, id, nil, SelfOpnd)
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def setinstancevariable(jit, ctx, asm)
starting_context = ctx.dup # make a copy for use with jit_chain_guard
# Defer compilation so we can specialize on a runtime `self`
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
ivar_name = jit.operand(0)
comptime_receiver = jit.peek_at_self
# If the comptime receiver is frozen, writing an IV will raise an exception
# and we don't want to JIT code to deal with that situation.
if C.rb_obj_frozen_p(comptime_receiver)
asm.incr_counter(:setivar_frozen)
return CantCompile
end
# Check if the comptime receiver is a T_OBJECT
receiver_t_object = C::BUILTIN_TYPE(comptime_receiver) == C::T_OBJECT
# If the receiver isn't a T_OBJECT, or uses a custom allocator,
# then just write out the IV write as a function call.
# too-complex shapes can't use index access, so we use rb_ivar_get for them too.
if !receiver_t_object || shape_too_complex?(comptime_receiver) || ctx.chain_depth >= 10
asm.comment('call rb_vm_setinstancevariable')
ic = jit.operand(1)
# The function could raise exceptions.
# Note that this modifies REG_SP, which is why we do it first
jit_prepare_routine_call(jit, ctx, asm)
# Get the operands from the stack
val_opnd = ctx.stack_pop(1)
# Call rb_vm_setinstancevariable(iseq, obj, id, val, ic);
asm.mov(:rdi, jit.iseq.to_i)
asm.mov(:rsi, [CFP, C.rb_control_frame_t.offsetof(:self)])
asm.mov(:rdx, ivar_name)
asm.mov(:rcx, val_opnd)
asm.mov(:r8, ic)
asm.call(C.rb_vm_setinstancevariable)
else
# Get the iv index
shape_id = C.rb_shape_get_shape_id(comptime_receiver)
ivar_index = C.rb_shape_get_iv_index(shape_id, ivar_name)
# Get the receiver
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:self)])
# Generate a side exit
side_exit = side_exit(jit, ctx)
# Upgrade type
guard_object_is_heap(jit, ctx, asm, :rax, SelfOpnd, :setivar_not_heap)
asm.comment('guard shape')
asm.cmp(DwordPtr[:rax, C.rb_shape_id_offset], shape_id)
megamorphic_side_exit = counted_exit(side_exit, :setivar_megamorphic)
jit_chain_guard(:jne, jit, starting_context, asm, megamorphic_side_exit)
# If we don't have an instance variable index, then we need to
# transition out of the current shape.
if ivar_index.nil?
shape = C.rb_shape_get_shape_by_id(shape_id)
current_capacity = shape.capacity
dest_shape = C.rb_shape_get_next_no_warnings(shape, comptime_receiver, ivar_name)
new_shape_id = C.rb_shape_id(dest_shape)
if new_shape_id == C::OBJ_TOO_COMPLEX_SHAPE_ID
asm.incr_counter(:setivar_too_complex)
return CantCompile
end
ivar_index = shape.next_iv_index
# If the new shape has a different capacity, we need to
# reallocate the object.
needs_extension = dest_shape.capacity != shape.capacity
if needs_extension
# Generate the C call so that runtime code will increase
# the capacity and set the buffer.
asm.mov(C_ARGS[0], :rax)
asm.mov(C_ARGS[1], current_capacity)
asm.mov(C_ARGS[2], dest_shape.capacity)
asm.call(C.rb_ensure_iv_list_size)
# Load the receiver again after the function call
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:self)])
end
write_val = ctx.stack_pop(1)
jit_write_iv(asm, comptime_receiver, :rax, :rcx, ivar_index, write_val, needs_extension)
# Store the new shape
asm.comment('write shape')
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:self)]) # reload after jit_write_iv
asm.mov(DwordPtr[:rax, C.rb_shape_id_offset], new_shape_id)
else
# If the iv index already exists, then we don't need to
# transition to a new shape. The reason is because we find
# the iv index by searching up the shape tree. If we've
# made the transition already, then there's no reason to
# update the shape on the object. Just set the IV.
write_val = ctx.stack_pop(1)
jit_write_iv(asm, comptime_receiver, :rax, :rcx, ivar_index, write_val, false)
end
skip_wb = asm.new_label('skip_wb')
# If the value we're writing is an immediate, we don't need to WB
asm.test(write_val, C::RUBY_IMMEDIATE_MASK)
asm.jnz(skip_wb)
# If the value we're writing is nil or false, we don't need to WB
asm.cmp(write_val, Qnil)
asm.jbe(skip_wb)
asm.comment('write barrier')
asm.mov(C_ARGS[0], [CFP, C.rb_control_frame_t.offsetof(:self)]) # reload after jit_write_iv
asm.mov(C_ARGS[1], write_val)
asm.call(C.rb_gc_writebarrier)
asm.write_label(skip_wb)
end
KeepCompiling
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def getclassvariable(jit, ctx, asm)
# rb_vm_getclassvariable can raise exceptions.
jit_prepare_routine_call(jit, ctx, asm)
asm.mov(C_ARGS[0], [CFP, C.rb_control_frame_t.offsetof(:iseq)])
asm.mov(C_ARGS[1], CFP)
asm.mov(C_ARGS[2], jit.operand(0))
asm.mov(C_ARGS[3], jit.operand(1))
asm.call(C.rb_vm_getclassvariable)
top = ctx.stack_push(Type::Unknown)
asm.mov(top, C_RET)
KeepCompiling
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def setclassvariable(jit, ctx, asm)
# rb_vm_setclassvariable can raise exceptions.
jit_prepare_routine_call(jit, ctx, asm)
asm.mov(C_ARGS[0], [CFP, C.rb_control_frame_t.offsetof(:iseq)])
asm.mov(C_ARGS[1], CFP)
asm.mov(C_ARGS[2], jit.operand(0))
asm.mov(C_ARGS[3], ctx.stack_pop(1))
asm.mov(C_ARGS[4], jit.operand(1))
asm.call(C.rb_vm_setclassvariable)
KeepCompiling
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def opt_getconstant_path(jit, ctx, asm)
# Cut the block for invalidation
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
ic = C.iseq_inline_constant_cache.new(jit.operand(0))
idlist = ic.segments
# Make sure there is an exit for this block as the interpreter might want
# to invalidate this block from rb_rjit_constant_ic_update().
# For now, we always take an entry exit even if it was a side exit.
Invariants.ensure_block_entry_exit(jit, cause: 'opt_getconstant_path')
# See vm_ic_hit_p(). The same conditions are checked in yjit_constant_ic_update().
ice = ic.entry
if ice.nil?
# In this case, leave a block that unconditionally side exits
# for the interpreter to invalidate.
asm.incr_counter(:optgetconst_not_cached)
return CantCompile
end
if ice.ic_cref # with cref
# Cache is keyed on a certain lexical scope. Use the interpreter's cache.
side_exit = side_exit(jit, ctx)
# Call function to verify the cache. It doesn't allocate or call methods.
asm.mov(C_ARGS[0], ic.to_i)
asm.mov(C_ARGS[1], [CFP, C.rb_control_frame_t.offsetof(:ep)])
asm.call(C.rb_vm_ic_hit_p)
# Check the result. SysV only specifies one byte for _Bool return values,
# so it's important we only check one bit to ignore the higher bits in the register.
asm.test(C_RET, 1)
asm.jz(counted_exit(side_exit, :optgetconst_cache_miss))
asm.mov(:rax, ic.to_i) # inline_cache
asm.mov(:rax, [:rax, C.iseq_inline_constant_cache.offsetof(:entry)]) # ic_entry
asm.mov(:rax, [:rax, C.iseq_inline_constant_cache_entry.offsetof(:value)]) # ic_entry_val
# Push ic->entry->value
stack_top = ctx.stack_push(Type::Unknown)
asm.mov(stack_top, :rax)
else # without cref
# TODO: implement this
# Optimize for single ractor mode.
# if !assume_single_ractor_mode(jit, ocb)
# return CantCompile
# end
# Invalidate output code on any constant writes associated with
# constants referenced within the current block.
Invariants.assume_stable_constant_names(jit, idlist)
putobject(jit, ctx, asm, val: ice.value)
end
jump_to_next_insn(jit, ctx, asm)
EndBlock
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def getconstant(jit, ctx, asm)
id = jit.operand(0)
# vm_get_ev_const can raise exceptions.
jit_prepare_routine_call(jit, ctx, asm)
allow_nil_opnd = ctx.stack_pop(1)
klass_opnd = ctx.stack_pop(1)
asm.mov(C_ARGS[0], EC)
asm.mov(C_ARGS[1], klass_opnd)
asm.mov(C_ARGS[2], id)
asm.mov(C_ARGS[3], allow_nil_opnd)
asm.call(C.rb_vm_get_ev_const)
top = ctx.stack_push(Type::Unknown)
asm.mov(top, C_RET)
KeepCompiling
end
# setconstant
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def getglobal(jit, ctx, asm)
gid = jit.operand(0)
# Save the PC and SP because we might make a Ruby call for warning
jit_prepare_routine_call(jit, ctx, asm)
asm.mov(C_ARGS[0], gid)
asm.call(C.rb_gvar_get)
top = ctx.stack_push(Type::Unknown)
asm.mov(top, C_RET)
KeepCompiling
end
# setglobal
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def putnil(jit, ctx, asm)
putobject(jit, ctx, asm, val: Qnil)
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def putself(jit, ctx, asm)
stack_top = ctx.stack_push_self
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:self)])
asm.mov(stack_top, :rax)
KeepCompiling
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def putobject(jit, ctx, asm, val: jit.operand(0))
# Push it to the stack
val_type = Type.from(C.to_ruby(val))
stack_top = ctx.stack_push(val_type)
if asm.imm32?(val)
asm.mov(stack_top, val)
else # 64-bit immediates can't be directly written to memory
asm.mov(:rax, val)
asm.mov(stack_top, :rax)
end
KeepCompiling
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def putspecialobject(jit, ctx, asm)
object_type = jit.operand(0)
if object_type == C::VM_SPECIAL_OBJECT_VMCORE
stack_top = ctx.stack_push(Type::UnknownHeap)
asm.mov(:rax, C.rb_mRubyVMFrozenCore)
asm.mov(stack_top, :rax)
KeepCompiling
else
# TODO: implement for VM_SPECIAL_OBJECT_CBASE and
# VM_SPECIAL_OBJECT_CONST_BASE
CantCompile
end
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def putstring(jit, ctx, asm)
put_val = jit.operand(0, ruby: true)
# Save the PC and SP because the callee will allocate
jit_prepare_routine_call(jit, ctx, asm)
asm.mov(C_ARGS[0], EC)
asm.mov(C_ARGS[1], to_value(put_val))
asm.mov(C_ARGS[2], 0)
asm.call(C.rb_ec_str_resurrect)
stack_top = ctx.stack_push(Type::TString)
asm.mov(stack_top, C_RET)
KeepCompiling
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def putchilledstring(jit, ctx, asm)
put_val = jit.operand(0, ruby: true)
# Save the PC and SP because the callee will allocate
jit_prepare_routine_call(jit, ctx, asm)
asm.mov(C_ARGS[0], EC)
asm.mov(C_ARGS[1], to_value(put_val))
asm.mov(C_ARGS[2], 1)
asm.call(C.rb_ec_str_resurrect)
stack_top = ctx.stack_push(Type::TString)
asm.mov(stack_top, C_RET)
KeepCompiling
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def concatstrings(jit, ctx, asm)
n = jit.operand(0)
# Save the PC and SP because we are allocating
jit_prepare_routine_call(jit, ctx, asm)
asm.lea(:rax, ctx.sp_opnd(-C.VALUE.size * n))
# call rb_str_concat_literals(size_t n, const VALUE *strings);
asm.mov(C_ARGS[0], n)
asm.mov(C_ARGS[1], :rax)
asm.call(C.rb_str_concat_literals)
ctx.stack_pop(n)
stack_ret = ctx.stack_push(Type::TString)
asm.mov(stack_ret, C_RET)
KeepCompiling
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def anytostring(jit, ctx, asm)
# Save the PC and SP since we might call #to_s
jit_prepare_routine_call(jit, ctx, asm)
str = ctx.stack_pop(1)
val = ctx.stack_pop(1)
asm.mov(C_ARGS[0], str)
asm.mov(C_ARGS[1], val)
asm.call(C.rb_obj_as_string_result)
# Push the return value
stack_ret = ctx.stack_push(Type::TString)
asm.mov(stack_ret, C_RET)
KeepCompiling
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def toregexp(jit, ctx, asm)
opt = jit.operand(0, signed: true)
cnt = jit.operand(1)
# Save the PC and SP because this allocates an object and could
# raise an exception.
jit_prepare_routine_call(jit, ctx, asm)
asm.lea(:rax, ctx.sp_opnd(-C.VALUE.size * cnt)) # values_ptr
ctx.stack_pop(cnt)
asm.mov(C_ARGS[0], 0)
asm.mov(C_ARGS[1], cnt)
asm.mov(C_ARGS[2], :rax) # values_ptr
asm.call(C.rb_ary_tmp_new_from_values)
# Save the array so we can clear it later
asm.push(C_RET)
asm.push(C_RET) # Alignment
asm.mov(C_ARGS[0], C_RET)
asm.mov(C_ARGS[1], opt)
asm.call(C.rb_reg_new_ary)
# The actual regex is in RAX now. Pop the temp array from
# rb_ary_tmp_new_from_values into C arg regs so we can clear it
asm.pop(:rcx) # Alignment
asm.pop(:rcx) # ary
# The value we want to push on the stack is in RAX right now
stack_ret = ctx.stack_push(Type::UnknownHeap)
asm.mov(stack_ret, C_RET)
# Clear the temp array.
asm.mov(C_ARGS[0], :rcx) # ary
asm.call(C.rb_ary_clear)
KeepCompiling
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def intern(jit, ctx, asm)
# Save the PC and SP because we might allocate
jit_prepare_routine_call(jit, ctx, asm);
str = ctx.stack_pop(1)
asm.mov(C_ARGS[0], str)
asm.call(C.rb_str_intern)
# Push the return value
stack_ret = ctx.stack_push(Type::Unknown)
asm.mov(stack_ret, C_RET)
KeepCompiling
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def newarray(jit, ctx, asm)
n = jit.operand(0)
# Save the PC and SP because we are allocating
jit_prepare_routine_call(jit, ctx, asm)
# If n is 0, then elts is never going to be read, so we can just pass null
if n == 0
values_ptr = 0
else
asm.comment('load pointer to array elts')
offset_magnitude = C.VALUE.size * n
values_opnd = ctx.sp_opnd(-(offset_magnitude))
asm.lea(:rax, values_opnd)
values_ptr = :rax
end
# call rb_ec_ary_new_from_values(struct rb_execution_context_struct *ec, long n, const VALUE *elts);
asm.mov(C_ARGS[0], EC)
asm.mov(C_ARGS[1], n)
asm.mov(C_ARGS[2], values_ptr)
asm.call(C.rb_ec_ary_new_from_values)
ctx.stack_pop(n)
stack_ret = ctx.stack_push(Type::TArray)
asm.mov(stack_ret, C_RET)
KeepCompiling
end
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def duparray(jit, ctx, asm)
ary = jit.operand(0)
# Save the PC and SP because we are allocating
jit_prepare_routine_call(jit, ctx, asm)
# call rb_ary_resurrect(VALUE ary);
asm.comment('call rb_ary_resurrect')
asm.mov(C_ARGS[0], ary)
asm.call(C.rb_ary_resurrect)
stack_ret = ctx.stack_push(Type::TArray)
asm.mov(stack_ret, C_RET)
KeepCompiling
end
# duphash
# @param jit [RubyVM::RJIT::JITState]
# @param ctx [RubyVM::RJIT::Context]
# @param asm [RubyVM::RJIT::Assembler]
def expandarray(jit, ctx, asm)
# Both arguments are rb_num_t which is unsigned
num = jit.operand(0)
flag = jit.operand(1)
# If this instruction has the splat flag, then bail out.
if flag & 0x01 != 0
asm.incr_counter(:expandarray_splat)
return CantCompile
end
# If this instruction has the postarg flag, then bail out.
if flag & 0x02 != 0
asm.incr_counter(:expandarray_postarg)
return CantCompile
end
side_exit = side_exit(jit, ctx)
array_opnd = ctx.stack_opnd(0)
array_stack_opnd = StackOpnd[0]
# num is the number of requested values. If there aren't enough in the
# array then we're going to push on nils.
if ctx.get_opnd_type(array_stack_opnd) == Type::Nil
ctx.stack_pop(1) # pop after using the type info
# special case for a, b = nil pattern
# push N nils onto the stack