@@ -227,6 +227,7 @@ void ir_print_const(const ir_ctx *ctx, const ir_insn *insn, FILE *f, bool quoted
227
227
#define ir_op_flag_d0 ir_op_flag_d
228
228
#define ir_op_flag_d1 (ir_op_flag_d | 1 | (1 << IR_OP_FLAG_OPERANDS_SHIFT))
229
229
#define ir_op_flag_d1X1 (ir_op_flag_d | 1 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
230
+ #define ir_op_flag_d1X2 (ir_op_flag_d | 1 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
230
231
#define ir_op_flag_d2 (ir_op_flag_d | 2 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
231
232
#define ir_op_flag_d2C (ir_op_flag_d | IR_OP_FLAG_COMMUTATIVE | 2 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
232
233
#define ir_op_flag_d3 (ir_op_flag_d | 3 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
@@ -270,6 +271,7 @@ void ir_print_const(const ir_ctx *ctx, const ir_insn *insn, FILE *f, bool quoted
270
271
#define ir_op_flag_s3 (ir_op_flag_s | 3 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
271
272
#define ir_op_flag_x1 (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_CALL | 1 | (1 << IR_OP_FLAG_OPERANDS_SHIFT))
272
273
#define ir_op_flag_x2 (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_CALL | 2 | (2 << IR_OP_FLAG_OPERANDS_SHIFT))
274
+ #define ir_op_flag_x2X1 (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_CALL | 2 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
273
275
#define ir_op_flag_x3 (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_CALL | 3 | (3 << IR_OP_FLAG_OPERANDS_SHIFT))
274
276
#define ir_op_flag_xN (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_CALL | IR_OP_FLAG_VAR_INPUTS)
275
277
#define ir_op_flag_a1 (IR_OP_FLAG_CONTROL|IR_OP_FLAG_MEM|IR_OP_FLAG_MEM_ALLOC | 1 | (1 << IR_OP_FLAG_OPERANDS_SHIFT))
@@ -392,6 +394,8 @@ void ir_init(ir_ctx *ctx, uint32_t flags, ir_ref consts_limit, ir_ref insns_limi
392
394
ctx -> insns_limit = insns_limit ;
393
395
ctx -> consts_count = - (IR_TRUE - 1 );
394
396
ctx -> consts_limit = consts_limit ;
397
+ ctx -> const_hash = ctx -> _const_hash ;
398
+ ctx -> const_hash_mask = IR_CONST_HASH_SIZE - 1 ;
395
399
ctx -> fold_cse_limit = IR_UNUSED + 1 ;
396
400
ctx -> flags = flags ;
397
401
@@ -414,6 +418,9 @@ void ir_free(ir_ctx *ctx)
414
418
{
415
419
ir_insn * buf = ctx -> ir_base - ctx -> consts_limit ;
416
420
ir_mem_free (buf );
421
+ if (ctx -> value_params ) {
422
+ ir_mem_free (ctx -> value_params );
423
+ }
417
424
if (ctx -> strtab .data ) {
418
425
ir_strtab_free (& ctx -> strtab );
419
426
}
@@ -468,6 +475,10 @@ void ir_free(ir_ctx *ctx)
468
475
ir_list_free ((ir_list * )ctx -> osr_entry_loads );
469
476
ir_mem_free (ctx -> osr_entry_loads );
470
477
}
478
+
479
+ if (ctx -> const_hash_mask != IR_CONST_HASH_SIZE - 1 ) {
480
+ ir_mem_free (ctx -> const_hash );
481
+ }
471
482
}
472
483
473
484
ir_ref ir_unique_const_addr (ir_ctx * ctx , uintptr_t addr )
@@ -479,72 +490,64 @@ ir_ref ir_unique_const_addr(ir_ctx *ctx, uintptr_t addr)
479
490
insn -> val .u64 = addr ;
480
491
/* don't insert into constants chain */
481
492
insn -> prev_const = IR_UNUSED ;
482
- #if 0
483
- insn -> prev_const = ctx -> prev_const_chain [IR_ADDR ];
484
- ctx -> prev_const_chain [IR_ADDR ] = ref ;
485
- #endif
486
- #if 0
487
- ir_insn * prev_insn , * next_insn ;
488
- ir_ref next ;
489
-
490
- prev_insn = NULL ;
491
- next = ctx -> prev_const_chain [IR_ADDR ];
492
- while (next ) {
493
- next_insn = & ctx -> ir_base [next ];
494
- if (UNEXPECTED (next_insn -> val .u64 >= addr )) {
495
- break ;
496
- }
497
- prev_insn = next_insn ;
498
- next = next_insn -> prev_const ;
499
- }
500
-
501
- if (prev_insn ) {
502
- insn -> prev_const = prev_insn -> prev_const ;
503
- prev_insn -> prev_const = ref ;
504
- } else {
505
- insn -> prev_const = ctx -> prev_const_chain [IR_ADDR ];
506
- ctx -> prev_const_chain [IR_ADDR ] = ref ;
507
- }
508
- #endif
509
493
510
494
return ref ;
511
495
}
512
496
497
+ IR_ALWAYS_INLINE uintptr_t ir_const_hash (ir_val val , uint32_t optx )
498
+ {
499
+ return (val .u64 ^ (val .u64 >> 32 ) ^ optx );
500
+ }
501
+
502
+ static IR_NEVER_INLINE void ir_const_hash_rehash (ir_ctx * ctx )
503
+ {
504
+ ir_insn * insn ;
505
+ ir_ref ref ;
506
+ uintptr_t hash ;
507
+
508
+ if (ctx -> const_hash_mask != IR_CONST_HASH_SIZE - 1 ) {
509
+ ir_mem_free (ctx -> const_hash );
510
+ }
511
+ ctx -> const_hash_mask = (ctx -> const_hash_mask + 1 ) * 2 - 1 ;
512
+ ctx -> const_hash = ir_mem_calloc (ctx -> const_hash_mask + 1 , sizeof (ir_ref ));
513
+ for (ref = IR_TRUE - 1 ; ref > - ctx -> consts_count ; ref -- ) {
514
+ insn = & ctx -> ir_base [ref ];
515
+ hash = ir_const_hash (insn -> val , insn -> optx ) & ctx -> const_hash_mask ;
516
+ insn -> prev_const = ctx -> const_hash [hash ];
517
+ ctx -> const_hash [hash ] = ref ;
518
+ }
519
+ }
520
+
513
521
ir_ref ir_const_ex (ir_ctx * ctx , ir_val val , uint8_t type , uint32_t optx )
514
522
{
515
- ir_insn * insn , * prev_insn ;
523
+ ir_insn * insn ;
516
524
ir_ref ref , prev ;
525
+ uintptr_t hash ;
517
526
518
527
if (type == IR_BOOL ) {
519
528
return val .u64 ? IR_TRUE : IR_FALSE ;
520
529
} else if (type == IR_ADDR && val .u64 == 0 ) {
521
530
return IR_NULL ;
522
531
}
523
- prev_insn = NULL ;
524
- ref = ctx -> prev_const_chain [type ];
532
+
533
+ hash = ir_const_hash (val , optx ) & ctx -> const_hash_mask ;
534
+ ref = ctx -> const_hash [hash ];
525
535
while (ref ) {
526
536
insn = & ctx -> ir_base [ref ];
527
- if (UNEXPECTED (insn -> val .u64 >= val .u64 )) {
528
- if (insn -> val .u64 == val .u64 ) {
529
- if (insn -> optx == optx ) {
530
- return ref ;
531
- }
532
- } else {
533
- break ;
534
- }
537
+ if (insn -> val .u64 == val .u64 && insn -> optx == optx ) {
538
+ return ref ;
535
539
}
536
- prev_insn = insn ;
537
540
ref = insn -> prev_const ;
538
541
}
539
542
540
- if (prev_insn ) {
541
- prev = prev_insn -> prev_const ;
542
- prev_insn -> prev_const = - ctx -> consts_count ;
543
- } else {
544
- prev = ctx -> prev_const_chain [type ];
545
- ctx -> prev_const_chain [type ] = - ctx -> consts_count ;
543
+ if ((uintptr_t )ctx -> consts_count > ctx -> const_hash_mask ) {
544
+ ir_const_hash_rehash (ctx );
545
+ hash = ir_const_hash (val , optx ) & ctx -> const_hash_mask ;
546
546
}
547
547
548
+ prev = ctx -> const_hash [hash ];
549
+ ctx -> const_hash [hash ] = - ctx -> consts_count ;
550
+
548
551
ref = ir_next_const (ctx );
549
552
insn = & ctx -> ir_base [ref ];
550
553
insn -> prev_const = prev ;
@@ -2092,10 +2095,10 @@ IR_ALWAYS_INLINE ir_ref ir_find_aliasing_vload_i(ir_ctx *ctx, ir_ref ref, ir_typ
2092
2095
if (insn -> type == type ) {
2093
2096
return ref ; /* load forwarding (L2L) */
2094
2097
} else if (ir_type_size [insn -> type ] == ir_type_size [type ]) {
2095
- return ir_fold1 ( ctx , IR_OPT ( IR_BITCAST , type ), ref ) ; /* load forwarding with bitcast (L2L) */
2098
+ return ref ; /* load forwarding with bitcast (L2L) */
2096
2099
} else if (ir_type_size [insn -> type ] > ir_type_size [type ]
2097
2100
&& IR_IS_TYPE_INT (type ) && IR_IS_TYPE_INT (insn -> type )) {
2098
- return ir_fold1 ( ctx , IR_OPT ( IR_TRUNC , type ), ref ) ; /* partial load forwarding (L2L) */
2101
+ return ref ; /* partial load forwarding (L2L) */
2099
2102
}
2100
2103
}
2101
2104
} else if (insn -> op == IR_VSTORE ) {
@@ -2105,10 +2108,10 @@ IR_ALWAYS_INLINE ir_ref ir_find_aliasing_vload_i(ir_ctx *ctx, ir_ref ref, ir_typ
2105
2108
if (type2 == type ) {
2106
2109
return insn -> op3 ; /* store forwarding (S2L) */
2107
2110
} else if (ir_type_size [type2 ] == ir_type_size [type ]) {
2108
- return ir_fold1 ( ctx , IR_OPT ( IR_BITCAST , type ), insn -> op3 ) ; /* store forwarding with bitcast (S2L) */
2111
+ return insn -> op3 ; /* store forwarding with bitcast (S2L) */
2109
2112
} else if (ir_type_size [type2 ] > ir_type_size [type ]
2110
2113
&& IR_IS_TYPE_INT (type ) && IR_IS_TYPE_INT (type2 )) {
2111
- return ir_fold1 ( ctx , IR_OPT ( IR_TRUNC , type ), insn -> op3 ) ; /* partial store forwarding (S2L) */
2114
+ return insn -> op3 ; /* partial store forwarding (S2L) */
2112
2115
} else {
2113
2116
break ;
2114
2117
}
@@ -3214,6 +3217,13 @@ ir_ref _ir_VA_ARG(ir_ctx *ctx, ir_type type, ir_ref list)
3214
3217
return ctx -> control = ir_emit2 (ctx , IR_OPT (IR_VA_ARG , type ), ctx -> control , list );
3215
3218
}
3216
3219
3220
+ ir_ref _ir_VA_ARG_EX (ir_ctx * ctx , ir_type type , ir_ref list , size_t size )
3221
+ {
3222
+ IR_ASSERT (ctx -> control );
3223
+ IR_ASSERT (size <= 0x7fffffff );
3224
+ return ctx -> control = ir_emit3 (ctx , IR_OPT (IR_VA_ARG , type ), ctx -> control , list , (ir_ref )size );
3225
+ }
3226
+
3217
3227
ir_ref _ir_BLOCK_BEGIN (ir_ctx * ctx )
3218
3228
{
3219
3229
IR_ASSERT (ctx -> control );
0 commit comments