1
1
/*
2
- * Copyright (C) 2012-2020 Apple Inc. All rights reserved.
2
+ * Copyright (C) 2012-2021 Apple Inc. All rights reserved.
3
3
*
4
4
* Redistribution and use in source and binary forms, with or without
5
5
* modification, are permitted provided that the following conditions
@@ -1111,6 +1111,20 @@ class ARM64Assembler {
1111
1111
insn (0x0 );
1112
1112
}
1113
1113
1114
+ template <int datasize>
1115
+ ALWAYS_INLINE static bool isValidLDPImm (int immediate)
1116
+ {
1117
+ unsigned immedShiftAmount = memPairOffsetShift (false , MEMPAIROPSIZE_INT (datasize));
1118
+ return isValidSignedImm7 (immediate, immedShiftAmount);
1119
+ }
1120
+
1121
+ template <int datasize>
1122
+ ALWAYS_INLINE static bool isValidLDPFPImm (int immediate)
1123
+ {
1124
+ unsigned immedShiftAmount = memPairOffsetShift (true , MEMPAIROPSIZE_FP (datasize));
1125
+ return isValidSignedImm7 (immediate, immedShiftAmount);
1126
+ }
1127
+
1114
1128
template <int datasize>
1115
1129
ALWAYS_INLINE void ldp (RegisterID rt, RegisterID rt2, RegisterID rn, PairPostIndex simm)
1116
1130
{
@@ -1126,17 +1140,45 @@ class ARM64Assembler {
1126
1140
}
1127
1141
1128
1142
template <int datasize>
1129
- ALWAYS_INLINE void ldp (RegisterID rt, RegisterID rt2, RegisterID rn, unsigned pimm = 0 )
1143
+ ALWAYS_INLINE void ldp (RegisterID rt, RegisterID rt2, RegisterID rn, int simm = 0 )
1144
+ {
1145
+ CHECK_DATASIZE ();
1146
+ insn (loadStoreRegisterPairOffset (MEMPAIROPSIZE_INT (datasize), false , MemOp_LOAD, simm, rn, rt, rt2));
1147
+ }
1148
+
1149
+ template <int datasize>
1150
+ ALWAYS_INLINE void ldnp (RegisterID rt, RegisterID rt2, RegisterID rn, int simm = 0 )
1151
+ {
1152
+ CHECK_DATASIZE ();
1153
+ insn (loadStoreRegisterPairNonTemporal (MEMPAIROPSIZE_INT (datasize), false , MemOp_LOAD, simm, rn, rt, rt2));
1154
+ }
1155
+
1156
+ template <int datasize>
1157
+ ALWAYS_INLINE void ldp (FPRegisterID rt, FPRegisterID rt2, RegisterID rn, PairPostIndex simm)
1158
+ {
1159
+ CHECK_DATASIZE ();
1160
+ insn (loadStoreRegisterPairPostIndex (MEMPAIROPSIZE_FP (datasize), true , MemOp_LOAD, simm, rn, rt, rt2));
1161
+ }
1162
+
1163
+ template <int datasize>
1164
+ ALWAYS_INLINE void ldp (FPRegisterID rt, FPRegisterID rt2, RegisterID rn, PairPreIndex simm)
1165
+ {
1166
+ CHECK_DATASIZE ();
1167
+ insn (loadStoreRegisterPairPreIndex (MEMPAIROPSIZE_FP (datasize), true , MemOp_LOAD, simm, rn, rt, rt2));
1168
+ }
1169
+
1170
+ template <int datasize>
1171
+ ALWAYS_INLINE void ldp (FPRegisterID rt, FPRegisterID rt2, RegisterID rn, int simm = 0 )
1130
1172
{
1131
1173
CHECK_DATASIZE ();
1132
- insn (loadStoreRegisterPairOffset (MEMPAIROPSIZE_INT (datasize), false , MemOp_LOAD, pimm , rn, rt, rt2));
1174
+ insn (loadStoreRegisterPairOffset (MEMPAIROPSIZE_FP (datasize), true , MemOp_LOAD, simm , rn, rt, rt2));
1133
1175
}
1134
1176
1135
1177
template <int datasize>
1136
- ALWAYS_INLINE void ldnp (RegisterID rt, RegisterID rt2, RegisterID rn, unsigned pimm = 0 )
1178
+ ALWAYS_INLINE void ldnp (FPRegisterID rt, FPRegisterID rt2, RegisterID rn, int simm = 0 )
1137
1179
{
1138
1180
CHECK_DATASIZE ();
1139
- insn (loadStoreRegisterPairNonTemporal (MEMPAIROPSIZE_INT (datasize), false , MemOp_LOAD, pimm , rn, rt, rt2));
1181
+ insn (loadStoreRegisterPairNonTemporal (MEMPAIROPSIZE_FP (datasize), true , MemOp_LOAD, simm , rn, rt, rt2));
1140
1182
}
1141
1183
1142
1184
template <int datasize>
@@ -1740,6 +1782,18 @@ class ARM64Assembler {
1740
1782
smaddl (rd, rn, rm, ARM64Registers::zr);
1741
1783
}
1742
1784
1785
+ template <int datasize>
1786
+ ALWAYS_INLINE static bool isValidSTPImm (int immediate)
1787
+ {
1788
+ return isValidLDPImm<datasize>(immediate);
1789
+ }
1790
+
1791
+ template <int datasize>
1792
+ ALWAYS_INLINE static bool isValidSTPFPImm (int immediate)
1793
+ {
1794
+ return isValidLDPFPImm<datasize>(immediate);
1795
+ }
1796
+
1743
1797
template <int datasize>
1744
1798
ALWAYS_INLINE void stp (RegisterID rt, RegisterID rt2, RegisterID rn, PairPostIndex simm)
1745
1799
{
@@ -1755,17 +1809,45 @@ class ARM64Assembler {
1755
1809
}
1756
1810
1757
1811
template <int datasize>
1758
- ALWAYS_INLINE void stp (RegisterID rt, RegisterID rt2, RegisterID rn, unsigned pimm = 0 )
1812
+ ALWAYS_INLINE void stp (RegisterID rt, RegisterID rt2, RegisterID rn, int simm = 0 )
1813
+ {
1814
+ CHECK_DATASIZE ();
1815
+ insn (loadStoreRegisterPairOffset (MEMPAIROPSIZE_INT (datasize), false , MemOp_STORE, simm, rn, rt, rt2));
1816
+ }
1817
+
1818
+ template <int datasize>
1819
+ ALWAYS_INLINE void stnp (RegisterID rt, RegisterID rt2, RegisterID rn, int simm = 0 )
1820
+ {
1821
+ CHECK_DATASIZE ();
1822
+ insn (loadStoreRegisterPairNonTemporal (MEMPAIROPSIZE_INT (datasize), false , MemOp_STORE, simm, rn, rt, rt2));
1823
+ }
1824
+
1825
+ template <int datasize>
1826
+ ALWAYS_INLINE void stp (FPRegisterID rt, FPRegisterID rt2, RegisterID rn, PairPostIndex simm)
1827
+ {
1828
+ CHECK_DATASIZE ();
1829
+ insn (loadStoreRegisterPairPostIndex (MEMPAIROPSIZE_FP (datasize), true , MemOp_STORE, simm, rn, rt, rt2));
1830
+ }
1831
+
1832
+ template <int datasize>
1833
+ ALWAYS_INLINE void stp (FPRegisterID rt, FPRegisterID rt2, RegisterID rn, PairPreIndex simm)
1834
+ {
1835
+ CHECK_DATASIZE ();
1836
+ insn (loadStoreRegisterPairPreIndex (MEMPAIROPSIZE_FP (datasize), true , MemOp_STORE, simm, rn, rt, rt2));
1837
+ }
1838
+
1839
+ template <int datasize>
1840
+ ALWAYS_INLINE void stp (FPRegisterID rt, FPRegisterID rt2, RegisterID rn, int simm = 0 )
1759
1841
{
1760
1842
CHECK_DATASIZE ();
1761
- insn (loadStoreRegisterPairOffset (MEMPAIROPSIZE_INT (datasize), false , MemOp_STORE, pimm , rn, rt, rt2));
1843
+ insn (loadStoreRegisterPairOffset (MEMPAIROPSIZE_FP (datasize), true , MemOp_STORE, simm , rn, rt, rt2));
1762
1844
}
1763
1845
1764
1846
template <int datasize>
1765
- ALWAYS_INLINE void stnp (RegisterID rt, RegisterID rt2, RegisterID rn, unsigned pimm = 0 )
1847
+ ALWAYS_INLINE void stnp (FPRegisterID rt, FPRegisterID rt2, RegisterID rn, int simm = 0 )
1766
1848
{
1767
1849
CHECK_DATASIZE ();
1768
- insn (loadStoreRegisterPairNonTemporal (MEMPAIROPSIZE_INT (datasize), false , MemOp_STORE, pimm , rn, rt, rt2));
1850
+ insn (loadStoreRegisterPairNonTemporal (MEMPAIROPSIZE_FP (datasize), true , MemOp_STORE, simm , rn, rt, rt2));
1769
1851
}
1770
1852
1771
1853
template <int datasize>
@@ -3541,6 +3623,7 @@ class ARM64Assembler {
3541
3623
ASSERT (opc == (opc & 1 )); // Only load or store, load signed 64 is handled via size.
3542
3624
ASSERT (V || (size != MemPairOp_LoadSigned_32) || (opc == MemOp_LOAD)); // There isn't an integer store signed.
3543
3625
unsigned immedShiftAmount = memPairOffsetShift (V, size);
3626
+ RELEASE_ASSERT (isValidSignedImm7 (immediate, immedShiftAmount));
3544
3627
int imm7 = immediate >> immedShiftAmount;
3545
3628
ASSERT ((imm7 << immedShiftAmount) == immediate && isInt<7 >(imm7));
3546
3629
return (0x28800000 | size << 30 | V << 26 | opc << 22 | (imm7 & 0x7f ) << 15 | rt2 << 10 | xOrSp (rn) << 5 | rt);
@@ -3572,6 +3655,7 @@ class ARM64Assembler {
3572
3655
ASSERT (opc == (opc & 1 )); // Only load or store, load signed 64 is handled via size.
3573
3656
ASSERT (V || (size != MemPairOp_LoadSigned_32) || (opc == MemOp_LOAD)); // There isn't an integer store signed.
3574
3657
unsigned immedShiftAmount = memPairOffsetShift (V, size);
3658
+ RELEASE_ASSERT (isValidSignedImm7 (immediate, immedShiftAmount));
3575
3659
int imm7 = immediate >> immedShiftAmount;
3576
3660
ASSERT ((imm7 << immedShiftAmount) == immediate && isInt<7 >(imm7));
3577
3661
return (0x29800000 | size << 30 | V << 26 | opc << 22 | (imm7 & 0x7f ) << 15 | rt2 << 10 | xOrSp (rn) << 5 | rt);
@@ -3589,6 +3673,7 @@ class ARM64Assembler {
3589
3673
ASSERT (opc == (opc & 1 )); // Only load or store, load signed 64 is handled via size.
3590
3674
ASSERT (V || (size != MemPairOp_LoadSigned_32) || (opc == MemOp_LOAD)); // There isn't an integer store signed.
3591
3675
unsigned immedShiftAmount = memPairOffsetShift (V, size);
3676
+ RELEASE_ASSERT (isValidSignedImm7 (immediate, immedShiftAmount));
3592
3677
int imm7 = immediate >> immedShiftAmount;
3593
3678
ASSERT ((imm7 << immedShiftAmount) == immediate && isInt<7 >(imm7));
3594
3679
return (0x29000000 | size << 30 | V << 26 | opc << 22 | (imm7 & 0x7f ) << 15 | rt2 << 10 | xOrSp (rn) << 5 | rt);
@@ -3606,6 +3691,7 @@ class ARM64Assembler {
3606
3691
ASSERT (opc == (opc & 1 )); // Only load or store, load signed 64 is handled via size.
3607
3692
ASSERT (V || (size != MemPairOp_LoadSigned_32) || (opc == MemOp_LOAD)); // There isn't an integer store signed.
3608
3693
unsigned immedShiftAmount = memPairOffsetShift (V, size);
3694
+ RELEASE_ASSERT (isValidSignedImm7 (immediate, immedShiftAmount));
3609
3695
int imm7 = immediate >> immedShiftAmount;
3610
3696
ASSERT ((imm7 << immedShiftAmount) == immediate && isInt<7 >(imm7));
3611
3697
return (0x28000000 | size << 30 | V << 26 | opc << 22 | (imm7 & 0x7f ) << 15 | rt2 << 10 | xOrSp (rn) << 5 | rt);
0 commit comments