Skip to content

Commit 193ccc4

Browse files
Sandhya ViswanathanDerek White
authored andcommitted
8274527: Minimal VM build fails after JDK-8273459
Backport-of: a8edd1b360d4e5f35aff371a91fda42eeb00d395
1 parent ccbf2a9 commit 193ccc4

File tree

4 files changed

+22
-17
lines changed

4 files changed

+22
-17
lines changed

src/hotspot/cpu/x86/macroAssembler_x86.cpp

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1178,6 +1178,10 @@ void MacroAssembler::align64() {
11781178
align(64, (unsigned long long) pc());
11791179
}
11801180

1181+
void MacroAssembler::align32() {
1182+
align(32, (unsigned long long) pc());
1183+
}
1184+
11811185
void MacroAssembler::align(int modulus) {
11821186
// 8273459: Ensure alignment is possible with current segment alignment
11831187
assert(modulus <= CodeEntryAlignment, "Alignment must be <= CodeEntryAlignment");
@@ -7123,7 +7127,7 @@ void MacroAssembler::kernel_crc32(Register crc, Register buf, Register len, Regi
71237127
// 128 bits per each of 4 parallel streams.
71247128
movdqu(xmm0, ExternalAddress(StubRoutines::x86::crc_by128_masks_addr() + 32));
71257129

7126-
align(32);
7130+
align32();
71277131
BIND(L_fold_512b_loop);
71287132
fold_128bit_crc32(xmm1, xmm0, xmm5, buf, 0);
71297133
fold_128bit_crc32(xmm2, xmm0, xmm5, buf, 16);

src/hotspot/cpu/x86/macroAssembler_x86.hpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -194,6 +194,7 @@ class MacroAssembler: public Assembler {
194194
void incrementq(AddressLiteral dst);
195195

196196
// Alignment
197+
void align32();
197198
void align64();
198199
void align(int modulus);
199200
void align(int modulus, int target);

src/hotspot/cpu/x86/macroAssembler_x86_adler.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ void MacroAssembler::updateBytesAdler32(Register init_d, Register data, Register
8080
cmpptr(data, end);
8181
jcc(Assembler::aboveEqual, SKIP_LOOP_1A);
8282

83-
align(32);
83+
align32();
8484
bind(SLOOP1A);
8585
vbroadcastf128(ydata, Address(data, 0), Assembler::AVX_256bit);
8686
addptr(data, CHUNKSIZE);
@@ -178,7 +178,7 @@ void MacroAssembler::updateBytesAdler32(Register init_d, Register data, Register
178178
movdl(rax, xb);
179179
addl(b_d, rax);
180180

181-
align(32);
181+
align32();
182182
bind(FINAL_LOOP);
183183
movzbl(rax, Address(data, 0)); //movzx eax, byte[data]
184184
addl(a_d, rax);

src/hotspot/cpu/x86/stubGenerator_x86_64.cpp

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1484,7 +1484,7 @@ class StubGenerator: public StubCodeGenerator {
14841484
__ subq(temp1, loop_size[shift]);
14851485

14861486
// Main loop with aligned copy block size of 192 bytes at 32 byte granularity.
1487-
__ align(32);
1487+
__ align32();
14881488
__ BIND(L_main_loop);
14891489
__ copy64_avx(to, from, temp4, xmm1, false, shift, 0);
14901490
__ copy64_avx(to, from, temp4, xmm1, false, shift, 64);
@@ -1551,7 +1551,7 @@ class StubGenerator: public StubCodeGenerator {
15511551

15521552
// Main loop with aligned copy block size of 192 bytes at
15531553
// 64 byte copy granularity.
1554-
__ align(32);
1554+
__ align32();
15551555
__ BIND(L_main_loop_64bytes);
15561556
__ copy64_avx(to, from, temp4, xmm1, false, shift, 0 , true);
15571557
__ copy64_avx(to, from, temp4, xmm1, false, shift, 64, true);
@@ -1691,7 +1691,7 @@ class StubGenerator: public StubCodeGenerator {
16911691
__ BIND(L_main_pre_loop);
16921692

16931693
// Main loop with aligned copy block size of 192 bytes at 32 byte granularity.
1694-
__ align(32);
1694+
__ align32();
16951695
__ BIND(L_main_loop);
16961696
__ copy64_avx(to, from, temp1, xmm1, true, shift, -64);
16971697
__ copy64_avx(to, from, temp1, xmm1, true, shift, -128);
@@ -1724,7 +1724,7 @@ class StubGenerator: public StubCodeGenerator {
17241724

17251725
// Main loop with aligned copy block size of 192 bytes at
17261726
// 64 byte copy granularity.
1727-
__ align(32);
1727+
__ align32();
17281728
__ BIND(L_main_loop_64bytes);
17291729
__ copy64_avx(to, from, temp1, xmm1, true, shift, -64 , true);
17301730
__ copy64_avx(to, from, temp1, xmm1, true, shift, -128, true);
@@ -4274,7 +4274,7 @@ class StubGenerator: public StubCodeGenerator {
42744274

42754275
//Mask for byte-swapping a couple of qwords in an XMM register using (v)pshufb.
42764276
address generate_pshuffle_byte_flip_mask_sha512() {
4277-
__ align(32);
4277+
__ align32();
42784278
StubCodeMark mark(this, "StubRoutines", "pshuffle_byte_flip_mask_sha512");
42794279
address start = __ pc();
42804280
if (VM_Version::supports_avx2()) {
@@ -5307,7 +5307,7 @@ address generate_avx_ghash_processBlocks() {
53075307

53085308
address base64_avx2_shuffle_addr()
53095309
{
5310-
__ align(32);
5310+
__ align32();
53115311
StubCodeMark mark(this, "StubRoutines", "avx2_shuffle_base64");
53125312
address start = __ pc();
53135313
__ emit_data64(0x0809070805060405, relocInfo::none);
@@ -5319,7 +5319,7 @@ address generate_avx_ghash_processBlocks() {
53195319

53205320
address base64_avx2_input_mask_addr()
53215321
{
5322-
__ align(32);
5322+
__ align32();
53235323
StubCodeMark mark(this, "StubRoutines", "avx2_input_mask_base64");
53245324
address start = __ pc();
53255325
__ emit_data64(0x8000000000000000, relocInfo::none);
@@ -5331,7 +5331,7 @@ address generate_avx_ghash_processBlocks() {
53315331

53325332
address base64_avx2_lut_addr()
53335333
{
5334-
__ align(32);
5334+
__ align32();
53355335
StubCodeMark mark(this, "StubRoutines", "avx2_lut_base64");
53365336
address start = __ pc();
53375337
__ emit_data64(0xfcfcfcfcfcfc4741, relocInfo::none);
@@ -5436,7 +5436,7 @@ address generate_avx_ghash_processBlocks() {
54365436
__ evmovdquq(xmm2, Address(encode_table, 0), Assembler::AVX_512bit);
54375437
__ evpbroadcastq(xmm1, rax, Assembler::AVX_512bit);
54385438

5439-
__ align(32);
5439+
__ align32();
54405440
__ BIND(L_vbmiLoop);
54415441

54425442
__ vpermb(xmm0, xmm3, Address(source, start_offset), Assembler::AVX_512bit);
@@ -5636,7 +5636,7 @@ address generate_avx_ghash_processBlocks() {
56365636
__ cmpl(length, 31);
56375637
__ jcc(Assembler::belowEqual, L_process3);
56385638

5639-
__ align(32);
5639+
__ align32();
56405640
__ BIND(L_32byteLoop);
56415641

56425642
// Get next 32 bytes
@@ -6083,7 +6083,7 @@ address generate_avx_ghash_processBlocks() {
60836083
__ evmovdquq(join12, ExternalAddress(StubRoutines::x86::base64_vbmi_join_1_2_addr()), Assembler::AVX_512bit, r13);
60846084
__ evmovdquq(join23, ExternalAddress(StubRoutines::x86::base64_vbmi_join_2_3_addr()), Assembler::AVX_512bit, r13);
60856085

6086-
__ align(32);
6086+
__ align32();
60876087
__ BIND(L_process256);
60886088
// Grab input data
60896089
__ evmovdquq(input0, Address(source, start_offset, Address::times_1, 0x00), Assembler::AVX_512bit);
@@ -6165,7 +6165,7 @@ address generate_avx_ghash_processBlocks() {
61656165
__ cmpl(length, 63);
61666166
__ jcc(Assembler::lessEqual, L_finalBit);
61676167

6168-
__ align(32);
6168+
__ align32();
61696169
__ BIND(L_process64Loop);
61706170

61716171
// Handle first 64-byte block
@@ -6301,7 +6301,7 @@ address generate_avx_ghash_processBlocks() {
63016301
__ shrq(rax, 1);
63026302
__ jmp(L_donePadding);
63036303

6304-
__ align(32);
6304+
__ align32();
63056305
__ BIND(L_bruteForce);
63066306
} // End of if(avx512_vbmi)
63076307

@@ -6345,7 +6345,7 @@ address generate_avx_ghash_processBlocks() {
63456345

63466346
__ jmp(L_bottomLoop);
63476347

6348-
__ align(32);
6348+
__ align32();
63496349
__ BIND(L_forceLoop);
63506350
__ shll(byte1, 18);
63516351
__ shll(byte2, 12);

0 commit comments

Comments
 (0)