Skip to content
Permalink
Browse files

[lldb] [lit] Fix Register tests to reference arrays via %0 and %1

Fix Register tests to reference memory access to arrays via %0 and %1,
rather than via referencing %rax and %rbx directly.  This fixes test
build failures on 32-bit x86.

llvm-svn: 360148
  • Loading branch information...
mgorny committed May 7, 2019
1 parent bb6e7b3 commit 25f64629761f583324c716aab319cf6298aed45b
@@ -28,23 +28,23 @@ int main() {
};

asm volatile(
"movq 0x00(%%rax), %%r8\n\t"
"movq 0x08(%%rax), %%r9\n\t"
"movq 0x10(%%rax), %%r10\n\t"
"movq 0x18(%%rax), %%r11\n\t"
"movq 0x20(%%rax), %%r12\n\t"
"movq 0x28(%%rax), %%r13\n\t"
"movq 0x30(%%rax), %%r14\n\t"
"movq 0x38(%%rax), %%r15\n\t"
"movq 0x00(%0), %%r8\n\t"
"movq 0x08(%0), %%r9\n\t"
"movq 0x10(%0), %%r10\n\t"
"movq 0x18(%0), %%r11\n\t"
"movq 0x20(%0), %%r12\n\t"
"movq 0x28(%0), %%r13\n\t"
"movq 0x30(%0), %%r14\n\t"
"movq 0x38(%0), %%r15\n\t"
"\n\t"
"movaps 0x00(%%rbx), %%xmm8\n\t"
"movaps 0x10(%%rbx), %%xmm9\n\t"
"movaps 0x20(%%rbx), %%xmm10\n\t"
"movaps 0x30(%%rbx), %%xmm11\n\t"
"movaps 0x40(%%rbx), %%xmm12\n\t"
"movaps 0x50(%%rbx), %%xmm13\n\t"
"movaps 0x60(%%rbx), %%xmm14\n\t"
"movaps 0x70(%%rbx), %%xmm15\n\t"
"movaps 0x00(%1), %%xmm8\n\t"
"movaps 0x10(%1), %%xmm9\n\t"
"movaps 0x20(%1), %%xmm10\n\t"
"movaps 0x30(%1), %%xmm11\n\t"
"movaps 0x40(%1), %%xmm12\n\t"
"movaps 0x50(%1), %%xmm13\n\t"
"movaps 0x60(%1), %%xmm14\n\t"
"movaps 0x70(%1), %%xmm15\n\t"
"\n\t"
"int3\n\t"
:
@@ -36,29 +36,27 @@ int main() {
"\n\t"
"int3\n\t"
"\n\t"
"lea %0, %%rbx\n\t"
"movq %%r8, 0x00(%%rbx)\n\t"
"movq %%r9, 0x08(%%rbx)\n\t"
"movq %%r10, 0x10(%%rbx)\n\t"
"movq %%r11, 0x18(%%rbx)\n\t"
"movq %%r12, 0x20(%%rbx)\n\t"
"movq %%r13, 0x28(%%rbx)\n\t"
"movq %%r14, 0x30(%%rbx)\n\t"
"movq %%r15, 0x38(%%rbx)\n\t"
"movq %%r8, 0x00(%0)\n\t"
"movq %%r9, 0x08(%0)\n\t"
"movq %%r10, 0x10(%0)\n\t"
"movq %%r11, 0x18(%0)\n\t"
"movq %%r12, 0x20(%0)\n\t"
"movq %%r13, 0x28(%0)\n\t"
"movq %%r14, 0x30(%0)\n\t"
"movq %%r15, 0x38(%0)\n\t"
"\n\t"
"lea %1, %%rbx\n\t"
"movaps %%xmm8, 0x00(%%rbx)\n\t"
"movaps %%xmm9, 0x10(%%rbx)\n\t"
"movaps %%xmm10, 0x20(%%rbx)\n\t"
"movaps %%xmm11, 0x30(%%rbx)\n\t"
"movaps %%xmm12, 0x40(%%rbx)\n\t"
"movaps %%xmm13, 0x50(%%rbx)\n\t"
"movaps %%xmm14, 0x60(%%rbx)\n\t"
"movaps %%xmm15, 0x70(%%rbx)\n\t"
: "=m"(r64), "=m"(xmm)
: "m"(xmm_fill)
: "%rbx", "%mm0", "%mm1", "%mm2", "%mm3", "%mm4", "%mm5", "%mm6", "%mm7",
"%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "%xmm5", "%xmm6", "%xmm7"
"movaps %%xmm8, 0x00(%1)\n\t"
"movaps %%xmm9, 0x10(%1)\n\t"
"movaps %%xmm10, 0x20(%1)\n\t"
"movaps %%xmm11, 0x30(%1)\n\t"
"movaps %%xmm12, 0x40(%1)\n\t"
"movaps %%xmm13, 0x50(%1)\n\t"
"movaps %%xmm14, 0x60(%1)\n\t"
"movaps %%xmm15, 0x70(%1)\n\t"
:
: "a"(r64), "b"(xmm), "m"(xmm_fill)
: "%mm0", "%mm1", "%mm2", "%mm3", "%mm4", "%mm5", "%mm6", "%mm7", "%xmm0",
"%xmm1", "%xmm2", "%xmm3", "%xmm4", "%xmm5", "%xmm6", "%xmm7"
);

for (int i = 0; i < 8; ++i)
@@ -28,23 +28,23 @@ int main() {
};

asm volatile(
"movq 0x00(%%rax), %%mm0\n\t"
"movq 0x08(%%rax), %%mm1\n\t"
"movq 0x10(%%rax), %%mm2\n\t"
"movq 0x18(%%rax), %%mm3\n\t"
"movq 0x20(%%rax), %%mm4\n\t"
"movq 0x28(%%rax), %%mm5\n\t"
"movq 0x30(%%rax), %%mm6\n\t"
"movq 0x38(%%rax), %%mm7\n\t"
"movq 0x00(%0), %%mm0\n\t"
"movq 0x08(%0), %%mm1\n\t"
"movq 0x10(%0), %%mm2\n\t"
"movq 0x18(%0), %%mm3\n\t"
"movq 0x20(%0), %%mm4\n\t"
"movq 0x28(%0), %%mm5\n\t"
"movq 0x30(%0), %%mm6\n\t"
"movq 0x38(%0), %%mm7\n\t"
"\n\t"
"movaps 0x00(%%rbx), %%xmm0\n\t"
"movaps 0x10(%%rbx), %%xmm1\n\t"
"movaps 0x20(%%rbx), %%xmm2\n\t"
"movaps 0x30(%%rbx), %%xmm3\n\t"
"movaps 0x40(%%rbx), %%xmm4\n\t"
"movaps 0x50(%%rbx), %%xmm5\n\t"
"movaps 0x60(%%rbx), %%xmm6\n\t"
"movaps 0x70(%%rbx), %%xmm7\n\t"
"movaps 0x00(%1), %%xmm0\n\t"
"movaps 0x10(%1), %%xmm1\n\t"
"movaps 0x20(%1), %%xmm2\n\t"
"movaps 0x30(%1), %%xmm3\n\t"
"movaps 0x40(%1), %%xmm4\n\t"
"movaps 0x50(%1), %%xmm5\n\t"
"movaps 0x60(%1), %%xmm6\n\t"
"movaps 0x70(%1), %%xmm7\n\t"
"\n\t"
"int3\n\t"
:
@@ -36,29 +36,27 @@ int main() {
"\n\t"
"int3\n\t"
"\n\t"
"lea %0, %%rbx\n\t"
"movq %%mm0, 0x00(%%rbx)\n\t"
"movq %%mm1, 0x08(%%rbx)\n\t"
"movq %%mm2, 0x10(%%rbx)\n\t"
"movq %%mm3, 0x18(%%rbx)\n\t"
"movq %%mm4, 0x20(%%rbx)\n\t"
"movq %%mm5, 0x28(%%rbx)\n\t"
"movq %%mm6, 0x30(%%rbx)\n\t"
"movq %%mm7, 0x38(%%rbx)\n\t"
"movq %%mm0, 0x00(%0)\n\t"
"movq %%mm1, 0x08(%0)\n\t"
"movq %%mm2, 0x10(%0)\n\t"
"movq %%mm3, 0x18(%0)\n\t"
"movq %%mm4, 0x20(%0)\n\t"
"movq %%mm5, 0x28(%0)\n\t"
"movq %%mm6, 0x30(%0)\n\t"
"movq %%mm7, 0x38(%0)\n\t"
"\n\t"
"lea %1, %%rbx\n\t"
"movaps %%xmm0, 0x00(%%rbx)\n\t"
"movaps %%xmm1, 0x10(%%rbx)\n\t"
"movaps %%xmm2, 0x20(%%rbx)\n\t"
"movaps %%xmm3, 0x30(%%rbx)\n\t"
"movaps %%xmm4, 0x40(%%rbx)\n\t"
"movaps %%xmm5, 0x50(%%rbx)\n\t"
"movaps %%xmm6, 0x60(%%rbx)\n\t"
"movaps %%xmm7, 0x70(%%rbx)\n\t"
: "=m"(mm), "=m"(xmm)
: "m"(xmm_fill)
: "%rbx", "%mm0", "%mm1", "%mm2", "%mm3", "%mm4", "%mm5", "%mm6", "%mm7",
"%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "%xmm5", "%xmm6", "%xmm7"
"movaps %%xmm0, 0x00(%1)\n\t"
"movaps %%xmm1, 0x10(%1)\n\t"
"movaps %%xmm2, 0x20(%1)\n\t"
"movaps %%xmm3, 0x30(%1)\n\t"
"movaps %%xmm4, 0x40(%1)\n\t"
"movaps %%xmm5, 0x50(%1)\n\t"
"movaps %%xmm6, 0x60(%1)\n\t"
"movaps %%xmm7, 0x70(%1)\n\t"
:
: "a"(mm), "b"(xmm), "m"(xmm_fill)
: "%mm0", "%mm1", "%mm2", "%mm3", "%mm4", "%mm5", "%mm6", "%mm7", "%xmm0",
"%xmm1", "%xmm2", "%xmm3", "%xmm4", "%xmm5", "%xmm6", "%xmm7"
);

for (int i = 0; i < 8; ++i)
@@ -43,32 +43,31 @@ int main() {
};

asm volatile(
"vmovaps 0x000(%%rbx), %%ymm0\n\t"
"vmovaps 0x020(%%rbx), %%ymm1\n\t"
"vmovaps 0x040(%%rbx), %%ymm2\n\t"
"vmovaps 0x060(%%rbx), %%ymm3\n\t"
"vmovaps 0x080(%%rbx), %%ymm4\n\t"
"vmovaps 0x0A0(%%rbx), %%ymm5\n\t"
"vmovaps 0x0C0(%%rbx), %%ymm6\n\t"
"vmovaps 0x0E0(%%rbx), %%ymm7\n\t"
"vmovaps 0x000(%0), %%ymm0\n\t"
"vmovaps 0x020(%0), %%ymm1\n\t"
"vmovaps 0x040(%0), %%ymm2\n\t"
"vmovaps 0x060(%0), %%ymm3\n\t"
"vmovaps 0x080(%0), %%ymm4\n\t"
"vmovaps 0x0A0(%0), %%ymm5\n\t"
"vmovaps 0x0C0(%0), %%ymm6\n\t"
"vmovaps 0x0E0(%0), %%ymm7\n\t"
#if defined(__x86_64__) || defined(_M_X64)
"vmovaps 0x100(%%rbx), %%ymm8\n\t"
"vmovaps 0x120(%%rbx), %%ymm9\n\t"
"vmovaps 0x140(%%rbx), %%ymm10\n\t"
"vmovaps 0x160(%%rbx), %%ymm11\n\t"
"vmovaps 0x180(%%rbx), %%ymm12\n\t"
"vmovaps 0x1A0(%%rbx), %%ymm13\n\t"
"vmovaps 0x1C0(%%rbx), %%ymm14\n\t"
"vmovaps 0x1E0(%%rbx), %%ymm15\n\t"
"vmovaps 0x100(%0), %%ymm8\n\t"
"vmovaps 0x120(%0), %%ymm9\n\t"
"vmovaps 0x140(%0), %%ymm10\n\t"
"vmovaps 0x160(%0), %%ymm11\n\t"
"vmovaps 0x180(%0), %%ymm12\n\t"
"vmovaps 0x1A0(%0), %%ymm13\n\t"
"vmovaps 0x1C0(%0), %%ymm14\n\t"
"vmovaps 0x1E0(%0), %%ymm15\n\t"
#endif
"\n\t"
"int3\n\t"
:
: "b"(ymm)
: "%ymm0", "%ymm1", "%ymm2", "%ymm3", "%ymm4", "%ymm5", "%ymm6", "%ymm7"
#if defined(__x86_64__) || defined(_M_X64)
,
"%ymm8", "%ymm9", "%ymm10", "%ymm11", "%ymm12", "%ymm13", "%ymm14",
, "%ymm8", "%ymm9", "%ymm10", "%ymm11", "%ymm12", "%ymm13", "%ymm14",
"%ymm15"
#endif
);
@@ -37,33 +37,31 @@ int main() {
"\n\t"
"int3\n\t"
"\n\t"
"lea %0, %%rbx\n\t"
"vmovaps %%ymm0, 0x000(%%rbx)\n\t"
"vmovaps %%ymm1, 0x020(%%rbx)\n\t"
"vmovaps %%ymm2, 0x040(%%rbx)\n\t"
"vmovaps %%ymm3, 0x060(%%rbx)\n\t"
"vmovaps %%ymm4, 0x080(%%rbx)\n\t"
"vmovaps %%ymm5, 0x0A0(%%rbx)\n\t"
"vmovaps %%ymm6, 0x0C0(%%rbx)\n\t"
"vmovaps %%ymm7, 0x0E0(%%rbx)\n\t"
"vmovaps %%ymm0, 0x000(%0)\n\t"
"vmovaps %%ymm1, 0x020(%0)\n\t"
"vmovaps %%ymm2, 0x040(%0)\n\t"
"vmovaps %%ymm3, 0x060(%0)\n\t"
"vmovaps %%ymm4, 0x080(%0)\n\t"
"vmovaps %%ymm5, 0x0A0(%0)\n\t"
"vmovaps %%ymm6, 0x0C0(%0)\n\t"
"vmovaps %%ymm7, 0x0E0(%0)\n\t"
#if defined(__x86_64__) || defined(_M_X64)
"vmovaps %%ymm8, 0x100(%%rbx)\n\t"
"vmovaps %%ymm9, 0x120(%%rbx)\n\t"
"vmovaps %%ymm10, 0x140(%%rbx)\n\t"
"vmovaps %%ymm11, 0x160(%%rbx)\n\t"
"vmovaps %%ymm12, 0x180(%%rbx)\n\t"
"vmovaps %%ymm13, 0x1A0(%%rbx)\n\t"
"vmovaps %%ymm14, 0x1C0(%%rbx)\n\t"
"vmovaps %%ymm15, 0x1E0(%%rbx)\n\t"
"vmovaps %%ymm8, 0x100(%0)\n\t"
"vmovaps %%ymm9, 0x120(%0)\n\t"
"vmovaps %%ymm10, 0x140(%0)\n\t"
"vmovaps %%ymm11, 0x160(%0)\n\t"
"vmovaps %%ymm12, 0x180(%0)\n\t"
"vmovaps %%ymm13, 0x1A0(%0)\n\t"
"vmovaps %%ymm14, 0x1C0(%0)\n\t"
"vmovaps %%ymm15, 0x1E0(%0)\n\t"
#endif
: "=m"(ymm)
: "m"(ymm_fill)
: "%ymm0", "%ymm1", "%ymm2", "%ymm3", "%ymm4", "%ymm5", "%ymm6", "%ymm7",
:
: "b"(ymm), "m"(ymm_fill)
: "%ymm0", "%ymm1", "%ymm2", "%ymm3", "%ymm4", "%ymm5", "%ymm6", "%ymm7"
#if defined(__x86_64__) || defined(_M_X64)
"%ymm8", "%ymm9", "%ymm10", "%ymm11", "%ymm12", "%ymm13", "%ymm14",
"%ymm15",
, "%ymm8", "%ymm9", "%ymm10", "%ymm11", "%ymm12", "%ymm13", "%ymm14",
"%ymm15"
#endif
"%rbx"
);

for (int i = 0; i < 16; ++i) {
@@ -139,47 +139,47 @@ int main() {
};

asm volatile(
"vmovaps 0x000(%%rbx), %%zmm0\n\t"
"vmovaps 0x040(%%rbx), %%zmm1\n\t"
"vmovaps 0x080(%%rbx), %%zmm2\n\t"
"vmovaps 0x0C0(%%rbx), %%zmm3\n\t"
"vmovaps 0x100(%%rbx), %%zmm4\n\t"
"vmovaps 0x140(%%rbx), %%zmm5\n\t"
"vmovaps 0x180(%%rbx), %%zmm6\n\t"
"vmovaps 0x1C0(%%rbx), %%zmm7\n\t"
"vmovaps 0x000(%0), %%zmm0\n\t"
"vmovaps 0x040(%0), %%zmm1\n\t"
"vmovaps 0x080(%0), %%zmm2\n\t"
"vmovaps 0x0C0(%0), %%zmm3\n\t"
"vmovaps 0x100(%0), %%zmm4\n\t"
"vmovaps 0x140(%0), %%zmm5\n\t"
"vmovaps 0x180(%0), %%zmm6\n\t"
"vmovaps 0x1C0(%0), %%zmm7\n\t"
#if defined(__x86_64__) || defined(_M_X64)
"vmovaps 0x200(%%rbx), %%zmm8\n\t"
"vmovaps 0x240(%%rbx), %%zmm9\n\t"
"vmovaps 0x280(%%rbx), %%zmm10\n\t"
"vmovaps 0x2C0(%%rbx), %%zmm11\n\t"
"vmovaps 0x300(%%rbx), %%zmm12\n\t"
"vmovaps 0x340(%%rbx), %%zmm13\n\t"
"vmovaps 0x380(%%rbx), %%zmm14\n\t"
"vmovaps 0x3C0(%%rbx), %%zmm15\n\t"
"vmovaps 0x400(%%rbx), %%zmm16\n\t"
"vmovaps 0x440(%%rbx), %%zmm17\n\t"
"vmovaps 0x480(%%rbx), %%zmm18\n\t"
"vmovaps 0x4C0(%%rbx), %%zmm19\n\t"
"vmovaps 0x500(%%rbx), %%zmm20\n\t"
"vmovaps 0x540(%%rbx), %%zmm21\n\t"
"vmovaps 0x580(%%rbx), %%zmm22\n\t"
"vmovaps 0x5C0(%%rbx), %%zmm23\n\t"
"vmovaps 0x600(%%rbx), %%zmm24\n\t"
"vmovaps 0x640(%%rbx), %%zmm25\n\t"
"vmovaps 0x680(%%rbx), %%zmm26\n\t"
"vmovaps 0x6C0(%%rbx), %%zmm27\n\t"
"vmovaps 0x700(%%rbx), %%zmm28\n\t"
"vmovaps 0x740(%%rbx), %%zmm29\n\t"
"vmovaps 0x780(%%rbx), %%zmm30\n\t"
"vmovaps 0x7C0(%%rbx), %%zmm31\n\t"
"vmovaps 0x200(%0), %%zmm8\n\t"
"vmovaps 0x240(%0), %%zmm9\n\t"
"vmovaps 0x280(%0), %%zmm10\n\t"
"vmovaps 0x2C0(%0), %%zmm11\n\t"
"vmovaps 0x300(%0), %%zmm12\n\t"
"vmovaps 0x340(%0), %%zmm13\n\t"
"vmovaps 0x380(%0), %%zmm14\n\t"
"vmovaps 0x3C0(%0), %%zmm15\n\t"
"vmovaps 0x400(%0), %%zmm16\n\t"
"vmovaps 0x440(%0), %%zmm17\n\t"
"vmovaps 0x480(%0), %%zmm18\n\t"
"vmovaps 0x4C0(%0), %%zmm19\n\t"
"vmovaps 0x500(%0), %%zmm20\n\t"
"vmovaps 0x540(%0), %%zmm21\n\t"
"vmovaps 0x580(%0), %%zmm22\n\t"
"vmovaps 0x5C0(%0), %%zmm23\n\t"
"vmovaps 0x600(%0), %%zmm24\n\t"
"vmovaps 0x640(%0), %%zmm25\n\t"
"vmovaps 0x680(%0), %%zmm26\n\t"
"vmovaps 0x6C0(%0), %%zmm27\n\t"
"vmovaps 0x700(%0), %%zmm28\n\t"
"vmovaps 0x740(%0), %%zmm29\n\t"
"vmovaps 0x780(%0), %%zmm30\n\t"
"vmovaps 0x7C0(%0), %%zmm31\n\t"
#endif
"\n\t"
"int3\n\t"
:
: "b"(zmm)
: "%zmm0", "%zmm1", "%zmm2", "%zmm3", "%zmm4", "%zmm5", "%zmm6", "%zmm7"
#if defined(__x86_64__) || defined(_M_X64)
, "%zmm8", "%zmm9", "%zmm10", "%zmm11", "%zmm12", "%zmm13", "%zmm14",
, "%zmm8", "%zmm9", "%zmm10", "%zmm11", "%zmm12", "%zmm13", "%zmm14",
"%zmm15", "%zmm16", "%zmm17", "%zmm18", "%zmm19", "%zmm20", "%zmm21",
"%zmm22", "%zmm23", "%zmm24", "%zmm25", "%zmm26", "%zmm27", "%zmm28",
"%zmm29", "%zmm30", "%zmm31"

0 comments on commit 25f6462

Please sign in to comment.
You can’t perform that action at this time.