diff --git a/compiler/rustc_codegen_gcc/src/asm.rs b/compiler/rustc_codegen_gcc/src/asm.rs index f237861b1595a..f8e54512f09c7 100644 --- a/compiler/rustc_codegen_gcc/src/asm.rs +++ b/compiler/rustc_codegen_gcc/src/asm.rs @@ -710,6 +710,7 @@ fn reg_class_to_gcc(reg_class: InlineAsmRegClass) -> &'static str { PowerPCInlineAsmRegClass::cr | PowerPCInlineAsmRegClass::ctr | PowerPCInlineAsmRegClass::lr + | PowerPCInlineAsmRegClass::vscr | PowerPCInlineAsmRegClass::xer, ) => { unreachable!("clobber-only") @@ -793,6 +794,7 @@ fn dummy_output_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, reg: InlineAsmRegCl PowerPCInlineAsmRegClass::cr | PowerPCInlineAsmRegClass::ctr | PowerPCInlineAsmRegClass::lr + | PowerPCInlineAsmRegClass::vscr | PowerPCInlineAsmRegClass::xer, ) => { unreachable!("clobber-only") diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index 8cd4bdc372789..e1b819bf13cde 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -663,6 +663,7 @@ fn reg_to_llvm(reg: InlineAsmRegOrRegClass, layout: Option<&TyAndLayout<'_>>) -> PowerPCInlineAsmRegClass::cr | PowerPCInlineAsmRegClass::ctr | PowerPCInlineAsmRegClass::lr + | PowerPCInlineAsmRegClass::vscr | PowerPCInlineAsmRegClass::xer, ) => { unreachable!("clobber-only") @@ -843,6 +844,7 @@ fn dummy_output_type<'ll>(cx: &CodegenCx<'ll, '_>, reg: InlineAsmRegClass) -> &' PowerPCInlineAsmRegClass::cr | PowerPCInlineAsmRegClass::ctr | PowerPCInlineAsmRegClass::lr + | PowerPCInlineAsmRegClass::vscr | PowerPCInlineAsmRegClass::xer, ) => { unreachable!("clobber-only") diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 38718bad9e57e..75a0608d1e672 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -2441,6 +2441,7 @@ symbols! { volatile_store, vreg, vreg_low16, + vscr, vsreg, vsx, vtable_align, diff --git a/compiler/rustc_target/src/asm/mod.rs b/compiler/rustc_target/src/asm/mod.rs index cd7d9125d7b50..b429543ddd200 100644 --- a/compiler/rustc_target/src/asm/mod.rs +++ b/compiler/rustc_target/src/asm/mod.rs @@ -1265,11 +1265,12 @@ impl InlineAsmClobberAbi { v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, - // cr0-cr1, cr5-cr7, ctr, lr, xer + // cr0-cr1, cr5-cr7, ctr, lr, vscr, xer cr0, cr1, cr5, cr6, cr7, ctr, lr, + vscr, xer, } }, diff --git a/compiler/rustc_target/src/asm/powerpc.rs b/compiler/rustc_target/src/asm/powerpc.rs index 09682ee9d4e67..969c37be067c0 100644 --- a/compiler/rustc_target/src/asm/powerpc.rs +++ b/compiler/rustc_target/src/asm/powerpc.rs @@ -16,6 +16,7 @@ def_reg_class! { cr, ctr, lr, + vscr, xer, } } @@ -63,7 +64,7 @@ impl PowerPCInlineAsmRegClass { Self::vsreg => types! { vsx: F32, F64, VecI8(16), VecI16(8), VecI32(4), VecI64(2), VecF32(4), VecF64(2); }, - Self::cr | Self::ctr | Self::lr | Self::xer => &[], + Self::cr | Self::ctr | Self::lr | Self::vscr | Self::xer => &[], } } } @@ -284,6 +285,7 @@ def_regs! { cr7: cr = ["cr7"], ctr: ctr = ["ctr"], lr: lr = ["lr"], + vscr: vscr = ["vscr"], xer: xer = ["xer"], #error = ["r1", "1", "sp"] => "the stack pointer cannot be used as an operand for inline asm", @@ -341,6 +343,7 @@ impl PowerPCInlineAsmReg { (cr0, "0"), (cr1, "1"), (cr2, "2"), (cr3, "3"), (cr4, "4"), (cr5, "5"), (cr6, "6"), (cr7, "7"); (ctr, "ctr"); (lr, "lr"); + (vscr, "vscr"); (xer, "xer"); } } diff --git a/tests/codegen-llvm/asm/powerpc-clobbers.rs b/tests/codegen-llvm/asm/powerpc-clobbers.rs index ee52727e3c036..f84abb46328ba 100644 --- a/tests/codegen-llvm/asm/powerpc-clobbers.rs +++ b/tests/codegen-llvm/asm/powerpc-clobbers.rs @@ -45,6 +45,13 @@ pub unsafe fn xer_clobber() { asm!("", out("xer") _, options(nostack, nomem, preserves_flags)); } +// CHECK-LABEL: @vscr_clobber +// CHECK: call void asm sideeffect "", "~{vscr}"() +#[no_mangle] +pub unsafe fn vscr_clobber() { + asm!("", out("vscr") _, options(nostack, nomem, preserves_flags)); +} + // Output format depends on the availability of altivec. // CHECK-LABEL: @v0_clobber // powerpc: call void asm sideeffect "", "~{v0}"() @@ -69,10 +76,10 @@ pub unsafe fn vs32_clobber() { // Output format depends on the availability of altivec and vsx // CHECK-LABEL: @clobber_abi -// powerpc: asm sideeffect "", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},~{vs0},~{vs1},~{vs2},~{vs3},~{vs4},~{vs5},~{vs6},~{vs7},~{vs8},~{vs9},~{vs10},~{vs11},~{vs12},~{vs13},~{vs14},~{vs15},~{vs16},~{vs17},~{vs18},~{vs19},~{vs20},~{vs21},~{vs22},~{vs23},~{vs24},~{vs25},~{vs26},~{vs27},~{vs28},~{vs29},~{vs30},~{vs31},~{v0},~{v1},~{v2},~{v3},~{v4},~{v5},~{v6},~{v7},~{v8},~{v9},~{v10},~{v11},~{v12},~{v13},~{v14},~{v15},~{v16},~{v17},~{v18},~{v19},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{xer}"() -// powerpc64: asm sideeffect "", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},={v0},={v1},={v2},={v3},={v4},={v5},={v6},={v7},={v8},={v9},={v10},={v11},={v12},={v13},={v14},={v15},={v16},={v17},={v18},={v19},~{vs0},~{vs1},~{vs2},~{vs3},~{vs4},~{vs5},~{vs6},~{vs7},~{vs8},~{vs9},~{vs10},~{vs11},~{vs12},~{vs13},~{vs14},~{vs15},~{vs16},~{vs17},~{vs18},~{vs19},~{vs20},~{vs21},~{vs22},~{vs23},~{vs24},~{vs25},~{vs26},~{vs27},~{vs28},~{vs29},~{vs30},~{vs31},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{xer}"() -// powerpc64le: asm sideeffect "", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},={vs0},={vs1},={vs2},={vs3},={vs4},={vs5},={vs6},={vs7},={vs8},={vs9},={vs10},={vs11},={vs12},={vs13},={vs14},={vs15},={vs16},={vs17},={vs18},={vs19},={vs20},={vs21},={vs22},={vs23},={vs24},={vs25},={vs26},={vs27},={vs28},={vs29},={vs30},={vs31},={v0},={v1},={v2},={v3},={v4},={v5},={v6},={v7},={v8},={v9},={v10},={v11},={v12},={v13},={v14},={v15},={v16},={v17},={v18},={v19},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{xer}"() -// aix64: asm sideeffect "", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},={vs0},={vs1},={vs2},={vs3},={vs4},={vs5},={vs6},={vs7},={vs8},={vs9},={vs10},={vs11},={vs12},={vs13},={vs14},={vs15},={vs16},={vs17},={vs18},={vs19},={vs20},={vs21},={vs22},={vs23},={vs24},={vs25},={vs26},={vs27},={vs28},={vs29},={vs30},={vs31},={v0},={v1},={v2},={v3},={v4},={v5},={v6},={v7},={v8},={v9},={v10},={v11},={v12},={v13},={v14},={v15},={v16},={v17},={v18},={v19},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{xer}"() +// powerpc: asm sideeffect "", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},~{vs0},~{vs1},~{vs2},~{vs3},~{vs4},~{vs5},~{vs6},~{vs7},~{vs8},~{vs9},~{vs10},~{vs11},~{vs12},~{vs13},~{vs14},~{vs15},~{vs16},~{vs17},~{vs18},~{vs19},~{vs20},~{vs21},~{vs22},~{vs23},~{vs24},~{vs25},~{vs26},~{vs27},~{vs28},~{vs29},~{vs30},~{vs31},~{v0},~{v1},~{v2},~{v3},~{v4},~{v5},~{v6},~{v7},~{v8},~{v9},~{v10},~{v11},~{v12},~{v13},~{v14},~{v15},~{v16},~{v17},~{v18},~{v19},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{vscr},~{xer}"() +// powerpc64: asm sideeffect "", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},={v0},={v1},={v2},={v3},={v4},={v5},={v6},={v7},={v8},={v9},={v10},={v11},={v12},={v13},={v14},={v15},={v16},={v17},={v18},={v19},~{vs0},~{vs1},~{vs2},~{vs3},~{vs4},~{vs5},~{vs6},~{vs7},~{vs8},~{vs9},~{vs10},~{vs11},~{vs12},~{vs13},~{vs14},~{vs15},~{vs16},~{vs17},~{vs18},~{vs19},~{vs20},~{vs21},~{vs22},~{vs23},~{vs24},~{vs25},~{vs26},~{vs27},~{vs28},~{vs29},~{vs30},~{vs31},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{vscr},~{xer}"() +// powerpc64le: asm sideeffect "", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},={vs0},={vs1},={vs2},={vs3},={vs4},={vs5},={vs6},={vs7},={vs8},={vs9},={vs10},={vs11},={vs12},={vs13},={vs14},={vs15},={vs16},={vs17},={vs18},={vs19},={vs20},={vs21},={vs22},={vs23},={vs24},={vs25},={vs26},={vs27},={vs28},={vs29},={vs30},={vs31},={v0},={v1},={v2},={v3},={v4},={v5},={v6},={v7},={v8},={v9},={v10},={v11},={v12},={v13},={v14},={v15},={v16},={v17},={v18},={v19},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{vscr},~{xer}"() +// aix64: asm sideeffect "", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},={vs0},={vs1},={vs2},={vs3},={vs4},={vs5},={vs6},={vs7},={vs8},={vs9},={vs10},={vs11},={vs12},={vs13},={vs14},={vs15},={vs16},={vs17},={vs18},={vs19},={vs20},={vs21},={vs22},={vs23},={vs24},={vs25},={vs26},={vs27},={vs28},={vs29},={vs30},={vs31},={v0},={v1},={v2},={v3},={v4},={v5},={v6},={v7},={v8},={v9},={v10},={v11},={v12},={v13},={v14},={v15},={v16},={v17},={v18},={v19},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{vscr},~{xer}"() #[no_mangle] pub unsafe fn clobber_abi() { asm!("", clobber_abi("C"), options(nostack, nomem, preserves_flags)); @@ -104,10 +111,10 @@ pub unsafe fn clobber_preservesflags() { // Output format depends on the availability of altivec and vsx // CHECK-LABEL: @clobber_abi_no_preserves_flags #[no_mangle] -// powerpc: asm sideeffect "nop", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},~{vs0},~{vs1},~{vs2},~{vs3},~{vs4},~{vs5},~{vs6},~{vs7},~{vs8},~{vs9},~{vs10},~{vs11},~{vs12},~{vs13},~{vs14},~{vs15},~{vs16},~{vs17},~{vs18},~{vs19},~{vs20},~{vs21},~{vs22},~{vs23},~{vs24},~{vs25},~{vs26},~{vs27},~{vs28},~{vs29},~{vs30},~{vs31},~{v0},~{v1},~{v2},~{v3},~{v4},~{v5},~{v6},~{v7},~{v8},~{v9},~{v10},~{v11},~{v12},~{v13},~{v14},~{v15},~{v16},~{v17},~{v18},~{v19},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{xer}"() -// powerpc64: asm sideeffect "nop", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},={v0},={v1},={v2},={v3},={v4},={v5},={v6},={v7},={v8},={v9},={v10},={v11},={v12},={v13},={v14},={v15},={v16},={v17},={v18},={v19},~{vs0},~{vs1},~{vs2},~{vs3},~{vs4},~{vs5},~{vs6},~{vs7},~{vs8},~{vs9},~{vs10},~{vs11},~{vs12},~{vs13},~{vs14},~{vs15},~{vs16},~{vs17},~{vs18},~{vs19},~{vs20},~{vs21},~{vs22},~{vs23},~{vs24},~{vs25},~{vs26},~{vs27},~{vs28},~{vs29},~{vs30},~{vs31},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{xer}"() -// powerpc64le: asm sideeffect "nop", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},={vs0},={vs1},={vs2},={vs3},={vs4},={vs5},={vs6},={vs7},={vs8},={vs9},={vs10},={vs11},={vs12},={vs13},={vs14},={vs15},={vs16},={vs17},={vs18},={vs19},={vs20},={vs21},={vs22},={vs23},={vs24},={vs25},={vs26},={vs27},={vs28},={vs29},={vs30},={vs31},={v0},={v1},={v2},={v3},={v4},={v5},={v6},={v7},={v8},={v9},={v10},={v11},={v12},={v13},={v14},={v15},={v16},={v17},={v18},={v19},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{xer}"() -// aix64: asm sideeffect "nop", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},={vs0},={vs1},={vs2},={vs3},={vs4},={vs5},={vs6},={vs7},={vs8},={vs9},={vs10},={vs11},={vs12},={vs13},={vs14},={vs15},={vs16},={vs17},={vs18},={vs19},={vs20},={vs21},={vs22},={vs23},={vs24},={vs25},={vs26},={vs27},={vs28},={vs29},={vs30},={vs31},={v0},={v1},={v2},={v3},={v4},={v5},={v6},={v7},={v8},={v9},={v10},={v11},={v12},={v13},={v14},={v15},={v16},={v17},={v18},={v19},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{xer}"() +// powerpc: asm sideeffect "nop", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},~{vs0},~{vs1},~{vs2},~{vs3},~{vs4},~{vs5},~{vs6},~{vs7},~{vs8},~{vs9},~{vs10},~{vs11},~{vs12},~{vs13},~{vs14},~{vs15},~{vs16},~{vs17},~{vs18},~{vs19},~{vs20},~{vs21},~{vs22},~{vs23},~{vs24},~{vs25},~{vs26},~{vs27},~{vs28},~{vs29},~{vs30},~{vs31},~{v0},~{v1},~{v2},~{v3},~{v4},~{v5},~{v6},~{v7},~{v8},~{v9},~{v10},~{v11},~{v12},~{v13},~{v14},~{v15},~{v16},~{v17},~{v18},~{v19},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{vscr},~{xer}"() +// powerpc64: asm sideeffect "nop", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},={v0},={v1},={v2},={v3},={v4},={v5},={v6},={v7},={v8},={v9},={v10},={v11},={v12},={v13},={v14},={v15},={v16},={v17},={v18},={v19},~{vs0},~{vs1},~{vs2},~{vs3},~{vs4},~{vs5},~{vs6},~{vs7},~{vs8},~{vs9},~{vs10},~{vs11},~{vs12},~{vs13},~{vs14},~{vs15},~{vs16},~{vs17},~{vs18},~{vs19},~{vs20},~{vs21},~{vs22},~{vs23},~{vs24},~{vs25},~{vs26},~{vs27},~{vs28},~{vs29},~{vs30},~{vs31},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{vscr},~{xer}"() +// powerpc64le: asm sideeffect "nop", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},={vs0},={vs1},={vs2},={vs3},={vs4},={vs5},={vs6},={vs7},={vs8},={vs9},={vs10},={vs11},={vs12},={vs13},={vs14},={vs15},={vs16},={vs17},={vs18},={vs19},={vs20},={vs21},={vs22},={vs23},={vs24},={vs25},={vs26},={vs27},={vs28},={vs29},={vs30},={vs31},={v0},={v1},={v2},={v3},={v4},={v5},={v6},={v7},={v8},={v9},={v10},={v11},={v12},={v13},={v14},={v15},={v16},={v17},={v18},={v19},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{vscr},~{xer}"() +// aix64: asm sideeffect "nop", "={r0},={r3},={r4},={r5},={r6},={r7},={r8},={r9},={r10},={r11},={r12},={f0},={f1},={f2},={f3},={f4},={f5},={f6},={f7},={f8},={f9},={f10},={f11},={f12},={f13},={vs0},={vs1},={vs2},={vs3},={vs4},={vs5},={vs6},={vs7},={vs8},={vs9},={vs10},={vs11},={vs12},={vs13},={vs14},={vs15},={vs16},={vs17},={vs18},={vs19},={vs20},={vs21},={vs22},={vs23},={vs24},={vs25},={vs26},={vs27},={vs28},={vs29},={vs30},={vs31},={v0},={v1},={v2},={v3},={v4},={v5},={v6},={v7},={v8},={v9},={v10},={v11},={v12},={v13},={v14},={v15},={v16},={v17},={v18},={v19},~{cr0},~{cr1},~{cr5},~{cr6},~{cr7},~{ctr},~{lr},~{vscr},~{xer}"() pub unsafe fn clobber_abi_no_preserves_flags() { // Use a nop to prevent aliasing of identical functions here. asm!("nop", clobber_abi("C"), options(nostack, nomem));