xref: /llvm-project/clang/test/CodeGen/PowerPC/inline-asm-matching-ppc-vsx.c (revision c661c4f57613b5f85af94ee4e905708e0ba820f8)
1 // REQUIRES: powerpc-registered-target
2 
3 // RUN: %clang_cc1 -triple powerpc64le-unknown-linux-gnu -target-feature +vsx \
4 // RUN:   -target-cpu pwr9 -emit-llvm %s -o - | FileCheck %s
5 // RUN: %clang_cc1 -triple powerpc64-unknown-linux-gnu -target-feature +vsx \
6 // RUN:   -target-cpu pwr9 -emit-llvm %s -o - | FileCheck %s
7 // RUN: %clang_cc1 -triple powerpc64-ibm-aix -target-feature +vsx \
8 // RUN:   -target-cpu pwr9 -emit-llvm %s -o - | FileCheck %s
9 // RUN: %clang_cc1 -triple powerpc-ibm-aix -target-feature +vsx \
10 // RUN:   -target-cpu pwr9 -emit-llvm %s -o - | FileCheck %s
11 
12 // This case is to test VSX register support in the clobbers list for inline asm.
testVSX(void)13 void testVSX (void) {
14   unsigned int a = 0;
15   unsigned int *dbell=&a;
16   int d;
17   __asm__ __volatile__ (
18     "lxvw4x  32, 0, %2\n\t"
19     "stxvw4x 32, 0, %1"
20     : "=m"(*(volatile unsigned int*)(dbell))
21     : "r" (dbell), "r" (&d)
22     : "vs32"
23   );
24 }
25 
26 // CHECK: call void asm sideeffect "lxvw4x  32, 0, $2\0A\09stxvw4x 32, 0, $1", "=*m,r,r,~{vs32}"
27