1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc -mtriple=riscv32 -mattr=+v -verify-machineinstrs < %s \ 3; RUN: | FileCheck -check-prefix=RV32I %s 4; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s \ 5; RUN: | FileCheck -check-prefix=RV64I %s 6 7define <vscale x 1 x i8> @constraint_vr(<vscale x 1 x i8> %0, <vscale x 1 x i8> %1) nounwind { 8; RV32I-LABEL: constraint_vr: 9; RV32I: # %bb.0: 10; RV32I-NEXT: #APP 11; RV32I-NEXT: vadd.vv v8, v8, v9 12; RV32I-NEXT: #NO_APP 13; RV32I-NEXT: ret 14; 15; RV64I-LABEL: constraint_vr: 16; RV64I: # %bb.0: 17; RV64I-NEXT: #APP 18; RV64I-NEXT: vadd.vv v8, v8, v9 19; RV64I-NEXT: #NO_APP 20; RV64I-NEXT: ret 21 %a = tail call <vscale x 1 x i8> asm "vadd.vv $0, $1, $2", "=^vr,^vr,^vr"( 22 <vscale x 1 x i8> %0, <vscale x 1 x i8> %1) 23 ret <vscale x 1 x i8> %a 24} 25 26define <vscale x 1 x i8> @constraint_vd(<vscale x 1 x i8> %0, <vscale x 1 x i8> %1) nounwind { 27; RV32I-LABEL: constraint_vd: 28; RV32I: # %bb.0: 29; RV32I-NEXT: #APP 30; RV32I-NEXT: vadd.vv v8, v8, v9 31; RV32I-NEXT: #NO_APP 32; RV32I-NEXT: ret 33; 34; RV64I-LABEL: constraint_vd: 35; RV64I: # %bb.0: 36; RV64I-NEXT: #APP 37; RV64I-NEXT: vadd.vv v8, v8, v9 38; RV64I-NEXT: #NO_APP 39; RV64I-NEXT: ret 40 %a = tail call <vscale x 1 x i8> asm "vadd.vv $0, $1, $2", "=^vd,^vr,^vr"( 41 <vscale x 1 x i8> %0, <vscale x 1 x i8> %1) 42 ret <vscale x 1 x i8> %a 43} 44 45define <vscale x 1 x i1> @constraint_vm(<vscale x 1 x i1> %0, <vscale x 1 x i1> %1) nounwind { 46; RV32I-LABEL: constraint_vm: 47; RV32I: # %bb.0: 48; RV32I-NEXT: vsetivli zero, 1, e8, m1, ta, ma 49; RV32I-NEXT: vmv1r.v v9, v0 50; RV32I-NEXT: vmv1r.v v0, v8 51; RV32I-NEXT: #APP 52; RV32I-NEXT: vadd.vv v0, v9, v0 53; RV32I-NEXT: #NO_APP 54; RV32I-NEXT: ret 55; 56; RV64I-LABEL: constraint_vm: 57; RV64I: # %bb.0: 58; RV64I-NEXT: vsetivli zero, 1, e8, m1, ta, ma 59; RV64I-NEXT: vmv1r.v v9, v0 60; RV64I-NEXT: vmv1r.v v0, v8 61; RV64I-NEXT: #APP 62; RV64I-NEXT: vadd.vv v0, v9, v0 63; RV64I-NEXT: #NO_APP 64; RV64I-NEXT: ret 65 %a = tail call <vscale x 1 x i1> asm "vadd.vv $0, $1, $2", "=^vr,^vr,^vm"( 66 <vscale x 1 x i1> %0, <vscale x 1 x i1> %1) 67 ret <vscale x 1 x i1> %a 68} 69