Home
last modified time | relevance | path

Searched refs:uxtw (Results 1 – 20 of 20) sorted by relevance

/netbsd-src/external/apache2/llvm/dist/llvm/lib/Target/ARM/MCTargetDesc/
H A DARMAddressingModes.h34 uxtw enumerator
52 case ARM_AM::uxtw: return "uxtw"; in getShiftOpcStr()
H A DARMInstPrinter.cpp620 printRegImmShift(O, ARM_AM::uxtw, shift, UseMarkup); in printMveAddrModeRQOperand()
/netbsd-src/external/gpl3/gcc.old/dist/gcc/config/aarch64/
H A Daarch64-sve.md1435 ld1<Vesize>\t%0.s, %5/z, [%1, %2.s, uxtw]
1437 ld1<Vesize>\t%0.s, %5/z, [%1, %2.s, uxtw %p4]"
1528 ld1<Vesize>\t%0.d, %5/z, [%1, %2.d, uxtw]
1529 ld1<Vesize>\t%0.d, %5/z, [%1, %2.d, uxtw %p4]"
1565 ld1<ANY_EXTEND:s><SVE_4BHI:Vesize>\t%0.s, %5/z, [%1, %2.s, uxtw]
1567 ld1<ANY_EXTEND:s><SVE_4BHI:Vesize>\t%0.s, %5/z, [%1, %2.s, uxtw %p4]"
1684 ld1<ANY_EXTEND:s><SVE_2BHSI:Vesize>\t%0.d, %5/z, [%1, %2.d, uxtw]
1685 ld1<ANY_EXTEND:s><SVE_2BHSI:Vesize>\t%0.d, %5/z, [%1, %2.d, uxtw %p4]"
1718 ldff1w\t%0.s, %5/z, [%1, %2.s, uxtw]
1720 ldff1w\t%0.s, %5/z, [%1, %2.s, uxtw %p4]"
[all …]
H A Daarch64.md1809 uxtw\t%0, %w1
/netbsd-src/external/gpl3/gcc/dist/gcc/config/arm/
H A Dmve.md7442 output_asm_insn ("vldrh.u16\t%q0, [%m1, %q2, uxtw #1]",ops);
7444 output_asm_insn ("vldrh.<supf><V_sz_elem>\t%q0, [%m1, %q2, uxtw #1]",ops);
7467 output_asm_insn ("vpst\n\tvldrht.u16\t%q0, [%m1, %q2, uxtw #1]",ops);
7469 output_asm_insn ("vpst\n\tvldrht.<supf><V_sz_elem>\t%q0, [%m1, %q2, uxtw #1]",ops);
7734 output_asm_insn ("vldrd.u64\t%q0, [%m1, %q2, uxtw #3]",ops);
7755 output_asm_insn ("vpst\n\tvldrdt.u64\t%q0, [%m1, %q2, uxtw #3]",ops);
7817 output_asm_insn ("vldrh.f16\t%q0, [%m1, %q2, uxtw #1]",ops);
7839 output_asm_insn ("vpst\n\tvldrht.f16\t%q0, [%m1, %q2, uxtw #1]",ops);
7984 output_asm_insn ("vldrw.u32\t%q0, [%m1, %q2, uxtw #2]",ops);
8004 output_asm_insn ("vldrw.u32\t%q0, [%m1, %q2, uxtw #2]",ops);
[all …]
/netbsd-src/external/gpl3/gcc/dist/gcc/config/aarch64/
H A Daarch64-sve.md1435 ld1<Vesize>\t%0.s, %5/z, [%1, %2.s, uxtw]
1437 ld1<Vesize>\t%0.s, %5/z, [%1, %2.s, uxtw %p4]"
1528 ld1<Vesize>\t%0.d, %5/z, [%1, %2.d, uxtw]
1529 ld1<Vesize>\t%0.d, %5/z, [%1, %2.d, uxtw %p4]"
1565 ld1<ANY_EXTEND:s><SVE_4BHI:Vesize>\t%0.s, %5/z, [%1, %2.s, uxtw]
1567 ld1<ANY_EXTEND:s><SVE_4BHI:Vesize>\t%0.s, %5/z, [%1, %2.s, uxtw %p4]"
1684 ld1<ANY_EXTEND:s><SVE_2BHSI:Vesize>\t%0.d, %5/z, [%1, %2.d, uxtw]
1685 ld1<ANY_EXTEND:s><SVE_2BHSI:Vesize>\t%0.d, %5/z, [%1, %2.d, uxtw %p4]"
1718 ldff1w\t%0.s, %5/z, [%1, %2.s, uxtw]
1720 ldff1w\t%0.s, %5/z, [%1, %2.s, uxtw %p4]"
[all …]
H A Daarch64.md1983 uxtw\t%0, %w1
/netbsd-src/external/gpl3/gcc.old/dist/gcc/config/arm/
H A Dmve.md7800 output_asm_insn ("vldrh.u16\t%q0, [%m1, %q2, uxtw #1]",ops);
7802 output_asm_insn ("vldrh.<supf><V_sz_elem>\t%q0, [%m1, %q2, uxtw #1]",ops);
7825 output_asm_insn ("vpst\n\tvldrht.u16\t%q0, [%m1, %q2, uxtw #1]",ops);
7827 output_asm_insn ("vpst\n\tvldrht.<supf><V_sz_elem>\t%q0, [%m1, %q2, uxtw #1]",ops);
8092 output_asm_insn ("vldrd.u64\t%q0, [%m1, %q2, uxtw #3]",ops);
8113 output_asm_insn ("vpst\n\tvldrdt.u64\t%q0, [%m1, %q2, uxtw #3]",ops);
8175 output_asm_insn ("vldrh.f16\t%q0, [%m1, %q2, uxtw #1]",ops);
8197 output_asm_insn ("vpst\n\tvldrht.f16\t%q0, [%m1, %q2, uxtw #1]",ops);
8342 output_asm_insn ("vldrw.u32\t%q0, [%m1, %q2, uxtw #2]",ops);
8362 output_asm_insn ("vldrw.u32\t%q0, [%m1, %q2, uxtw #2]",ops);
[all …]
/netbsd-src/external/apache2/llvm/dist/llvm/lib/Target/AArch64/
H A DAArch64SVEInstrInfo.td380 defm UXTW_ZPmZ : sve_int_un_pred_arit_0_d<0b101, "uxtw", AArch64uxt_mt>;
819 // ld1h z0.s, p0/z, [x0, z0.s, uxtw]
832 // ld1h z0.s, p0/z, [x0, z0.s, uxtw #1]
901 // ld1h z0.d, p0/z, [x0, z0.d, uxtw]
918 // ld1h z0.d, p0/z, [x0, z0.d, uxtw #1]
967 // st1h z0.d, p0, [x0, z0.d, uxtw]
974 // st1h z0.s, p0, [x0, z0.s, uxtw]
980 // st1h z0.s, p0, [x0, z0.s, uxtw #1]
985 // st1h z0.d, p0, [x0, z0.d, uxtw #1]
1098 // prfh pldl1keep, p0, [x0, z0.s, uxtw #1]
[all …]
H A DAArch64SchedA64FX.td3856 // [526] "uxtw $Zd, $Pg/m, $Zn";
H A DSVEInstrFormats.td7006 // bit lsl is '0' if the offsets are extended (uxtw/sxtw), '1' if shifted (lsl)
7174 // bit lsl is '0' if the offsets are extended (uxtw/sxtw), '1' if shifted (lsl)
H A DAArch64InstrInfo.td2110 def : InstAlias<"uxtw $dst, $src", (UBFMXri GPR64:$dst, GPR64:$src, 0, 31)>;
H A DAArch64InstrFormats.td3396 // Asm-level Operand covering the valid "uxtw #3" style syntax.
/netbsd-src/external/apache2/llvm/dist/llvm/include/llvm/IR/
H A DIntrinsicsAArch64.td2084 // 32 bit unscaled offsets, sign (sxtw) or zero (uxtw) extended to 64 bits
2088 // 32 bit scaled offsets, sign (sxtw) or zero (uxtw) extended to 64 bits
/netbsd-src/external/gpl3/binutils.old/dist/opcodes/
H A DChangeLog-2018439 uxtb, uxth, uxtw, bic, eon, orn, mov, fmov): Change _SVE_INSN into _SVE_INSNC and add
/netbsd-src/external/gpl3/binutils/dist/opcodes/
H A DChangeLog-2018439 uxtb, uxth, uxtw, bic, eon, orn, mov, fmov): Change _SVE_INSN into _SVE_INSNC and add
/netbsd-src/external/gpl3/gdb.old/dist/opcodes/
H A DChangeLog-2018439 uxtb, uxth, uxtw, bic, eon, orn, mov, fmov): Change _SVE_INSN into _SVE_INSNC and add
/netbsd-src/external/gpl3/gdb/dist/opcodes/
H A DChangeLog-2018439 uxtb, uxth, uxtw, bic, eon, orn, mov, fmov): Change _SVE_INSN into _SVE_INSNC and add
/netbsd-src/external/apache2/llvm/dist/llvm/lib/Target/ARM/AsmParser/
H A DARMAsmParser.cpp1889 (Memory.ShiftType != ARM_AM::uxtw || Memory.ShiftImm != shift)) in isMemRegRQOffset()
6037 St = ARM_AM::uxtw; in parseMemRegOffsetShift()
/netbsd-src/external/gpl3/gcc/dist/gcc/
H A DChangeLog-201931568 attrribute for uxtw.