Lines Matching defs:res_vsc

34 vector signed char res_vsc;
128 res_vsc = vec_neg(vsc);
153 res_vsc = vec_add(vsc, vsc);
157 res_vsc = vec_add(vbc, vsc);
161 res_vsc = vec_add(vsc, vbc);
245 res_vsc = vec_vaddubm(vsc, vsc);
249 res_vsc = vec_vaddubm(vbc, vsc);
253 res_vsc = vec_vaddubm(vsc, vbc);
331 res_vsc = vec_adds(vsc, vsc);
335 res_vsc = vec_adds(vbc, vsc);
339 res_vsc = vec_adds(vsc, vbc);
403 res_vsc = vec_vaddsbs(vsc, vsc);
407 res_vsc = vec_vaddsbs(vbc, vsc);
411 res_vsc = vec_vaddsbs(vsc, vbc);
476 res_vsc = vec_and(vsc, vsc);
480 res_vsc = vec_and(vbc, vsc);
484 res_vsc = vec_and(vsc, vbc);
560 res_vsc = vec_vand(vsc, vsc);
564 res_vsc = vec_vand(vbc, vsc);
568 res_vsc = vec_vand(vsc, vbc);
645 res_vsc = vec_andc(vsc, vsc);
651 res_vsc = vec_andc(vbc, vsc);
657 res_vsc = vec_andc(vsc, vbc);
781 res_vsc = vec_vandc(vsc, vsc);
787 res_vsc = vec_vandc(vbc, vsc);
793 res_vsc = vec_vandc(vsc, vbc);
924 res_vsc = vec_avg(vsc, vsc);
948 res_vsc = vec_vavgsb(vsc, vsc);
1329 res_vsc = vec_div(vsc, vsc);
1402 res_vsc = vec_ld(0, &vsc);
1406 res_vsc = vec_ld(0, param_sc_ld);
1474 res_vsc = vec_lvx(0, &vsc);
1478 res_vsc = vec_lvx(0, param_sc_ld);
1547 res_vsc = vec_lde(0, param_sc_ld);
1575 res_vsc = vec_lvebx(0, param_sc_ld);
1604 res_vsc = vec_ldl(0, &vsc);
1608 res_vsc = vec_ldl(0, param_sc_ld);
1676 res_vsc = vec_lvxl(0, &vsc);
1680 res_vsc = vec_lvxl(0, param_sc_ld);
1786 res_vsc = vec_max(vsc, vsc);
1790 res_vsc = vec_max(vbc, vsc);
1794 res_vsc = vec_max(vsc, vbc);
1862 res_vsc = vec_vmaxsb(vsc, vsc);
1866 res_vsc = vec_vmaxsb(vbc, vsc);
1870 res_vsc = vec_vmaxsb(vsc, vbc);
1939 res_vsc = vec_mergeh(vsc, vsc);
1983 res_vsc = vec_vmrghb(vsc, vsc);
2028 res_vsc = vec_mergel(vsc, vsc);
2072 res_vsc = vec_vmrglb(vsc, vsc);
2122 res_vsc = vec_min(vsc, vsc);
2126 res_vsc = vec_min(vbc, vsc);
2130 res_vsc = vec_min(vsc, vbc);
2198 res_vsc = vec_vminsb(vsc, vsc);
2202 res_vsc = vec_vminsb(vbc, vsc);
2206 res_vsc = vec_vminsb(vsc, vbc);
2400 res_vsc = vec_mul(vsc, vsc);
2500 res_vsc = vec_nor(vsc, vsc);
2560 res_vsc = vec_vnor(vsc, vsc);
2621 res_vsc = vec_or(vsc, vsc);
2625 res_vsc = vec_or(vbc, vsc);
2629 res_vsc = vec_or(vsc, vbc);
2717 res_vsc = vec_vor(vsc, vsc);
2721 res_vsc = vec_vor(vbc, vsc);
2725 res_vsc = vec_vor(vsc, vbc);
2814 res_vsc = vec_pack(vs, vs);
2838 res_vsc = vec_vpkuhum(vs, vs);
2872 res_vsc = vec_packs(vs, vs);
2888 res_vsc = vec_vpkshss(vs, vs);
2938 res_vsc = vec_perm(vsc, vsc, vuc);
2982 res_vsc = vec_vperm(vsc, vsc, vuc);
3036 res_vsc = vec_rl(vsc, vuc);
3060 res_vsc = vec_vrlb(vsc, vuc);
3103 res_vsc = vec_sel(vsc, vsc, vuc);
3113 res_vsc = vec_sel(vsc, vsc, vbc);
3303 res_vsc = vec_vsel(vsc, vsc, vuc);
3313 res_vsc = vec_vsel(vsc, vsc, vbc);
3504 res_vsc = vec_sl(vsc, vuc);
3540 res_vsc = vec_vslb(vsc, vuc);
3565 res_vsc = vec_sld(vsc, vsc, 0);
3692 res_vsc = vec_sldw(vsc, vsc, 0);
3776 res_vsc = vec_vsldoi(vsc, vsc, 0);
3873 res_vsc = vec_sll(vsc, vuc);
3877 res_vsc = vec_sll(vsc, vus);
3881 res_vsc = vec_sll(vsc, vui);
3993 res_vsc = vec_vsl(vsc, vuc);
3997 res_vsc = vec_vsl(vsc, vus);
4001 res_vsc = vec_vsl(vsc, vui);
4114 res_vsc = vec_slo(vsc, vsc);
4118 res_vsc = vec_slo(vsc, vuc);
4178 res_vsc = vec_vslo(vsc, vsc);
4182 res_vsc = vec_vslo(vsc, vuc);
4243 res_vsc = vec_splat(vsc, 0);
4287 res_vsc = vec_vspltb(vsc, 0);
4332 res_vsc = vec_splat_s8(0x09); // TODO: add check
4333 res_vsc = vec_vspltisb(0x09); // TODO: add check
4353 res_vsc = vec_sr(vsc, vuc);
4389 res_vsc = vec_vsrb(vsc, vuc);
4426 res_vsc = vec_sra(vsc, vuc);
4450 res_vsc = vec_vsrab(vsc, vuc);
4475 res_vsc = vec_srl(vsc, vuc);
4479 res_vsc = vec_srl(vsc, vus);
4483 res_vsc = vec_srl(vsc, vui);
4595 res_vsc = vec_vsr(vsc, vuc);
4599 res_vsc = vec_vsr(vsc, vus);
4603 res_vsc = vec_vsr(vsc, vui);
4716 res_vsc = vec_sro(vsc, vsc);
4720 res_vsc = vec_sro(vsc, vuc);
4780 res_vsc = vec_vsro(vsc, vsc);
4784 res_vsc = vec_vsro(vsc, vuc);
5384 res_vsc = vec_sub(vsc, vsc);
5388 res_vsc = vec_sub(vbc, vsc);
5392 res_vsc = vec_sub(vsc, vbc);
5462 res_vsc = vec_vsububm(vsc, vsc);
5466 res_vsc = vec_vsububm(vbc, vsc);
5470 res_vsc = vec_vsububm(vsc, vbc);
5552 res_vsc = vec_subs(vsc, vsc);
5556 res_vsc = vec_subs(vbc, vsc);
5560 res_vsc = vec_subs(vsc, vbc);
5644 res_vsc = vec_vsubsbs(vsc, vsc);
5648 res_vsc = vec_vsubsbs(vbc, vsc);
5652 res_vsc = vec_vsubsbs(vsc, vbc);
5865 res_vsc = vec_xor(vsc, vsc);
5869 res_vsc = vec_xor(vbc, vsc);
5873 res_vsc = vec_xor(vsc, vbc);
5961 res_vsc = vec_vxor(vsc, vsc);
5965 res_vsc = vec_vxor(vbc, vsc);
5969 res_vsc = vec_vxor(vsc, vbc);
6101 res_vsc = vec_insert(param_sc, vsc, param_i);
6142 res_vsc = vec_lvlx(0, param_sc_ld);
6152 res_vsc = vec_lvlx(0, &vsc);
6313 res_vsc = vec_lvlxl(0, param_sc_ld);
6323 res_vsc = vec_lvlxl(0, &vsc);
6484 res_vsc = vec_lvrx(0, param_sc_ld);
6494 res_vsc = vec_lvrx(0, &vsc);
6655 res_vsc = vec_lvrxl(0, param_sc_ld);
6665 res_vsc = vec_lvrxl(0, &vsc);
7918 res_vsc = vec_promote(param_sc, 0);
7961 res_vsc = vec_splats(param_sc);
9340 res_vsc = vec_reve(vsc);
9386 res_vsc = vec_revb(vsc);
9390 // CHECK: store <16 x i8> [[T3]], ptr @res_vsc, align 16
9394 // CHECK-LE: store <16 x i8> [[T3]], ptr @res_vsc, align 16
9467 res_vsc = vec_xl(param_sll, param_sc_ld);