Lines Matching full:metadata

7 declare <1 x half> @llvm.experimental.constrained.sitofp.v1f16.v1i1(<1 x i1>, metadata, metadata)
16 %evec = call <1 x half> @llvm.experimental.constrained.sitofp.v1f16.v1i1(<1 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
20 declare <1 x half> @llvm.experimental.constrained.uitofp.v1f16.v1i1(<1 x i1>, metadata, metadata)
29 %evec = call <1 x half> @llvm.experimental.constrained.uitofp.v1f16.v1i1(<1 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
33 declare <1 x float> @llvm.experimental.constrained.sitofp.v1f32.v1i1(<1 x i1>, metadata, metadata)
42 %evec = call <1 x float> @llvm.experimental.constrained.sitofp.v1f32.v1i1(<1 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
46 declare <1 x float> @llvm.experimental.constrained.uitofp.v1f32.v1i1(<1 x i1>, metadata, metadata)
55 %evec = call <1 x float> @llvm.experimental.constrained.uitofp.v1f32.v1i1(<1 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
59 declare <1 x double> @llvm.experimental.constrained.sitofp.v1f64.v1i1(<1 x i1>, metadata, metadata)
68 %evec = call <1 x double> @llvm.experimental.constrained.sitofp.v1f64.v1i1(<1 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
72 declare <1 x double> @llvm.experimental.constrained.uitofp.v1f64.v1i1(<1 x i1>, metadata, metadata)
81 %evec = call <1 x double> @llvm.experimental.constrained.uitofp.v1f64.v1i1(<1 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
85 declare <2 x half> @llvm.experimental.constrained.sitofp.v2f16.v2i1(<2 x i1>, metadata, metadata)
94 %evec = call <2 x half> @llvm.experimental.constrained.sitofp.v2f16.v2i1(<2 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
98 declare <2 x half> @llvm.experimental.constrained.uitofp.v2f16.v2i1(<2 x i1>, metadata, metadata)
107 %evec = call <2 x half> @llvm.experimental.constrained.uitofp.v2f16.v2i1(<2 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
111 declare <2 x float> @llvm.experimental.constrained.sitofp.v2f32.v2i1(<2 x i1>, metadata, metadata)
120 %evec = call <2 x float> @llvm.experimental.constrained.sitofp.v2f32.v2i1(<2 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
124 declare <2 x float> @llvm.experimental.constrained.uitofp.v2f32.v2i1(<2 x i1>, metadata, metadata)
133 %evec = call <2 x float> @llvm.experimental.constrained.uitofp.v2f32.v2i1(<2 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
137 declare <2 x double> @llvm.experimental.constrained.sitofp.v2f64.v2i1(<2 x i1>, metadata, metadata)
146 %evec = call <2 x double> @llvm.experimental.constrained.sitofp.v2f64.v2i1(<2 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
150 declare <2 x double> @llvm.experimental.constrained.uitofp.v2f64.v2i1(<2 x i1>, metadata, metadata)
159 %evec = call <2 x double> @llvm.experimental.constrained.uitofp.v2f64.v2i1(<2 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
163 declare <4 x half> @llvm.experimental.constrained.sitofp.v4f16.v4i1(<4 x i1>, metadata, metadata)
172 %evec = call <4 x half> @llvm.experimental.constrained.sitofp.v4f16.v4i1(<4 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
176 declare <4 x half> @llvm.experimental.constrained.uitofp.v4f16.v4i1(<4 x i1>, metadata, metadata)
185 %evec = call <4 x half> @llvm.experimental.constrained.uitofp.v4f16.v4i1(<4 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
189 declare <4 x float> @llvm.experimental.constrained.sitofp.v4f32.v4i1(<4 x i1>, metadata, metadata)
198 %evec = call <4 x float> @llvm.experimental.constrained.sitofp.v4f32.v4i1(<4 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
202 declare <4 x float> @llvm.experimental.constrained.uitofp.v4f32.v4i1(<4 x i1>, metadata, metadata)
211 %evec = call <4 x float> @llvm.experimental.constrained.uitofp.v4f32.v4i1(<4 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
215 declare <4 x double> @llvm.experimental.constrained.sitofp.v4f64.v4i1(<4 x i1>, metadata, metadata)
224 %evec = call <4 x double> @llvm.experimental.constrained.sitofp.v4f64.v4i1(<4 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
228 declare <4 x double> @llvm.experimental.constrained.uitofp.v4f64.v4i1(<4 x i1>, metadata, metadata)
237 %evec = call <4 x double> @llvm.experimental.constrained.uitofp.v4f64.v4i1(<4 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
241 declare <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i1(<8 x i1>, metadata, metadata)
250 %evec = call <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i1(<8 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
254 declare <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i1(<8 x i1>, metadata, metadata)
263 %evec = call <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i1(<8 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
267 declare <8 x float> @llvm.experimental.constrained.sitofp.v8f32.v8i1(<8 x i1>, metadata, metadata)
276 %evec = call <8 x float> @llvm.experimental.constrained.sitofp.v8f32.v8i1(<8 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
280 declare <8 x float> @llvm.experimental.constrained.uitofp.v8f32.v8i1(<8 x i1>, metadata, metadata)
289 %evec = call <8 x float> @llvm.experimental.constrained.uitofp.v8f32.v8i1(<8 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
293 declare <8 x double> @llvm.experimental.constrained.sitofp.v8f64.v8i1(<8 x i1>, metadata, metadata)
302 %evec = call <8 x double> @llvm.experimental.constrained.sitofp.v8f64.v8i1(<8 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
306 declare <8 x double> @llvm.experimental.constrained.uitofp.v8f64.v8i1(<8 x i1>, metadata, metadata)
315 %evec = call <8 x double> @llvm.experimental.constrained.uitofp.v8f64.v8i1(<8 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
319 declare <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i1(<16 x i1>, metadata, metadata)
328 %evec = call <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i1(<16 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
332 declare <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i1(<16 x i1>, metadata, metadata)
341 %evec = call <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i1(<16 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
345 declare <16 x float> @llvm.experimental.constrained.sitofp.v16f32.v16i1(<16 x i1>, metadata, metadata)
354 %evec = call <16 x float> @llvm.experimental.constrained.sitofp.v16f32.v16i1(<16 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
358 declare <16 x float> @llvm.experimental.constrained.uitofp.v16f32.v16i1(<16 x i1>, metadata, metadata)
367 %evec = call <16 x float> @llvm.experimental.constrained.uitofp.v16f32.v16i1(<16 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
371 declare <32 x half> @llvm.experimental.constrained.sitofp.v32f16.v32i1(<32 x i1>, metadata, metadata)
381 %evec = call <32 x half> @llvm.experimental.constrained.sitofp.v32f16.v32i1(<32 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
385 declare <32 x half> @llvm.experimental.constrained.uitofp.v32f16.v32i1(<32 x i1>, metadata, metadata)
395 %evec = call <32 x half> @llvm.experimental.constrained.uitofp.v32f16.v32i1(<32 x i1> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
399 declare <1 x half> @llvm.experimental.constrained.sitofp.v1f16.v1i8(<1 x i8>, metadata, metadata)
407 %evec = call <1 x half> @llvm.experimental.constrained.sitofp.v1f16.v1i8(<1 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
411 declare <1 x half> @llvm.experimental.constrained.sitofp.v1f16.v1i7(<1 x i7>, metadata, metadata)
430 %evec = call <1 x half> @llvm.experimental.constrained.sitofp.v1f16.v1i7(<1 x i7> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
434 declare <1 x half> @llvm.experimental.constrained.uitofp.v1f16.v1i7(<1 x i7>, metadata, metadata)
443 %evec = call <1 x half> @llvm.experimental.constrained.uitofp.v1f16.v1i7(<1 x i7> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
447 declare <1 x half> @llvm.experimental.constrained.uitofp.v1f16.v1i8(<1 x i8>, metadata, metadata)
455 %evec = call <1 x half> @llvm.experimental.constrained.uitofp.v1f16.v1i8(<1 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
459 declare <1 x float> @llvm.experimental.constrained.sitofp.v1f32.v1i8(<1 x i8>, metadata, metadata)
467 %evec = call <1 x float> @llvm.experimental.constrained.sitofp.v1f32.v1i8(<1 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
471 declare <1 x float> @llvm.experimental.constrained.uitofp.v1f32.v1i8(<1 x i8>, metadata, metadata)
479 %evec = call <1 x float> @llvm.experimental.constrained.uitofp.v1f32.v1i8(<1 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
483 declare <1 x double> @llvm.experimental.constrained.sitofp.v1f64.v1i8(<1 x i8>, metadata, metadata)
491 %evec = call <1 x double> @llvm.experimental.constrained.sitofp.v1f64.v1i8(<1 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
495 declare <1 x double> @llvm.experimental.constrained.uitofp.v1f64.v1i8(<1 x i8>, metadata, metadata)
503 %evec = call <1 x double> @llvm.experimental.constrained.uitofp.v1f64.v1i8(<1 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
507 declare <2 x half> @llvm.experimental.constrained.sitofp.v2f16.v2i8(<2 x i8>, metadata, metadata)
515 %evec = call <2 x half> @llvm.experimental.constrained.sitofp.v2f16.v2i8(<2 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
519 declare <2 x half> @llvm.experimental.constrained.uitofp.v2f16.v2i8(<2 x i8>, metadata, metadata)
527 %evec = call <2 x half> @llvm.experimental.constrained.uitofp.v2f16.v2i8(<2 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
531 declare <2 x float> @llvm.experimental.constrained.sitofp.v2f32.v2i8(<2 x i8>, metadata, metadata)
539 %evec = call <2 x float> @llvm.experimental.constrained.sitofp.v2f32.v2i8(<2 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
543 declare <2 x float> @llvm.experimental.constrained.uitofp.v2f32.v2i8(<2 x i8>, metadata, metadata)
551 %evec = call <2 x float> @llvm.experimental.constrained.uitofp.v2f32.v2i8(<2 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
555 declare <2 x double> @llvm.experimental.constrained.sitofp.v2f64.v2i8(<2 x i8>, metadata, metadata)
563 %evec = call <2 x double> @llvm.experimental.constrained.sitofp.v2f64.v2i8(<2 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
567 declare <2 x double> @llvm.experimental.constrained.uitofp.v2f64.v2i8(<2 x i8>, metadata, metadata)
575 %evec = call <2 x double> @llvm.experimental.constrained.uitofp.v2f64.v2i8(<2 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
579 declare <4 x half> @llvm.experimental.constrained.sitofp.v4f16.v4i8(<4 x i8>, metadata, metadata)
587 %evec = call <4 x half> @llvm.experimental.constrained.sitofp.v4f16.v4i8(<4 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
591 declare <4 x half> @llvm.experimental.constrained.uitofp.v4f16.v4i8(<4 x i8>, metadata, metadata)
599 %evec = call <4 x half> @llvm.experimental.constrained.uitofp.v4f16.v4i8(<4 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
603 declare <4 x float> @llvm.experimental.constrained.sitofp.v4f32.v4i8(<4 x i8>, metadata, metadata)
611 %evec = call <4 x float> @llvm.experimental.constrained.sitofp.v4f32.v4i8(<4 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
615 declare <4 x float> @llvm.experimental.constrained.uitofp.v4f32.v4i8(<4 x i8>, metadata, metadata)
623 %evec = call <4 x float> @llvm.experimental.constrained.uitofp.v4f32.v4i8(<4 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
627 declare <4 x double> @llvm.experimental.constrained.sitofp.v4f64.v4i8(<4 x i8>, metadata, metadata)
635 %evec = call <4 x double> @llvm.experimental.constrained.sitofp.v4f64.v4i8(<4 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
639 declare <4 x double> @llvm.experimental.constrained.uitofp.v4f64.v4i8(<4 x i8>, metadata, metadata)
647 %evec = call <4 x double> @llvm.experimental.constrained.uitofp.v4f64.v4i8(<4 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
651 declare <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i8(<8 x i8>, metadata, metadata)
659 %evec = call <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i8(<8 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
663 declare <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i8(<8 x i8>, metadata, metadata)
671 %evec = call <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i8(<8 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
675 declare <8 x float> @llvm.experimental.constrained.sitofp.v8f32.v8i8(<8 x i8>, metadata, metadata)
683 %evec = call <8 x float> @llvm.experimental.constrained.sitofp.v8f32.v8i8(<8 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
687 declare <8 x float> @llvm.experimental.constrained.uitofp.v8f32.v8i8(<8 x i8>, metadata, metadata)
695 %evec = call <8 x float> @llvm.experimental.constrained.uitofp.v8f32.v8i8(<8 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
699 declare <8 x double> @llvm.experimental.constrained.sitofp.v8f64.v8i8(<8 x i8>, metadata, metadata)
707 %evec = call <8 x double> @llvm.experimental.constrained.sitofp.v8f64.v8i8(<8 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
711 declare <8 x double> @llvm.experimental.constrained.uitofp.v8f64.v8i8(<8 x i8>, metadata, metadata)
719 %evec = call <8 x double> @llvm.experimental.constrained.uitofp.v8f64.v8i8(<8 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
723 declare <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i8(<16 x i8>, metadata, metadata)
731 %evec = call <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i8(<16 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
735 declare <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i8(<16 x i8>, metadata, metadata)
743 %evec = call <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i8(<16 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
747 declare <16 x float> @llvm.experimental.constrained.sitofp.v16f32.v16i8(<16 x i8>, metadata, metadata)
755 %evec = call <16 x float> @llvm.experimental.constrained.sitofp.v16f32.v16i8(<16 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
759 declare <16 x float> @llvm.experimental.constrained.uitofp.v16f32.v16i8(<16 x i8>, metadata, metadata)
767 %evec = call <16 x float> @llvm.experimental.constrained.uitofp.v16f32.v16i8(<16 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
771 declare <32 x half> @llvm.experimental.constrained.sitofp.v32f16.v32i8(<32 x i8>, metadata, metadata)
780 %evec = call <32 x half> @llvm.experimental.constrained.sitofp.v32f16.v32i8(<32 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
784 declare <32 x half> @llvm.experimental.constrained.uitofp.v32f16.v32i8(<32 x i8>, metadata, metadata)
793 %evec = call <32 x half> @llvm.experimental.constrained.uitofp.v32f16.v32i8(<32 x i8> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
797 declare <1 x half> @llvm.experimental.constrained.sitofp.v1f16.v1i16(<1 x i16>, metadata, metadata)
804 %evec = call <1 x half> @llvm.experimental.constrained.sitofp.v1f16.v1i16(<1 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
808 declare <1 x half> @llvm.experimental.constrained.uitofp.v1f16.v1i16(<1 x i16>, metadata, metadata)
815 %evec = call <1 x half> @llvm.experimental.constrained.uitofp.v1f16.v1i16(<1 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
819 declare <1 x float> @llvm.experimental.constrained.sitofp.v1f32.v1i16(<1 x i16>, metadata, metadata)
827 %evec = call <1 x float> @llvm.experimental.constrained.sitofp.v1f32.v1i16(<1 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
831 declare <1 x float> @llvm.experimental.constrained.uitofp.v1f32.v1i16(<1 x i16>, metadata, metadata)
839 %evec = call <1 x float> @llvm.experimental.constrained.uitofp.v1f32.v1i16(<1 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
843 declare <1 x double> @llvm.experimental.constrained.sitofp.v1f64.v1i16(<1 x i16>, metadata, metadata)
851 %evec = call <1 x double> @llvm.experimental.constrained.sitofp.v1f64.v1i16(<1 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
855 declare <1 x double> @llvm.experimental.constrained.uitofp.v1f64.v1i16(<1 x i16>, metadata, metadata)
863 %evec = call <1 x double> @llvm.experimental.constrained.uitofp.v1f64.v1i16(<1 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
867 declare <2 x half> @llvm.experimental.constrained.sitofp.v2f16.v2i16(<2 x i16>, metadata, metadata)
874 %evec = call <2 x half> @llvm.experimental.constrained.sitofp.v2f16.v2i16(<2 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
878 declare <2 x half> @llvm.experimental.constrained.uitofp.v2f16.v2i16(<2 x i16>, metadata, metadata)
885 %evec = call <2 x half> @llvm.experimental.constrained.uitofp.v2f16.v2i16(<2 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
889 declare <2 x float> @llvm.experimental.constrained.sitofp.v2f32.v2i16(<2 x i16>, metadata, metadata)
897 %evec = call <2 x float> @llvm.experimental.constrained.sitofp.v2f32.v2i16(<2 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
901 declare <2 x float> @llvm.experimental.constrained.uitofp.v2f32.v2i16(<2 x i16>, metadata, metadata)
909 %evec = call <2 x float> @llvm.experimental.constrained.uitofp.v2f32.v2i16(<2 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
913 declare <2 x double> @llvm.experimental.constrained.sitofp.v2f64.v2i16(<2 x i16>, metadata, metadata)
921 %evec = call <2 x double> @llvm.experimental.constrained.sitofp.v2f64.v2i16(<2 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
925 declare <2 x double> @llvm.experimental.constrained.uitofp.v2f64.v2i16(<2 x i16>, metadata, metadata)
933 %evec = call <2 x double> @llvm.experimental.constrained.uitofp.v2f64.v2i16(<2 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
937 declare <4 x half> @llvm.experimental.constrained.sitofp.v4f16.v4i16(<4 x i16>, metadata, metadata)
944 %evec = call <4 x half> @llvm.experimental.constrained.sitofp.v4f16.v4i16(<4 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
948 declare <4 x half> @llvm.experimental.constrained.uitofp.v4f16.v4i16(<4 x i16>, metadata, metadata)
955 %evec = call <4 x half> @llvm.experimental.constrained.uitofp.v4f16.v4i16(<4 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
959 declare <4 x float> @llvm.experimental.constrained.sitofp.v4f32.v4i16(<4 x i16>, metadata, metadata)
967 %evec = call <4 x float> @llvm.experimental.constrained.sitofp.v4f32.v4i16(<4 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
971 declare <4 x float> @llvm.experimental.constrained.uitofp.v4f32.v4i16(<4 x i16>, metadata, metadata)
979 %evec = call <4 x float> @llvm.experimental.constrained.uitofp.v4f32.v4i16(<4 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
983 declare <4 x double> @llvm.experimental.constrained.sitofp.v4f64.v4i16(<4 x i16>, metadata, metadata)
991 %evec = call <4 x double> @llvm.experimental.constrained.sitofp.v4f64.v4i16(<4 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
995 declare <4 x double> @llvm.experimental.constrained.uitofp.v4f64.v4i16(<4 x i16>, metadata, metadata)
1003 %evec = call <4 x double> @llvm.experimental.constrained.uitofp.v4f64.v4i16(<4 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1007 declare <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i16(<8 x i16>, metadata, metadata)
1014 %evec = call <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i16(<8 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1018 declare <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i16(<8 x i16>, metadata, metadata)
1025 %evec = call <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i16(<8 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1029 declare <8 x float> @llvm.experimental.constrained.sitofp.v8f32.v8i16(<8 x i16>, metadata, metadata)
1037 %evec = call <8 x float> @llvm.experimental.constrained.sitofp.v8f32.v8i16(<8 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1041 declare <8 x float> @llvm.experimental.constrained.uitofp.v8f32.v8i16(<8 x i16>, metadata, metadata)
1049 %evec = call <8 x float> @llvm.experimental.constrained.uitofp.v8f32.v8i16(<8 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1053 declare <8 x double> @llvm.experimental.constrained.sitofp.v8f64.v8i16(<8 x i16>, metadata, metadata)
1061 %evec = call <8 x double> @llvm.experimental.constrained.sitofp.v8f64.v8i16(<8 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1065 declare <8 x double> @llvm.experimental.constrained.uitofp.v8f64.v8i16(<8 x i16>, metadata, metadata)
1073 %evec = call <8 x double> @llvm.experimental.constrained.uitofp.v8f64.v8i16(<8 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1077 declare <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i16(<16 x i16>, metadata, metadata)
1084 %evec = call <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i16(<16 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1088 declare <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i16(<16 x i16>, metadata, metadata)
1095 %evec = call <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i16(<16 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1099 declare <16 x float> @llvm.experimental.constrained.sitofp.v16f32.v16i16(<16 x i16>, metadata, metadata)
1107 %evec = call <16 x float> @llvm.experimental.constrained.sitofp.v16f32.v16i16(<16 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1111 declare <16 x float> @llvm.experimental.constrained.uitofp.v16f32.v16i16(<16 x i16>, metadata, metadata)
1119 %evec = call <16 x float> @llvm.experimental.constrained.uitofp.v16f32.v16i16(<16 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1123 declare <32 x half> @llvm.experimental.constrained.sitofp.v32f16.v32i16(<32 x i16>, metadata, metadata)
1131 %evec = call <32 x half> @llvm.experimental.constrained.sitofp.v32f16.v32i16(<32 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1135 declare <32 x half> @llvm.experimental.constrained.uitofp.v32f16.v32i16(<32 x i16>, metadata, metadata)
1143 %evec = call <32 x half> @llvm.experimental.constrained.uitofp.v32f16.v32i16(<32 x i16> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1147 declare <1 x half> @llvm.experimental.constrained.sitofp.v1f16.v1i32(<1 x i32>, metadata, metadata)
1155 %evec = call <1 x half> @llvm.experimental.constrained.sitofp.v1f16.v1i32(<1 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1159 declare <1 x half> @llvm.experimental.constrained.uitofp.v1f16.v1i32(<1 x i32>, metadata, metadata)
1167 %evec = call <1 x half> @llvm.experimental.constrained.uitofp.v1f16.v1i32(<1 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1171 declare <1 x float> @llvm.experimental.constrained.sitofp.v1f32.v1i32(<1 x i32>, metadata, metadata)
1178 %evec = call <1 x float> @llvm.experimental.constrained.sitofp.v1f32.v1i32(<1 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1182 declare <1 x float> @llvm.experimental.constrained.uitofp.v1f32.v1i32(<1 x i32>, metadata, metadata)
1189 %evec = call <1 x float> @llvm.experimental.constrained.uitofp.v1f32.v1i32(<1 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1193 declare <1 x double> @llvm.experimental.constrained.sitofp.v1f64.v1i32(<1 x i32>, metadata, metadata)
1201 %evec = call <1 x double> @llvm.experimental.constrained.sitofp.v1f64.v1i32(<1 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1205 declare <1 x double> @llvm.experimental.constrained.uitofp.v1f64.v1i32(<1 x i32>, metadata, metadata)
1213 %evec = call <1 x double> @llvm.experimental.constrained.uitofp.v1f64.v1i32(<1 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1217 declare <2 x half> @llvm.experimental.constrained.sitofp.v2f16.v2i32(<2 x i32>, metadata, metadata)
1225 %evec = call <2 x half> @llvm.experimental.constrained.sitofp.v2f16.v2i32(<2 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1229 declare <2 x half> @llvm.experimental.constrained.uitofp.v2f16.v2i32(<2 x i32>, metadata, metadata)
1237 %evec = call <2 x half> @llvm.experimental.constrained.uitofp.v2f16.v2i32(<2 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1241 declare <2 x float> @llvm.experimental.constrained.sitofp.v2f32.v2i32(<2 x i32>, metadata, metadata)
1248 %evec = call <2 x float> @llvm.experimental.constrained.sitofp.v2f32.v2i32(<2 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1252 declare <2 x float> @llvm.experimental.constrained.uitofp.v2f32.v2i32(<2 x i32>, metadata, metadata)
1259 %evec = call <2 x float> @llvm.experimental.constrained.uitofp.v2f32.v2i32(<2 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1263 declare <2 x double> @llvm.experimental.constrained.sitofp.v2f64.v2i32(<2 x i32>, metadata, metadata)
1271 %evec = call <2 x double> @llvm.experimental.constrained.sitofp.v2f64.v2i32(<2 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1275 declare <2 x double> @llvm.experimental.constrained.uitofp.v2f64.v2i32(<2 x i32>, metadata, metadata)
1283 %evec = call <2 x double> @llvm.experimental.constrained.uitofp.v2f64.v2i32(<2 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1287 declare <4 x half> @llvm.experimental.constrained.sitofp.v4f16.v4i32(<4 x i32>, metadata, metadata)
1295 %evec = call <4 x half> @llvm.experimental.constrained.sitofp.v4f16.v4i32(<4 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1299 declare <4 x half> @llvm.experimental.constrained.uitofp.v4f16.v4i32(<4 x i32>, metadata, metadata)
1307 %evec = call <4 x half> @llvm.experimental.constrained.uitofp.v4f16.v4i32(<4 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1311 declare <4 x float> @llvm.experimental.constrained.sitofp.v4f32.v4i32(<4 x i32>, metadata, metadata)
1318 %evec = call <4 x float> @llvm.experimental.constrained.sitofp.v4f32.v4i32(<4 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1322 declare <4 x float> @llvm.experimental.constrained.uitofp.v4f32.v4i32(<4 x i32>, metadata, metadata)
1329 %evec = call <4 x float> @llvm.experimental.constrained.uitofp.v4f32.v4i32(<4 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1333 declare <4 x double> @llvm.experimental.constrained.sitofp.v4f64.v4i32(<4 x i32>, metadata, metadata)
1341 %evec = call <4 x double> @llvm.experimental.constrained.sitofp.v4f64.v4i32(<4 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1345 declare <4 x double> @llvm.experimental.constrained.uitofp.v4f64.v4i32(<4 x i32>, metadata, metadata)
1353 %evec = call <4 x double> @llvm.experimental.constrained.uitofp.v4f64.v4i32(<4 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1357 declare <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i32(<8 x i32>, metadata, metadata)
1365 %evec = call <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i32(<8 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1369 declare <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i32(<8 x i32>, metadata, metadata)
1377 %evec = call <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i32(<8 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1381 declare <8 x float> @llvm.experimental.constrained.sitofp.v8f32.v8i32(<8 x i32>, metadata, metadata)
1388 %evec = call <8 x float> @llvm.experimental.constrained.sitofp.v8f32.v8i32(<8 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1392 declare <8 x float> @llvm.experimental.constrained.uitofp.v8f32.v8i32(<8 x i32>, metadata, metadata)
1399 %evec = call <8 x float> @llvm.experimental.constrained.uitofp.v8f32.v8i32(<8 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1403 declare <8 x double> @llvm.experimental.constrained.sitofp.v8f64.v8i32(<8 x i32>, metadata, metadata)
1411 %evec = call <8 x double> @llvm.experimental.constrained.sitofp.v8f64.v8i32(<8 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1415 declare <8 x double> @llvm.experimental.constrained.uitofp.v8f64.v8i32(<8 x i32>, metadata, metadata)
1423 %evec = call <8 x double> @llvm.experimental.constrained.uitofp.v8f64.v8i32(<8 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1427 declare <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i32(<16 x i32>, metadata, metadata)
1435 %evec = call <16 x half> @llvm.experimental.constrained.sitofp.v16f16.v16i32(<16 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1439 declare <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i32(<16 x i32>, metadata, metadata)
1447 %evec = call <16 x half> @llvm.experimental.constrained.uitofp.v16f16.v16i32(<16 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1451 declare <16 x float> @llvm.experimental.constrained.sitofp.v16f32.v16i32(<16 x i32>, metadata, metadata)
1458 %evec = call <16 x float> @llvm.experimental.constrained.sitofp.v16f32.v16i32(<16 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1462 declare <16 x float> @llvm.experimental.constrained.uitofp.v16f32.v16i32(<16 x i32>, metadata, metadata)
1469 %evec = call <16 x float> @llvm.experimental.constrained.uitofp.v16f32.v16i32(<16 x i32> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1473 declare <1 x half> @llvm.experimental.constrained.sitofp.v1f16.v1i64(<1 x i64>, metadata, metadata)
1482 %evec = call <1 x half> @llvm.experimental.constrained.sitofp.v1f16.v1i64(<1 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1486 declare <1 x half> @llvm.experimental.constrained.uitofp.v1f16.v1i64(<1 x i64>, metadata, metadata)
1495 %evec = call <1 x half> @llvm.experimental.constrained.uitofp.v1f16.v1i64(<1 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1499 declare <1 x float> @llvm.experimental.constrained.sitofp.v1f32.v1i64(<1 x i64>, metadata, metadata)
1507 %evec = call <1 x float> @llvm.experimental.constrained.sitofp.v1f32.v1i64(<1 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1511 declare <1 x float> @llvm.experimental.constrained.uitofp.v1f32.v1i64(<1 x i64>, metadata, metadata)
1519 %evec = call <1 x float> @llvm.experimental.constrained.uitofp.v1f32.v1i64(<1 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1523 declare <1 x double> @llvm.experimental.constrained.sitofp.v1f64.v1i64(<1 x i64>, metadata, metadata)
1530 %evec = call <1 x double> @llvm.experimental.constrained.sitofp.v1f64.v1i64(<1 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1534 declare <1 x double> @llvm.experimental.constrained.uitofp.v1f64.v1i64(<1 x i64>, metadata, metadata)
1541 %evec = call <1 x double> @llvm.experimental.constrained.uitofp.v1f64.v1i64(<1 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1546 declare <2 x half> @llvm.experimental.constrained.sitofp.v2f16.v2i64(<2 x i64>, metadata, metadata)
1555 %evec = call <2 x half> @llvm.experimental.constrained.sitofp.v2f16.v2i64(<2 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1559 declare <2 x half> @llvm.experimental.constrained.uitofp.v2f16.v2i64(<2 x i64>, metadata, metadata)
1568 %evec = call <2 x half> @llvm.experimental.constrained.uitofp.v2f16.v2i64(<2 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1572 declare <2 x float> @llvm.experimental.constrained.sitofp.v2f32.v2i64(<2 x i64>, metadata, metadata)
1580 %evec = call <2 x float> @llvm.experimental.constrained.sitofp.v2f32.v2i64(<2 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1584 declare <2 x float> @llvm.experimental.constrained.uitofp.v2f32.v2i64(<2 x i64>, metadata, metadata)
1592 %evec = call <2 x float> @llvm.experimental.constrained.uitofp.v2f32.v2i64(<2 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1596 declare <2 x double> @llvm.experimental.constrained.sitofp.v2f64.v2i64(<2 x i64>, metadata, metadata)
1603 %evec = call <2 x double> @llvm.experimental.constrained.sitofp.v2f64.v2i64(<2 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1607 declare <2 x double> @llvm.experimental.constrained.uitofp.v2f64.v2i64(<2 x i64>, metadata, metadata)
1614 %evec = call <2 x double> @llvm.experimental.constrained.uitofp.v2f64.v2i64(<2 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1618 declare <4 x half> @llvm.experimental.constrained.sitofp.v4f16.v4i64(<4 x i64>, metadata, metadata)
1627 %evec = call <4 x half> @llvm.experimental.constrained.sitofp.v4f16.v4i64(<4 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1631 declare <4 x half> @llvm.experimental.constrained.uitofp.v4f16.v4i64(<4 x i64>, metadata, metadata)
1640 %evec = call <4 x half> @llvm.experimental.constrained.uitofp.v4f16.v4i64(<4 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1644 declare <4 x float> @llvm.experimental.constrained.sitofp.v4f32.v4i64(<4 x i64>, metadata, metadata)
1652 %evec = call <4 x float> @llvm.experimental.constrained.sitofp.v4f32.v4i64(<4 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1656 declare <4 x float> @llvm.experimental.constrained.uitofp.v4f32.v4i64(<4 x i64>, metadata, metadata)
1664 %evec = call <4 x float> @llvm.experimental.constrained.uitofp.v4f32.v4i64(<4 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1668 declare <4 x double> @llvm.experimental.constrained.sitofp.v4f64.v4i64(<4 x i64>, metadata, metadata)
1675 %evec = call <4 x double> @llvm.experimental.constrained.sitofp.v4f64.v4i64(<4 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1679 declare <4 x double> @llvm.experimental.constrained.uitofp.v4f64.v4i64(<4 x i64>, metadata, metadata)
1686 %evec = call <4 x double> @llvm.experimental.constrained.uitofp.v4f64.v4i64(<4 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1690 declare <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i64(<8 x i64>, metadata, metadata)
1699 %evec = call <8 x half> @llvm.experimental.constrained.sitofp.v8f16.v8i64(<8 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1703 declare <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i64(<8 x i64>, metadata, metadata)
1712 %evec = call <8 x half> @llvm.experimental.constrained.uitofp.v8f16.v8i64(<8 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1716 declare <8 x float> @llvm.experimental.constrained.sitofp.v8f32.v8i64(<8 x i64>, metadata, metadata)
1724 %evec = call <8 x float> @llvm.experimental.constrained.sitofp.v8f32.v8i64(<8 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1728 declare <8 x float> @llvm.experimental.constrained.uitofp.v8f32.v8i64(<8 x i64>, metadata, metadata)
1736 %evec = call <8 x float> @llvm.experimental.constrained.uitofp.v8f32.v8i64(<8 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1740 declare <8 x double> @llvm.experimental.constrained.sitofp.v8f64.v8i64(<8 x i64>, metadata, metadata)
1747 %evec = call <8 x double> @llvm.experimental.constrained.sitofp.v8f64.v8i64(<8 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")
1751 declare <8 x double> @llvm.experimental.constrained.uitofp.v8f64.v8i64(<8 x i64>, metadata, metadata)
1758 %evec = call <8 x double> @llvm.experimental.constrained.uitofp.v8f64.v8i64(<8 x i64> %va, metadata !"round.dynamic", metadata !"fpexcept.strict")