1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -debugify-and-strip-all-safe -mtriple aarch64 -O0 -run-pass=aarch64-prelegalizer-combiner --aarch64prelegalizercombiner-only-enable-rule="load_or_combine" -global-isel -verify-machineinstrs %s -o - | FileCheck %s --check-prefix=NOT_STRICT
3# RUN: llc -debugify-and-strip-all-safe -mattr=+strict-align -mtriple aarch64 -O0 -run-pass=aarch64-prelegalizer-combiner --aarch64prelegalizercombiner-only-enable-rule="load_or_combine" -global-isel -verify-machineinstrs %s -o - | FileCheck %s --check-prefix=STRICT
4
5# REQUIRES: asserts
6
7# Check that the load-or combine respects alignment requirements.
8...
9---
10name:            misaligned
11tracksRegLiveness: true
12body:             |
13  bb.0:
14    liveins: $x0, $x1
15    ; NOT_STRICT-LABEL: name: misaligned
16    ; NOT_STRICT: liveins: $x0, $x1
17    ; NOT_STRICT: %ptr:_(p0) = COPY $x1
18    ; NOT_STRICT: %full_load:_(s32) = G_LOAD %ptr(p0) :: (load (s32), align 2)
19    ; NOT_STRICT: $w1 = COPY %full_load(s32)
20    ; NOT_STRICT: RET_ReallyLR implicit $w1
21    ; STRICT-LABEL: name: misaligned
22    ; STRICT: liveins: $x0, $x1
23    ; STRICT: %cst_1:_(s64) = G_CONSTANT i64 1
24    ; STRICT: %cst_16:_(s32) = G_CONSTANT i32 16
25    ; STRICT: %ptr:_(p0) = COPY $x1
26    ; STRICT: %ptr_elt_1:_(p0) = G_PTR_ADD %ptr, %cst_1(s64)
27    ; STRICT: %low_half:_(s32) = G_ZEXTLOAD %ptr(p0) :: (load (s16))
28    ; STRICT: %elt1:_(s32) = G_ZEXTLOAD %ptr_elt_1(p0) :: (load (s16))
29    ; STRICT: %high_half:_(s32) = nuw G_SHL %elt1, %cst_16(s32)
30    ; STRICT: %full_load:_(s32) = G_OR %low_half, %high_half
31    ; STRICT: $w1 = COPY %full_load(s32)
32    ; STRICT: RET_ReallyLR implicit $w1
33    %cst_1:_(s64) = G_CONSTANT i64 1
34    %cst_16:_(s32) = G_CONSTANT i32 16
35
36    %ptr:_(p0) = COPY $x1
37    %ptr_elt_1:_(p0) = G_PTR_ADD %ptr, %cst_1(s64)
38
39    %low_half:_(s32) = G_ZEXTLOAD %ptr(p0) :: (load (s16), align 2)
40    %elt1:_(s32) = G_ZEXTLOAD %ptr_elt_1(p0) :: (load (s16), align 2)
41    %high_half:_(s32) = nuw G_SHL %elt1, %cst_16(s32)
42
43    %full_load:_(s32) = G_OR %low_half, %high_half
44    $w1 = COPY %full_load(s32)
45    RET_ReallyLR implicit $w1
46
47...
48---
49name:            aligned
50tracksRegLiveness: true
51body:             |
52  bb.0:
53    liveins: $x0, $x1
54
55    ; NOT_STRICT-LABEL: name: aligned
56    ; NOT_STRICT: liveins: $x0, $x1
57    ; NOT_STRICT: %ptr:_(p0) = COPY $x1
58    ; NOT_STRICT: %full_load:_(s32) = G_LOAD %ptr(p0) :: (load (s32))
59    ; NOT_STRICT: $w1 = COPY %full_load(s32)
60    ; NOT_STRICT: RET_ReallyLR implicit $w1
61    ; STRICT-LABEL: name: aligned
62    ; STRICT: liveins: $x0, $x1
63    ; STRICT: %ptr:_(p0) = COPY $x1
64    ; STRICT: %full_load:_(s32) = G_LOAD %ptr(p0) :: (load (s32))
65    ; STRICT: $w1 = COPY %full_load(s32)
66    ; STRICT: RET_ReallyLR implicit $w1
67    %cst_1:_(s64) = G_CONSTANT i64 1
68    %cst_16:_(s32) = G_CONSTANT i32 16
69
70    %ptr:_(p0) = COPY $x1
71    %ptr_elt_1:_(p0) = G_PTR_ADD %ptr, %cst_1(s64)
72
73    %low_half:_(s32) = G_ZEXTLOAD %ptr(p0) :: (load (s16), align 4)
74    %elt1:_(s32) = G_ZEXTLOAD %ptr_elt_1(p0) :: (load (s16), align 4)
75    %high_half:_(s32) = nuw G_SHL %elt1, %cst_16(s32)
76
77    %full_load:_(s32) = G_OR %low_half, %high_half
78    $w1 = COPY %full_load(s32)
79    RET_ReallyLR implicit $w1
80