1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py 2; RUN: opt -S --mtriple=loongarch32 -mattr=+d --passes=atomic-expand %s | FileCheck %s --check-prefix=LA32 3; RUN: opt -S --mtriple=loongarch64 -mattr=+d --passes=atomic-expand %s | FileCheck %s --check-prefix=LA64 4 5define i8 @load_acquire_i8(ptr %ptr) { 6; LA32-LABEL: @load_acquire_i8( 7; LA32-NEXT: [[VAL:%.*]] = load atomic i8, ptr [[PTR:%.*]] monotonic, align 1 8; LA32-NEXT: fence acquire 9; LA32-NEXT: ret i8 [[VAL]] 10; 11; LA64-LABEL: @load_acquire_i8( 12; LA64-NEXT: [[VAL:%.*]] = load atomic i8, ptr [[PTR:%.*]] monotonic, align 1 13; LA64-NEXT: fence acquire 14; LA64-NEXT: ret i8 [[VAL]] 15; 16 %val = load atomic i8, ptr %ptr acquire, align 1 17 ret i8 %val 18} 19 20define i16 @load_acquire_i16(ptr %ptr) { 21; LA32-LABEL: @load_acquire_i16( 22; LA32-NEXT: [[VAL:%.*]] = load atomic i16, ptr [[PTR:%.*]] monotonic, align 2 23; LA32-NEXT: fence acquire 24; LA32-NEXT: ret i16 [[VAL]] 25; 26; LA64-LABEL: @load_acquire_i16( 27; LA64-NEXT: [[VAL:%.*]] = load atomic i16, ptr [[PTR:%.*]] monotonic, align 2 28; LA64-NEXT: fence acquire 29; LA64-NEXT: ret i16 [[VAL]] 30; 31 %val = load atomic i16, ptr %ptr acquire, align 2 32 ret i16 %val 33} 34 35define i32 @load_acquire_i32(ptr %ptr) { 36; LA32-LABEL: @load_acquire_i32( 37; LA32-NEXT: [[VAL:%.*]] = load atomic i32, ptr [[PTR:%.*]] monotonic, align 4 38; LA32-NEXT: fence acquire 39; LA32-NEXT: ret i32 [[VAL]] 40; 41; LA64-LABEL: @load_acquire_i32( 42; LA64-NEXT: [[VAL:%.*]] = load atomic i32, ptr [[PTR:%.*]] monotonic, align 4 43; LA64-NEXT: fence acquire 44; LA64-NEXT: ret i32 [[VAL]] 45; 46 %val = load atomic i32, ptr %ptr acquire, align 4 47 ret i32 %val 48} 49 50define i64 @load_acquire_i64(ptr %ptr) { 51; LA32-LABEL: @load_acquire_i64( 52; LA32-NEXT: [[TMP1:%.*]] = call i64 @__atomic_load_8(ptr [[PTR:%.*]], i32 2) 53; LA32-NEXT: ret i64 [[TMP1]] 54; 55; LA64-LABEL: @load_acquire_i64( 56; LA64-NEXT: [[VAL:%.*]] = load atomic i64, ptr [[PTR:%.*]] monotonic, align 8 57; LA64-NEXT: fence acquire 58; LA64-NEXT: ret i64 [[VAL]] 59; 60 %val = load atomic i64, ptr %ptr acquire, align 8 61 ret i64 %val 62} 63 64define void @store_release_i8(ptr %ptr, i8 signext %v) { 65; LA32-LABEL: @store_release_i8( 66; LA32-NEXT: fence release 67; LA32-NEXT: store atomic i8 [[V:%.*]], ptr [[PTR:%.*]] monotonic, align 1 68; LA32-NEXT: ret void 69; 70; LA64-LABEL: @store_release_i8( 71; LA64-NEXT: fence release 72; LA64-NEXT: store atomic i8 [[V:%.*]], ptr [[PTR:%.*]] monotonic, align 1 73; LA64-NEXT: ret void 74; 75 store atomic i8 %v, ptr %ptr release, align 1 76 ret void 77} 78 79define void @store_release_i16(ptr %ptr, i16 signext %v) { 80; LA32-LABEL: @store_release_i16( 81; LA32-NEXT: fence release 82; LA32-NEXT: store atomic i16 [[V:%.*]], ptr [[PTR:%.*]] monotonic, align 2 83; LA32-NEXT: ret void 84; 85; LA64-LABEL: @store_release_i16( 86; LA64-NEXT: fence release 87; LA64-NEXT: store atomic i16 [[V:%.*]], ptr [[PTR:%.*]] monotonic, align 2 88; LA64-NEXT: ret void 89; 90 store atomic i16 %v, ptr %ptr release, align 2 91 ret void 92} 93 94define void @store_release_i32(ptr %ptr, i32 signext %v) { 95; LA32-LABEL: @store_release_i32( 96; LA32-NEXT: fence release 97; LA32-NEXT: store atomic i32 [[V:%.*]], ptr [[PTR:%.*]] monotonic, align 4 98; LA32-NEXT: ret void 99; 100; LA64-LABEL: @store_release_i32( 101; LA64-NEXT: store atomic i32 [[V:%.*]], ptr [[PTR:%.*]] release, align 4 102; LA64-NEXT: ret void 103; 104 store atomic i32 %v, ptr %ptr release, align 4 105 ret void 106} 107 108define void @store_release_i64(ptr %ptr, i64 %v) { 109; LA32-LABEL: @store_release_i64( 110; LA32-NEXT: call void @__atomic_store_8(ptr [[PTR:%.*]], i64 [[V:%.*]], i32 3) 111; LA32-NEXT: ret void 112; 113; LA64-LABEL: @store_release_i64( 114; LA64-NEXT: store atomic i64 [[V:%.*]], ptr [[PTR:%.*]] release, align 8 115; LA64-NEXT: ret void 116; 117 store atomic i64 %v, ptr %ptr release, align 8 118 ret void 119} 120