1; RUN: opt < %s -aarch64-stack-tagging -stack-tagging-use-stack-safety=0 -S -o - | FileCheck %s --check-prefixes=CHECK 2; RUN: opt < %s -aarch64-stack-tagging -stack-tagging-use-stack-safety=0 -S -stack-tagging-record-stack-history=instr -o - | FileCheck %s --check-prefixes=INSTR 3; RUN llc -mattr=+mte -stack-tagging-use-stack-safety=0 -stack-tagging-record-stack-history=instr %s -o - | FileCheck %s --check-prefixes=ASMINSTR 4 5 6target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128" 7target triple = "aarch64--linux-android35" 8 9declare void @use32(ptr) 10 11define void @OneVar() sanitize_memtag { 12entry: 13 %x = alloca i32, align 4 14 call void @use32(ptr %x) 15 ret void 16} 17 18; CHECK-LABEL: define void @OneVar( 19; CHECK: [[BASE:%.*]] = call ptr @llvm.aarch64.irg.sp(i64 0) 20; CHECK: [[X:%.*]] = alloca { i32, [12 x i8] }, align 16 21; CHECK: [[TX:%.*]] = call ptr @llvm.aarch64.tagp.{{.*}}(ptr [[X]], ptr [[BASE]], i64 0) 22; CHECK: ret void 23 24; INSTR-LABEL: define void @OneVar( 25; INSTR: [[BASE:%.*]] = call ptr @llvm.aarch64.irg.sp(i64 0) 26; INSTR: [[TLS:%.*]] = call ptr @llvm.thread.pointer() 27; INSTR: [[TLS_SLOT:%.*]] = getelementptr i8, ptr [[TLS]], i32 -24 28; INSTR: [[TLS_VALUE:%.*]] = load i64, ptr [[TLS_SLOT]], align 8 29; INSTR: [[FP:%.*]] = call ptr @llvm.frameaddress.p0(i32 0) 30; INSTR: [[FP_INT:%.*]] = ptrtoint ptr [[FP]] to i64 31; INSTR: [[BASE_INT:%.*]] = ptrtoint ptr [[BASE]] to i64 32; INSTR: [[BASE_TAG:%.*]] = and i64 [[BASE_INT]], 1080863910568919040 33; INSTR: [[TAGGED_FP:%.*]] = or i64 [[FP_INT]], [[BASE_TAG]] 34; INSTR: [[PC:%.*]] = call i64 @llvm.read_register.i64(metadata !0) 35; INSTR: [[TLS_VALUE_PTR:%.*]] = inttoptr i64 [[TLS_VALUE]] to ptr 36; INSTR: store i64 [[PC]], ptr [[TLS_VALUE_PTR]], align 8 37; INSTR: [[SECOND_SLOT:%.*]] = getelementptr i64, ptr [[TLS_VALUE_PTR]], i64 1 38; INSTR: store i64 [[TAGGED_FP]], ptr [[SECOND_SLOT]], align 8 39; INSTR: [[SIZE_IN_PAGES:%.*]] = ashr i64 [[TLS_VALUE]], 56 40; INSTR: [[WRAP_MASK_INTERMEDIARY:%.*]] = shl nuw nsw i64 [[SIZE_IN_PAGES]], 12 41; INSTR: [[WRAP_MASK:%.*]] = xor i64 [[WRAP_MASK_INTERMEDIARY]], -1 42; INSTR: [[NEXT_TLS_VALUE_BEFORE_WRAP:%.*]] = add i64 [[TLS_VALUE]], 16 43; INSTR: [[NEXT_TLS_VALUE:%.*]] = and i64 [[NEXT_TLS_VALUE_BEFORE_WRAP]], [[WRAP_MASK]] 44; INSTR: store i64 [[NEXT_TLS_VALUE]], ptr [[TLS_SLOT]], align 8 45; INSTR: [[X:%.*]] = alloca { i32, [12 x i8] }, align 16 46; INSTR: [[TX:%.*]] = call ptr @llvm.aarch64.tagp.{{.*}}(ptr [[X]], ptr [[BASE]], i64 0) 47; INSTR: [[PC:!.*]] = !{!"pc"} 48 49; ASMINSTR-LABEL: OneVar: 50; ASMINSTR: mrs [[TLS:x.*]], TPIDR_EL0 51; ASMINSTR: irg [[BASE:x.*]], sp 52; ASMINSTR: adr [[PC:x.*]], #0 53; ASMINSTR: ldur [[TLS_SLOT:x.*]], [[[TLS]], #-24] 54; ASMINSTR: and [[SP_TAG:x.*]], [[BASE]], #0xf00000000000000 55; ASMINSTR: orr [[TAGGED_FP]], x29, [[SP_TAG]] 56; ASMINSTR: asr [[TLS_SIZE:x.*]], [[TLS_SLOT]], #56 57; ASMINSTR: add [[NEXT_TLS_VALUE_BEFORE_WRAP:x.*]], [[TLS_SLOT]], #16 58; ASMINSTR: stp [[PC]], [[TAGGED_FP]], [[[TLS_SLOT]]] 59; ASMINSTR: bic [[NEXT_TLS_VALUE:x.*]], [[NEXT_TLS_VALUE_BEFORE_WRAP]], [[TLS_SIZE]], lsl #12 60; ASMINSTR: stur [[NEXT_TLS_VALUE]], [[[TLS]], #-24] 61; ASMINSTR: stg [[BASE]], [[[BASE]]] 62