1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 3 2; RUN: opt -safe-stack -S -mtriple=i386-pc-linux-gnu < %s -o - | FileCheck %s --check-prefix=I386 3; RUN: opt -safe-stack -S -mtriple=x86_64-pc-linux-gnu < %s -o - | FileCheck %s --check-prefix=X86-64 4; RUN: opt -passes=safe-stack -S -mtriple=i386-pc-linux-gnu < %s -o - | FileCheck %s --check-prefix=I386 5; RUN: opt -passes=safe-stack -S -mtriple=x86_64-pc-linux-gnu < %s -o - | FileCheck %s --check-prefix=X86-64 6 7%struct.__jmp_buf_tag = type { [8 x i64], i32, %struct.__sigset_t } 8%struct.__sigset_t = type { [16 x i64] } 9 10@.str = private unnamed_addr constant [4 x i8] c"%s\0A\00", align 1 11@buf = internal global [1 x %struct.__jmp_buf_tag] zeroinitializer, align 16 12 13; setjmp/longjmp test with dynamically sized array. 14; Requires protector. 15define i32 @foo(i32 %size) nounwind uwtable safestack { 16; I386-LABEL: define i32 @foo( 17; I386-SAME: i32 [[SIZE:%.*]]) #[[ATTR0:[0-9]+]] { 18; I386-NEXT: entry: 19; I386-NEXT: [[UNSAFE_STACK_PTR:%.*]] = load ptr, ptr @__safestack_unsafe_stack_ptr, align 4 20; I386-NEXT: [[UNSAFE_STACK_DYNAMIC_PTR:%.*]] = alloca ptr, align 4 21; I386-NEXT: store ptr [[UNSAFE_STACK_PTR]], ptr [[UNSAFE_STACK_DYNAMIC_PTR]], align 4 22; I386-NEXT: [[TMP0:%.*]] = mul i32 [[SIZE]], 4 23; I386-NEXT: [[TMP1:%.*]] = load ptr, ptr @__safestack_unsafe_stack_ptr, align 4 24; I386-NEXT: [[TMP2:%.*]] = ptrtoint ptr [[TMP1]] to i32 25; I386-NEXT: [[TMP3:%.*]] = sub i32 [[TMP2]], [[TMP0]] 26; I386-NEXT: [[TMP4:%.*]] = and i32 [[TMP3]], -16 27; I386-NEXT: [[A:%.*]] = inttoptr i32 [[TMP4]] to ptr 28; I386-NEXT: store ptr [[A]], ptr @__safestack_unsafe_stack_ptr, align 4 29; I386-NEXT: store ptr [[A]], ptr [[UNSAFE_STACK_DYNAMIC_PTR]], align 4 30; I386-NEXT: [[CALL:%.*]] = call i32 @_setjmp(ptr @buf) #[[ATTR1:[0-9]+]] 31; I386-NEXT: [[TMP5:%.*]] = load ptr, ptr [[UNSAFE_STACK_DYNAMIC_PTR]], align 4 32; I386-NEXT: store ptr [[TMP5]], ptr @__safestack_unsafe_stack_ptr, align 4 33; I386-NEXT: call void @funcall(ptr [[A]]) 34; I386-NEXT: store ptr [[UNSAFE_STACK_PTR]], ptr @__safestack_unsafe_stack_ptr, align 4 35; I386-NEXT: ret i32 0 36; 37; X86-64-LABEL: define i32 @foo( 38; X86-64-SAME: i32 [[SIZE:%.*]]) #[[ATTR0:[0-9]+]] { 39; X86-64-NEXT: entry: 40; X86-64-NEXT: [[UNSAFE_STACK_PTR:%.*]] = load ptr, ptr @__safestack_unsafe_stack_ptr, align 8 41; X86-64-NEXT: [[UNSAFE_STACK_DYNAMIC_PTR:%.*]] = alloca ptr, align 8 42; X86-64-NEXT: store ptr [[UNSAFE_STACK_PTR]], ptr [[UNSAFE_STACK_DYNAMIC_PTR]], align 8 43; X86-64-NEXT: [[TMP0:%.*]] = zext i32 [[SIZE]] to i64 44; X86-64-NEXT: [[TMP1:%.*]] = mul i64 [[TMP0]], 4 45; X86-64-NEXT: [[TMP2:%.*]] = load ptr, ptr @__safestack_unsafe_stack_ptr, align 8 46; X86-64-NEXT: [[TMP3:%.*]] = ptrtoint ptr [[TMP2]] to i64 47; X86-64-NEXT: [[TMP4:%.*]] = sub i64 [[TMP3]], [[TMP1]] 48; X86-64-NEXT: [[TMP5:%.*]] = and i64 [[TMP4]], -16 49; X86-64-NEXT: [[A:%.*]] = inttoptr i64 [[TMP5]] to ptr 50; X86-64-NEXT: store ptr [[A]], ptr @__safestack_unsafe_stack_ptr, align 8 51; X86-64-NEXT: store ptr [[A]], ptr [[UNSAFE_STACK_DYNAMIC_PTR]], align 8 52; X86-64-NEXT: [[CALL:%.*]] = call i32 @_setjmp(ptr @buf) #[[ATTR1:[0-9]+]] 53; X86-64-NEXT: [[TMP6:%.*]] = load ptr, ptr [[UNSAFE_STACK_DYNAMIC_PTR]], align 8 54; X86-64-NEXT: store ptr [[TMP6]], ptr @__safestack_unsafe_stack_ptr, align 8 55; X86-64-NEXT: call void @funcall(ptr [[A]]) 56; X86-64-NEXT: store ptr [[UNSAFE_STACK_PTR]], ptr @__safestack_unsafe_stack_ptr, align 8 57; X86-64-NEXT: ret i32 0 58; 59entry: 60 61 %a = alloca i32, i32 %size 62 63 %call = call i32 @_setjmp(ptr @buf) returns_twice 64 65 call void @funcall(ptr %a) 66 ret i32 0 67} 68 69declare i32 @_setjmp(ptr) 70declare void @funcall(ptr) 71