1 // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp -fopenmp-version=50 -x c -emit-llvm %s -o - | FileCheck %s 2 // RUN: %clang_cc1 -fopenmp -fopenmp-version=50 -x c -triple x86_64-apple-darwin10 -emit-pch -o %t %s 3 // RUN: %clang_cc1 -fopenmp -fopenmp-version=50 -x c -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s 4 5 // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp-simd -fopenmp-version=50 -x c -emit-llvm %s -o - | FileCheck --check-prefix SIMD-ONLY0 %s 6 // RUN: %clang_cc1 -fopenmp-simd -fopenmp-version=50 -x c -triple x86_64-apple-darwin10 -emit-pch -o %t %s 7 // RUN: %clang_cc1 -fopenmp-simd -fopenmp-version=50 -x c -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck --check-prefix SIMD-ONLY0 %s 8 // SIMD-ONLY0-NOT: {{__kmpc|__tgt}} 9 // expected-no-diagnostics 10 #ifndef HEADER 11 #define HEADER 12 13 typedef void *omp_depend_t; 14 typedef __UINTPTR_TYPE__ omp_event_handle_t; 15 16 void foo(void); 17 18 // CHECK-LABEL: @main 19 int main(void) { 20 omp_depend_t d, x; 21 omp_event_handle_t evt; 22 int a, *b; 23 // CHECK: [[D_ADDR:%.+]] = alloca ptr, 24 // CHECK: [[X_ADDR:%.+]] = alloca ptr, 25 // CHECK: [[EVT_ADDR:%.+]] = alloca i64, 26 // CHECK: [[A_ADDR:%.+]] = alloca i32, 27 // CHECK: [[DEPOBJ_SIZE_ADDR:%.+]] = alloca i64, 28 // CHECK: [[DEPOBJ_SIZE_ADDR1:%.+]] = alloca i64, 29 // CHECK: = alloca i64, 30 // CHECK: [[DEP_COUNTER_ADDR:%.+]] = alloca i64, 31 // CHECK: [[GTID:%.+]] = call i32 @__kmpc_global_thread_num( 32 // CHECK: [[ALLOC:%.+]] = call ptr @__kmpc_omp_task_alloc(ptr @{{.+}}, i32 [[GTID]], i32 65, i64 48, i64 0, ptr [[TASK_ENTRY:@.+]]) 33 // CHECK: [[EVT_VAL:%.+]] = call ptr @__kmpc_task_allow_completion_event(ptr @{{.+}}, i32 [[GTID]], ptr [[ALLOC]]) 34 // CHECK: [[CAST_EVT_VAL:%.+]] = ptrtoint ptr [[EVT_VAL]] to i64 35 // CHECK: store i64 [[CAST_EVT_VAL]], ptr [[EVT_ADDR]], align 8 36 // CHECK: [[D_DEP:%.+]] = load ptr, ptr [[D_ADDR]], align 8 37 // CHECK: [[D_DEP_BASE:%.+]] = getelementptr %struct.kmp_depend_info, ptr [[D_DEP]], i{{.+}} -1 38 // CHECK: [[D_DEP_BASE_SIZE:%.+]] = getelementptr inbounds nuw %struct.kmp_depend_info, ptr [[D_DEP_BASE]], i{{.+}} 0, i{{.+}} 0 39 // CHECK: [[SIZE1:%.+]] = load i64, ptr [[D_DEP_BASE_SIZE]], align 8 40 // CHECK-DAG: store i64 0, ptr [[DEPOBJ_SIZE_ADDR]], align 8 41 // CHECK: [[SZ:%.+]] = load i64, ptr [[DEPOBJ_SIZE_ADDR]], align 8 42 // CHECK: [[SIZE:%.+]] = add nuw i64 [[SZ]], [[SIZE1]] 43 // CHECK: store i64 [[SIZE]], ptr [[DEPOBJ_SIZE_ADDR]], align 8 44 // CHECK: [[X_DEP:%.+]] = load ptr, ptr [[X_ADDR]], align 8 45 // CHECK: [[X_DEP_BASE:%.+]] = getelementptr %struct.kmp_depend_info, ptr [[X_DEP]], i{{.+}} -1 46 // CHECK: [[X_DEP_BASE_SIZE:%.+]] = getelementptr inbounds nuw %struct.kmp_depend_info, ptr [[X_DEP_BASE]], i{{.+}} 0, i{{.+}} 0 47 // CHECK: [[SIZE2:%.+]] = load i64, ptr [[X_DEP_BASE_SIZE]], align 8 48 // CHECK-DAG: store i64 0, ptr [[DEPOBJ_SIZE_ADDR1]], align 8 49 // CHECK: [[SZ:%.+]] = load i64, ptr [[DEPOBJ_SIZE_ADDR1]], align 8 50 // CHECK: [[SIZE3:%.+]] = add nuw i64 [[SZ]], [[SIZE2]] 51 // CHECK: store i64 [[SIZE3]], ptr [[DEPOBJ_SIZE_ADDR1]], align 8 52 // CHECK: [[SZ:%.+]] = load i64, ptr [[DEPOBJ_SIZE_ADDR]], align 8 53 // CHECK: [[SZ1:%.+]] = load i64, ptr [[DEPOBJ_SIZE_ADDR1]], align 8 54 // CHECK: [[SIZE1:%.+]] = add nuw i64 0, [[SZ]] 55 // CHECK: [[SIZE2:%.+]] = add nuw i64 [[SIZE1]], [[SZ1]] 56 // CHECK: [[SIZE:%.+]] = add nuw i64 [[SIZE2]], 2 57 // CHECK: [[SV:%.+]] = call ptr @llvm.stacksave.p0() 58 // CHECK: store ptr [[SV]], ptr [[SV_ADDR:%.+]], align 8 59 // CHECK: [[VLA:%.+]] = alloca %struct.kmp_depend_info, i64 [[SIZE]], 60 // CHECK: [[SIZE32:%.+]] = trunc i64 [[SIZE]] to i32 61 // CHECK: [[A_ADDR_CAST:%.+]] = ptrtoint ptr [[A_ADDR]] to i64 62 // CHECK: [[VLA0:%.+]] = getelementptr %struct.kmp_depend_info, ptr [[VLA]], i64 0 63 // CHECK: [[BASE_ADDR:%.+]] = getelementptr inbounds nuw %struct.kmp_depend_info, ptr [[VLA0]], i{{.+}} 0, i{{.+}} 0 64 // CHECK: store i64 [[A_ADDR_CAST]], ptr [[BASE_ADDR]], align 16 65 // CHECK: [[SIZE_ADDR:%.+]] = getelementptr inbounds nuw %struct.kmp_depend_info, ptr [[VLA0]], i{{.+}} 0, i{{.+}} 1 66 // CHECK: store i64 4, ptr [[SIZE_ADDR]], align 8 67 // CHECK: [[FLAGS_ADDR:%.+]] = getelementptr inbounds nuw %struct.kmp_depend_info, ptr [[VLA0]], i{{.+}} 0, i{{.+}} 2 68 // CHECK: store i8 1, ptr [[FLAGS_ADDR]], align 1 69 // CHECK: [[A:%.+]] = load i32, ptr [[A_ADDR]], align 4 70 // CHECK: [[A_CAST:%.+]] = sext i32 [[A]] to i64 71 // CHECK: [[SZ1:%.+]] = mul nuw i64 24, [[A_CAST]] 72 // CHECK: [[A:%.+]] = load i32, ptr [[A_ADDR]], align 4 73 // CHECK: [[A_CAST:%.+]] = sext i32 [[A]] to i64 74 // CHECK: [[SZ:%.+]] = mul nuw i64 [[SZ1]], [[A_CAST]] 75 // CHECK: [[B_ADDR_CAST:%.+]] = ptrtoint ptr %{{.+}} to i64 76 // CHECK: [[VLA1:%.+]] = getelementptr %struct.kmp_depend_info, ptr [[VLA]], i64 1 77 // CHECK: [[BASE_ADDR:%.+]] = getelementptr inbounds nuw %struct.kmp_depend_info, ptr [[VLA1]], i{{.+}} 0, i{{.+}} 0 78 // CHECK: store i64 [[B_ADDR_CAST]], ptr [[BASE_ADDR]], align 8 79 // CHECK: [[SIZE_ADDR:%.+]] = getelementptr inbounds nuw %struct.kmp_depend_info, ptr [[VLA1]], i{{.+}} 0, i{{.+}} 1 80 // CHECK: store i64 [[SZ]], ptr [[SIZE_ADDR]], align 8 81 // CHECK: [[FLAGS_ADDR:%.+]] = getelementptr inbounds nuw %struct.kmp_depend_info, ptr [[VLA1]], i{{.+}} 0, i{{.+}} 2 82 // CHECK: store i8 1, ptr [[FLAGS_ADDR]], align 8 83 // CHECK: store i64 2, ptr [[DEP_COUNTER_ADDR]], align 8 84 // CHECK: [[BC:%.+]] = load ptr, ptr [[D_ADDR]], align 8 85 // CHECK: [[PREV:%.+]] = getelementptr %struct.kmp_depend_info, ptr [[BC]], i64 -1 86 // CHECK: [[SIZE_ADDR:%.+]] = getelementptr inbounds nuw %struct.kmp_depend_info, ptr [[PREV]], i{{.+}} 0, i{{.+}} 0 87 // CHECK: [[SIZE:%.+]] = load i64, ptr [[SIZE_ADDR]], align 8 88 // CHECK: [[BYTES:%.+]] = mul nuw i64 24, [[SIZE]] 89 // CHECK: [[POS:%.+]] = load i64, ptr [[DEP_COUNTER_ADDR]], align 8 90 // CHECK: [[VLA_D:%.+]] = getelementptr %struct.kmp_depend_info, ptr [[VLA]], i64 [[POS]] 91 // CHECK: call void @llvm.memcpy.p0.p0.i64(ptr align {{.+}} [[VLA_D]], ptr align {{.+}} [[BC]], i64 [[BYTES]], i1 false) 92 // CHECK: [[ADD:%.+]] = add nuw i64 [[POS]], [[SIZE]] 93 // CHECK: store i64 [[ADD]], ptr [[DEP_COUNTER_ADDR]], align 8 94 // CHECK: [[BC:%.+]] = load ptr, ptr [[X_ADDR]], align 8 95 // CHECK: [[PREV:%.+]] = getelementptr %struct.kmp_depend_info, ptr [[BC]], i64 -1 96 // CHECK: [[SIZE_ADDR:%.+]] = getelementptr inbounds nuw %struct.kmp_depend_info, ptr [[PREV]], i{{.+}} 0, i{{.+}} 0 97 // CHECK: [[SIZE:%.+]] = load i64, ptr [[SIZE_ADDR]], align 8 98 // CHECK: [[BYTES:%.+]] = mul nuw i64 24, [[SIZE]] 99 // CHECK: [[POS:%.+]] = load i64, ptr [[DEP_COUNTER_ADDR]], align 8 100 // CHECK: [[VLA_X:%.+]] = getelementptr %struct.kmp_depend_info, ptr [[VLA]], i64 [[POS]] 101 // CHECK: call void @llvm.memcpy.p0.p0.i64(ptr align {{.+}} [[VLA_X]], ptr align {{.+}} [[BC]], i64 [[BYTES]], i1 false) 102 // CHECK: [[ADD:%.+]] = add nuw i64 [[POS]], [[SIZE]] 103 // CHECK: store i64 [[ADD]], ptr [[DEP_COUNTER_ADDR]], align 8 104 // CHECK: call i32 @__kmpc_omp_task_with_deps(ptr @{{.+}}, i32 [[GTID]], ptr [[ALLOC]], i32 [[SIZE32]], ptr [[VLA]], i32 0, ptr null) 105 // CHECK: [[SV:%.+]] = load ptr, ptr [[SV_ADDR]], align 8 106 // CHECK: call void @llvm.stackrestore.p0(ptr [[SV]]) 107 #pragma omp task depend(in: a, ([3][a][a])&b) depend(depobj: d, x) detach(evt) 108 { 109 #pragma omp taskgroup 110 { 111 #pragma omp task 112 foo(); 113 } 114 } 115 // CHECK: ret i32 0 116 return 0; 117 } 118 // CHECK: call void @__kmpc_taskgroup( 119 // CHECK: call ptr @__kmpc_omp_task_alloc( 120 // CHECK: call i32 @__kmpc_omp_task( 121 // CHECK: call void @__kmpc_end_taskgroup( 122 123 // CHECK-LINE: @bar 124 void bar(void) { 125 int **a; 126 // CHECK: call void @__kmpc_for_static_init_4( 127 #pragma omp for 128 for (int i = 0; i < 10; ++i) 129 // CHECK: [[BUF:%.+]] = call ptr @__kmpc_omp_task_alloc(ptr @{{.+}}, i32 %{{.+}}, i32 1, i64 48, 130 // CHECK: [[PRIVS:%.+]] = getelementptr inbounds nuw [[TT_WITH_PRIVS:%.+]], ptr [[BUF]], i32 0, i32 1 131 // CHECK: [[I_PRIV:%.+]] = getelementptr inbounds nuw %{{.+}}, ptr [[PRIVS]], i32 0, i32 0 132 // CHECK: [[I:%.+]] = load i32, ptr [[I_ADDR:%.+]], 133 // CHECK: store i32 %{{.+}}, ptr [[I_PRIV]], 134 135 // NELEMS = 1 * ((i - 0 + 2 - 1) / 2); 136 // CHECK: [[END:%.+]] = load i32, ptr [[I_ADDR]], 137 // CHECK: [[EB_SUB:%.+]] = sub i32 [[END]], 0 138 // CHECK: [[EB_SUB_2_ADD:%.+]] = add i32 [[EB_SUB]], 2 139 // CHECK: [[EB_SUB_2_ADD_1_SUB:%.+]] = sub i32 [[EB_SUB_2_ADD]], 1 140 // CHECK: [[EB_SUB_2_ADD_1_SUB_2_DIV:%.+]] = udiv i32 [[EB_SUB_2_ADD_1_SUB]], 2 141 // CHECK: [[ELEMS:%.+]] = zext i32 [[EB_SUB_2_ADD_1_SUB_2_DIV]] to i64 142 // CHECK: [[ELEMS2:%.+]] = mul nuw i64 [[ELEMS]], 1 143 // CHECK: [[NELEMS:%.+]] = mul nuw i64 [[ELEMS2]], 1 144 145 // ITERATOR_TOTAL = NELEMS + 0; 146 // CHECK: [[ITERATOR_TOTAL:%.+]] = add nuw i64 0, [[NELEMS]] 147 // NELEMS = ITERATOR_TOTAL + non-iterator-deps (=0) 148 // CHECK: [[TOTAL:%.+]] = add nuw i64 [[ITERATOR_TOTAL]], 0 149 150 // %struct.kmp_depend_info DEPS[TOTAL]; 151 // CHECK: [[DEPS:%.+]] = alloca %struct.kmp_depend_info, i64 [[TOTAL]], 152 // CHECK: [[NDEPS:%.+]] = trunc i64 [[TOTAL]] to i32 153 154 // i64 DEP_COUNTER = 0; 155 // CHECK: store i64 0, ptr [[DEP_COUNTER_ADDR:%.+]], 156 157 // NELEMS = ((i - 0 + 2 - 1) / 2); 158 // CHECK: [[END:%.+]] = load i32, ptr [[I_ADDR]], 159 // CHECK: [[EB_SUB:%.+]] = sub i32 [[END]], 0 160 // CHECK: [[EB_SUB_2_ADD:%.+]] = add i32 [[EB_SUB]], 2 161 // CHECK: [[EB_SUB_2_ADD_1_SUB:%.+]] = sub i32 [[EB_SUB_2_ADD]], 1 162 // CHECK: [[ELEMS:%.+]] = udiv i32 [[EB_SUB_2_ADD_1_SUB]], 2 163 164 // i32 COUNTER = 0; 165 // CHECK: store i32 0, ptr [[COUNTER_ADDR:%.+]], 166 // CHECK: br label %[[CONT:.+]] 167 168 // Loop. 169 // CHECK: [[CONT]]: 170 // CHECK: [[COUNTER:%.+]] = load i32, ptr [[COUNTER_ADDR]], 171 // CHECK: [[CMP:%.+]] = icmp ult i32 [[COUNTER]], [[ELEMS]] 172 // CHECK: br i1 [[CMP]], label %[[BODY:.+]], label %[[EXIT:.+]] 173 174 // CHECK: [[BODY]]: 175 176 // k = 0 + 2*COUNTER; 177 // CHECK: [[COUNTER:%.+]] = load i32, ptr [[COUNTER_ADDR]], 178 // CHECK: [[C2_MUL:%.+]] = mul i32 [[COUNTER]], 2 179 // CHECK: [[C2_MUL_0_ADD:%.+]] = add i32 0, [[C2_MUL]] 180 // CHECK: store i32 [[C2_MUL_0_ADD]], ptr [[K_ADDR:%.+]], 181 182 // &a[k][i] 183 // CHECK: [[A:%.+]] = load ptr, ptr [[A_ADDR:%.+]], 184 // CHECK: [[K:%.+]] = load i32, ptr [[K_ADDR]], 185 // CHECK: [[IDX:%.+]] = zext i32 [[K]] to i64 186 // CHECK: [[AK_ADDR:%.+]] = getelementptr inbounds nuw ptr, ptr [[A]], i64 [[IDX]] 187 // CHECK: [[AK:%.+]] = load ptr, ptr [[AK_ADDR]], 188 // CHECK: [[I:%.+]] = load i32, ptr [[I_ADDR]], 189 // CHECK: [[IDX:%.+]] = sext i32 [[I]] to i64 190 // CHECK: [[AKI_ADDR:%.+]] = getelementptr inbounds i32, ptr [[AK]], i64 [[IDX]] 191 // CHECK: [[AKI_INT:%.+]] = ptrtoint ptr [[AKI_ADDR]] to i64 192 193 // DEPS[DEP_COUNTER].base_addr = &a[k][i]; 194 // CHECK: [[DEP_COUNTER:%.+]] = load i64, ptr [[DEP_COUNTER_ADDR]], 195 // CHECK: [[DEPS_DC:%.+]] = getelementptr %struct.kmp_depend_info, ptr [[DEPS]], i64 [[DEP_COUNTER]] 196 // CHECK: [[DEPS_DC_BASE_ADDR:%.+]] = getelementptr inbounds nuw %struct.kmp_depend_info, ptr [[DEPS_DC]], i{{.+}} 0, i{{.+}} 0 197 // CHECK: store i64 [[AKI_INT]], ptr [[DEPS_DC_BASE_ADDR]], 198 199 // DEPS[DEP_COUNTER].size = sizeof(a[k][i]); 200 // CHECK: [[DEPS_DC_SIZE:%.+]] = getelementptr inbounds nuw %struct.kmp_depend_info, ptr [[DEPS_DC]], i{{.+}} 0, i{{.+}} 1 201 // CHECK: store i64 4, ptr [[DEPS_DC_SIZE]], 202 203 // DEPS[DEP_COUNTER].flags = in; 204 // CHECK: [[DEPS_DC_FLAGS:%.+]] = getelementptr inbounds nuw %struct.kmp_depend_info, ptr [[DEPS_DC]], i{{.+}} 0, i{{.+}} 2 205 // CHECK: store i8 1, ptr [[DEPS_DC_FLAGS]], 206 207 // DEP_COUNTER = DEP_COUNTER + 1; 208 // CHECK: [[DEP_COUNTER:%.+]] = load i64, ptr [[DEP_COUNTER_ADDR]], 209 // CHECK: [[INC:%.+]] = add nuw i64 [[DEP_COUNTER]], 1 210 // CHECK: store i64 [[INC]], ptr [[DEP_COUNTER_ADDR]], 211 212 // COUNTER = COUNTER + 1; 213 // CHECK: [[COUNTER:%.+]] = load i32, ptr [[COUNTER_ADDR]], 214 // CHECK: [[INC:%.+]] = add i32 [[COUNTER]], 1 215 // CHECK: store i32 [[INC]], ptr [[COUNTER_ADDR]], 216 // CHECK: br label %[[CONT]] 217 218 // CHECK: [[EXIT]]: 219 // CHECK: = call i32 @__kmpc_omp_task_with_deps(ptr @{{.+}}, i32 %{{.+}}, ptr [[BUF]], i32 [[NDEPS]], ptr [[DEPS]], i32 0, ptr null) 220 #pragma omp task depend(iterator(unsigned k=0:i:2), in: a[k][i]) 221 ++i; 222 } 223 #endif 224