1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --filter-out "\b(sp)\b" --filter "^\s*(ld|st[^r]|swp|cas|bl|add|and|eor|orn|orr|sub|mvn|sxt|cmp|ccmp|csel|dmb)" 2; The base test file was generated by ./llvm/test/CodeGen/AArch64/Atomics/generate-tests.py 3; RUN: llc %s -o - -verify-machineinstrs -mtriple=aarch64 -mattr=+outline-atomics -O0 | FileCheck %s --check-prefixes=CHECK,-O0 4; RUN: llc %s -o - -verify-machineinstrs -mtriple=aarch64 -mattr=+outline-atomics -O1 | FileCheck %s --check-prefixes=CHECK,-O1 5 6define dso_local i8 @load_atomic_i8_aligned_unordered(ptr %ptr) { 7; CHECK-LABEL: load_atomic_i8_aligned_unordered: 8; CHECK: ldrb w0, [x0] 9 %r = load atomic i8, ptr %ptr unordered, align 1 10 ret i8 %r 11} 12 13define dso_local i8 @load_atomic_i8_aligned_unordered_const(ptr readonly %ptr) { 14; CHECK-LABEL: load_atomic_i8_aligned_unordered_const: 15; CHECK: ldrb w0, [x0] 16 %r = load atomic i8, ptr %ptr unordered, align 1 17 ret i8 %r 18} 19 20define dso_local i8 @load_atomic_i8_aligned_monotonic(ptr %ptr) { 21; CHECK-LABEL: load_atomic_i8_aligned_monotonic: 22; CHECK: ldrb w0, [x0] 23 %r = load atomic i8, ptr %ptr monotonic, align 1 24 ret i8 %r 25} 26 27define dso_local i8 @load_atomic_i8_aligned_monotonic_const(ptr readonly %ptr) { 28; CHECK-LABEL: load_atomic_i8_aligned_monotonic_const: 29; CHECK: ldrb w0, [x0] 30 %r = load atomic i8, ptr %ptr monotonic, align 1 31 ret i8 %r 32} 33 34define dso_local i8 @load_atomic_i8_aligned_acquire(ptr %ptr) { 35; CHECK-LABEL: load_atomic_i8_aligned_acquire: 36; CHECK: ldarb w0, [x0] 37 %r = load atomic i8, ptr %ptr acquire, align 1 38 ret i8 %r 39} 40 41define dso_local i8 @load_atomic_i8_aligned_acquire_const(ptr readonly %ptr) { 42; CHECK-LABEL: load_atomic_i8_aligned_acquire_const: 43; CHECK: ldarb w0, [x0] 44 %r = load atomic i8, ptr %ptr acquire, align 1 45 ret i8 %r 46} 47 48define dso_local i8 @load_atomic_i8_aligned_seq_cst(ptr %ptr) { 49; CHECK-LABEL: load_atomic_i8_aligned_seq_cst: 50; CHECK: ldarb w0, [x0] 51 %r = load atomic i8, ptr %ptr seq_cst, align 1 52 ret i8 %r 53} 54 55define dso_local i8 @load_atomic_i8_aligned_seq_cst_const(ptr readonly %ptr) { 56; CHECK-LABEL: load_atomic_i8_aligned_seq_cst_const: 57; CHECK: ldarb w0, [x0] 58 %r = load atomic i8, ptr %ptr seq_cst, align 1 59 ret i8 %r 60} 61 62define dso_local i16 @load_atomic_i16_aligned_unordered(ptr %ptr) { 63; CHECK-LABEL: load_atomic_i16_aligned_unordered: 64; CHECK: ldrh w0, [x0] 65 %r = load atomic i16, ptr %ptr unordered, align 2 66 ret i16 %r 67} 68 69define dso_local i16 @load_atomic_i16_aligned_unordered_const(ptr readonly %ptr) { 70; CHECK-LABEL: load_atomic_i16_aligned_unordered_const: 71; CHECK: ldrh w0, [x0] 72 %r = load atomic i16, ptr %ptr unordered, align 2 73 ret i16 %r 74} 75 76define dso_local i16 @load_atomic_i16_aligned_monotonic(ptr %ptr) { 77; CHECK-LABEL: load_atomic_i16_aligned_monotonic: 78; CHECK: ldrh w0, [x0] 79 %r = load atomic i16, ptr %ptr monotonic, align 2 80 ret i16 %r 81} 82 83define dso_local i16 @load_atomic_i16_aligned_monotonic_const(ptr readonly %ptr) { 84; CHECK-LABEL: load_atomic_i16_aligned_monotonic_const: 85; CHECK: ldrh w0, [x0] 86 %r = load atomic i16, ptr %ptr monotonic, align 2 87 ret i16 %r 88} 89 90define dso_local i16 @load_atomic_i16_aligned_acquire(ptr %ptr) { 91; CHECK-LABEL: load_atomic_i16_aligned_acquire: 92; CHECK: ldarh w0, [x0] 93 %r = load atomic i16, ptr %ptr acquire, align 2 94 ret i16 %r 95} 96 97define dso_local i16 @load_atomic_i16_aligned_acquire_const(ptr readonly %ptr) { 98; CHECK-LABEL: load_atomic_i16_aligned_acquire_const: 99; CHECK: ldarh w0, [x0] 100 %r = load atomic i16, ptr %ptr acquire, align 2 101 ret i16 %r 102} 103 104define dso_local i16 @load_atomic_i16_aligned_seq_cst(ptr %ptr) { 105; CHECK-LABEL: load_atomic_i16_aligned_seq_cst: 106; CHECK: ldarh w0, [x0] 107 %r = load atomic i16, ptr %ptr seq_cst, align 2 108 ret i16 %r 109} 110 111define dso_local i16 @load_atomic_i16_aligned_seq_cst_const(ptr readonly %ptr) { 112; CHECK-LABEL: load_atomic_i16_aligned_seq_cst_const: 113; CHECK: ldarh w0, [x0] 114 %r = load atomic i16, ptr %ptr seq_cst, align 2 115 ret i16 %r 116} 117 118define dso_local i32 @load_atomic_i32_aligned_unordered(ptr %ptr) { 119; CHECK-LABEL: load_atomic_i32_aligned_unordered: 120; CHECK: ldr w0, [x0] 121 %r = load atomic i32, ptr %ptr unordered, align 4 122 ret i32 %r 123} 124 125define dso_local i32 @load_atomic_i32_aligned_unordered_const(ptr readonly %ptr) { 126; CHECK-LABEL: load_atomic_i32_aligned_unordered_const: 127; CHECK: ldr w0, [x0] 128 %r = load atomic i32, ptr %ptr unordered, align 4 129 ret i32 %r 130} 131 132define dso_local i32 @load_atomic_i32_aligned_monotonic(ptr %ptr) { 133; CHECK-LABEL: load_atomic_i32_aligned_monotonic: 134; CHECK: ldr w0, [x0] 135 %r = load atomic i32, ptr %ptr monotonic, align 4 136 ret i32 %r 137} 138 139define dso_local i32 @load_atomic_i32_aligned_monotonic_const(ptr readonly %ptr) { 140; CHECK-LABEL: load_atomic_i32_aligned_monotonic_const: 141; CHECK: ldr w0, [x0] 142 %r = load atomic i32, ptr %ptr monotonic, align 4 143 ret i32 %r 144} 145 146define dso_local i32 @load_atomic_i32_aligned_acquire(ptr %ptr) { 147; CHECK-LABEL: load_atomic_i32_aligned_acquire: 148; CHECK: ldar w0, [x0] 149 %r = load atomic i32, ptr %ptr acquire, align 4 150 ret i32 %r 151} 152 153define dso_local i32 @load_atomic_i32_aligned_acquire_const(ptr readonly %ptr) { 154; CHECK-LABEL: load_atomic_i32_aligned_acquire_const: 155; CHECK: ldar w0, [x0] 156 %r = load atomic i32, ptr %ptr acquire, align 4 157 ret i32 %r 158} 159 160define dso_local i32 @load_atomic_i32_aligned_seq_cst(ptr %ptr) { 161; CHECK-LABEL: load_atomic_i32_aligned_seq_cst: 162; CHECK: ldar w0, [x0] 163 %r = load atomic i32, ptr %ptr seq_cst, align 4 164 ret i32 %r 165} 166 167define dso_local i32 @load_atomic_i32_aligned_seq_cst_const(ptr readonly %ptr) { 168; CHECK-LABEL: load_atomic_i32_aligned_seq_cst_const: 169; CHECK: ldar w0, [x0] 170 %r = load atomic i32, ptr %ptr seq_cst, align 4 171 ret i32 %r 172} 173 174define dso_local i64 @load_atomic_i64_aligned_unordered(ptr %ptr) { 175; CHECK-LABEL: load_atomic_i64_aligned_unordered: 176; CHECK: ldr x0, [x0] 177 %r = load atomic i64, ptr %ptr unordered, align 8 178 ret i64 %r 179} 180 181define dso_local i64 @load_atomic_i64_aligned_unordered_const(ptr readonly %ptr) { 182; CHECK-LABEL: load_atomic_i64_aligned_unordered_const: 183; CHECK: ldr x0, [x0] 184 %r = load atomic i64, ptr %ptr unordered, align 8 185 ret i64 %r 186} 187 188define dso_local i64 @load_atomic_i64_aligned_monotonic(ptr %ptr) { 189; CHECK-LABEL: load_atomic_i64_aligned_monotonic: 190; CHECK: ldr x0, [x0] 191 %r = load atomic i64, ptr %ptr monotonic, align 8 192 ret i64 %r 193} 194 195define dso_local i64 @load_atomic_i64_aligned_monotonic_const(ptr readonly %ptr) { 196; CHECK-LABEL: load_atomic_i64_aligned_monotonic_const: 197; CHECK: ldr x0, [x0] 198 %r = load atomic i64, ptr %ptr monotonic, align 8 199 ret i64 %r 200} 201 202define dso_local i64 @load_atomic_i64_aligned_acquire(ptr %ptr) { 203; CHECK-LABEL: load_atomic_i64_aligned_acquire: 204; CHECK: ldar x0, [x0] 205 %r = load atomic i64, ptr %ptr acquire, align 8 206 ret i64 %r 207} 208 209define dso_local i64 @load_atomic_i64_aligned_acquire_const(ptr readonly %ptr) { 210; CHECK-LABEL: load_atomic_i64_aligned_acquire_const: 211; CHECK: ldar x0, [x0] 212 %r = load atomic i64, ptr %ptr acquire, align 8 213 ret i64 %r 214} 215 216define dso_local i64 @load_atomic_i64_aligned_seq_cst(ptr %ptr) { 217; CHECK-LABEL: load_atomic_i64_aligned_seq_cst: 218; CHECK: ldar x0, [x0] 219 %r = load atomic i64, ptr %ptr seq_cst, align 8 220 ret i64 %r 221} 222 223define dso_local i64 @load_atomic_i64_aligned_seq_cst_const(ptr readonly %ptr) { 224; CHECK-LABEL: load_atomic_i64_aligned_seq_cst_const: 225; CHECK: ldar x0, [x0] 226 %r = load atomic i64, ptr %ptr seq_cst, align 8 227 ret i64 %r 228} 229 230define dso_local i128 @load_atomic_i128_aligned_unordered(ptr %ptr) { 231; -O0-LABEL: load_atomic_i128_aligned_unordered: 232; -O0: bl __aarch64_cas16_relax 233; 234; -O1-LABEL: load_atomic_i128_aligned_unordered: 235; -O1: ldxp x0, x1, [x8] 236; -O1: stxp w9, x0, x1, [x8] 237 %r = load atomic i128, ptr %ptr unordered, align 16 238 ret i128 %r 239} 240 241define dso_local i128 @load_atomic_i128_aligned_unordered_const(ptr readonly %ptr) { 242; -O0-LABEL: load_atomic_i128_aligned_unordered_const: 243; -O0: bl __aarch64_cas16_relax 244; 245; -O1-LABEL: load_atomic_i128_aligned_unordered_const: 246; -O1: ldxp x0, x1, [x8] 247; -O1: stxp w9, x0, x1, [x8] 248 %r = load atomic i128, ptr %ptr unordered, align 16 249 ret i128 %r 250} 251 252define dso_local i128 @load_atomic_i128_aligned_monotonic(ptr %ptr) { 253; -O0-LABEL: load_atomic_i128_aligned_monotonic: 254; -O0: bl __aarch64_cas16_relax 255; 256; -O1-LABEL: load_atomic_i128_aligned_monotonic: 257; -O1: ldxp x0, x1, [x8] 258; -O1: stxp w9, x0, x1, [x8] 259 %r = load atomic i128, ptr %ptr monotonic, align 16 260 ret i128 %r 261} 262 263define dso_local i128 @load_atomic_i128_aligned_monotonic_const(ptr readonly %ptr) { 264; -O0-LABEL: load_atomic_i128_aligned_monotonic_const: 265; -O0: bl __aarch64_cas16_relax 266; 267; -O1-LABEL: load_atomic_i128_aligned_monotonic_const: 268; -O1: ldxp x0, x1, [x8] 269; -O1: stxp w9, x0, x1, [x8] 270 %r = load atomic i128, ptr %ptr monotonic, align 16 271 ret i128 %r 272} 273 274define dso_local i128 @load_atomic_i128_aligned_acquire(ptr %ptr) { 275; -O0-LABEL: load_atomic_i128_aligned_acquire: 276; -O0: bl __aarch64_cas16_acq 277; 278; -O1-LABEL: load_atomic_i128_aligned_acquire: 279; -O1: ldaxp x0, x1, [x8] 280; -O1: stxp w9, x0, x1, [x8] 281 %r = load atomic i128, ptr %ptr acquire, align 16 282 ret i128 %r 283} 284 285define dso_local i128 @load_atomic_i128_aligned_acquire_const(ptr readonly %ptr) { 286; -O0-LABEL: load_atomic_i128_aligned_acquire_const: 287; -O0: bl __aarch64_cas16_acq 288; 289; -O1-LABEL: load_atomic_i128_aligned_acquire_const: 290; -O1: ldaxp x0, x1, [x8] 291; -O1: stxp w9, x0, x1, [x8] 292 %r = load atomic i128, ptr %ptr acquire, align 16 293 ret i128 %r 294} 295 296define dso_local i128 @load_atomic_i128_aligned_seq_cst(ptr %ptr) { 297; -O0-LABEL: load_atomic_i128_aligned_seq_cst: 298; -O0: bl __aarch64_cas16_acq_rel 299; 300; -O1-LABEL: load_atomic_i128_aligned_seq_cst: 301; -O1: ldaxp x0, x1, [x8] 302; -O1: stlxp w9, x0, x1, [x8] 303 %r = load atomic i128, ptr %ptr seq_cst, align 16 304 ret i128 %r 305} 306 307define dso_local i128 @load_atomic_i128_aligned_seq_cst_const(ptr readonly %ptr) { 308; -O0-LABEL: load_atomic_i128_aligned_seq_cst_const: 309; -O0: bl __aarch64_cas16_acq_rel 310; 311; -O1-LABEL: load_atomic_i128_aligned_seq_cst_const: 312; -O1: ldaxp x0, x1, [x8] 313; -O1: stlxp w9, x0, x1, [x8] 314 %r = load atomic i128, ptr %ptr seq_cst, align 16 315 ret i128 %r 316} 317 318define dso_local i8 @load_atomic_i8_unaligned_unordered(ptr %ptr) { 319; CHECK-LABEL: load_atomic_i8_unaligned_unordered: 320; CHECK: ldrb w0, [x0] 321 %r = load atomic i8, ptr %ptr unordered, align 1 322 ret i8 %r 323} 324 325define dso_local i8 @load_atomic_i8_unaligned_unordered_const(ptr readonly %ptr) { 326; CHECK-LABEL: load_atomic_i8_unaligned_unordered_const: 327; CHECK: ldrb w0, [x0] 328 %r = load atomic i8, ptr %ptr unordered, align 1 329 ret i8 %r 330} 331 332define dso_local i8 @load_atomic_i8_unaligned_monotonic(ptr %ptr) { 333; CHECK-LABEL: load_atomic_i8_unaligned_monotonic: 334; CHECK: ldrb w0, [x0] 335 %r = load atomic i8, ptr %ptr monotonic, align 1 336 ret i8 %r 337} 338 339define dso_local i8 @load_atomic_i8_unaligned_monotonic_const(ptr readonly %ptr) { 340; CHECK-LABEL: load_atomic_i8_unaligned_monotonic_const: 341; CHECK: ldrb w0, [x0] 342 %r = load atomic i8, ptr %ptr monotonic, align 1 343 ret i8 %r 344} 345 346define dso_local i8 @load_atomic_i8_unaligned_acquire(ptr %ptr) { 347; CHECK-LABEL: load_atomic_i8_unaligned_acquire: 348; CHECK: ldarb w0, [x0] 349 %r = load atomic i8, ptr %ptr acquire, align 1 350 ret i8 %r 351} 352 353define dso_local i8 @load_atomic_i8_unaligned_acquire_const(ptr readonly %ptr) { 354; CHECK-LABEL: load_atomic_i8_unaligned_acquire_const: 355; CHECK: ldarb w0, [x0] 356 %r = load atomic i8, ptr %ptr acquire, align 1 357 ret i8 %r 358} 359 360define dso_local i8 @load_atomic_i8_unaligned_seq_cst(ptr %ptr) { 361; CHECK-LABEL: load_atomic_i8_unaligned_seq_cst: 362; CHECK: ldarb w0, [x0] 363 %r = load atomic i8, ptr %ptr seq_cst, align 1 364 ret i8 %r 365} 366 367define dso_local i8 @load_atomic_i8_unaligned_seq_cst_const(ptr readonly %ptr) { 368; CHECK-LABEL: load_atomic_i8_unaligned_seq_cst_const: 369; CHECK: ldarb w0, [x0] 370 %r = load atomic i8, ptr %ptr seq_cst, align 1 371 ret i8 %r 372} 373 374define dso_local i16 @load_atomic_i16_unaligned_unordered(ptr %ptr) { 375; CHECK-LABEL: load_atomic_i16_unaligned_unordered: 376; CHECK: bl __atomic_load 377 %r = load atomic i16, ptr %ptr unordered, align 1 378 ret i16 %r 379} 380 381define dso_local i16 @load_atomic_i16_unaligned_unordered_const(ptr readonly %ptr) { 382; CHECK-LABEL: load_atomic_i16_unaligned_unordered_const: 383; CHECK: bl __atomic_load 384 %r = load atomic i16, ptr %ptr unordered, align 1 385 ret i16 %r 386} 387 388define dso_local i16 @load_atomic_i16_unaligned_monotonic(ptr %ptr) { 389; CHECK-LABEL: load_atomic_i16_unaligned_monotonic: 390; CHECK: bl __atomic_load 391 %r = load atomic i16, ptr %ptr monotonic, align 1 392 ret i16 %r 393} 394 395define dso_local i16 @load_atomic_i16_unaligned_monotonic_const(ptr readonly %ptr) { 396; CHECK-LABEL: load_atomic_i16_unaligned_monotonic_const: 397; CHECK: bl __atomic_load 398 %r = load atomic i16, ptr %ptr monotonic, align 1 399 ret i16 %r 400} 401 402define dso_local i16 @load_atomic_i16_unaligned_acquire(ptr %ptr) { 403; CHECK-LABEL: load_atomic_i16_unaligned_acquire: 404; CHECK: bl __atomic_load 405 %r = load atomic i16, ptr %ptr acquire, align 1 406 ret i16 %r 407} 408 409define dso_local i16 @load_atomic_i16_unaligned_acquire_const(ptr readonly %ptr) { 410; CHECK-LABEL: load_atomic_i16_unaligned_acquire_const: 411; CHECK: bl __atomic_load 412 %r = load atomic i16, ptr %ptr acquire, align 1 413 ret i16 %r 414} 415 416define dso_local i16 @load_atomic_i16_unaligned_seq_cst(ptr %ptr) { 417; CHECK-LABEL: load_atomic_i16_unaligned_seq_cst: 418; CHECK: bl __atomic_load 419 %r = load atomic i16, ptr %ptr seq_cst, align 1 420 ret i16 %r 421} 422 423define dso_local i16 @load_atomic_i16_unaligned_seq_cst_const(ptr readonly %ptr) { 424; CHECK-LABEL: load_atomic_i16_unaligned_seq_cst_const: 425; CHECK: bl __atomic_load 426 %r = load atomic i16, ptr %ptr seq_cst, align 1 427 ret i16 %r 428} 429 430define dso_local i32 @load_atomic_i32_unaligned_unordered(ptr %ptr) { 431; CHECK-LABEL: load_atomic_i32_unaligned_unordered: 432; CHECK: bl __atomic_load 433 %r = load atomic i32, ptr %ptr unordered, align 1 434 ret i32 %r 435} 436 437define dso_local i32 @load_atomic_i32_unaligned_unordered_const(ptr readonly %ptr) { 438; CHECK-LABEL: load_atomic_i32_unaligned_unordered_const: 439; CHECK: bl __atomic_load 440 %r = load atomic i32, ptr %ptr unordered, align 1 441 ret i32 %r 442} 443 444define dso_local i32 @load_atomic_i32_unaligned_monotonic(ptr %ptr) { 445; CHECK-LABEL: load_atomic_i32_unaligned_monotonic: 446; CHECK: bl __atomic_load 447 %r = load atomic i32, ptr %ptr monotonic, align 1 448 ret i32 %r 449} 450 451define dso_local i32 @load_atomic_i32_unaligned_monotonic_const(ptr readonly %ptr) { 452; CHECK-LABEL: load_atomic_i32_unaligned_monotonic_const: 453; CHECK: bl __atomic_load 454 %r = load atomic i32, ptr %ptr monotonic, align 1 455 ret i32 %r 456} 457 458define dso_local i32 @load_atomic_i32_unaligned_acquire(ptr %ptr) { 459; CHECK-LABEL: load_atomic_i32_unaligned_acquire: 460; CHECK: bl __atomic_load 461 %r = load atomic i32, ptr %ptr acquire, align 1 462 ret i32 %r 463} 464 465define dso_local i32 @load_atomic_i32_unaligned_acquire_const(ptr readonly %ptr) { 466; CHECK-LABEL: load_atomic_i32_unaligned_acquire_const: 467; CHECK: bl __atomic_load 468 %r = load atomic i32, ptr %ptr acquire, align 1 469 ret i32 %r 470} 471 472define dso_local i32 @load_atomic_i32_unaligned_seq_cst(ptr %ptr) { 473; CHECK-LABEL: load_atomic_i32_unaligned_seq_cst: 474; CHECK: bl __atomic_load 475 %r = load atomic i32, ptr %ptr seq_cst, align 1 476 ret i32 %r 477} 478 479define dso_local i32 @load_atomic_i32_unaligned_seq_cst_const(ptr readonly %ptr) { 480; CHECK-LABEL: load_atomic_i32_unaligned_seq_cst_const: 481; CHECK: bl __atomic_load 482 %r = load atomic i32, ptr %ptr seq_cst, align 1 483 ret i32 %r 484} 485 486define dso_local i64 @load_atomic_i64_unaligned_unordered(ptr %ptr) { 487; CHECK-LABEL: load_atomic_i64_unaligned_unordered: 488; CHECK: bl __atomic_load 489 %r = load atomic i64, ptr %ptr unordered, align 1 490 ret i64 %r 491} 492 493define dso_local i64 @load_atomic_i64_unaligned_unordered_const(ptr readonly %ptr) { 494; CHECK-LABEL: load_atomic_i64_unaligned_unordered_const: 495; CHECK: bl __atomic_load 496 %r = load atomic i64, ptr %ptr unordered, align 1 497 ret i64 %r 498} 499 500define dso_local i64 @load_atomic_i64_unaligned_monotonic(ptr %ptr) { 501; CHECK-LABEL: load_atomic_i64_unaligned_monotonic: 502; CHECK: bl __atomic_load 503 %r = load atomic i64, ptr %ptr monotonic, align 1 504 ret i64 %r 505} 506 507define dso_local i64 @load_atomic_i64_unaligned_monotonic_const(ptr readonly %ptr) { 508; CHECK-LABEL: load_atomic_i64_unaligned_monotonic_const: 509; CHECK: bl __atomic_load 510 %r = load atomic i64, ptr %ptr monotonic, align 1 511 ret i64 %r 512} 513 514define dso_local i64 @load_atomic_i64_unaligned_acquire(ptr %ptr) { 515; CHECK-LABEL: load_atomic_i64_unaligned_acquire: 516; CHECK: bl __atomic_load 517 %r = load atomic i64, ptr %ptr acquire, align 1 518 ret i64 %r 519} 520 521define dso_local i64 @load_atomic_i64_unaligned_acquire_const(ptr readonly %ptr) { 522; CHECK-LABEL: load_atomic_i64_unaligned_acquire_const: 523; CHECK: bl __atomic_load 524 %r = load atomic i64, ptr %ptr acquire, align 1 525 ret i64 %r 526} 527 528define dso_local i64 @load_atomic_i64_unaligned_seq_cst(ptr %ptr) { 529; CHECK-LABEL: load_atomic_i64_unaligned_seq_cst: 530; CHECK: bl __atomic_load 531 %r = load atomic i64, ptr %ptr seq_cst, align 1 532 ret i64 %r 533} 534 535define dso_local i64 @load_atomic_i64_unaligned_seq_cst_const(ptr readonly %ptr) { 536; CHECK-LABEL: load_atomic_i64_unaligned_seq_cst_const: 537; CHECK: bl __atomic_load 538 %r = load atomic i64, ptr %ptr seq_cst, align 1 539 ret i64 %r 540} 541 542define dso_local i128 @load_atomic_i128_unaligned_unordered(ptr %ptr) { 543; CHECK-LABEL: load_atomic_i128_unaligned_unordered: 544; CHECK: bl __atomic_load 545 %r = load atomic i128, ptr %ptr unordered, align 1 546 ret i128 %r 547} 548 549define dso_local i128 @load_atomic_i128_unaligned_unordered_const(ptr readonly %ptr) { 550; CHECK-LABEL: load_atomic_i128_unaligned_unordered_const: 551; CHECK: bl __atomic_load 552 %r = load atomic i128, ptr %ptr unordered, align 1 553 ret i128 %r 554} 555 556define dso_local i128 @load_atomic_i128_unaligned_monotonic(ptr %ptr) { 557; CHECK-LABEL: load_atomic_i128_unaligned_monotonic: 558; CHECK: bl __atomic_load 559 %r = load atomic i128, ptr %ptr monotonic, align 1 560 ret i128 %r 561} 562 563define dso_local i128 @load_atomic_i128_unaligned_monotonic_const(ptr readonly %ptr) { 564; CHECK-LABEL: load_atomic_i128_unaligned_monotonic_const: 565; CHECK: bl __atomic_load 566 %r = load atomic i128, ptr %ptr monotonic, align 1 567 ret i128 %r 568} 569 570define dso_local i128 @load_atomic_i128_unaligned_acquire(ptr %ptr) { 571; CHECK-LABEL: load_atomic_i128_unaligned_acquire: 572; CHECK: bl __atomic_load 573 %r = load atomic i128, ptr %ptr acquire, align 1 574 ret i128 %r 575} 576 577define dso_local i128 @load_atomic_i128_unaligned_acquire_const(ptr readonly %ptr) { 578; CHECK-LABEL: load_atomic_i128_unaligned_acquire_const: 579; CHECK: bl __atomic_load 580 %r = load atomic i128, ptr %ptr acquire, align 1 581 ret i128 %r 582} 583 584define dso_local i128 @load_atomic_i128_unaligned_seq_cst(ptr %ptr) { 585; CHECK-LABEL: load_atomic_i128_unaligned_seq_cst: 586; CHECK: bl __atomic_load 587 %r = load atomic i128, ptr %ptr seq_cst, align 1 588 ret i128 %r 589} 590 591define dso_local i128 @load_atomic_i128_unaligned_seq_cst_const(ptr readonly %ptr) { 592; CHECK-LABEL: load_atomic_i128_unaligned_seq_cst_const: 593; CHECK: bl __atomic_load 594 %r = load atomic i128, ptr %ptr seq_cst, align 1 595 ret i128 %r 596} 597