1 //===- llvm/unittest/Support/AllocatorTest.cpp - BumpPtrAllocator tests ---===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "llvm/Support/Memory.h" 10 #include "llvm/Support/Process.h" 11 #include "gtest/gtest.h" 12 #include <cassert> 13 #include <cstdlib> 14 15 #if defined(__NetBSD__) 16 // clang-format off 17 #include <sys/param.h> 18 #include <sys/types.h> 19 #include <sys/sysctl.h> 20 #include <err.h> 21 #include <unistd.h> 22 // clang-format on 23 #endif 24 25 using namespace llvm; 26 using namespace sys; 27 28 namespace { 29 30 bool IsMPROTECT() { 31 #if defined(__NetBSD__) 32 int mib[3]; 33 int paxflags; 34 size_t len = sizeof(paxflags); 35 36 mib[0] = CTL_PROC; 37 mib[1] = getpid(); 38 mib[2] = PROC_PID_PAXFLAGS; 39 40 if (sysctl(mib, 3, &paxflags, &len, NULL, 0) != 0) 41 err(EXIT_FAILURE, "sysctl"); 42 43 return !!(paxflags & CTL_PROC_PAXFLAGS_MPROTECT); 44 #else 45 return false; 46 #endif 47 } 48 49 class MappedMemoryTest : public ::testing::TestWithParam<unsigned> { 50 public: 51 MappedMemoryTest() { 52 Flags = GetParam(); 53 PageSize = sys::Process::getPageSize(); 54 } 55 56 protected: 57 // Adds RW flags to permit testing of the resulting memory 58 unsigned getTestableEquivalent(unsigned RequestedFlags) { 59 switch (RequestedFlags) { 60 case Memory::MF_READ: 61 case Memory::MF_WRITE: 62 case Memory::MF_READ|Memory::MF_WRITE: 63 return Memory::MF_READ|Memory::MF_WRITE; 64 case Memory::MF_READ|Memory::MF_EXEC: 65 case Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC: 66 case Memory::MF_EXEC: 67 return Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC; 68 } 69 // Default in case values are added to the enum, as required by some compilers 70 return Memory::MF_READ|Memory::MF_WRITE; 71 } 72 73 // Returns true if the memory blocks overlap 74 bool doesOverlap(MemoryBlock M1, MemoryBlock M2) { 75 if (M1.base() == M2.base()) 76 return true; 77 78 if (M1.base() > M2.base()) 79 return (unsigned char *)M2.base() + M2.size() > M1.base(); 80 81 return (unsigned char *)M1.base() + M1.size() > M2.base(); 82 } 83 84 unsigned Flags; 85 size_t PageSize; 86 }; 87 88 // MPROTECT prevents W+X mmaps 89 #define CHECK_UNSUPPORTED() \ 90 do { \ 91 if ((Flags & Memory::MF_WRITE) && (Flags & Memory::MF_EXEC) && \ 92 IsMPROTECT()) \ 93 return; \ 94 } while (0) 95 96 TEST_P(MappedMemoryTest, AllocAndRelease) { 97 CHECK_UNSUPPORTED(); 98 std::error_code EC; 99 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags,EC); 100 EXPECT_EQ(std::error_code(), EC); 101 102 EXPECT_NE((void*)nullptr, M1.base()); 103 EXPECT_LE(sizeof(int), M1.size()); 104 105 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 106 } 107 108 TEST_P(MappedMemoryTest, MultipleAllocAndRelease) { 109 CHECK_UNSUPPORTED(); 110 std::error_code EC; 111 MemoryBlock M1 = Memory::allocateMappedMemory(16, nullptr, Flags, EC); 112 EXPECT_EQ(std::error_code(), EC); 113 MemoryBlock M2 = Memory::allocateMappedMemory(64, nullptr, Flags, EC); 114 EXPECT_EQ(std::error_code(), EC); 115 MemoryBlock M3 = Memory::allocateMappedMemory(32, nullptr, Flags, EC); 116 EXPECT_EQ(std::error_code(), EC); 117 118 EXPECT_NE((void*)nullptr, M1.base()); 119 EXPECT_LE(16U, M1.size()); 120 EXPECT_NE((void*)nullptr, M2.base()); 121 EXPECT_LE(64U, M2.size()); 122 EXPECT_NE((void*)nullptr, M3.base()); 123 EXPECT_LE(32U, M3.size()); 124 125 EXPECT_FALSE(doesOverlap(M1, M2)); 126 EXPECT_FALSE(doesOverlap(M2, M3)); 127 EXPECT_FALSE(doesOverlap(M1, M3)); 128 129 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 130 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 131 MemoryBlock M4 = Memory::allocateMappedMemory(16, nullptr, Flags, EC); 132 EXPECT_EQ(std::error_code(), EC); 133 EXPECT_NE((void*)nullptr, M4.base()); 134 EXPECT_LE(16U, M4.size()); 135 EXPECT_FALSE(Memory::releaseMappedMemory(M4)); 136 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 137 } 138 139 TEST_P(MappedMemoryTest, BasicWrite) { 140 // This test applies only to readable and writeable combinations 141 if (Flags && 142 !((Flags & Memory::MF_READ) && (Flags & Memory::MF_WRITE))) 143 return; 144 CHECK_UNSUPPORTED(); 145 146 std::error_code EC; 147 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags,EC); 148 EXPECT_EQ(std::error_code(), EC); 149 150 EXPECT_NE((void*)nullptr, M1.base()); 151 EXPECT_LE(sizeof(int), M1.size()); 152 153 int *a = (int*)M1.base(); 154 *a = 1; 155 EXPECT_EQ(1, *a); 156 157 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 158 } 159 160 TEST_P(MappedMemoryTest, MultipleWrite) { 161 // This test applies only to readable and writeable combinations 162 if (Flags && 163 !((Flags & Memory::MF_READ) && (Flags & Memory::MF_WRITE))) 164 return; 165 CHECK_UNSUPPORTED(); 166 167 std::error_code EC; 168 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags, 169 EC); 170 EXPECT_EQ(std::error_code(), EC); 171 MemoryBlock M2 = Memory::allocateMappedMemory(8 * sizeof(int), nullptr, Flags, 172 EC); 173 EXPECT_EQ(std::error_code(), EC); 174 MemoryBlock M3 = Memory::allocateMappedMemory(4 * sizeof(int), nullptr, Flags, 175 EC); 176 EXPECT_EQ(std::error_code(), EC); 177 178 EXPECT_FALSE(doesOverlap(M1, M2)); 179 EXPECT_FALSE(doesOverlap(M2, M3)); 180 EXPECT_FALSE(doesOverlap(M1, M3)); 181 182 EXPECT_NE((void*)nullptr, M1.base()); 183 EXPECT_LE(1U * sizeof(int), M1.size()); 184 EXPECT_NE((void*)nullptr, M2.base()); 185 EXPECT_LE(8U * sizeof(int), M2.size()); 186 EXPECT_NE((void*)nullptr, M3.base()); 187 EXPECT_LE(4U * sizeof(int), M3.size()); 188 189 int *x = (int*)M1.base(); 190 *x = 1; 191 192 int *y = (int*)M2.base(); 193 for (int i = 0; i < 8; i++) { 194 y[i] = i; 195 } 196 197 int *z = (int*)M3.base(); 198 *z = 42; 199 200 EXPECT_EQ(1, *x); 201 EXPECT_EQ(7, y[7]); 202 EXPECT_EQ(42, *z); 203 204 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 205 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 206 207 MemoryBlock M4 = Memory::allocateMappedMemory(64 * sizeof(int), nullptr, 208 Flags, EC); 209 EXPECT_EQ(std::error_code(), EC); 210 EXPECT_NE((void*)nullptr, M4.base()); 211 EXPECT_LE(64U * sizeof(int), M4.size()); 212 x = (int*)M4.base(); 213 *x = 4; 214 EXPECT_EQ(4, *x); 215 EXPECT_FALSE(Memory::releaseMappedMemory(M4)); 216 217 // Verify that M2 remains unaffected by other activity 218 for (int i = 0; i < 8; i++) { 219 EXPECT_EQ(i, y[i]); 220 } 221 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 222 } 223 224 TEST_P(MappedMemoryTest, EnabledWrite) { 225 // MPROTECT prevents W+X, and since this test always adds W we need 226 // to block any variant with X. 227 if ((Flags & Memory::MF_EXEC) && IsMPROTECT()) 228 return; 229 230 std::error_code EC; 231 MemoryBlock M1 = Memory::allocateMappedMemory(2 * sizeof(int), nullptr, Flags, 232 EC); 233 EXPECT_EQ(std::error_code(), EC); 234 MemoryBlock M2 = Memory::allocateMappedMemory(8 * sizeof(int), nullptr, Flags, 235 EC); 236 EXPECT_EQ(std::error_code(), EC); 237 MemoryBlock M3 = Memory::allocateMappedMemory(4 * sizeof(int), nullptr, Flags, 238 EC); 239 EXPECT_EQ(std::error_code(), EC); 240 241 EXPECT_NE((void*)nullptr, M1.base()); 242 EXPECT_LE(2U * sizeof(int), M1.size()); 243 EXPECT_NE((void*)nullptr, M2.base()); 244 EXPECT_LE(8U * sizeof(int), M2.size()); 245 EXPECT_NE((void*)nullptr, M3.base()); 246 EXPECT_LE(4U * sizeof(int), M3.size()); 247 248 EXPECT_FALSE(Memory::protectMappedMemory(M1, getTestableEquivalent(Flags))); 249 EXPECT_FALSE(Memory::protectMappedMemory(M2, getTestableEquivalent(Flags))); 250 EXPECT_FALSE(Memory::protectMappedMemory(M3, getTestableEquivalent(Flags))); 251 252 EXPECT_FALSE(doesOverlap(M1, M2)); 253 EXPECT_FALSE(doesOverlap(M2, M3)); 254 EXPECT_FALSE(doesOverlap(M1, M3)); 255 256 int *x = (int*)M1.base(); 257 *x = 1; 258 int *y = (int*)M2.base(); 259 for (unsigned int i = 0; i < 8; i++) { 260 y[i] = i; 261 } 262 int *z = (int*)M3.base(); 263 *z = 42; 264 265 EXPECT_EQ(1, *x); 266 EXPECT_EQ(7, y[7]); 267 EXPECT_EQ(42, *z); 268 269 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 270 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 271 EXPECT_EQ(6, y[6]); 272 273 MemoryBlock M4 = Memory::allocateMappedMemory(16, nullptr, Flags, EC); 274 EXPECT_EQ(std::error_code(), EC); 275 EXPECT_NE((void*)nullptr, M4.base()); 276 EXPECT_LE(16U, M4.size()); 277 EXPECT_EQ(std::error_code(), 278 Memory::protectMappedMemory(M4, getTestableEquivalent(Flags))); 279 x = (int*)M4.base(); 280 *x = 4; 281 EXPECT_EQ(4, *x); 282 EXPECT_FALSE(Memory::releaseMappedMemory(M4)); 283 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 284 } 285 286 TEST_P(MappedMemoryTest, SuccessiveNear) { 287 CHECK_UNSUPPORTED(); 288 std::error_code EC; 289 MemoryBlock M1 = Memory::allocateMappedMemory(16, nullptr, Flags, EC); 290 EXPECT_EQ(std::error_code(), EC); 291 MemoryBlock M2 = Memory::allocateMappedMemory(64, &M1, Flags, EC); 292 EXPECT_EQ(std::error_code(), EC); 293 MemoryBlock M3 = Memory::allocateMappedMemory(32, &M2, Flags, EC); 294 EXPECT_EQ(std::error_code(), EC); 295 296 EXPECT_NE((void*)nullptr, M1.base()); 297 EXPECT_LE(16U, M1.size()); 298 EXPECT_NE((void*)nullptr, M2.base()); 299 EXPECT_LE(64U, M2.size()); 300 EXPECT_NE((void*)nullptr, M3.base()); 301 EXPECT_LE(32U, M3.size()); 302 303 EXPECT_FALSE(doesOverlap(M1, M2)); 304 EXPECT_FALSE(doesOverlap(M2, M3)); 305 EXPECT_FALSE(doesOverlap(M1, M3)); 306 307 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 308 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 309 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 310 } 311 312 TEST_P(MappedMemoryTest, DuplicateNear) { 313 CHECK_UNSUPPORTED(); 314 std::error_code EC; 315 MemoryBlock Near((void*)(3*PageSize), 16); 316 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC); 317 EXPECT_EQ(std::error_code(), EC); 318 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC); 319 EXPECT_EQ(std::error_code(), EC); 320 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC); 321 EXPECT_EQ(std::error_code(), EC); 322 323 EXPECT_NE((void*)nullptr, M1.base()); 324 EXPECT_LE(16U, M1.size()); 325 EXPECT_NE((void*)nullptr, M2.base()); 326 EXPECT_LE(64U, M2.size()); 327 EXPECT_NE((void*)nullptr, M3.base()); 328 EXPECT_LE(32U, M3.size()); 329 330 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 331 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 332 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 333 } 334 335 TEST_P(MappedMemoryTest, ZeroNear) { 336 CHECK_UNSUPPORTED(); 337 std::error_code EC; 338 MemoryBlock Near(nullptr, 0); 339 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC); 340 EXPECT_EQ(std::error_code(), EC); 341 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC); 342 EXPECT_EQ(std::error_code(), EC); 343 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC); 344 EXPECT_EQ(std::error_code(), EC); 345 346 EXPECT_NE((void*)nullptr, M1.base()); 347 EXPECT_LE(16U, M1.size()); 348 EXPECT_NE((void*)nullptr, M2.base()); 349 EXPECT_LE(64U, M2.size()); 350 EXPECT_NE((void*)nullptr, M3.base()); 351 EXPECT_LE(32U, M3.size()); 352 353 EXPECT_FALSE(doesOverlap(M1, M2)); 354 EXPECT_FALSE(doesOverlap(M2, M3)); 355 EXPECT_FALSE(doesOverlap(M1, M3)); 356 357 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 358 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 359 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 360 } 361 362 TEST_P(MappedMemoryTest, ZeroSizeNear) { 363 CHECK_UNSUPPORTED(); 364 std::error_code EC; 365 MemoryBlock Near((void*)(4*PageSize), 0); 366 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC); 367 EXPECT_EQ(std::error_code(), EC); 368 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC); 369 EXPECT_EQ(std::error_code(), EC); 370 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC); 371 EXPECT_EQ(std::error_code(), EC); 372 373 EXPECT_NE((void*)nullptr, M1.base()); 374 EXPECT_LE(16U, M1.size()); 375 EXPECT_NE((void*)nullptr, M2.base()); 376 EXPECT_LE(64U, M2.size()); 377 EXPECT_NE((void*)nullptr, M3.base()); 378 EXPECT_LE(32U, M3.size()); 379 380 EXPECT_FALSE(doesOverlap(M1, M2)); 381 EXPECT_FALSE(doesOverlap(M2, M3)); 382 EXPECT_FALSE(doesOverlap(M1, M3)); 383 384 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 385 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 386 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 387 } 388 389 TEST_P(MappedMemoryTest, UnalignedNear) { 390 CHECK_UNSUPPORTED(); 391 std::error_code EC; 392 MemoryBlock Near((void*)(2*PageSize+5), 0); 393 MemoryBlock M1 = Memory::allocateMappedMemory(15, &Near, Flags, EC); 394 EXPECT_EQ(std::error_code(), EC); 395 396 EXPECT_NE((void*)nullptr, M1.base()); 397 EXPECT_LE(sizeof(int), M1.size()); 398 399 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 400 } 401 402 // Note that Memory::MF_WRITE is not supported exclusively across 403 // operating systems and architectures and can imply MF_READ|MF_WRITE 404 unsigned MemoryFlags[] = { 405 Memory::MF_READ, 406 Memory::MF_WRITE, 407 Memory::MF_READ|Memory::MF_WRITE, 408 Memory::MF_EXEC, 409 Memory::MF_READ|Memory::MF_EXEC, 410 Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC 411 }; 412 413 INSTANTIATE_TEST_CASE_P(AllocationTests, 414 MappedMemoryTest, 415 ::testing::ValuesIn(MemoryFlags),); 416 417 } // anonymous namespace 418