1 //===- llvm/unittest/Support/AllocatorTest.cpp - BumpPtrAllocator tests ---===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 10 #include "llvm/Support/Memory.h" 11 #include "llvm/Support/Process.h" 12 #include "gtest/gtest.h" 13 #include <cassert> 14 #include <cstdlib> 15 16 #if defined(__NetBSD__) 17 // clang-format off 18 #include <sys/param.h> 19 #include <sys/types.h> 20 #include <sys/sysctl.h> 21 #include <err.h> 22 #include <unistd.h> 23 // clang-format on 24 #endif 25 26 using namespace llvm; 27 using namespace sys; 28 29 namespace { 30 31 bool IsMPROTECT() { 32 #if defined(__NetBSD__) 33 int mib[3]; 34 int paxflags; 35 size_t len = sizeof(paxflags); 36 37 mib[0] = CTL_PROC; 38 mib[1] = getpid(); 39 mib[2] = PROC_PID_PAXFLAGS; 40 41 if (sysctl(mib, 3, &paxflags, &len, NULL, 0) != 0) 42 err(EXIT_FAILURE, "sysctl"); 43 44 return !!(paxflags & CTL_PROC_PAXFLAGS_MPROTECT); 45 #else 46 return false; 47 #endif 48 } 49 50 class MappedMemoryTest : public ::testing::TestWithParam<unsigned> { 51 public: 52 MappedMemoryTest() { 53 Flags = GetParam(); 54 PageSize = sys::Process::getPageSize(); 55 } 56 57 protected: 58 // Adds RW flags to permit testing of the resulting memory 59 unsigned getTestableEquivalent(unsigned RequestedFlags) { 60 switch (RequestedFlags) { 61 case Memory::MF_READ: 62 case Memory::MF_WRITE: 63 case Memory::MF_READ|Memory::MF_WRITE: 64 return Memory::MF_READ|Memory::MF_WRITE; 65 case Memory::MF_READ|Memory::MF_EXEC: 66 case Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC: 67 case Memory::MF_EXEC: 68 return Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC; 69 } 70 // Default in case values are added to the enum, as required by some compilers 71 return Memory::MF_READ|Memory::MF_WRITE; 72 } 73 74 // Returns true if the memory blocks overlap 75 bool doesOverlap(MemoryBlock M1, MemoryBlock M2) { 76 if (M1.base() == M2.base()) 77 return true; 78 79 if (M1.base() > M2.base()) 80 return (unsigned char *)M2.base() + M2.size() > M1.base(); 81 82 return (unsigned char *)M1.base() + M1.size() > M2.base(); 83 } 84 85 unsigned Flags; 86 size_t PageSize; 87 }; 88 89 // MPROTECT prevents W+X mmaps 90 #define CHECK_UNSUPPORTED() \ 91 do { \ 92 if ((Flags & Memory::MF_WRITE) && (Flags & Memory::MF_EXEC) && \ 93 IsMPROTECT()) \ 94 return; \ 95 } while (0) 96 97 TEST_P(MappedMemoryTest, AllocAndRelease) { 98 CHECK_UNSUPPORTED(); 99 std::error_code EC; 100 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags,EC); 101 EXPECT_EQ(std::error_code(), EC); 102 103 EXPECT_NE((void*)nullptr, M1.base()); 104 EXPECT_LE(sizeof(int), M1.size()); 105 106 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 107 } 108 109 TEST_P(MappedMemoryTest, MultipleAllocAndRelease) { 110 CHECK_UNSUPPORTED(); 111 std::error_code EC; 112 MemoryBlock M1 = Memory::allocateMappedMemory(16, nullptr, Flags, EC); 113 EXPECT_EQ(std::error_code(), EC); 114 MemoryBlock M2 = Memory::allocateMappedMemory(64, nullptr, Flags, EC); 115 EXPECT_EQ(std::error_code(), EC); 116 MemoryBlock M3 = Memory::allocateMappedMemory(32, nullptr, Flags, EC); 117 EXPECT_EQ(std::error_code(), EC); 118 119 EXPECT_NE((void*)nullptr, M1.base()); 120 EXPECT_LE(16U, M1.size()); 121 EXPECT_NE((void*)nullptr, M2.base()); 122 EXPECT_LE(64U, M2.size()); 123 EXPECT_NE((void*)nullptr, M3.base()); 124 EXPECT_LE(32U, M3.size()); 125 126 EXPECT_FALSE(doesOverlap(M1, M2)); 127 EXPECT_FALSE(doesOverlap(M2, M3)); 128 EXPECT_FALSE(doesOverlap(M1, M3)); 129 130 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 131 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 132 MemoryBlock M4 = Memory::allocateMappedMemory(16, nullptr, Flags, EC); 133 EXPECT_EQ(std::error_code(), EC); 134 EXPECT_NE((void*)nullptr, M4.base()); 135 EXPECT_LE(16U, M4.size()); 136 EXPECT_FALSE(Memory::releaseMappedMemory(M4)); 137 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 138 } 139 140 TEST_P(MappedMemoryTest, BasicWrite) { 141 // This test applies only to readable and writeable combinations 142 if (Flags && 143 !((Flags & Memory::MF_READ) && (Flags & Memory::MF_WRITE))) 144 return; 145 CHECK_UNSUPPORTED(); 146 147 std::error_code EC; 148 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags,EC); 149 EXPECT_EQ(std::error_code(), EC); 150 151 EXPECT_NE((void*)nullptr, M1.base()); 152 EXPECT_LE(sizeof(int), M1.size()); 153 154 int *a = (int*)M1.base(); 155 *a = 1; 156 EXPECT_EQ(1, *a); 157 158 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 159 } 160 161 TEST_P(MappedMemoryTest, MultipleWrite) { 162 // This test applies only to readable and writeable combinations 163 if (Flags && 164 !((Flags & Memory::MF_READ) && (Flags & Memory::MF_WRITE))) 165 return; 166 CHECK_UNSUPPORTED(); 167 168 std::error_code EC; 169 MemoryBlock M1 = Memory::allocateMappedMemory(sizeof(int), nullptr, Flags, 170 EC); 171 EXPECT_EQ(std::error_code(), EC); 172 MemoryBlock M2 = Memory::allocateMappedMemory(8 * sizeof(int), nullptr, Flags, 173 EC); 174 EXPECT_EQ(std::error_code(), EC); 175 MemoryBlock M3 = Memory::allocateMappedMemory(4 * sizeof(int), nullptr, Flags, 176 EC); 177 EXPECT_EQ(std::error_code(), EC); 178 179 EXPECT_FALSE(doesOverlap(M1, M2)); 180 EXPECT_FALSE(doesOverlap(M2, M3)); 181 EXPECT_FALSE(doesOverlap(M1, M3)); 182 183 EXPECT_NE((void*)nullptr, M1.base()); 184 EXPECT_LE(1U * sizeof(int), M1.size()); 185 EXPECT_NE((void*)nullptr, M2.base()); 186 EXPECT_LE(8U * sizeof(int), M2.size()); 187 EXPECT_NE((void*)nullptr, M3.base()); 188 EXPECT_LE(4U * sizeof(int), M3.size()); 189 190 int *x = (int*)M1.base(); 191 *x = 1; 192 193 int *y = (int*)M2.base(); 194 for (int i = 0; i < 8; i++) { 195 y[i] = i; 196 } 197 198 int *z = (int*)M3.base(); 199 *z = 42; 200 201 EXPECT_EQ(1, *x); 202 EXPECT_EQ(7, y[7]); 203 EXPECT_EQ(42, *z); 204 205 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 206 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 207 208 MemoryBlock M4 = Memory::allocateMappedMemory(64 * sizeof(int), nullptr, 209 Flags, EC); 210 EXPECT_EQ(std::error_code(), EC); 211 EXPECT_NE((void*)nullptr, M4.base()); 212 EXPECT_LE(64U * sizeof(int), M4.size()); 213 x = (int*)M4.base(); 214 *x = 4; 215 EXPECT_EQ(4, *x); 216 EXPECT_FALSE(Memory::releaseMappedMemory(M4)); 217 218 // Verify that M2 remains unaffected by other activity 219 for (int i = 0; i < 8; i++) { 220 EXPECT_EQ(i, y[i]); 221 } 222 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 223 } 224 225 TEST_P(MappedMemoryTest, EnabledWrite) { 226 // MPROTECT prevents W+X, and since this test always adds W we need 227 // to block any variant with X. 228 if ((Flags & Memory::MF_EXEC) && IsMPROTECT()) 229 return; 230 231 std::error_code EC; 232 MemoryBlock M1 = Memory::allocateMappedMemory(2 * sizeof(int), nullptr, Flags, 233 EC); 234 EXPECT_EQ(std::error_code(), EC); 235 MemoryBlock M2 = Memory::allocateMappedMemory(8 * sizeof(int), nullptr, Flags, 236 EC); 237 EXPECT_EQ(std::error_code(), EC); 238 MemoryBlock M3 = Memory::allocateMappedMemory(4 * sizeof(int), nullptr, Flags, 239 EC); 240 EXPECT_EQ(std::error_code(), EC); 241 242 EXPECT_NE((void*)nullptr, M1.base()); 243 EXPECT_LE(2U * sizeof(int), M1.size()); 244 EXPECT_NE((void*)nullptr, M2.base()); 245 EXPECT_LE(8U * sizeof(int), M2.size()); 246 EXPECT_NE((void*)nullptr, M3.base()); 247 EXPECT_LE(4U * sizeof(int), M3.size()); 248 249 EXPECT_FALSE(Memory::protectMappedMemory(M1, getTestableEquivalent(Flags))); 250 EXPECT_FALSE(Memory::protectMappedMemory(M2, getTestableEquivalent(Flags))); 251 EXPECT_FALSE(Memory::protectMappedMemory(M3, getTestableEquivalent(Flags))); 252 253 EXPECT_FALSE(doesOverlap(M1, M2)); 254 EXPECT_FALSE(doesOverlap(M2, M3)); 255 EXPECT_FALSE(doesOverlap(M1, M3)); 256 257 int *x = (int*)M1.base(); 258 *x = 1; 259 int *y = (int*)M2.base(); 260 for (unsigned int i = 0; i < 8; i++) { 261 y[i] = i; 262 } 263 int *z = (int*)M3.base(); 264 *z = 42; 265 266 EXPECT_EQ(1, *x); 267 EXPECT_EQ(7, y[7]); 268 EXPECT_EQ(42, *z); 269 270 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 271 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 272 EXPECT_EQ(6, y[6]); 273 274 MemoryBlock M4 = Memory::allocateMappedMemory(16, nullptr, Flags, EC); 275 EXPECT_EQ(std::error_code(), EC); 276 EXPECT_NE((void*)nullptr, M4.base()); 277 EXPECT_LE(16U, M4.size()); 278 EXPECT_EQ(std::error_code(), 279 Memory::protectMappedMemory(M4, getTestableEquivalent(Flags))); 280 x = (int*)M4.base(); 281 *x = 4; 282 EXPECT_EQ(4, *x); 283 EXPECT_FALSE(Memory::releaseMappedMemory(M4)); 284 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 285 } 286 287 TEST_P(MappedMemoryTest, SuccessiveNear) { 288 CHECK_UNSUPPORTED(); 289 std::error_code EC; 290 MemoryBlock M1 = Memory::allocateMappedMemory(16, nullptr, Flags, EC); 291 EXPECT_EQ(std::error_code(), EC); 292 MemoryBlock M2 = Memory::allocateMappedMemory(64, &M1, Flags, EC); 293 EXPECT_EQ(std::error_code(), EC); 294 MemoryBlock M3 = Memory::allocateMappedMemory(32, &M2, Flags, EC); 295 EXPECT_EQ(std::error_code(), EC); 296 297 EXPECT_NE((void*)nullptr, M1.base()); 298 EXPECT_LE(16U, M1.size()); 299 EXPECT_NE((void*)nullptr, M2.base()); 300 EXPECT_LE(64U, M2.size()); 301 EXPECT_NE((void*)nullptr, M3.base()); 302 EXPECT_LE(32U, M3.size()); 303 304 EXPECT_FALSE(doesOverlap(M1, M2)); 305 EXPECT_FALSE(doesOverlap(M2, M3)); 306 EXPECT_FALSE(doesOverlap(M1, M3)); 307 308 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 309 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 310 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 311 } 312 313 TEST_P(MappedMemoryTest, DuplicateNear) { 314 CHECK_UNSUPPORTED(); 315 std::error_code EC; 316 MemoryBlock Near((void*)(3*PageSize), 16); 317 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC); 318 EXPECT_EQ(std::error_code(), EC); 319 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC); 320 EXPECT_EQ(std::error_code(), EC); 321 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC); 322 EXPECT_EQ(std::error_code(), EC); 323 324 EXPECT_NE((void*)nullptr, M1.base()); 325 EXPECT_LE(16U, M1.size()); 326 EXPECT_NE((void*)nullptr, M2.base()); 327 EXPECT_LE(64U, M2.size()); 328 EXPECT_NE((void*)nullptr, M3.base()); 329 EXPECT_LE(32U, M3.size()); 330 331 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 332 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 333 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 334 } 335 336 TEST_P(MappedMemoryTest, ZeroNear) { 337 CHECK_UNSUPPORTED(); 338 std::error_code EC; 339 MemoryBlock Near(nullptr, 0); 340 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC); 341 EXPECT_EQ(std::error_code(), EC); 342 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC); 343 EXPECT_EQ(std::error_code(), EC); 344 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC); 345 EXPECT_EQ(std::error_code(), EC); 346 347 EXPECT_NE((void*)nullptr, M1.base()); 348 EXPECT_LE(16U, M1.size()); 349 EXPECT_NE((void*)nullptr, M2.base()); 350 EXPECT_LE(64U, M2.size()); 351 EXPECT_NE((void*)nullptr, M3.base()); 352 EXPECT_LE(32U, M3.size()); 353 354 EXPECT_FALSE(doesOverlap(M1, M2)); 355 EXPECT_FALSE(doesOverlap(M2, M3)); 356 EXPECT_FALSE(doesOverlap(M1, M3)); 357 358 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 359 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 360 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 361 } 362 363 TEST_P(MappedMemoryTest, ZeroSizeNear) { 364 CHECK_UNSUPPORTED(); 365 std::error_code EC; 366 MemoryBlock Near((void*)(4*PageSize), 0); 367 MemoryBlock M1 = Memory::allocateMappedMemory(16, &Near, Flags, EC); 368 EXPECT_EQ(std::error_code(), EC); 369 MemoryBlock M2 = Memory::allocateMappedMemory(64, &Near, Flags, EC); 370 EXPECT_EQ(std::error_code(), EC); 371 MemoryBlock M3 = Memory::allocateMappedMemory(32, &Near, Flags, EC); 372 EXPECT_EQ(std::error_code(), EC); 373 374 EXPECT_NE((void*)nullptr, M1.base()); 375 EXPECT_LE(16U, M1.size()); 376 EXPECT_NE((void*)nullptr, M2.base()); 377 EXPECT_LE(64U, M2.size()); 378 EXPECT_NE((void*)nullptr, M3.base()); 379 EXPECT_LE(32U, M3.size()); 380 381 EXPECT_FALSE(doesOverlap(M1, M2)); 382 EXPECT_FALSE(doesOverlap(M2, M3)); 383 EXPECT_FALSE(doesOverlap(M1, M3)); 384 385 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 386 EXPECT_FALSE(Memory::releaseMappedMemory(M3)); 387 EXPECT_FALSE(Memory::releaseMappedMemory(M2)); 388 } 389 390 TEST_P(MappedMemoryTest, UnalignedNear) { 391 CHECK_UNSUPPORTED(); 392 std::error_code EC; 393 MemoryBlock Near((void*)(2*PageSize+5), 0); 394 MemoryBlock M1 = Memory::allocateMappedMemory(15, &Near, Flags, EC); 395 EXPECT_EQ(std::error_code(), EC); 396 397 EXPECT_NE((void*)nullptr, M1.base()); 398 EXPECT_LE(sizeof(int), M1.size()); 399 400 EXPECT_FALSE(Memory::releaseMappedMemory(M1)); 401 } 402 403 // Note that Memory::MF_WRITE is not supported exclusively across 404 // operating systems and architectures and can imply MF_READ|MF_WRITE 405 unsigned MemoryFlags[] = { 406 Memory::MF_READ, 407 Memory::MF_WRITE, 408 Memory::MF_READ|Memory::MF_WRITE, 409 Memory::MF_EXEC, 410 Memory::MF_READ|Memory::MF_EXEC, 411 Memory::MF_READ|Memory::MF_WRITE|Memory::MF_EXEC 412 }; 413 414 INSTANTIATE_TEST_CASE_P(AllocationTests, 415 MappedMemoryTest, 416 ::testing::ValuesIn(MemoryFlags),); 417 418 } // anonymous namespace 419