1 #include "llvm/ProfileData/MemProf.h" 2 #include "llvm/ADT/DenseMap.h" 3 #include "llvm/ADT/MapVector.h" 4 #include "llvm/DebugInfo/DIContext.h" 5 #include "llvm/DebugInfo/Symbolize/SymbolizableModule.h" 6 #include "llvm/IR/Value.h" 7 #include "llvm/Object/ObjectFile.h" 8 #include "llvm/ProfileData/MemProfData.inc" 9 #include "llvm/ProfileData/MemProfReader.h" 10 #include "llvm/Support/raw_ostream.h" 11 #include "gmock/gmock.h" 12 #include "gtest/gtest.h" 13 14 #include <initializer_list> 15 16 namespace { 17 18 using ::llvm::DIGlobal; 19 using ::llvm::DIInliningInfo; 20 using ::llvm::DILineInfo; 21 using ::llvm::DILineInfoSpecifier; 22 using ::llvm::DILocal; 23 using ::llvm::StringRef; 24 using ::llvm::memprof::CallStackId; 25 using ::llvm::memprof::CallStackMap; 26 using ::llvm::memprof::Frame; 27 using ::llvm::memprof::FrameId; 28 using ::llvm::memprof::IndexedAllocationInfo; 29 using ::llvm::memprof::IndexedMemProfRecord; 30 using ::llvm::memprof::MemInfoBlock; 31 using ::llvm::memprof::MemProfReader; 32 using ::llvm::memprof::MemProfRecord; 33 using ::llvm::memprof::MemProfSchema; 34 using ::llvm::memprof::Meta; 35 using ::llvm::memprof::PortableMemInfoBlock; 36 using ::llvm::memprof::RawMemProfReader; 37 using ::llvm::memprof::SegmentEntry; 38 using ::llvm::object::SectionedAddress; 39 using ::llvm::symbolize::SymbolizableModule; 40 using ::testing::Return; 41 using ::testing::SizeIs; 42 43 class MockSymbolizer : public SymbolizableModule { 44 public: 45 MOCK_CONST_METHOD3(symbolizeInlinedCode, 46 DIInliningInfo(SectionedAddress, DILineInfoSpecifier, 47 bool)); 48 // Most of the methods in the interface are unused. We only mock the 49 // method that we expect to be called from the memprof reader. 50 virtual DILineInfo symbolizeCode(SectionedAddress, DILineInfoSpecifier, 51 bool) const { 52 llvm_unreachable("unused"); 53 } 54 virtual DIGlobal symbolizeData(SectionedAddress) const { 55 llvm_unreachable("unused"); 56 } 57 virtual std::vector<DILocal> symbolizeFrame(SectionedAddress) const { 58 llvm_unreachable("unused"); 59 } 60 virtual std::vector<SectionedAddress> findSymbol(StringRef Symbol, 61 uint64_t Offset) const { 62 llvm_unreachable("unused"); 63 } 64 virtual bool isWin32Module() const { llvm_unreachable("unused"); } 65 virtual uint64_t getModulePreferredBase() const { 66 llvm_unreachable("unused"); 67 } 68 }; 69 70 struct MockInfo { 71 std::string FunctionName; 72 uint32_t Line; 73 uint32_t StartLine; 74 uint32_t Column; 75 std::string FileName = "valid/path.cc"; 76 }; 77 DIInliningInfo makeInliningInfo(std::initializer_list<MockInfo> MockFrames) { 78 DIInliningInfo Result; 79 for (const auto &Item : MockFrames) { 80 DILineInfo Frame; 81 Frame.FunctionName = Item.FunctionName; 82 Frame.Line = Item.Line; 83 Frame.StartLine = Item.StartLine; 84 Frame.Column = Item.Column; 85 Frame.FileName = Item.FileName; 86 Result.addFrame(Frame); 87 } 88 return Result; 89 } 90 91 llvm::SmallVector<SegmentEntry, 4> makeSegments() { 92 llvm::SmallVector<SegmentEntry, 4> Result; 93 // Mimic an entry for a non position independent executable. 94 Result.emplace_back(0x0, 0x40000, 0x0); 95 return Result; 96 } 97 98 const DILineInfoSpecifier specifier() { 99 return DILineInfoSpecifier( 100 DILineInfoSpecifier::FileLineInfoKind::RawValue, 101 DILineInfoSpecifier::FunctionNameKind::LinkageName); 102 } 103 104 MATCHER_P4(FrameContains, FunctionName, LineOffset, Column, Inline, "") { 105 const Frame &F = arg; 106 107 const uint64_t ExpectedHash = IndexedMemProfRecord::getGUID(FunctionName); 108 if (F.Function != ExpectedHash) { 109 *result_listener << "Hash mismatch"; 110 return false; 111 } 112 if (F.SymbolName && *F.SymbolName != FunctionName) { 113 *result_listener << "SymbolName mismatch\nWant: " << FunctionName 114 << "\nGot: " << *F.SymbolName; 115 return false; 116 } 117 if (F.LineOffset == LineOffset && F.Column == Column && 118 F.IsInlineFrame == Inline) { 119 return true; 120 } 121 *result_listener << "LineOffset, Column or Inline mismatch"; 122 return false; 123 } 124 125 TEST(MemProf, FillsValue) { 126 std::unique_ptr<MockSymbolizer> Symbolizer(new MockSymbolizer()); 127 128 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x1000}, 129 specifier(), false)) 130 .Times(1) // Only once since we remember invalid PCs. 131 .WillRepeatedly(Return(makeInliningInfo({ 132 {"new", 70, 57, 3, "memprof/memprof_new_delete.cpp"}, 133 }))); 134 135 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x2000}, 136 specifier(), false)) 137 .Times(1) // Only once since we cache the result for future lookups. 138 .WillRepeatedly(Return(makeInliningInfo({ 139 {"foo", 10, 5, 30}, 140 {"bar", 201, 150, 20}, 141 }))); 142 143 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x3000}, 144 specifier(), false)) 145 .Times(1) 146 .WillRepeatedly(Return(makeInliningInfo({ 147 {"xyz.llvm.123", 10, 5, 30}, 148 {"abc", 10, 5, 30}, 149 }))); 150 151 CallStackMap CSM; 152 CSM[0x1] = {0x1000, 0x2000, 0x3000}; 153 154 llvm::MapVector<uint64_t, MemInfoBlock> Prof; 155 Prof[0x1].AllocCount = 1; 156 157 auto Seg = makeSegments(); 158 159 RawMemProfReader Reader(std::move(Symbolizer), Seg, Prof, CSM, 160 /*KeepName=*/true); 161 162 llvm::DenseMap<llvm::GlobalValue::GUID, MemProfRecord> Records; 163 for (const auto &Pair : Reader) { 164 Records.insert({Pair.first, Pair.second}); 165 } 166 167 // Mock program pseudocode and expected memprof record contents. 168 // 169 // AllocSite CallSite 170 // inline foo() { new(); } Y N 171 // bar() { foo(); } Y Y 172 // inline xyz() { bar(); } N Y 173 // abc() { xyz(); } N Y 174 175 // We expect 4 records. We attach alloc site data to foo and bar, i.e. 176 // all frames bottom up until we find a non-inline frame. We attach call site 177 // data to bar, xyz and abc. 178 ASSERT_THAT(Records, SizeIs(4)); 179 180 // Check the memprof record for foo. 181 const llvm::GlobalValue::GUID FooId = IndexedMemProfRecord::getGUID("foo"); 182 ASSERT_EQ(Records.count(FooId), 1U); 183 const MemProfRecord &Foo = Records[FooId]; 184 ASSERT_THAT(Foo.AllocSites, SizeIs(1)); 185 EXPECT_EQ(Foo.AllocSites[0].Info.getAllocCount(), 1U); 186 EXPECT_THAT(Foo.AllocSites[0].CallStack[0], 187 FrameContains("foo", 5U, 30U, true)); 188 EXPECT_THAT(Foo.AllocSites[0].CallStack[1], 189 FrameContains("bar", 51U, 20U, false)); 190 EXPECT_THAT(Foo.AllocSites[0].CallStack[2], 191 FrameContains("xyz", 5U, 30U, true)); 192 EXPECT_THAT(Foo.AllocSites[0].CallStack[3], 193 FrameContains("abc", 5U, 30U, false)); 194 EXPECT_TRUE(Foo.CallSites.empty()); 195 196 // Check the memprof record for bar. 197 const llvm::GlobalValue::GUID BarId = IndexedMemProfRecord::getGUID("bar"); 198 ASSERT_EQ(Records.count(BarId), 1U); 199 const MemProfRecord &Bar = Records[BarId]; 200 ASSERT_THAT(Bar.AllocSites, SizeIs(1)); 201 EXPECT_EQ(Bar.AllocSites[0].Info.getAllocCount(), 1U); 202 EXPECT_THAT(Bar.AllocSites[0].CallStack[0], 203 FrameContains("foo", 5U, 30U, true)); 204 EXPECT_THAT(Bar.AllocSites[0].CallStack[1], 205 FrameContains("bar", 51U, 20U, false)); 206 EXPECT_THAT(Bar.AllocSites[0].CallStack[2], 207 FrameContains("xyz", 5U, 30U, true)); 208 EXPECT_THAT(Bar.AllocSites[0].CallStack[3], 209 FrameContains("abc", 5U, 30U, false)); 210 211 ASSERT_THAT(Bar.CallSites, SizeIs(1)); 212 ASSERT_THAT(Bar.CallSites[0], SizeIs(2)); 213 EXPECT_THAT(Bar.CallSites[0][0], FrameContains("foo", 5U, 30U, true)); 214 EXPECT_THAT(Bar.CallSites[0][1], FrameContains("bar", 51U, 20U, false)); 215 216 // Check the memprof record for xyz. 217 const llvm::GlobalValue::GUID XyzId = IndexedMemProfRecord::getGUID("xyz"); 218 ASSERT_EQ(Records.count(XyzId), 1U); 219 const MemProfRecord &Xyz = Records[XyzId]; 220 ASSERT_THAT(Xyz.CallSites, SizeIs(1)); 221 ASSERT_THAT(Xyz.CallSites[0], SizeIs(2)); 222 // Expect the entire frame even though in practice we only need the first 223 // entry here. 224 EXPECT_THAT(Xyz.CallSites[0][0], FrameContains("xyz", 5U, 30U, true)); 225 EXPECT_THAT(Xyz.CallSites[0][1], FrameContains("abc", 5U, 30U, false)); 226 227 // Check the memprof record for abc. 228 const llvm::GlobalValue::GUID AbcId = IndexedMemProfRecord::getGUID("abc"); 229 ASSERT_EQ(Records.count(AbcId), 1U); 230 const MemProfRecord &Abc = Records[AbcId]; 231 EXPECT_TRUE(Abc.AllocSites.empty()); 232 ASSERT_THAT(Abc.CallSites, SizeIs(1)); 233 ASSERT_THAT(Abc.CallSites[0], SizeIs(2)); 234 EXPECT_THAT(Abc.CallSites[0][0], FrameContains("xyz", 5U, 30U, true)); 235 EXPECT_THAT(Abc.CallSites[0][1], FrameContains("abc", 5U, 30U, false)); 236 } 237 238 TEST(MemProf, PortableWrapper) { 239 MemInfoBlock Info(/*size=*/16, /*access_count=*/7, /*alloc_timestamp=*/1000, 240 /*dealloc_timestamp=*/2000, /*alloc_cpu=*/3, 241 /*dealloc_cpu=*/4); 242 243 const auto Schema = llvm::memprof::getFullSchema(); 244 PortableMemInfoBlock WriteBlock(Info); 245 246 std::string Buffer; 247 llvm::raw_string_ostream OS(Buffer); 248 WriteBlock.serialize(Schema, OS); 249 OS.flush(); 250 251 PortableMemInfoBlock ReadBlock( 252 Schema, reinterpret_cast<const unsigned char *>(Buffer.data())); 253 254 EXPECT_EQ(ReadBlock, WriteBlock); 255 // Here we compare directly with the actual counts instead of MemInfoBlock 256 // members. Since the MemInfoBlock struct is packed and the EXPECT_EQ macros 257 // take a reference to the params, this results in unaligned accesses. 258 EXPECT_EQ(1UL, ReadBlock.getAllocCount()); 259 EXPECT_EQ(7ULL, ReadBlock.getTotalAccessCount()); 260 EXPECT_EQ(3UL, ReadBlock.getAllocCpuId()); 261 } 262 263 // Version0 and Version1 serialize IndexedMemProfRecord in the same format, so 264 // we share one test. 265 TEST(MemProf, RecordSerializationRoundTripVersion0And1) { 266 const auto Schema = llvm::memprof::getFullSchema(); 267 268 MemInfoBlock Info(/*size=*/16, /*access_count=*/7, /*alloc_timestamp=*/1000, 269 /*dealloc_timestamp=*/2000, /*alloc_cpu=*/3, 270 /*dealloc_cpu=*/4); 271 272 llvm::SmallVector<llvm::SmallVector<FrameId>> AllocCallStacks = { 273 {0x123, 0x345}, {0x123, 0x567}}; 274 275 llvm::SmallVector<llvm::SmallVector<FrameId>> CallSites = {{0x333, 0x777}}; 276 277 IndexedMemProfRecord Record; 278 for (const auto &ACS : AllocCallStacks) { 279 // Use the same info block for both allocation sites. 280 Record.AllocSites.emplace_back(ACS, llvm::memprof::hashCallStack(ACS), 281 Info); 282 } 283 Record.CallSites.assign(CallSites); 284 for (const auto &CS : CallSites) 285 Record.CallSiteIds.push_back(llvm::memprof::hashCallStack(CS)); 286 287 std::string Buffer; 288 llvm::raw_string_ostream OS(Buffer); 289 Record.serialize(Schema, OS, llvm::memprof::Version0); 290 OS.flush(); 291 292 const IndexedMemProfRecord GotRecord = IndexedMemProfRecord::deserialize( 293 Schema, reinterpret_cast<const unsigned char *>(Buffer.data()), 294 llvm::memprof::Version0); 295 296 EXPECT_EQ(Record, GotRecord); 297 } 298 299 TEST(MemProf, RecordSerializationRoundTripVerion2) { 300 const auto Schema = llvm::memprof::getFullSchema(); 301 302 MemInfoBlock Info(/*size=*/16, /*access_count=*/7, /*alloc_timestamp=*/1000, 303 /*dealloc_timestamp=*/2000, /*alloc_cpu=*/3, 304 /*dealloc_cpu=*/4); 305 306 llvm::SmallVector<llvm::memprof::CallStackId> CallStackIds = {0x123, 0x456}; 307 308 llvm::SmallVector<llvm::memprof::CallStackId> CallSiteIds = {0x333, 0x444}; 309 310 IndexedMemProfRecord Record; 311 for (const auto &CSId : CallStackIds) { 312 // Use the same info block for both allocation sites. 313 Record.AllocSites.emplace_back(llvm::SmallVector<FrameId>(), CSId, Info); 314 } 315 Record.CallSiteIds.assign(CallSiteIds); 316 317 std::string Buffer; 318 llvm::raw_string_ostream OS(Buffer); 319 Record.serialize(Schema, OS, llvm::memprof::Version2); 320 OS.flush(); 321 322 const IndexedMemProfRecord GotRecord = IndexedMemProfRecord::deserialize( 323 Schema, reinterpret_cast<const unsigned char *>(Buffer.data()), 324 llvm::memprof::Version2); 325 326 EXPECT_EQ(Record, GotRecord); 327 } 328 329 TEST(MemProf, SymbolizationFilter) { 330 std::unique_ptr<MockSymbolizer> Symbolizer(new MockSymbolizer()); 331 332 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x1000}, 333 specifier(), false)) 334 .Times(1) // once since we don't lookup invalid PCs repeatedly. 335 .WillRepeatedly(Return(makeInliningInfo({ 336 {"malloc", 70, 57, 3, "memprof/memprof_malloc_linux.cpp"}, 337 }))); 338 339 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x2000}, 340 specifier(), false)) 341 .Times(1) // once since we don't lookup invalid PCs repeatedly. 342 .WillRepeatedly(Return(makeInliningInfo({ 343 {"new", 70, 57, 3, "memprof/memprof_new_delete.cpp"}, 344 }))); 345 346 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x3000}, 347 specifier(), false)) 348 .Times(1) // once since we don't lookup invalid PCs repeatedly. 349 .WillRepeatedly(Return(makeInliningInfo({ 350 {DILineInfo::BadString, 0, 0, 0}, 351 }))); 352 353 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x4000}, 354 specifier(), false)) 355 .Times(1) 356 .WillRepeatedly(Return(makeInliningInfo({ 357 {"foo", 10, 5, 30, "memprof/memprof_test_file.cpp"}, 358 }))); 359 360 EXPECT_CALL(*Symbolizer, symbolizeInlinedCode(SectionedAddress{0x5000}, 361 specifier(), false)) 362 .Times(1) 363 .WillRepeatedly(Return(makeInliningInfo({ 364 // Depending on how the runtime was compiled, only the filename 365 // may be present in the debug information. 366 {"malloc", 70, 57, 3, "memprof_malloc_linux.cpp"}, 367 }))); 368 369 CallStackMap CSM; 370 CSM[0x1] = {0x1000, 0x2000, 0x3000, 0x4000}; 371 // This entry should be dropped since all PCs are either not 372 // symbolizable or belong to the runtime. 373 CSM[0x2] = {0x1000, 0x2000, 0x5000}; 374 375 llvm::MapVector<uint64_t, MemInfoBlock> Prof; 376 Prof[0x1].AllocCount = 1; 377 Prof[0x2].AllocCount = 1; 378 379 auto Seg = makeSegments(); 380 381 RawMemProfReader Reader(std::move(Symbolizer), Seg, Prof, CSM); 382 383 llvm::SmallVector<MemProfRecord, 1> Records; 384 for (const auto &KeyRecordPair : Reader) { 385 Records.push_back(KeyRecordPair.second); 386 } 387 388 ASSERT_THAT(Records, SizeIs(1)); 389 ASSERT_THAT(Records[0].AllocSites, SizeIs(1)); 390 ASSERT_THAT(Records[0].AllocSites[0].CallStack, SizeIs(1)); 391 EXPECT_THAT(Records[0].AllocSites[0].CallStack[0], 392 FrameContains("foo", 5U, 30U, false)); 393 } 394 395 TEST(MemProf, BaseMemProfReader) { 396 llvm::DenseMap<FrameId, Frame> FrameIdMap; 397 Frame F1(/*Hash=*/IndexedMemProfRecord::getGUID("foo"), /*LineOffset=*/20, 398 /*Column=*/5, /*IsInlineFrame=*/true); 399 Frame F2(/*Hash=*/IndexedMemProfRecord::getGUID("bar"), /*LineOffset=*/10, 400 /*Column=*/2, /*IsInlineFrame=*/false); 401 FrameIdMap.insert({F1.hash(), F1}); 402 FrameIdMap.insert({F2.hash(), F2}); 403 404 llvm::MapVector<llvm::GlobalValue::GUID, IndexedMemProfRecord> ProfData; 405 IndexedMemProfRecord FakeRecord; 406 MemInfoBlock Block; 407 Block.AllocCount = 1U, Block.TotalAccessDensity = 4, 408 Block.TotalLifetime = 200001; 409 std::array<FrameId, 2> CallStack{F1.hash(), F2.hash()}; 410 FakeRecord.AllocSites.emplace_back( 411 /*CS=*/CallStack, /*CSId=*/llvm::memprof::hashCallStack(CallStack), 412 /*MB=*/Block); 413 ProfData.insert({F1.hash(), FakeRecord}); 414 415 MemProfReader Reader(FrameIdMap, ProfData); 416 417 llvm::SmallVector<MemProfRecord, 1> Records; 418 for (const auto &KeyRecordPair : Reader) { 419 Records.push_back(KeyRecordPair.second); 420 } 421 422 ASSERT_THAT(Records, SizeIs(1)); 423 ASSERT_THAT(Records[0].AllocSites, SizeIs(1)); 424 ASSERT_THAT(Records[0].AllocSites[0].CallStack, SizeIs(2)); 425 EXPECT_THAT(Records[0].AllocSites[0].CallStack[0], 426 FrameContains("foo", 20U, 5U, true)); 427 EXPECT_THAT(Records[0].AllocSites[0].CallStack[1], 428 FrameContains("bar", 10U, 2U, false)); 429 } 430 431 TEST(MemProf, BaseMemProfReaderWithCSIdMap) { 432 llvm::DenseMap<FrameId, Frame> FrameIdMap; 433 Frame F1(/*Hash=*/IndexedMemProfRecord::getGUID("foo"), /*LineOffset=*/20, 434 /*Column=*/5, /*IsInlineFrame=*/true); 435 Frame F2(/*Hash=*/IndexedMemProfRecord::getGUID("bar"), /*LineOffset=*/10, 436 /*Column=*/2, /*IsInlineFrame=*/false); 437 FrameIdMap.insert({F1.hash(), F1}); 438 FrameIdMap.insert({F2.hash(), F2}); 439 440 llvm::DenseMap<CallStackId, llvm::SmallVector<FrameId>> CSIdMap; 441 llvm::SmallVector<FrameId> CallStack = {F1.hash(), F2.hash()}; 442 CallStackId CSId = llvm::memprof::hashCallStack(CallStack); 443 CSIdMap.insert({CSId, CallStack}); 444 445 llvm::MapVector<llvm::GlobalValue::GUID, IndexedMemProfRecord> ProfData; 446 IndexedMemProfRecord FakeRecord; 447 MemInfoBlock Block; 448 Block.AllocCount = 1U, Block.TotalAccessDensity = 4, 449 Block.TotalLifetime = 200001; 450 FakeRecord.AllocSites.emplace_back( 451 /*CS=*/llvm::SmallVector<FrameId>(), 452 /*CSId=*/llvm::memprof::hashCallStack(CallStack), 453 /*MB=*/Block); 454 ProfData.insert({F1.hash(), FakeRecord}); 455 456 MemProfReader Reader(FrameIdMap, CSIdMap, ProfData); 457 458 llvm::SmallVector<MemProfRecord, 1> Records; 459 for (const auto &KeyRecordPair : Reader) { 460 Records.push_back(KeyRecordPair.second); 461 } 462 463 ASSERT_THAT(Records, SizeIs(1)); 464 ASSERT_THAT(Records[0].AllocSites, SizeIs(1)); 465 ASSERT_THAT(Records[0].AllocSites[0].CallStack, SizeIs(2)); 466 EXPECT_THAT(Records[0].AllocSites[0].CallStack[0], 467 FrameContains("foo", 20U, 5U, true)); 468 EXPECT_THAT(Records[0].AllocSites[0].CallStack[1], 469 FrameContains("bar", 10U, 2U, false)); 470 } 471 472 TEST(MemProf, IndexedMemProfRecordToMemProfRecord) { 473 // Verify that MemProfRecord can be constructed from IndexedMemProfRecord with 474 // CallStackIds only. 475 476 llvm::DenseMap<FrameId, Frame> FrameIdMap; 477 Frame F1(1, 0, 0, false); 478 Frame F2(2, 0, 0, false); 479 Frame F3(3, 0, 0, false); 480 Frame F4(4, 0, 0, false); 481 FrameIdMap.insert({F1.hash(), F1}); 482 FrameIdMap.insert({F2.hash(), F2}); 483 FrameIdMap.insert({F3.hash(), F3}); 484 FrameIdMap.insert({F4.hash(), F4}); 485 486 llvm::DenseMap<CallStackId, llvm::SmallVector<FrameId>> CallStackIdMap; 487 llvm::SmallVector<FrameId> CS1 = {F1.hash(), F2.hash()}; 488 llvm::SmallVector<FrameId> CS2 = {F1.hash(), F3.hash()}; 489 llvm::SmallVector<FrameId> CS3 = {F2.hash(), F3.hash()}; 490 llvm::SmallVector<FrameId> CS4 = {F2.hash(), F4.hash()}; 491 CallStackIdMap.insert({llvm::memprof::hashCallStack(CS1), CS1}); 492 CallStackIdMap.insert({llvm::memprof::hashCallStack(CS2), CS2}); 493 CallStackIdMap.insert({llvm::memprof::hashCallStack(CS3), CS3}); 494 CallStackIdMap.insert({llvm::memprof::hashCallStack(CS4), CS4}); 495 496 IndexedMemProfRecord IndexedRecord; 497 IndexedAllocationInfo AI; 498 AI.CSId = llvm::memprof::hashCallStack(CS1); 499 IndexedRecord.AllocSites.push_back(AI); 500 AI.CSId = llvm::memprof::hashCallStack(CS2); 501 IndexedRecord.AllocSites.push_back(AI); 502 IndexedRecord.CallSiteIds.push_back(llvm::memprof::hashCallStack(CS3)); 503 IndexedRecord.CallSiteIds.push_back(llvm::memprof::hashCallStack(CS4)); 504 505 bool CSIdMissing = false; 506 bool FrameIdMissing = false; 507 508 auto Callback = [&](CallStackId CSId) -> llvm::SmallVector<Frame> { 509 llvm::SmallVector<Frame> CallStack; 510 llvm::SmallVector<FrameId> FrameIds; 511 512 auto Iter = CallStackIdMap.find(CSId); 513 if (Iter == CallStackIdMap.end()) 514 CSIdMissing = true; 515 else 516 FrameIds = Iter->second; 517 518 for (FrameId Id : FrameIds) { 519 Frame F(0, 0, 0, false); 520 auto Iter = FrameIdMap.find(Id); 521 if (Iter == FrameIdMap.end()) 522 FrameIdMissing = true; 523 else 524 F = Iter->second; 525 CallStack.push_back(F); 526 } 527 528 return CallStack; 529 }; 530 531 MemProfRecord Record = IndexedRecord.toMemProfRecord(Callback); 532 533 // Make sure that all lookups are successful. 534 ASSERT_FALSE(CSIdMissing); 535 ASSERT_FALSE(FrameIdMissing); 536 537 // Verify the contents of Record. 538 ASSERT_THAT(Record.AllocSites, SizeIs(2)); 539 ASSERT_THAT(Record.AllocSites[0].CallStack, SizeIs(2)); 540 EXPECT_EQ(Record.AllocSites[0].CallStack[0].hash(), F1.hash()); 541 EXPECT_EQ(Record.AllocSites[0].CallStack[1].hash(), F2.hash()); 542 ASSERT_THAT(Record.AllocSites[1].CallStack, SizeIs(2)); 543 EXPECT_EQ(Record.AllocSites[1].CallStack[0].hash(), F1.hash()); 544 EXPECT_EQ(Record.AllocSites[1].CallStack[1].hash(), F3.hash()); 545 ASSERT_THAT(Record.CallSites, SizeIs(2)); 546 ASSERT_THAT(Record.CallSites[0], SizeIs(2)); 547 EXPECT_EQ(Record.CallSites[0][0].hash(), F2.hash()); 548 EXPECT_EQ(Record.CallSites[0][1].hash(), F3.hash()); 549 ASSERT_THAT(Record.CallSites[1], SizeIs(2)); 550 EXPECT_EQ(Record.CallSites[1][0].hash(), F2.hash()); 551 EXPECT_EQ(Record.CallSites[1][1].hash(), F4.hash()); 552 } 553 } // namespace 554