Lines Matching full:allocated

57 // space that is always available. Thus, a dynamically allocated address space
233 std::vector<void *> allocated;
250 allocated.push_back(x);
265 for (uptr i = 0; i < allocated.size(); i++) {
266 void *x = allocated[i];
272 allocated.clear();
378 void *allocated[kNumAllocs];
382 allocated[i] = x;
388 void *m = a->GetMetaData(allocated[idx]);
392 cache.Deallocate(a, 1 + i % (Allocator::kNumClasses - 1), allocated[i]);
620 char *allocated[kNumAllocs];
624 allocated[i] = (char *)a.Allocate(&stats, size, 1);
625 CHECK(a.PointerIsMine(allocated[i]));
630 char *p = allocated[i];
643 allocated[i] = x;
646 char *p = allocated[i % kNumAllocs];
654 char *p = allocated[idx];
670 char *p = allocated[i] = (char *)a.Allocate(&stats, size, alignment);
674 CHECK_EQ(0, (uptr)allocated[i] % alignment);
678 a.Deallocate(&stats, allocated[i]);
712 std::vector<void*> allocated;
721 allocated.push_back(x);
724 std::shuffle(allocated.begin(), allocated.end(), r);
737 for (const auto &allocated_ptr : allocated) {
743 void *x = allocated[i];
750 allocated.clear();
804 void *allocated[kNumAllocs];
806 allocated[i] = cache.Allocate(a, class_id);
809 cache.Deallocate(a, class_id, allocated[i]);
884 // Struct which is allocated to pass info to new threads. The new thread frees
982 std::vector<void *> allocated;
994 allocated.push_back(x);
1003 for (uptr i = 0; i < allocated.size(); i++) {
1005 ASSERT_NE(reported_chunks.find(reinterpret_cast<uptr>(allocated[i])),
1043 char *allocated[kNumAllocs];
1047 allocated[i] = (char *)a.Allocate(&stats, size, 1);
1056 ASSERT_NE(reported_chunks.find(reinterpret_cast<uptr>(allocated[i])),
1060 a.Deallocate(&stats, allocated[i]);
1071 char *allocated[kNumAllocs];
1075 allocated[i] = (char *)a.Allocate(&stats, size, 1);
1081 char *p1 = allocated[i % kNumAllocs];
1097 a.Deallocate(&stats, allocated[i]);
1431 // can use the remaining space in the last allocated page.