Lines Matching defs:shadow_mem

148 NOINLINE void DoReportRace(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
155 old = Shadow(LoadShadow(&shadow_mem[1]));
158 StoreShadow(&shadow_mem[i], i == 0 ? Shadow::kRodata : Shadow::kEmpty);
166 ReportRace(thr, shadow_mem, cur, Shadow(old), typ);
195 bool CheckRaces(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
199 RawShadow* sp = &shadow_mem[idx];
220 DoReportRace(thr, shadow_mem, cur, old, typ);
230 StoreShadow(&shadow_mem[index], cur.raw());
234 # define LOAD_CURRENT_SHADOW(cur, shadow_mem) UNUSED int access = 0, shadow = 0
274 NOINLINE void DoReportRaceV(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
301 DoReportRace(thr, shadow_mem, cur, prev, typ);
305 bool CheckRaces(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
351 StoreShadow(&shadow_mem[index / 4], cur.raw());
380 DoReportRaceV(thr, shadow_mem, cur, concurrent_mask, shadow, typ);
384 # define LOAD_CURRENT_SHADOW(cur, shadow_mem) \
386 const m128 shadow = _mm_load_si128(reinterpret_cast<m128*>(shadow_mem))
422 RawShadow* shadow_mem = MemToShadow(addr);
427 static_cast<int>(typ), DumpShadow(memBuf[0], shadow_mem[0]),
428 DumpShadow(memBuf[1], shadow_mem[1]),
429 DumpShadow(memBuf[2], shadow_mem[2]),
430 DumpShadow(memBuf[3], shadow_mem[3]));
435 LOAD_CURRENT_SHADOW(cur, shadow_mem);
436 if (LIKELY(ContainsSameAccess(shadow_mem, cur, shadow, access, typ)))
442 CheckRaces(thr, shadow_mem, cur, shadow, access, typ);
461 RawShadow* shadow_mem = MemToShadow(addr);
464 LOAD_CURRENT_SHADOW(cur, shadow_mem);
465 if (LIKELY(ContainsSameAccess(shadow_mem, cur, shadow, access, typ)))
470 if (UNLIKELY(CheckRaces(thr, shadow_mem, cur, shadow, access, typ)))
474 shadow_mem += kShadowCnt;
475 LOAD_CURRENT_SHADOW(cur, shadow_mem);
476 if (LIKELY(ContainsSameAccess(shadow_mem, cur, shadow, access, typ)))
480 CheckRaces(thr, shadow_mem, cur, shadow, access, typ);
497 RawShadow* shadow_mem = MemToShadow(addr);
502 LOAD_CURRENT_SHADOW(cur, shadow_mem);
503 if (LIKELY(ContainsSameAccess(shadow_mem, cur, shadow, access, typ)))
508 if (UNLIKELY(CheckRaces(thr, shadow_mem, cur, shadow, access, typ)))
515 shadow_mem += kShadowCnt;
517 LOAD_CURRENT_SHADOW(cur, shadow_mem);
518 if (LIKELY(ContainsSameAccess(shadow_mem, cur, shadow, access, typ)))
522 CheckRaces(thr, shadow_mem, cur, shadow, access, typ);
608 RawShadow* shadow_mem = MemToShadow(addr);
615 for (; size; size -= kShadowCell, shadow_mem += kShadowCnt) {
616 const m128 shadow = _mm_load_si128((m128*)shadow_mem);
617 if (UNLIKELY(CheckRaces(thr, shadow_mem, cur, shadow, access, typ)))
619 _mm_store_si128((m128*)shadow_mem, freed);
622 for (; size; size -= kShadowCell, shadow_mem += kShadowCnt) {
623 if (UNLIKELY(CheckRaces(thr, shadow_mem, cur, 0, 0, typ)))
625 StoreShadow(&shadow_mem[0], Shadow::FreedMarker());
626 StoreShadow(&shadow_mem[1], Shadow::FreedInfo(cur.sid(), cur.epoch()));
627 StoreShadow(&shadow_mem[2], Shadow::kEmpty);
628 StoreShadow(&shadow_mem[3], Shadow::kEmpty);
650 bool MemoryAccessRangeOne(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
652 LOAD_CURRENT_SHADOW(cur, shadow_mem);
653 if (LIKELY(ContainsSameAccess(shadow_mem, cur, shadow, access, typ)))
655 return CheckRaces(thr, shadow_mem, cur, shadow, access, typ);
669 RawShadow* shadow_mem = MemToShadow(addr);
682 if (!IsShadowMem(shadow_mem)) {
683 Printf("Bad shadow start addr: %p (%p)\n", shadow_mem, (void*)addr);
684 DCHECK(IsShadowMem(shadow_mem));
688 reinterpret_cast<uptr>(shadow_mem) + size * kShadowMultiplier - 1);
695 shadow_mem, (void*)addr, size, kShadowMultiplier);
707 if (*shadow_mem == Shadow::kRodata)
722 if (UNLIKELY(MemoryAccessRangeOne(thr, shadow_mem, cur, typ)))
724 shadow_mem += kShadowCnt;
728 for (; size >= kShadowCell; size -= kShadowCell, shadow_mem += kShadowCnt) {
729 if (UNLIKELY(MemoryAccessRangeOne(thr, shadow_mem, cur, typ)))
735 if (UNLIKELY(MemoryAccessRangeOne(thr, shadow_mem, cur, typ)))