#include "tsan_rtl.h"
namespace __tsan {
ALWAYS_INLINE USED bool TryTraceMemoryAccess(ThreadState* thr, uptr pc,
uptr addr, uptr size,
AccessType typ) { … }
ALWAYS_INLINE
bool TryTraceMemoryAccessRange(ThreadState* thr, uptr pc, uptr addr, uptr size,
AccessType typ) { … }
void TraceMemoryAccessRange(ThreadState* thr, uptr pc, uptr addr, uptr size,
AccessType typ) { … }
void TraceFunc(ThreadState* thr, uptr pc) { … }
NOINLINE void TraceRestartFuncEntry(ThreadState* thr, uptr pc) { … }
NOINLINE void TraceRestartFuncExit(ThreadState* thr) { … }
void TraceMutexLock(ThreadState* thr, EventType type, uptr pc, uptr addr,
StackID stk) { … }
void TraceMutexUnlock(ThreadState* thr, uptr addr) { … }
void TraceTime(ThreadState* thr) { … }
NOINLINE void DoReportRace(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
Shadow old,
AccessType typ) SANITIZER_NO_THREAD_SAFETY_ANALYSIS { … }
#if !TSAN_VECTORIZE
ALWAYS_INLINE
bool ContainsSameAccess(RawShadow* s, Shadow cur, int unused0, int unused1,
AccessType typ) {
for (uptr i = 0; i < kShadowCnt; i++) {
auto old = LoadShadow(&s[i]);
if (!(typ & kAccessRead)) {
if (old == cur.raw())
return true;
continue;
}
auto masked = static_cast<RawShadow>(static_cast<u32>(old) |
static_cast<u32>(Shadow::kRodata));
if (masked == cur.raw())
return true;
if (!(typ & kAccessNoRodata) && !SANITIZER_GO) {
if (old == Shadow::kRodata)
return true;
}
}
return false;
}
ALWAYS_INLINE
bool CheckRaces(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
int unused0, int unused1, AccessType typ) {
bool stored = false;
for (uptr idx = 0; idx < kShadowCnt; idx++) {
RawShadow* sp = &shadow_mem[idx];
Shadow old(LoadShadow(sp));
if (LIKELY(old.raw() == Shadow::kEmpty)) {
if (!(typ & kAccessCheckOnly) && !stored)
StoreShadow(sp, cur.raw());
return false;
}
if (LIKELY(!(cur.access() & old.access())))
continue;
if (LIKELY(cur.sid() == old.sid())) {
if (!(typ & kAccessCheckOnly) &&
LIKELY(cur.access() == old.access() && old.IsRWWeakerOrEqual(typ))) {
StoreShadow(sp, cur.raw());
stored = true;
}
continue;
}
if (LIKELY(old.IsBothReadsOrAtomic(typ)))
continue;
if (LIKELY(thr->clock.Get(old.sid()) >= old.epoch()))
continue;
DoReportRace(thr, shadow_mem, cur, old, typ);
return true;
}
if (LIKELY(stored))
return false;
uptr index =
atomic_load_relaxed(&thr->trace_pos) / sizeof(Event) % kShadowCnt;
StoreShadow(&shadow_mem[index], cur.raw());
return false;
}
#define LOAD_CURRENT_SHADOW …
#else
ALWAYS_INLINE
bool ContainsSameAccess(RawShadow* unused0, Shadow unused1, m128 shadow,
m128 access, AccessType typ) { … }
NOINLINE void DoReportRaceV(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
u32 race_mask, m128 shadow, AccessType typ) { … }
ALWAYS_INLINE
bool CheckRaces(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
m128 shadow, m128 access, AccessType typ) { … }
#define LOAD_CURRENT_SHADOW(cur, shadow_mem) …
#endif
char* DumpShadow(char* buf, RawShadow raw) { … }
NOINLINE void TraceRestartMemoryAccess(ThreadState* thr, uptr pc, uptr addr,
uptr size, AccessType typ) { … }
ALWAYS_INLINE USED void MemoryAccess(ThreadState* thr, uptr pc, uptr addr,
uptr size, AccessType typ) { … }
void MemoryAccess16(ThreadState* thr, uptr pc, uptr addr, AccessType typ);
NOINLINE
void RestartMemoryAccess16(ThreadState* thr, uptr pc, uptr addr,
AccessType typ) { … }
ALWAYS_INLINE USED void MemoryAccess16(ThreadState* thr, uptr pc, uptr addr,
AccessType typ) { … }
NOINLINE
void RestartUnalignedMemoryAccess(ThreadState* thr, uptr pc, uptr addr,
uptr size, AccessType typ) { … }
ALWAYS_INLINE USED void UnalignedMemoryAccess(ThreadState* thr, uptr pc,
uptr addr, uptr size,
AccessType typ) { … }
void ShadowSet(RawShadow* p, RawShadow* end, RawShadow v) { … }
static void MemoryRangeSet(uptr addr, uptr size, RawShadow val) { … }
void MemoryResetRange(ThreadState* thr, uptr pc, uptr addr, uptr size) { … }
void MemoryRangeFreed(ThreadState* thr, uptr pc, uptr addr, uptr size) { … }
void MemoryRangeImitateWrite(ThreadState* thr, uptr pc, uptr addr, uptr size) { … }
void MemoryRangeImitateWriteOrResetRange(ThreadState* thr, uptr pc, uptr addr,
uptr size) { … }
ALWAYS_INLINE
bool MemoryAccessRangeOne(ThreadState* thr, RawShadow* shadow_mem, Shadow cur,
AccessType typ) { … }
template <bool is_read>
NOINLINE void RestartMemoryAccessRange(ThreadState* thr, uptr pc, uptr addr,
uptr size) { … }
template <bool is_read>
void MemoryAccessRangeT(ThreadState* thr, uptr pc, uptr addr, uptr size) { … }
template void MemoryAccessRangeT<true>(ThreadState* thr, uptr pc, uptr addr,
uptr size);
template void MemoryAccessRangeT<false>(ThreadState* thr, uptr pc, uptr addr,
uptr size);
}
#if !SANITIZER_GO
# include "tsan_interface.inc"
#endif