|
reference, declaration → definition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced
|
References
projects/compiler-rt/lib/asan/asan_allocator.cpp 286 return atomic_load(&rss_limit_exceeded, memory_order_relaxed);
336 options->min_redzone = atomic_load(&min_redzone, memory_order_acquire);
337 options->max_redzone = atomic_load(&max_redzone, memory_order_acquire);
340 atomic_load(&alloc_dealloc_mismatch, memory_order_acquire);
354 u32 min_rz = atomic_load(&min_redzone, memory_order_acquire);
355 u32 max_rz = atomic_load(&max_redzone, memory_order_acquire);
633 if (atomic_load(&alloc_dealloc_mismatch, memory_order_acquire)) {
projects/compiler-rt/lib/asan/asan_interceptors.cpp 202 atomic_load(¶m->t, memory_order_acquire))) == nullptr)
243 while (atomic_load(¶m.is_registered, memory_order_acquire) == 0)
projects/compiler-rt/lib/asan/asan_poisoning.cpp 30 return atomic_load(&can_poison_memory, memory_order_acquire);
projects/compiler-rt/lib/asan/asan_stack.cpp 26 return atomic_load(&malloc_context_size, memory_order_acquire);
projects/compiler-rt/lib/asan/asan_thread.cpp 119 if (atomic_load(&stack_switching_, memory_order_relaxed)) {
141 if (!atomic_load(&stack_switching_, memory_order_relaxed)) {
163 if (!atomic_load(&stack_switching_, memory_order_acquire)) {
projects/compiler-rt/lib/asan/asan_thread.h 110 return !atomic_load(&stack_switching_, memory_order_relaxed) &&
117 if (atomic_load(&stack_switching_, memory_order_relaxed))
projects/compiler-rt/lib/dfsan/dfsan.cpp 204 label = atomic_load(table_ent, memory_order_acquire);
343 atomic_load(&__dfsan_last_label, memory_order_relaxed);
351 atomic_load(&__dfsan_last_label, memory_order_relaxed);
projects/compiler-rt/lib/lsan/lsan_interceptors.cpp 417 while ((tid = atomic_load(&p->tid, memory_order_acquire)) == 0)
455 while (atomic_load(&p.tid, memory_order_acquire) != 0)
projects/compiler-rt/lib/msan/msan_interceptors.cpp 1004 (signal_cb)atomic_load(&sigactions[signo], memory_order_relaxed);
1017 (sigaction_cb)atomic_load(&sigactions[signo], memory_order_relaxed);
1381 uptr old_cb = atomic_load(&sigactions[signo], memory_order_relaxed);
projects/compiler-rt/lib/sanitizer_common/sanitizer_addrhashmap.h 187 uptr addr1 = atomic_load(&c->addr, memory_order_acquire);
195 if (atomic_load(&b->add, memory_order_relaxed)) {
197 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed);
200 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
216 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
228 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed);
232 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
256 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
289 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0);
300 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
313 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed);
320 uptr addr1 = atomic_load(&c1->addr, memory_order_relaxed);
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator.cpp 97 if (atomic_load(&internal_allocator_initialized, memory_order_acquire) == 0) {
99 if (atomic_load(&internal_allocator_initialized, memory_order_relaxed) ==
254 return atomic_load(&allocator_may_return_null, memory_order_relaxed);
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_bytemap.h 87 atomic_load(&map1_[idx], memory_order_acquire));
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h 90 return atomic_load(&release_to_os_interval_ms_, memory_order_relaxed);
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_stats.h 34 v += atomic_load(&stats_[i], memory_order_relaxed);
39 v = atomic_load(&stats_[i], memory_order_relaxed) - v;
48 return atomic_load(&stats_[i], memory_order_relaxed);
projects/compiler-rt/lib/sanitizer_common/sanitizer_atomic.h 76 return atomic_load(a, memory_order_relaxed);
projects/compiler-rt/lib/sanitizer_common/sanitizer_common.h 60 return atomic_load(¤t_verbosity, memory_order_relaxed);
projects/compiler-rt/lib/sanitizer_common/sanitizer_coverage_libcdep_new.cpp 134 if (atomic_load(pc_ptr, memory_order_relaxed) == 0)
projects/compiler-rt/lib/sanitizer_common/sanitizer_libignore.cpp 82 atomic_load(&ignored_ranges_count_, memory_order_relaxed);
111 atomic_load(&instrumented_ranges_count_, memory_order_relaxed);
projects/compiler-rt/lib/sanitizer_common/sanitizer_libignore.h 91 const uptr n = atomic_load(&ignored_ranges_count_, memory_order_acquire);
105 const uptr n = atomic_load(&instrumented_ranges_count_, memory_order_acquire);
projects/compiler-rt/lib/sanitizer_common/sanitizer_linux.cpp 681 CHECK_NE(MtxUnlocked, atomic_load(m, memory_order_relaxed));
projects/compiler-rt/lib/sanitizer_common/sanitizer_mutex.h 43 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1);
55 if (atomic_load(&state_, memory_order_relaxed) == 0
104 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked);
136 CHECK_NE(atomic_load(&state_, memory_order_relaxed), kUnlocked);
154 u32 cmp = atomic_load(&state_, memory_order_relaxed);
168 u32 prev = atomic_load(&state_, memory_order_acquire);
projects/compiler-rt/lib/sanitizer_common/sanitizer_persistent_allocator.h 37 uptr cmp = atomic_load(®ion_pos, memory_order_acquire);
38 uptr end = atomic_load(®ion_end, memory_order_acquire);
projects/compiler-rt/lib/sanitizer_common/sanitizer_stackdepot.cpp 40 atomic_load(&hash_and_use_count, memory_order_relaxed) & kHashMask;
78 return atomic_load(&node_->hash_and_use_count, memory_order_relaxed) &
128 uptr v = atomic_load(p, memory_order_consume);
projects/compiler-rt/lib/sanitizer_common/sanitizer_stackdepotbase.h 77 uptr cmp = atomic_load(p, memory_order_relaxed);
103 uptr v = atomic_load(p, memory_order_consume);
148 uptr v = atomic_load(p, memory_order_consume);
170 uptr s = atomic_load(p, memory_order_relaxed);
projects/compiler-rt/lib/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cpp 455 while (atomic_load(&tracer_thread_argument.done, memory_order_relaxed) == 0)
projects/compiler-rt/lib/sanitizer_common/sanitizer_thread_registry.cpp 52 return !!atomic_load(&thread_destroyed, memory_order_acquire);
projects/compiler-rt/lib/sanitizer_common/sanitizer_tls_get_addr.cpp 102 atomic_load(&number_of_live_dtls, memory_order_relaxed));
projects/compiler-rt/lib/tsan/dd/dd_interceptors.cpp 163 uptr cond = atomic_load(p, memory_order_acquire);
projects/compiler-rt/lib/tsan/rtl/tsan_clock.cpp 93 u32 v = atomic_load(ref, memory_order_acquire);
301 if (atomic_load(ref, memory_order_acquire) == 1)
460 u32 v = atomic_load(ref, memory_order_acquire);
projects/compiler-rt/lib/tsan/rtl/tsan_external.cpp 31 if (tag >= atomic_load(&used_tags, memory_order_relaxed)) return nullptr;
50 uptr tag_count = atomic_load(&used_tags, memory_order_relaxed);
61 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed));
99 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed));
projects/compiler-rt/lib/tsan/rtl/tsan_fd.cpp 57 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1)
63 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) {
77 uptr l1 = atomic_load(pl1, memory_order_consume);
133 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed);
145 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed);
projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp 325 if (atomic_load(&ctx->have_pending_signals, memory_order_relaxed) == 0)
502 atomic_load(&sctx->in_blocking_func, memory_order_relaxed) :
504 buf->in_signal_handler = atomic_load(&thr->in_signal_handler,
863 u32 cmp = atomic_load(g, memory_order_acquire);
945 while ((tid = atomic_load(&p->tid, memory_order_acquire)) == 0)
1009 while (atomic_load(&p.tid, memory_order_acquire) != 0)
1106 uptr cond = atomic_load(p, memory_order_acquire);
1132 CHECK_EQ(atomic_load(&ctx->in_blocking_func, memory_order_relaxed), 1);
1439 u32 v = atomic_load(a, memory_order_acquire);
1449 v = atomic_load(a, memory_order_acquire);
1943 atomic_load(&sctx->have_pending_signals, memory_order_relaxed) == 0)
1988 (sctx && atomic_load(&sctx->in_blocking_func, memory_order_relaxed))) {
1990 if (sctx && atomic_load(&sctx->in_blocking_func, memory_order_relaxed)) {
projects/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp 211 return atomic_load(to_atomic(a), to_mo(mo));
projects/compiler-rt/lib/tsan/rtl/tsan_mutex.cpp 223 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked);
235 if (atomic_load(&state_, memory_order_relaxed) == kUnlocked) {
265 prev = atomic_load(&state_, memory_order_acquire);
286 CHECK_NE(atomic_load(&state_, memory_order_relaxed), 0);
projects/compiler-rt/lib/tsan/rtl/tsan_rtl.cpp 178 atomic_load(&ctx->stop_background_thread, memory_order_relaxed) == 0;
213 u64 last = atomic_load(&ctx->last_symbolize_time_ns,
433 if (atomic_load(&bg_thread, memory_order_relaxed) == 0 &&
607 u64 raw = atomic_load((atomic_uint64_t*)p, memory_order_relaxed);
projects/compiler-rt/lib/xray/xray_basic_logging.cpp 94 if (atomic_load(&UseRealTSC, memory_order_acquire))
106 Header.CycleFrequency = atomic_load(&CycleFrequency, memory_order_acquire);
219 if (Delta < atomic_load(&ThresholdTicks, memory_order_relaxed)) {
392 if (!atomic_load(&UseRealTSC, memory_order_relaxed) && Verbosity())
424 atomic_load(&TicksPerSec, memory_order_acquire) *
427 __xray_set_handler_arg1(atomic_load(&UseRealTSC, memory_order_acquire)
430 __xray_set_handler(atomic_load(&UseRealTSC, memory_order_acquire)
projects/compiler-rt/lib/xray/xray_buffer_queue.cpp 162 if (atomic_load(&Finalizing, memory_order_acquire))
212 atomic_store(B->Buff.Extents, atomic_load(Buf.Extents, memory_order_acquire),
projects/compiler-rt/lib/xray/xray_buffer_queue.h 233 return atomic_load(&Finalizing, memory_order_acquire);
237 return atomic_load(&Generation, memory_order_acquire);
projects/compiler-rt/lib/xray/xray_fdr_logging.cpp 253 auto BufferSize = atomic_load(It->Extents, memory_order_acquire);
280 if (atomic_load(&LoggingStatus, memory_order_acquire) !=
367 auto BufferExtents = atomic_load(B.Extents, memory_order_acquire);
449 auto Status = atomic_load(&LoggingStatus, memory_order_acquire);
projects/compiler-rt/lib/xray/xray_init.cpp 66 if (atomic_load(&XRayInitialized, memory_order_acquire))
72 if (!atomic_load(&XRayFlagsInitialized, memory_order_acquire)) {
projects/compiler-rt/lib/xray/xray_interface.cpp 180 if (!atomic_load(&XRayInitialized,
231 if (!atomic_load(&XRayInitialized,
373 if (atomic_load(&XRayInitialized,
386 if (atomic_load(&XRayInitialized,
398 if (atomic_load(&XRayInitialized,
449 if (!atomic_load(&XRayInitialized,
projects/compiler-rt/lib/xray/xray_log_interface.cpp 201 atomic_load(&XRayBufferIterator, memory_order_acquire));
projects/compiler-rt/lib/xray/xray_profile_collector.cpp 115 if (!atomic_load(&CollectorInitialized, memory_order_acquire)) {
233 if (!atomic_load(&CollectorInitialized, memory_order_acquire))
projects/compiler-rt/lib/xray/xray_profiling.cpp 208 if (atomic_load(&ProfilerLogStatus, memory_order_acquire) !=
279 auto Status = atomic_load(&ProfilerLogStatus, memory_order_acquire);