dtoolbase: Use patomic<size_t> for memory usage counters

This commit is contained in:
rdb 2022-12-03 22:42:03 +01:00
parent 7c85b54ba4
commit 05b3fe2170
4 changed files with 33 additions and 33 deletions

View File

@ -18,7 +18,7 @@
INLINE void MemoryHook:: INLINE void MemoryHook::
inc_heap(size_t size) { inc_heap(size_t size) {
#ifdef DO_MEMORY_USAGE #ifdef DO_MEMORY_USAGE
AtomicAdjust::add(_requested_heap_size, (AtomicAdjust::Integer)size); _requested_heap_size.fetch_add(size, std::memory_order_relaxed);
#endif // DO_MEMORY_USAGE #endif // DO_MEMORY_USAGE
} }
@ -30,7 +30,7 @@ INLINE void MemoryHook::
dec_heap(size_t size) { dec_heap(size_t size) {
#ifdef DO_MEMORY_USAGE #ifdef DO_MEMORY_USAGE
// assert((int)size <= _requested_heap_size); // assert((int)size <= _requested_heap_size);
AtomicAdjust::add(_requested_heap_size, -(AtomicAdjust::Integer)size); _requested_heap_size.fetch_sub(size, std::memory_order_relaxed);
#endif // DO_MEMORY_USAGE #endif // DO_MEMORY_USAGE
} }

View File

@ -203,10 +203,10 @@ ptr_to_alloc(void *ptr, size_t &size) {
*/ */
MemoryHook:: MemoryHook::
MemoryHook(const MemoryHook &copy) : MemoryHook(const MemoryHook &copy) :
_total_heap_single_size(copy._total_heap_single_size), _total_heap_single_size(copy._total_heap_single_size.load(std::memory_order_relaxed)),
_total_heap_array_size(copy._total_heap_array_size), _total_heap_array_size(copy._total_heap_array_size.load(std::memory_order_relaxed)),
_requested_heap_size(copy._requested_heap_size), _requested_heap_size(copy._requested_heap_size.load(std::memory_order_relaxed)),
_total_mmap_size(copy._total_mmap_size), _total_mmap_size(copy._total_mmap_size.load(std::memory_order_relaxed)),
_max_heap_size(copy._max_heap_size), _max_heap_size(copy._max_heap_size),
_page_size(copy._page_size) { _page_size(copy._page_size) {
} }
@ -250,9 +250,9 @@ heap_alloc_single(size_t size) {
size = get_ptr_size(alloc); size = get_ptr_size(alloc);
inflated_size = size; inflated_size = size;
#endif #endif
AtomicAdjust::add(_total_heap_single_size, (AtomicAdjust::Integer)size); _total_heap_single_size.fetch_add(size, std::memory_order_relaxed);
if ((size_t)AtomicAdjust::get(_total_heap_single_size) + if (_total_heap_single_size.load(std::memory_order_relaxed) +
(size_t)AtomicAdjust::get(_total_heap_array_size) > _total_heap_array_size.load(std::memory_order_relaxed) >
_max_heap_size) { _max_heap_size) {
overflow_heap_size(); overflow_heap_size();
} }
@ -275,8 +275,8 @@ heap_free_single(void *ptr) {
void *alloc = ptr_to_alloc(ptr, size); void *alloc = ptr_to_alloc(ptr, size);
#ifdef DO_MEMORY_USAGE #ifdef DO_MEMORY_USAGE
assert((int)size <= _total_heap_single_size); assert((int)size <= _total_heap_single_size.load(std::memory_order_relaxed));
AtomicAdjust::add(_total_heap_single_size, -(AtomicAdjust::Integer)size); _total_heap_single_size.fetch_sub(size, std::memory_order_relaxed);
#endif // DO_MEMORY_USAGE #endif // DO_MEMORY_USAGE
#ifdef MEMORY_HOOK_MALLOC_LOCK #ifdef MEMORY_HOOK_MALLOC_LOCK
@ -327,9 +327,9 @@ heap_alloc_array(size_t size) {
size = get_ptr_size(alloc); size = get_ptr_size(alloc);
inflated_size = size; inflated_size = size;
#endif #endif
AtomicAdjust::add(_total_heap_array_size, (AtomicAdjust::Integer)size); _total_heap_array_size.fetch_add(size, std::memory_order_relaxed);
if ((size_t)AtomicAdjust::get(_total_heap_single_size) + if (_total_heap_single_size.load(std::memory_order_relaxed) +
(size_t)AtomicAdjust::get(_total_heap_array_size) > _total_heap_array_size.load(std::memory_order_relaxed) >
_max_heap_size) { _max_heap_size) {
overflow_heap_size(); overflow_heap_size();
} }
@ -383,8 +383,8 @@ heap_realloc_array(void *ptr, size_t size) {
size = get_ptr_size(alloc1); size = get_ptr_size(alloc1);
inflated_size = size; inflated_size = size;
#endif #endif
assert((AtomicAdjust::Integer)orig_size <= _total_heap_array_size); assert(orig_size <= _total_heap_array_size.load(std::memory_order_relaxed));
AtomicAdjust::add(_total_heap_array_size, (AtomicAdjust::Integer)size-(AtomicAdjust::Integer)orig_size); _total_heap_array_size.fetch_add(size - orig_size, std::memory_order_relaxed);
#endif // DO_MEMORY_USAGE #endif // DO_MEMORY_USAGE
// Align this to the requested boundary. // Align this to the requested boundary.
@ -424,7 +424,7 @@ heap_free_array(void *ptr) {
#ifdef DO_MEMORY_USAGE #ifdef DO_MEMORY_USAGE
assert((int)size <= _total_heap_array_size); assert((int)size <= _total_heap_array_size);
AtomicAdjust::add(_total_heap_array_size, -(AtomicAdjust::Integer)size); _total_heap_array_size.fetch_sub(size, std::memory_order_relaxed);
#endif // DO_MEMORY_USAGE #endif // DO_MEMORY_USAGE
#ifdef MEMORY_HOOK_MALLOC_LOCK #ifdef MEMORY_HOOK_MALLOC_LOCK
@ -489,7 +489,7 @@ mmap_alloc(size_t size, bool allow_exec) {
assert((size % _page_size) == 0); assert((size % _page_size) == 0);
#ifdef DO_MEMORY_USAGE #ifdef DO_MEMORY_USAGE
_total_mmap_size += size; _total_mmap_size.fetch_add(size, std::memory_order_relaxed);
#endif #endif
#ifdef _WIN32 #ifdef _WIN32
@ -544,8 +544,8 @@ mmap_free(void *ptr, size_t size) {
assert((size % _page_size) == 0); assert((size % _page_size) == 0);
#ifdef DO_MEMORY_USAGE #ifdef DO_MEMORY_USAGE
assert((int)size <= _total_mmap_size); assert((int)size <= _total_mmap_size.load(std::memory_order_relaxed));
_total_mmap_size -= size; _total_mmap_size.fetch_sub(size, std::memory_order_relaxed);
#endif #endif
#ifdef _WIN32 #ifdef _WIN32

View File

@ -16,7 +16,7 @@
#include "dtoolbase.h" #include "dtoolbase.h"
#include "numeric_types.h" #include "numeric_types.h"
#include "atomicAdjust.h" #include "patomic.h"
#include "mutexImpl.h" #include "mutexImpl.h"
#include <map> #include <map>
@ -66,10 +66,10 @@ public:
INLINE static size_t get_ptr_size(void *ptr); INLINE static size_t get_ptr_size(void *ptr);
protected: protected:
TVOLATILE AtomicAdjust::Integer _total_heap_single_size = 0; patomic<size_t> _total_heap_single_size { 0u };
TVOLATILE AtomicAdjust::Integer _total_heap_array_size = 0; patomic<size_t> _total_heap_array_size { 0u };
TVOLATILE AtomicAdjust::Integer _requested_heap_size = 0; patomic<size_t> _requested_heap_size { 0u };
TVOLATILE AtomicAdjust::Integer _total_mmap_size = 0; patomic<size_t> _total_mmap_size { 0u };
// If the allocated heap size crosses this threshold, we call // If the allocated heap size crosses this threshold, we call
// overflow_heap_size(). // overflow_heap_size().

View File

@ -153,7 +153,7 @@ get_total_cpp_size() {
INLINE size_t MemoryUsage:: INLINE size_t MemoryUsage::
get_panda_heap_single_size() { get_panda_heap_single_size() {
#ifdef DO_MEMORY_USAGE #ifdef DO_MEMORY_USAGE
return (size_t)AtomicAdjust::get(get_global_ptr()->_total_heap_single_size); return get_global_ptr()->_total_heap_single_size.load(std::memory_order_relaxed);
#else #else
return 0; return 0;
#endif #endif
@ -166,7 +166,7 @@ get_panda_heap_single_size() {
INLINE size_t MemoryUsage:: INLINE size_t MemoryUsage::
get_panda_heap_array_size() { get_panda_heap_array_size() {
#ifdef DO_MEMORY_USAGE #ifdef DO_MEMORY_USAGE
return (size_t)AtomicAdjust::get(get_global_ptr()->_total_heap_array_size); return get_global_ptr()->_total_heap_array_size.load(std::memory_order_relaxed);
#else #else
return 0; return 0;
#endif #endif
@ -181,7 +181,7 @@ INLINE size_t MemoryUsage::
get_panda_heap_overhead() { get_panda_heap_overhead() {
#if defined(DO_MEMORY_USAGE) && (defined(USE_MEMORY_DLMALLOC) || defined(USE_MEMORY_PTMALLOC2)) #if defined(DO_MEMORY_USAGE) && (defined(USE_MEMORY_DLMALLOC) || defined(USE_MEMORY_PTMALLOC2))
MemoryUsage *mu = get_global_ptr(); MemoryUsage *mu = get_global_ptr();
return (size_t)(AtomicAdjust::get(mu->_requested_heap_size) - AtomicAdjust::get(mu->_total_heap_single_size) - AtomicAdjust::get(mu->_total_heap_array_size)); return mu->_requested_heap_size.load(std::memory_order_relaxed) - mu->_total_heap_single_size.load(std::memory_order_relaxed) - mu->_total_heap_array_size.load(std::memory_order_relaxed);
#else #else
return 0; return 0;
#endif #endif
@ -194,7 +194,7 @@ get_panda_heap_overhead() {
INLINE size_t MemoryUsage:: INLINE size_t MemoryUsage::
get_panda_mmap_size() { get_panda_mmap_size() {
#ifdef DO_MEMORY_USAGE #ifdef DO_MEMORY_USAGE
return (size_t)AtomicAdjust::get(get_global_ptr()->_total_mmap_size); return get_global_ptr()->_total_mmap_size.load(std::memory_order_relaxed);
#else #else
return 0; return 0;
#endif #endif
@ -227,7 +227,7 @@ get_external_size() {
#else #else
// Without alternative malloc, the Panda allocated memory is also included // Without alternative malloc, the Panda allocated memory is also included
// in total_size, so we have to subtract it out. // in total_size, so we have to subtract it out.
return mu->_total_size - (size_t)mu->_total_heap_single_size - (size_t)mu->_total_heap_array_size; return mu->_total_size - mu->_total_heap_single_size.load(std::memory_order_relaxed) - mu->_total_heap_array_size.load(std::memory_order_relaxed);
#endif #endif
} else { } else {
return 0; return 0;
@ -246,12 +246,12 @@ get_total_size() {
#ifdef DO_MEMORY_USAGE #ifdef DO_MEMORY_USAGE
MemoryUsage *mu = get_global_ptr(); MemoryUsage *mu = get_global_ptr();
if (mu->_count_memory_usage) { if (mu->_count_memory_usage) {
return mu->_total_size + (size_t)mu->_requested_heap_size; return mu->_total_size + mu->_requested_heap_size.load(std::memory_order_relaxed);
} else { } else {
#if defined(USE_MEMORY_DLMALLOC) || defined(USE_MEMORY_PTMALLOC2) #if defined(USE_MEMORY_DLMALLOC) || defined(USE_MEMORY_PTMALLOC2)
return (size_t)mu->_requested_heap_size; return mu->_requested_heap_size.load(std::memory_order_relaxed);
#else #else
return (size_t)(AtomicAdjust::get(mu->_total_heap_single_size) + AtomicAdjust::get(mu->_total_heap_array_size)); return mu->_total_heap_single_size.load(std::memory_order_relaxed) + mu->_total_heap_array_size.load(std::memory_order_relaxed);
#endif #endif
} }
#else #else