33 typedef std::map<
void* ,
bool >
MemMap;
39 explicit PArena() : CurrAllocAddr_{
nullptr}, StartAddr_{
nullptr},
40 EndAddr_{
nullptr}, ActualAlloced_{0}, FreeList_{
new FreeList}
41 { pthread_mutex_init(&Lock_, NULL); flushDirtyCacheLines(); }
44 {
if (FreeList_) {
delete FreeList_; FreeList_ =
nullptr; } }
60 {
return start >= StartAddr_ &&
61 (
static_cast<char*
>(start) + sz) <
62 static_cast<char*
>(EndAddr_); }
65 size_t sz,
bool does_need_cache_line_alignment,
66 bool does_need_logging);
68 size_t sz,
bool does_need_cache_line_alignment,
69 bool does_need_logging);
71 size_t sz,
bool does_need_cache_line_alignment,
72 bool does_need_logging);
75 void freeMem(
void *ptr,
bool should_log);
77 void Lock() { pthread_mutex_lock(&Lock_); }
78 int tryLock() {
return pthread_mutex_trylock(&Lock_); }
79 void Unlock() { pthread_mutex_unlock(&Lock_); }
94 uint64_t ActualAlloced_;
98 pthread_mutex_t Lock_;
101 void flushDirtyCacheLines()
104 void *carveExtraMem(
char *mem,
size_t actual_sz,
size_t actual_free_sz);
106 void insertToFreeList(uint32_t bin_no,
void *mem);
107 void deleteFromFreeList(uint32_t bin_no,
void *mem);
109 void incrementActualAllocedStats(
size_t sz);
110 void decrementActualAllocedStats(
size_t sz);
115 StartAddr_ = CurrAllocAddr_ = start_addr;
116 EndAddr_ =
static_cast<void*
>(
118 flushDirtyCacheLines();
123 pthread_mutex_init(&Lock_, NULL);
127 inline void PArena::incrementActualAllocedStats(
size_t sz)
129 #if defined(ATLAS_ALLOC_STATS) 130 ActualAlloced_ += sz;
135 inline void PArena::decrementActualAllocedStats(
size_t sz)
137 #if defined(ATLAS_ALLOC_STATS) 138 ActualAlloced_ -= sz;
Definition: pmalloc.hpp:37
void * get_end_addr() const
Definition: pmalloc.hpp:56
std::map< void *, bool > MemMap
Definition: pmalloc.hpp:33
PArena & operator=(const PArena &)=delete
const uint32_t kArenaSize_
Definition: pregion_configs.hpp:31
~PArena()
Definition: pmalloc.hpp:43
void * get_curr_alloc_addr() const
Definition: pmalloc.hpp:54
void * allocFromFreeList(size_t sz, bool does_need_cache_line_alignment, bool does_need_logging)
Definition: pmalloc.cpp:175
void * get_start_addr() const
Definition: pmalloc.hpp:55
PArena()
Definition: pmalloc.hpp:39
#define NVM_FLUSH(p)
Definition: atlas_api.h:106
bool doesRangeCheck(void *start, size_t sz) const
Definition: pmalloc.hpp:59
void * allocRawMem(size_t)
Definition: pmalloc.cpp:339
void initAllocAddresses(void *start_addr)
Definition: pmalloc.hpp:113
void initTransients()
Definition: pmalloc.hpp:121
void * allocMem(size_t sz, bool does_need_cache_line_alignment, bool does_need_logging)
Definition: pmalloc.cpp:79
void Lock()
Definition: pmalloc.hpp:77
uint64_t get_actual_alloced() const
Definition: pmalloc.hpp:57
int tryLock()
Definition: pmalloc.hpp:78
std::map< uint32_t, MemMap > FreeList
Definition: pmalloc.hpp:34
void Unlock()
Definition: pmalloc.hpp:79
void freeMem(void *ptr, bool should_log)
Definition: pmalloc.cpp:37
Definition: atlas_alloc_cpp.hpp:21
void * allocFromUpdatedFreeList(size_t sz, bool does_need_cache_line_alignment, bool does_need_logging)
Definition: pmalloc.cpp:264