aboutsummaryrefslogtreecommitdiff
path: root/src/support
diff options
context:
space:
mode:
Diffstat (limited to 'src/support')
-rw-r--r--src/support/lockedpool.cpp18
-rw-r--r--src/support/lockedpool.h10
2 files changed, 15 insertions, 13 deletions
diff --git a/src/support/lockedpool.cpp b/src/support/lockedpool.cpp
index 24ae4bdd1e..0a58acf738 100644
--- a/src/support/lockedpool.cpp
+++ b/src/support/lockedpool.cpp
@@ -26,6 +26,7 @@
#include <iomanip>
#include <iostream>
#endif
+#include <utility>
LockedPoolManager* LockedPoolManager::_instance = nullptr;
@@ -42,12 +43,12 @@ static inline size_t align_up(size_t x, size_t align)
// Implementation: Arena
Arena::Arena(void *base_in, size_t size_in, size_t alignment_in):
- base(static_cast<char*>(base_in)), end(static_cast<char*>(base_in) + size_in), alignment(alignment_in)
+ base(base_in), end(static_cast<char*>(base_in) + size_in), alignment(alignment_in)
{
// Start with one free chunk that covers the entire arena
auto it = size_to_free_chunk.emplace(size_in, base);
chunks_free.emplace(base, it);
- chunks_free_end.emplace(base + size_in, it);
+ chunks_free_end.emplace(static_cast<char*>(base) + size_in, it);
}
Arena::~Arena()
@@ -73,8 +74,9 @@ void* Arena::alloc(size_t size)
// Create the used-chunk, taking its space from the end of the free-chunk
const size_t size_remaining = size_ptr_it->first - size;
- auto allocated = chunks_used.emplace(size_ptr_it->second + size_remaining, size).first;
- chunks_free_end.erase(size_ptr_it->second + size_ptr_it->first);
+ char* const free_chunk = static_cast<char*>(size_ptr_it->second);
+ auto allocated = chunks_used.emplace(free_chunk + size_remaining, size).first;
+ chunks_free_end.erase(free_chunk + size_ptr_it->first);
if (size_ptr_it->first == size) {
// whole chunk is used up
chunks_free.erase(size_ptr_it->second);
@@ -82,11 +84,11 @@ void* Arena::alloc(size_t size)
// still some memory left in the chunk
auto it_remaining = size_to_free_chunk.emplace(size_remaining, size_ptr_it->second);
chunks_free[size_ptr_it->second] = it_remaining;
- chunks_free_end.emplace(size_ptr_it->second + size_remaining, it_remaining);
+ chunks_free_end.emplace(free_chunk + size_remaining, it_remaining);
}
size_to_free_chunk.erase(size_ptr_it);
- return reinterpret_cast<void*>(allocated->first);
+ return allocated->first;
}
void Arena::free(void *ptr)
@@ -97,11 +99,11 @@ void Arena::free(void *ptr)
}
// Remove chunk from used map
- auto i = chunks_used.find(static_cast<char*>(ptr));
+ auto i = chunks_used.find(ptr);
if (i == chunks_used.end()) {
throw std::runtime_error("Arena: invalid or double free");
}
- std::pair<char*, size_t> freed = *i;
+ auto freed = std::make_pair(static_cast<char*>(i->first), i->second);
chunks_used.erase(i);
// coalesce freed with previous chunk
diff --git a/src/support/lockedpool.h b/src/support/lockedpool.h
index 1bba459377..81e0df513a 100644
--- a/src/support/lockedpool.h
+++ b/src/support/lockedpool.h
@@ -89,23 +89,23 @@ public:
*/
bool addressInArena(void *ptr) const { return ptr >= base && ptr < end; }
private:
- typedef std::multimap<size_t, char*> SizeToChunkSortedMap;
+ typedef std::multimap<size_t, void*> SizeToChunkSortedMap;
/** Map to enable O(log(n)) best-fit allocation, as it's sorted by size */
SizeToChunkSortedMap size_to_free_chunk;
- typedef std::unordered_map<char*, SizeToChunkSortedMap::const_iterator> ChunkToSizeMap;
+ typedef std::unordered_map<void*, SizeToChunkSortedMap::const_iterator> ChunkToSizeMap;
/** Map from begin of free chunk to its node in size_to_free_chunk */
ChunkToSizeMap chunks_free;
/** Map from end of free chunk to its node in size_to_free_chunk */
ChunkToSizeMap chunks_free_end;
/** Map from begin of used chunk to its size */
- std::unordered_map<char*, size_t> chunks_used;
+ std::unordered_map<void*, size_t> chunks_used;
/** Base address of arena */
- char* base;
+ void* base;
/** End address of arena */
- char* end;
+ void* end;
/** Minimum chunk alignment */
size_t alignment;
};