From: 김용섭/동작제어Lab(SR)/Engineer/삼성전자 Date: Tue, 6 Nov 2018 09:15:59 +0000 (+0900) Subject: [neurun] Introduce FirstfitAllocator (#3458) X-Git-Tag: 0.3~463 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=3a68971567b9094f566edc833a6fb8c1845d8b49;p=platform%2Fcore%2Fml%2Fnnfw.git [neurun] Introduce FirstfitAllocator (#3458) * [neurun] Introduce FirstfitAllocator Introduces FirstfitAllocator instead of BumpAllocator. This allocator allocates a block for tensor at the first-fit offset. Signed-off-by: Yongseop Kim * Revise allocate() of FirstfitAllocator - Adds comment for description of allocate() - Removes inner lambda allocateBlock() in allocate() * Change the name from Firstfit to FirstFit * Fix typo * Adds comment for _alloc_table --- diff --git a/runtimes/neurun/src/backend/cpu/MemoryAllocator.cc b/runtimes/neurun/src/backend/cpu/MemoryAllocator.cc index b9cc213..e40e4a4 100644 --- a/runtimes/neurun/src/backend/cpu/MemoryAllocator.cc +++ b/runtimes/neurun/src/backend/cpu/MemoryAllocator.cc @@ -60,6 +60,84 @@ void BumpAllocator::free(const graph::operand::Index &index) << "NOTHING does" << std::endl; } +FirstFitAllocator::~FirstFitAllocator() { delete[] _base; } + +// There are some assumptions for allocating(exactly making a reservation for memory). +// 1. About _alloc_table(std::map). +// - The table's data structure is std::map so that it always sorts +// values(Alloc(pair)) by key(base_offset). +// - This allocate() inserts key/value into _alloc_table and the free() removes the key/value from +// _alloc_table. +// - _alloc_table shows the memory status at a certain point in time. Therefore, +// - If _alloc_table has an offset and a certain size at a certain point in time, +// it means the place has been already allocated(can't allocate now. need to find someplace +// new). +// - If _alloc_table doesn't have any element for an offset and a certain size at a certain +// point in time, it means the place can be allocated. +// 2. In the loop for _alloc_table, we can assume the current alloc_base_offset value is bigger than +// the previous alloc_base_offset. +Block FirstFitAllocator::allocate(const graph::operand::Index &index, size_t size) +{ + assert(size != 0); + + // Find the right position for allocating + uint32_t next_offset = 0; + for (auto &mem_alloc : _alloc_table) + { + auto alloc_base_offset = mem_alloc.first; + auto alloc_size = mem_alloc.second.second; + if (next_offset + size <= alloc_base_offset) + { + break; + } + else + { + next_offset = alloc_base_offset + alloc_size; + } + } + + // Now next_offset is set to the proper offset + auto alloc = std::make_pair(index, size); + _alloc_table[next_offset] = alloc; + + VERBOSE(FF_ALLOC) << "alloc(#" << index.value() << "): [+" << next_offset << ", sz " << size + << "]" << std::endl; + + if (_pos < next_offset + size) + { + _pos = next_offset + size; + } + + Block blk{next_offset, size}; + return blk; +} + +void FirstFitAllocator::finalize() +{ + assert(!_base && _pos != 0); + + _base = new uint8_t[_pos]; + + VERBOSE(FF_ALLOC) << "final position: " << _pos << std::endl; + VERBOSE(FF_ALLOC) << "base pointer: " << static_cast(_base) << std::endl; +} + +void FirstFitAllocator::free(const graph::operand::Index &index) +{ + for (auto it = _alloc_table.cbegin(); it != _alloc_table.cend(); ++it) + { + auto alloc = it->second; + if (alloc.first == index) + { + it = _alloc_table.erase(it); + VERBOSE(FF_ALLOC) << "free(#" << index.value() << "): [+" << it->first << ", sz " + << alloc.second << "]" << std::endl; + return; + } + } + assert(false && "can't enter here"); +} + } // namespace cpu } // namespace backend } // namespace neurun diff --git a/runtimes/neurun/src/backend/cpu/MemoryAllocator.h b/runtimes/neurun/src/backend/cpu/MemoryAllocator.h index d949cf7..0f0072d 100644 --- a/runtimes/neurun/src/backend/cpu/MemoryAllocator.h +++ b/runtimes/neurun/src/backend/cpu/MemoryAllocator.h @@ -17,6 +17,8 @@ #ifndef __NEURUN_BACKEND_CPU_MEMORY_ALLOCATOR_H__ #define __NEURUN_BACKEND_CPU_MEMORY_ALLOCATOR_H__ +#include + #include "graph/operand/Index.h" namespace neurun @@ -55,6 +57,24 @@ private: uint32_t _pos = 0; }; +class FirstFitAllocator : public IMemoryAllocator +{ +public: + virtual ~FirstFitAllocator() override; + virtual Block allocate(const graph::operand::Index &index, size_t size) override; + virtual void free(const graph::operand::Index &index) override; + virtual void finalize() override; + virtual uint8_t *base() const override { return _base; } + +private: + uint8_t *_base = nullptr; + uint32_t _pos = 0; + + using Alloc = std::pair; // + // Use std::map because allocate() assumes that _alloc_table is sorted by uint32_t(base_offset) + std::map _alloc_table; // +}; + } // namespace cpu } // namespace backend } // namespace neurun diff --git a/runtimes/neurun/src/backend/cpu/TensorBuilder.cc b/runtimes/neurun/src/backend/cpu/TensorBuilder.cc index 526d46a..e1aa574 100644 --- a/runtimes/neurun/src/backend/cpu/TensorBuilder.cc +++ b/runtimes/neurun/src/backend/cpu/TensorBuilder.cc @@ -29,8 +29,7 @@ namespace backend namespace cpu { -// TODO Apply FirstFitAllocator in the future -TensorBuilder::TensorBuilder() : _mem_alloc(std::make_shared()) +TensorBuilder::TensorBuilder() : _mem_alloc(std::make_shared()) { // DO NOTHING } diff --git a/runtimes/neurun/test/backend/cpu/MemoryAllocator.cc b/runtimes/neurun/test/backend/cpu/MemoryAllocator.cc index 9d29e79..241ed5e 100644 --- a/runtimes/neurun/test/backend/cpu/MemoryAllocator.cc +++ b/runtimes/neurun/test/backend/cpu/MemoryAllocator.cc @@ -54,3 +54,86 @@ TEST(BumpAllocator, finalize_test) ASSERT_NE(allocator.base(), nullptr); } + +TEST(FirstFitAllocator, allocate_free_test) +{ + ::neurun::backend::cpu::FirstFitAllocator allocator; + + auto allocate = [&allocator](uint32_t index, size_t size, uint32_t expected_offset) { + ::neurun::graph::operand::Index mem_idx(index); + auto mem_blk = allocator.allocate(mem_idx, size); + ASSERT_EQ(mem_blk.offset, expected_offset); + ASSERT_EQ(mem_blk.size, size); + }; + + auto free = [&allocator](uint32_t index) { + ::neurun::graph::operand::Index mem_idx(index); + allocator.free(mem_idx); + }; + + // 0 ALLOC - 10 + allocate(0, 10, 0); + + // 1 ALLOC - 20 + allocate(1, 20, 10); + + // 2 ALLOC - 30 + allocate(2, 30, 30); + + // 0 FREE - 10 + free(0); + + // 3 ALLOC - 20 + allocate(3, 20, 60); + + // 4 ALLOC - 5 + allocate(4, 5, 0); + + // 5 ALLOC - 10 + allocate(5, 10, 80); + + // 6 ALLOC - 5 + allocate(6, 5, 5); + + // 2 FREE - 30 + free(2); + + // 7 ALLOC - 35 + allocate(7, 35, 90); + + // 8 ALLOC - 10 + allocate(8, 10, 30); + + // 4 FREE - 5 + free(4); + + // 9 ALLOC - 10 + allocate(9, 10, 40); + + // 10 ALLOC - 10 + allocate(10, 10, 50); + + // 6 FREE + free(6); + + // 1 FREE + free(1); + + // 8 FREE + free(8); + + // 9 FREE + free(9); + + // 10 FREE + free(10); + + // 3 FREE + free(3); + + // 5 FREE + free(5); + + // 7 FREE + free(7); +}