- add sources.
[platform/framework/web/crosswalk.git] / src / base / memory / discardable_memory_provider.cc
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/memory/discardable_memory_provider.h"
6
7 #include "base/bind.h"
8 #include "base/containers/hash_tables.h"
9 #include "base/containers/mru_cache.h"
10 #include "base/debug/trace_event.h"
11 #include "base/lazy_instance.h"
12 #include "base/memory/discardable_memory.h"
13 #include "base/synchronization/lock.h"
14 #include "base/sys_info.h"
15
16 namespace base {
17 namespace internal {
18
19 namespace {
20
21 static base::LazyInstance<DiscardableMemoryProvider>::Leaky g_provider =
22     LAZY_INSTANCE_INITIALIZER;
23
24 // If this is given a valid value via SetInstanceForTest, this pointer will be
25 // returned by GetInstance rather than |g_provider|.
26 static DiscardableMemoryProvider* g_provider_for_test = NULL;
27
28 // This is admittedly pretty magical. It's approximately enough memory for two
29 // 2560x1600 images.
30 static const size_t kDefaultDiscardableMemoryLimit = 32 * 1024 * 1024;
31 static const size_t kDefaultBytesToReclaimUnderModeratePressure =
32     kDefaultDiscardableMemoryLimit / 2;
33
34 }  // namespace
35
36 DiscardableMemoryProvider::DiscardableMemoryProvider()
37     : allocations_(AllocationMap::NO_AUTO_EVICT),
38       bytes_allocated_(0),
39       discardable_memory_limit_(kDefaultDiscardableMemoryLimit),
40       bytes_to_reclaim_under_moderate_pressure_(
41           kDefaultBytesToReclaimUnderModeratePressure),
42       memory_pressure_listener_(
43           base::Bind(&DiscardableMemoryProvider::NotifyMemoryPressure)) {
44 }
45
46 DiscardableMemoryProvider::~DiscardableMemoryProvider() {
47   DCHECK(allocations_.empty());
48   DCHECK_EQ(0u, bytes_allocated_);
49 }
50
51 // static
52 DiscardableMemoryProvider* DiscardableMemoryProvider::GetInstance() {
53   if (g_provider_for_test)
54     return g_provider_for_test;
55   return g_provider.Pointer();
56 }
57
58 // static
59 void DiscardableMemoryProvider::SetInstanceForTest(
60     DiscardableMemoryProvider* provider) {
61   g_provider_for_test = provider;
62 }
63
64 // static
65 void DiscardableMemoryProvider::NotifyMemoryPressure(
66     MemoryPressureListener::MemoryPressureLevel pressure_level) {
67   switch (pressure_level) {
68     case MemoryPressureListener::MEMORY_PRESSURE_MODERATE:
69       DiscardableMemoryProvider::GetInstance()->Purge();
70       return;
71     case MemoryPressureListener::MEMORY_PRESSURE_CRITICAL:
72       DiscardableMemoryProvider::GetInstance()->PurgeAll();
73       return;
74   }
75
76   NOTREACHED();
77 }
78
79 void DiscardableMemoryProvider::SetDiscardableMemoryLimit(size_t bytes) {
80   AutoLock lock(lock_);
81   discardable_memory_limit_ = bytes;
82   EnforcePolicyWithLockAcquired();
83 }
84
85 void DiscardableMemoryProvider::SetBytesToReclaimUnderModeratePressure(
86     size_t bytes) {
87   AutoLock lock(lock_);
88   bytes_to_reclaim_under_moderate_pressure_ = bytes;
89   EnforcePolicyWithLockAcquired();
90 }
91
92 void DiscardableMemoryProvider::Register(
93     const DiscardableMemory* discardable, size_t bytes) {
94   AutoLock lock(lock_);
95   DCHECK(allocations_.Peek(discardable) == allocations_.end());
96   allocations_.Put(discardable, Allocation(bytes));
97 }
98
99 void DiscardableMemoryProvider::Unregister(
100     const DiscardableMemory* discardable) {
101   AutoLock lock(lock_);
102   AllocationMap::iterator it = allocations_.Peek(discardable);
103   if (it == allocations_.end())
104     return;
105
106   if (it->second.memory) {
107     size_t bytes = it->second.bytes;
108     DCHECK_LE(bytes, bytes_allocated_);
109     bytes_allocated_ -= bytes;
110     free(it->second.memory);
111   }
112   allocations_.Erase(it);
113 }
114
115 scoped_ptr<uint8, FreeDeleter> DiscardableMemoryProvider::Acquire(
116     const DiscardableMemory* discardable,
117     bool* purged) {
118   AutoLock lock(lock_);
119   // NB: |allocations_| is an MRU cache, and use of |Get| here updates that
120   // cache.
121   AllocationMap::iterator it = allocations_.Get(discardable);
122   CHECK(it != allocations_.end());
123
124   if (it->second.memory) {
125     scoped_ptr<uint8, FreeDeleter> memory(it->second.memory);
126     it->second.memory = NULL;
127     *purged = false;
128     return memory.Pass();
129   }
130
131   size_t bytes = it->second.bytes;
132   if (!bytes)
133     return scoped_ptr<uint8, FreeDeleter>();
134
135   if (discardable_memory_limit_) {
136     size_t limit = 0;
137     if (bytes < discardable_memory_limit_)
138       limit = discardable_memory_limit_ - bytes;
139
140     PurgeLRUWithLockAcquiredUntilUsageIsWithin(limit);
141   }
142
143   bytes_allocated_ += bytes;
144   *purged = true;
145   return scoped_ptr<uint8, FreeDeleter>(static_cast<uint8*>(malloc(bytes)));
146 }
147
148 void DiscardableMemoryProvider::Release(
149     const DiscardableMemory* discardable,
150     scoped_ptr<uint8, FreeDeleter> memory) {
151   AutoLock lock(lock_);
152   // NB: |allocations_| is an MRU cache, and use of |Get| here updates that
153   // cache.
154   AllocationMap::iterator it = allocations_.Get(discardable);
155   CHECK(it != allocations_.end());
156
157   DCHECK(!it->second.memory);
158   it->second.memory = memory.release();
159
160   EnforcePolicyWithLockAcquired();
161 }
162
163 void DiscardableMemoryProvider::PurgeAll() {
164   AutoLock lock(lock_);
165   PurgeLRUWithLockAcquiredUntilUsageIsWithin(0);
166 }
167
168 bool DiscardableMemoryProvider::IsRegisteredForTest(
169     const DiscardableMemory* discardable) const {
170   AutoLock lock(lock_);
171   AllocationMap::const_iterator it = allocations_.Peek(discardable);
172   return it != allocations_.end();
173 }
174
175 bool DiscardableMemoryProvider::CanBePurgedForTest(
176     const DiscardableMemory* discardable) const {
177   AutoLock lock(lock_);
178   AllocationMap::const_iterator it = allocations_.Peek(discardable);
179   return it != allocations_.end() && it->second.memory;
180 }
181
182 size_t DiscardableMemoryProvider::GetBytesAllocatedForTest() const {
183   AutoLock lock(lock_);
184   return bytes_allocated_;
185 }
186
187 void DiscardableMemoryProvider::Purge() {
188   AutoLock lock(lock_);
189
190   if (bytes_to_reclaim_under_moderate_pressure_ == 0)
191     return;
192
193   size_t limit = 0;
194   if (bytes_to_reclaim_under_moderate_pressure_ < discardable_memory_limit_)
195     limit = bytes_allocated_ - bytes_to_reclaim_under_moderate_pressure_;
196
197   PurgeLRUWithLockAcquiredUntilUsageIsWithin(limit);
198 }
199
200 void DiscardableMemoryProvider::PurgeLRUWithLockAcquiredUntilUsageIsWithin(
201     size_t limit) {
202   TRACE_EVENT1(
203       "base",
204       "DiscardableMemoryProvider::PurgeLRUWithLockAcquiredUntilUsageIsWithin",
205       "limit", limit);
206
207   lock_.AssertAcquired();
208
209   for (AllocationMap::reverse_iterator it = allocations_.rbegin();
210        it != allocations_.rend();
211        ++it) {
212     if (bytes_allocated_ <= limit)
213       break;
214     if (!it->second.memory)
215       continue;
216
217     size_t bytes = it->second.bytes;
218     DCHECK_LE(bytes, bytes_allocated_);
219     bytes_allocated_ -= bytes;
220     free(it->second.memory);
221     it->second.memory = NULL;
222   }
223 }
224
225 void DiscardableMemoryProvider::EnforcePolicyWithLockAcquired() {
226   lock_.AssertAcquired();
227
228   bool exceeded_bound = bytes_allocated_ > discardable_memory_limit_;
229   if (!exceeded_bound || !bytes_to_reclaim_under_moderate_pressure_)
230     return;
231
232   size_t limit = 0;
233   if (bytes_to_reclaim_under_moderate_pressure_ < discardable_memory_limit_)
234     limit = bytes_allocated_ - bytes_to_reclaim_under_moderate_pressure_;
235
236   PurgeLRUWithLockAcquiredUntilUsageIsWithin(limit);
237 }
238
239 }  // namespace internal
240 }  // namespace base