FST  openfst-1.8.3
OpenFst Library
memory.h
Go to the documentation of this file.
1 // Copyright 2005-2024 Google LLC
2 //
3 // Licensed under the Apache License, Version 2.0 (the 'License');
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an 'AS IS' BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 //
15 // See www.openfst.org for extensive documentation on this weighted
16 // finite-state transducer library.
17 //
18 // FST memory utilities.
19 
20 #ifndef FST_MEMORY_H_
21 #define FST_MEMORY_H_
22 
23 #include <cstddef>
24 #include <list>
25 #include <memory>
26 #include <utility>
27 #include <vector>
28 
29 #include <fst/log.h>
30 #include <fstream>
31 
32 namespace fst {
33 
34 // Default block allocation size.
35 inline constexpr int kAllocSize = 64;
36 
37 // Minimum number of allocations per block.
38 inline constexpr int kAllocFit = 4;
39 
40 // Base class for MemoryArena that allows (e.g.) MemoryArenaCollection to
41 // easily manipulate collections of variously sized arenas.
43  public:
44  virtual ~MemoryArenaBase() = default;
45  virtual size_t Size() const = 0;
46 };
47 
48 namespace internal {
49 
50 // Allocates 'size' unintialized memory chunks of size object_size from
51 // underlying blocks of (at least) size 'block_size * object_size'.
52 // All blocks are freed when this class is deleted. Result of allocate() will
53 // be aligned to object_size.
54 template <size_t object_size>
56  public:
57  static constexpr size_t kObjectSize = object_size;
58 
59  explicit MemoryArenaImpl(size_t block_size = kAllocSize)
60  : block_size_(block_size * kObjectSize), block_pos_(0) {
61  blocks_.push_front(
62  fst::make_unique_for_overwrite<std::byte[]>(block_size_));
63  }
64 
65  void *Allocate(size_t size) {
66  const auto byte_size = size * kObjectSize;
67  if (byte_size * kAllocFit > block_size_) {
68  // Large block; adds new large block.
69  blocks_.push_back(
70  fst::make_unique_for_overwrite<std::byte[]>(byte_size));
71  return blocks_.back().get();
72  }
73  if (block_pos_ + byte_size > block_size_) {
74  // Doesn't fit; adds new standard block.
75  block_pos_ = 0;
76  blocks_.push_front(
77  fst::make_unique_for_overwrite<std::byte[]>(block_size_));
78  }
79  // Fits; uses current block.
80  auto *ptr = &blocks_.front()[block_pos_];
81  block_pos_ += byte_size;
82  return ptr;
83  }
84 
85  size_t Size() const override { return kObjectSize; }
86 
87  private:
88  const size_t block_size_; // Default block size in bytes.
89  size_t block_pos_; // Current position in block in bytes.
90  std::list<std::unique_ptr<std::byte[]>> blocks_; // List of allocated blocks.
91 };
92 
93 } // namespace internal
94 
95 template <typename T>
97 
98 // Base class for MemoryPool that allows (e.g.) MemoryPoolCollection to easily
99 // manipulate collections of variously sized pools.
101  public:
102  virtual ~MemoryPoolBase() = default;
103  virtual size_t Size() const = 0;
104 };
105 
106 namespace internal {
107 
108 // Allocates and frees initially uninitialized memory chunks of size
109 // object_size. Keeps an internal list of freed chunks that are reused (as is)
110 // on the next allocation if available. Chunks are constructed in blocks of size
111 // 'pool_size'.
112 template <size_t object_size>
114  public:
115  static constexpr size_t kObjectSize = object_size;
116 
117  struct Link {
118  std::byte buf[kObjectSize];
120  };
121 
122  explicit MemoryPoolImpl(size_t pool_size)
123  : mem_arena_(pool_size), free_list_(nullptr) {}
124 
125  void *Allocate() {
126  Link *link;
127  if (free_list_ == nullptr) {
128  link = static_cast<Link *>(mem_arena_.Allocate(1));
129  link->next = nullptr;
130  } else {
131  link = free_list_;
132  free_list_ = link->next;
133  }
134  return fst::implicit_cast<std::byte *>(link->buf);
135  }
136 
137  void Free(void *ptr) {
138  if (ptr) {
139  auto *link = static_cast<Link *>(ptr);
140  link->next = free_list_;
141  free_list_ = link;
142  }
143  }
144 
145  size_t Size() const override { return kObjectSize; }
146 
147  private:
148  MemoryArena<Link> mem_arena_;
149  Link *free_list_; // Not owned.
150 
151  MemoryPoolImpl(const MemoryPoolImpl &) = delete;
152  MemoryPoolImpl &operator=(const MemoryPoolImpl &) = delete;
153 };
154 
155 } // namespace internal
156 
157 // Allocates and frees initially uninitialized memory chunks of size sizeof(T).
158 // All memory is freed when the class is deleted. The result of Allocate() will
159 // be suitably memory-aligned. Combined with placement operator new and destroy
160 // functions for the T class, this can be used to improve allocation efficiency.
161 // See nlp/fst/lib/visit.h (global new) and nlp/fst/lib/dfs-visit.h (class new)
162 // for examples.
163 template <typename T>
164 class MemoryPool : public internal::MemoryPoolImpl<sizeof(T)> {
165  public:
166  // 'pool_size' specifies the size of the initial pool and how it is extended.
167  explicit MemoryPool(size_t pool_size = kAllocSize)
168  : internal::MemoryPoolImpl<sizeof(T)>(pool_size) {}
169 };
170 
171 // Stores a collection of memory arenas.
173  public:
174  // 'block_size' specifies the block size of the arenas.
175  explicit MemoryArenaCollection(size_t block_size = kAllocSize)
176  : block_size_(block_size) {}
177 
178  template <typename T>
180  if (sizeof(T) >= arenas_.size()) arenas_.resize(sizeof(T) + 1);
181  auto &arena = arenas_[sizeof(T)];
182  if (arena == nullptr) {
183  arena = std::make_unique<MemoryArena<T>>(block_size_);
184  }
185  return down_cast<MemoryArena<T> *>(arena.get());
186  }
187 
188  size_t BlockSize() const { return block_size_; }
189 
190  private:
191  size_t block_size_;
192  std::vector<std::unique_ptr<MemoryArenaBase>> arenas_;
193 };
194 
195 // Stores a collection of memory pools
197  public:
198  // 'pool_size' specifies the size of initial pool and how it is extended.
199  explicit MemoryPoolCollection(size_t pool_size = kAllocSize)
200  : pool_size_(pool_size) {}
201 
202  template <typename T>
204  if (sizeof(T) >= pools_.size()) pools_.resize(sizeof(T) + 1);
205  auto &pool = pools_[sizeof(T)];
206  if (pool == nullptr) pool = std::make_unique<MemoryPool<T>>(pool_size_);
207  return down_cast<MemoryPool<T> *>(pool.get());
208  }
209 
210  size_t PoolSize() const { return pool_size_; }
211 
212  private:
213  size_t pool_size_;
214  std::vector<std::unique_ptr<MemoryPoolBase>> pools_;
215 };
216 
217 // STL allocator using memory arenas. Memory is allocated from underlying
218 // blocks of size 'block_size * sizeof(T)'. Memory is freed only when all
219 // objects using this allocator are destroyed and there is otherwise no reuse
220 // (unlike PoolAllocator).
221 //
222 // This allocator has object-local state so it should not be used with splicing
223 // or swapping operations between objects created with different allocators nor
224 // should it be used if copies must be thread-safe. The result of allocate()
225 // will be suitably memory-aligned.
226 template <typename T>
228  public:
229  using Allocator = std::allocator<T>;
230  using size_type = typename Allocator::size_type;
231  using value_type = typename Allocator::value_type;
232 
233  explicit BlockAllocator(size_t block_size = kAllocSize)
234  : arenas_(std::make_shared<MemoryArenaCollection>(block_size)) {}
235 
236  template <typename U>
237  explicit BlockAllocator(const BlockAllocator<U> &arena_alloc)
238  : arenas_(arena_alloc.Arenas()) {}
239 
240  T *allocate(size_type n, const void *hint = nullptr) {
241  if (n * kAllocFit <= kAllocSize) {
242  return static_cast<T *>(Arena()->Allocate(n));
243  } else {
244  auto allocator = Allocator();
245  return std::allocator_traits<Allocator>::allocate(allocator, n, hint);
246  }
247  }
248 
249  void deallocate(T *p, size_type n) {
250  if (n * kAllocFit > kAllocSize) Allocator().deallocate(p, n);
251  }
252 
253  std::shared_ptr<MemoryArenaCollection> Arenas() const { return arenas_; }
254 
255  private:
256  MemoryArena<T> *Arena() { return arenas_->Arena<T>(); }
257 
258  std::shared_ptr<MemoryArenaCollection> arenas_;
259 };
260 
261 template <typename T, typename U>
262 bool operator==(const BlockAllocator<T> &alloc1,
263  const BlockAllocator<U> &alloc2) {
264  return false;
265 }
266 
267 template <typename T, typename U>
268 bool operator!=(const BlockAllocator<T> &alloc1,
269  const BlockAllocator<U> &alloc2) {
270  return true;
271 }
272 
273 // STL allocator using memory pools. Memory is allocated from underlying
274 // blocks of size 'block_size * sizeof(T)'. Keeps an internal list of freed
275 // chunks thare are reused on the next allocation.
276 //
277 // This allocator has object-local state so it should not be used with splicing
278 // or swapping operations between objects created with different allocators nor
279 // should it be used if copies must be thread-safe. The result of allocate()
280 // will be suitably memory-aligned.
281 template <typename T>
283  public:
284  using Allocator = std::allocator<T>;
285  using size_type = typename Allocator::size_type;
286  using value_type = typename Allocator::value_type;
287 
288  explicit PoolAllocator(size_t pool_size = kAllocSize)
289  : pools_(std::make_shared<MemoryPoolCollection>(pool_size)) {}
290 
291  template <typename U>
292  explicit PoolAllocator(const PoolAllocator<U> &pool_alloc)
293  : pools_(pool_alloc.Pools()) {}
294 
295  T *allocate(size_type n, const void *hint = nullptr) {
296  if (n == 1) {
297  return static_cast<T *>(Pool<1>()->Allocate());
298  } else if (n == 2) {
299  return static_cast<T *>(Pool<2>()->Allocate());
300  } else if (n <= 4) {
301  return static_cast<T *>(Pool<4>()->Allocate());
302  } else if (n <= 8) {
303  return static_cast<T *>(Pool<8>()->Allocate());
304  } else if (n <= 16) {
305  return static_cast<T *>(Pool<16>()->Allocate());
306  } else if (n <= 32) {
307  return static_cast<T *>(Pool<32>()->Allocate());
308  } else if (n <= 64) {
309  return static_cast<T *>(Pool<64>()->Allocate());
310  } else {
311  auto allocator = Allocator();
312  return std::allocator_traits<Allocator>::allocate(allocator, n, hint);
313  }
314  }
315 
316  void deallocate(T *p, size_type n) {
317  if (n == 1) {
318  Pool<1>()->Free(p);
319  } else if (n == 2) {
320  Pool<2>()->Free(p);
321  } else if (n <= 4) {
322  Pool<4>()->Free(p);
323  } else if (n <= 8) {
324  Pool<8>()->Free(p);
325  } else if (n <= 16) {
326  Pool<16>()->Free(p);
327  } else if (n <= 32) {
328  Pool<32>()->Free(p);
329  } else if (n <= 64) {
330  Pool<64>()->Free(p);
331  } else {
332  Allocator().deallocate(p, n);
333  }
334  }
335 
336  std::shared_ptr<MemoryPoolCollection> Pools() const { return pools_; }
337 
338  private:
339  template <int n>
340  struct TN {
341  T buf[n];
342  };
343 
344  template <int n>
345  MemoryPool<TN<n>> *Pool() {
346  return pools_->Pool<TN<n>>();
347  }
348 
349  std::shared_ptr<MemoryPoolCollection> pools_;
350 };
351 
352 template <typename T, typename U>
353 bool operator==(const PoolAllocator<T> &alloc1,
354  const PoolAllocator<U> &alloc2) {
355  return false;
356 }
357 
358 template <typename T, typename U>
359 bool operator!=(const PoolAllocator<T> &alloc1,
360  const PoolAllocator<U> &alloc2) {
361  return true;
362 }
363 
364 } // namespace fst
365 
366 #endif // FST_MEMORY_H_
MemoryPool< T > * Pool()
Definition: memory.h:203
BlockAllocator(const BlockAllocator< U > &arena_alloc)
Definition: memory.h:237
typename Allocator::value_type value_type
Definition: memory.h:231
T * allocate(size_type n, const void *hint=nullptr)
Definition: memory.h:240
MemoryArenaCollection(size_t block_size=kAllocSize)
Definition: memory.h:175
size_t PoolSize() const
Definition: memory.h:210
std::allocator< T > Allocator
Definition: memory.h:284
virtual size_t Size() const =0
constexpr int kAllocFit
Definition: memory.h:38
void Free(void *ptr)
Definition: memory.h:137
void deallocate(T *p, size_type n)
Definition: memory.h:249
constexpr int kAllocSize
Definition: memory.h:35
To down_cast(From *f)
Definition: compat.h:50
T * allocate(size_type n, const void *hint=nullptr)
Definition: memory.h:295
typename Allocator::size_type size_type
Definition: memory.h:285
MemoryArenaImpl(size_t block_size=kAllocSize)
Definition: memory.h:59
typename Allocator::value_type value_type
Definition: memory.h:286
bool operator!=(const ErrorWeight &, const ErrorWeight &)
Definition: error-weight.h:54
MemoryPool(size_t pool_size=kAllocSize)
Definition: memory.h:167
void deallocate(T *p, size_type n)
Definition: memory.h:316
std::shared_ptr< MemoryArenaCollection > Arenas() const
Definition: memory.h:253
std::unique_ptr< T > make_unique_for_overwrite()
Definition: compat.h:122
void * Allocate(size_t size)
Definition: memory.h:65
constexpr To implicit_cast(typename internal::type_identity_t< To > to)
Definition: compat.h:90
virtual ~MemoryArenaBase()=default
typename Allocator::size_type size_type
Definition: memory.h:230
MemoryPoolImpl(size_t pool_size)
Definition: memory.h:122
size_t Size() const override
Definition: memory.h:145
size_t Size() const override
Definition: memory.h:85
size_t BlockSize() const
Definition: memory.h:188
PoolAllocator(size_t pool_size=kAllocSize)
Definition: memory.h:288
bool operator==(const ErrorWeight &, const ErrorWeight &)
Definition: error-weight.h:51
PoolAllocator(const PoolAllocator< U > &pool_alloc)
Definition: memory.h:292
MemoryPoolCollection(size_t pool_size=kAllocSize)
Definition: memory.h:199
BlockAllocator(size_t block_size=kAllocSize)
Definition: memory.h:233
MemoryArena< T > * Arena()
Definition: memory.h:179
std::shared_ptr< MemoryPoolCollection > Pools() const
Definition: memory.h:336
std::allocator< T > Allocator
Definition: memory.h:229