LRUCache.h 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312
  1. #ifndef HV_LRU_CACHE_H_
  2. #define HV_LRU_CACHE_H_
  3. #include <unordered_map>
  4. #include <list>
  5. #include <mutex>
  6. #include <memory>
  7. #include <functional>
  8. namespace hv {
  9. /**
  10. * @brief Thread-safe LRU (Least Recently Used) Cache template
  11. *
  12. * This template provides a generic LRU cache implementation with the following features:
  13. * - Thread-safe operations using mutex
  14. * - Configurable capacity with automatic eviction
  15. * - O(1) get, put, and remove operations
  16. * - Optional eviction callback for cleanup
  17. *
  18. * @tparam Key The key type (must be hashable)
  19. * @tparam Value The value type
  20. */
  21. template<typename Key, typename Value>
  22. class LRUCache {
  23. public:
  24. using key_type = Key;
  25. using value_type = Value;
  26. using eviction_callback_t = std::function<void(const Key&, const Value&)>;
  27. private:
  28. // Double-linked list node for LRU ordering
  29. struct Node {
  30. Key key;
  31. Value value;
  32. Node(const Key& k, const Value& v) : key(k), value(v) {}
  33. };
  34. using node_list_t = std::list<Node>;
  35. using node_iterator_t = typename node_list_t::iterator;
  36. using hash_map_t = std::unordered_map<Key, node_iterator_t>;
  37. public:
  38. /**
  39. * @brief Construct LRUCache with specified capacity
  40. * @param capacity Maximum number of items to cache (default: 100)
  41. */
  42. explicit LRUCache(size_t capacity = 100)
  43. : capacity_(capacity), eviction_callback_(nullptr) {
  44. if (capacity_ == 0) {
  45. capacity_ = 1; // Minimum capacity of 1
  46. }
  47. }
  48. /**
  49. * @brief Destructor
  50. */
  51. virtual ~LRUCache() {
  52. clear();
  53. }
  54. // Disable copy constructor and assignment operator
  55. LRUCache(const LRUCache&) = delete;
  56. LRUCache& operator=(const LRUCache&) = delete;
  57. /**
  58. * @brief Set eviction callback function
  59. * @param callback Function to call when items are evicted
  60. */
  61. void set_eviction_callback(eviction_callback_t callback) {
  62. std::lock_guard<std::mutex> lock(mutex_);
  63. eviction_callback_ = callback;
  64. }
  65. /**
  66. * @brief Get value by key
  67. * @param key The key to search for
  68. * @param value Output parameter for the value
  69. * @return true if key exists, false otherwise
  70. */
  71. bool get(const Key& key, Value& value) {
  72. std::lock_guard<std::mutex> lock(mutex_);
  73. auto it = hash_map_.find(key);
  74. if (it == hash_map_.end()) {
  75. return false;
  76. }
  77. // Move to front (most recently used)
  78. move_to_front(it->second);
  79. value = it->second->value;
  80. return true;
  81. }
  82. /**
  83. * @brief Get value by key (alternative interface)
  84. * @param key The key to search for
  85. * @return Pointer to value if exists, nullptr otherwise
  86. */
  87. Value* get(const Key& key) {
  88. std::lock_guard<std::mutex> lock(mutex_);
  89. auto it = hash_map_.find(key);
  90. if (it == hash_map_.end()) {
  91. return nullptr;
  92. }
  93. // Move to front (most recently used)
  94. move_to_front(it->second);
  95. return &(it->second->value);
  96. }
  97. /**
  98. * @brief Put key-value pair into cache
  99. * @param key The key
  100. * @param value The value
  101. * @return true if new item was added, false if existing item was updated
  102. */
  103. bool put(const Key& key, const Value& value) {
  104. std::lock_guard<std::mutex> lock(mutex_);
  105. auto it = hash_map_.find(key);
  106. if (it != hash_map_.end()) {
  107. // Update existing item
  108. it->second->value = value;
  109. move_to_front(it->second);
  110. return false;
  111. }
  112. // Add new item
  113. if (node_list_.size() >= capacity_) {
  114. evict_lru();
  115. }
  116. node_list_.emplace_front(key, value);
  117. hash_map_[key] = node_list_.begin();
  118. return true;
  119. }
  120. /**
  121. * @brief Remove item by key
  122. * @param key The key to remove
  123. * @return true if item was removed, false if key not found
  124. */
  125. bool remove(const Key& key) {
  126. std::lock_guard<std::mutex> lock(mutex_);
  127. auto it = hash_map_.find(key);
  128. if (it == hash_map_.end()) {
  129. return false;
  130. }
  131. // Call eviction callback if set
  132. if (eviction_callback_) {
  133. eviction_callback_(it->second->key, it->second->value);
  134. }
  135. node_list_.erase(it->second);
  136. hash_map_.erase(it);
  137. return true;
  138. }
  139. /**
  140. * @brief Check if key exists in cache
  141. * @param key The key to check
  142. * @return true if key exists, false otherwise
  143. */
  144. bool contains(const Key& key) const {
  145. std::lock_guard<std::mutex> lock(mutex_);
  146. return hash_map_.find(key) != hash_map_.end();
  147. }
  148. /**
  149. * @brief Clear all items from cache
  150. */
  151. void clear() {
  152. std::lock_guard<std::mutex> lock(mutex_);
  153. if (eviction_callback_) {
  154. for (const auto& node : node_list_) {
  155. eviction_callback_(node.key, node.value);
  156. }
  157. }
  158. node_list_.clear();
  159. hash_map_.clear();
  160. }
  161. /**
  162. * @brief Get current cache size
  163. * @return Number of items in cache
  164. */
  165. size_t size() const {
  166. std::lock_guard<std::mutex> lock(mutex_);
  167. return node_list_.size();
  168. }
  169. /**
  170. * @brief Get cache capacity
  171. * @return Maximum number of items cache can hold
  172. */
  173. size_t capacity() const {
  174. return capacity_;
  175. }
  176. /**
  177. * @brief Check if cache is empty
  178. * @return true if cache is empty, false otherwise
  179. */
  180. bool empty() const {
  181. std::lock_guard<std::mutex> lock(mutex_);
  182. return node_list_.empty();
  183. }
  184. /**
  185. * @brief Set new capacity (may trigger eviction)
  186. * @param new_capacity New capacity value
  187. */
  188. void set_capacity(size_t new_capacity) {
  189. if (new_capacity == 0) {
  190. new_capacity = 1; // Minimum capacity of 1
  191. }
  192. std::lock_guard<std::mutex> lock(mutex_);
  193. capacity_ = new_capacity;
  194. // Evict excess items if necessary
  195. while (node_list_.size() > capacity_) {
  196. evict_lru();
  197. }
  198. }
  199. /**
  200. * @brief Apply a function to all cached items (for iteration)
  201. * @param func Function to apply to each key-value pair
  202. * Note: This is provided for compatibility but should be used carefully
  203. * as it may affect performance due to locking
  204. */
  205. template<typename Func>
  206. void for_each(Func func) {
  207. std::lock_guard<std::mutex> lock(mutex_);
  208. for (const auto& node : node_list_) {
  209. func(node.key, node.value);
  210. }
  211. }
  212. /**
  213. * @brief Remove items that match a predicate
  214. * @param predicate Function that returns true for items to remove
  215. * @return Number of items removed
  216. */
  217. template<typename Predicate>
  218. size_t remove_if(Predicate predicate) {
  219. std::lock_guard<std::mutex> lock(mutex_);
  220. size_t removed_count = 0;
  221. auto it = node_list_.begin();
  222. while (it != node_list_.end()) {
  223. if (predicate(it->key, it->value)) {
  224. // Call eviction callback if set
  225. if (eviction_callback_) {
  226. eviction_callback_(it->key, it->value);
  227. }
  228. hash_map_.erase(it->key);
  229. it = node_list_.erase(it);
  230. removed_count++;
  231. } else {
  232. ++it;
  233. }
  234. }
  235. return removed_count;
  236. }
  237. protected:
  238. /**
  239. * @brief Move node to front of list (most recently used position)
  240. * @param it Iterator to the node to move
  241. */
  242. void move_to_front(node_iterator_t it) {
  243. if (it != node_list_.begin()) {
  244. node_list_.splice(node_list_.begin(), node_list_, it);
  245. }
  246. }
  247. /**
  248. * @brief Evict least recently used item
  249. */
  250. void evict_lru() {
  251. if (node_list_.empty()) {
  252. return;
  253. }
  254. auto last = std::prev(node_list_.end());
  255. // Call eviction callback if set
  256. if (eviction_callback_) {
  257. eviction_callback_(last->key, last->value);
  258. }
  259. hash_map_.erase(last->key);
  260. node_list_.erase(last);
  261. }
  262. protected:
  263. size_t capacity_; // Maximum cache capacity
  264. mutable std::mutex mutex_; // Mutex for thread safety
  265. node_list_t node_list_; // Doubly-linked list for LRU ordering
  266. hash_map_t hash_map_; // Hash map for O(1) access
  267. eviction_callback_t eviction_callback_; // Optional eviction callback
  268. };
  269. } // namespace hv
  270. #endif // HV_LRU_CACHE_H_