lru_cache.h 1.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566
  1. #pragma once
  2. #include <iostream>
  3. #include <list>
  4. #include <mutex>
  5. #include <stdexcept>
  6. #include <unordered_map>
  7. namespace qcloud_cos {
  8. template <typename KeyType, typename ValueType>
  9. class LruCache {
  10. public:
  11. using KeyValuePair = std::pair<KeyType, ValueType>;
  12. using ListIterator = typename std::list<KeyValuePair>::iterator;
  13. LruCache(size_t size) : m_max_size(size) {}
  14. ~LruCache() {}
  15. void Put(const KeyType& key, const ValueType& value) {
  16. std::lock_guard<std::mutex> lock(m_mutex);
  17. m_entry_list.emplace_front(key, value);
  18. auto it = m_entry_map.find(key);
  19. if (it != m_entry_map.end()) {
  20. m_entry_list.erase(it->second);
  21. } else {
  22. if (m_entry_list.size() > m_max_size) {
  23. m_entry_map.erase(m_entry_list.back().first);
  24. m_entry_list.pop_back();
  25. }
  26. }
  27. m_entry_map[key] = m_entry_list.begin();
  28. }
  29. const ValueType& Get(const KeyType& key) {
  30. std::lock_guard<std::mutex> lock(m_mutex);
  31. auto it = m_entry_map.find(key);
  32. if (it == m_entry_map.end()) {
  33. throw std::range_error("No such key in cache");
  34. } else {
  35. m_entry_list.emplace_front(key, it->second->second);
  36. m_entry_list.erase(it->second);
  37. m_entry_map[key] = m_entry_list.begin();
  38. return m_entry_list.begin()->second;
  39. }
  40. }
  41. bool Exist(const KeyType& key) const {
  42. std::lock_guard<std::mutex> lock(m_mutex);
  43. return m_entry_map.find(key) != m_entry_map.end();
  44. }
  45. size_t Size() const {
  46. std::lock_guard<std::mutex> lock(m_mutex);
  47. return m_entry_map.size();
  48. }
  49. private:
  50. std::list<KeyValuePair> m_entry_list;
  51. std::unordered_map<KeyType, ListIterator> m_entry_map;
  52. size_t m_max_size;
  53. mutable std::mutex m_mutex;
  54. };
  55. } // namespace qcloud_cos