-
Notifications
You must be signed in to change notification settings - Fork 0
/
lru_cache.py
166 lines (125 loc) · 4.24 KB
/
lru_cache.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
"""
Python implementation of LRU Cache
Two data structures are used to implement a LRU Cache
1. Doubly Linked List:
The most recently used pages will be near front end and the least
recently used pages will be near rear end
2. Dictionary:
Key, Value pair. Value will correspond to the Queue
node
"""
class Node:
"""
Representation of a Node in a Doubly Linked List
"""
def __init__(self, key, value, previous=None, next=None):
"""
:param key: string
:param value: string value
:param previous: previous Node Object reference
:param next: next Node Object reference
"""
self.key = key
self.value = value
self.previous = previous
self.next = next
class LRUCache:
"""
Implementation for LRU Cache
"""
def __init__(self, capacity):
self.capacity = capacity
self.cache = {}
self.head = None
self.end = None
def get(self, key):
"""
Get the value in cache corresponding to a key.
If key exists in the cache, then remove the corresponding
Node from the Doubly Linked List and add it to the head of the list,
making it the most recently used value
If key does not exist in cache, throw a KeyError exception
:param key: a hashable object(string, int, ...)
:return: Node object
"""
if key in self.cache:
node = self.cache[key]
self.remove(node)
self.set_head(node)
return node
raise KeyError("Key does not exist in cache")
def remove(self, current_node):
"""
Remove the current_node from its current location in the list
and put it at the front
:param current_node: Node object
"""
if current_node.previous is not None:
current_node.previous.next = current_node.next
else:
self.head = current_node.next
if current_node.next is not None:
current_node.next.previous = current_node.previous
else:
self.end = current_node.previous
return
def set_head(self, current_node):
"""
Sets the given node as head in a Doubly Linked List
:param current_node: Node object in Doubly Linked List
"""
current_node.next = self.head
current_node.previous = None
if self.head is not None:
self.head.previous = current_node
self.head = current_node
if self.end is None:
self.end = self.head
def insert(self, key, value):
"""
If key exists in the cache, then update the value
if necessary and move it to the front
If key does not exist, then check for capacity. If capacity has
exceeded, then evict the end node and insert the new node at the head
:param key: a hashable object(string, int, ...)
:param value: any object
"""
if key in self.cache:
old_node = self.cache[key]
old_node.value = value
self.remove(old_node)
self.set_head(old_node)
else:
new_node = Node(key, value)
if len(self.cache) >= self.capacity:
self.remove(self.end)
self.set_head(new_node)
self.cache[key] = new_node
return
def print_cache(self):
"""
Print the contents inside the cache in order
Most Recent to Least recent
"""
print "\n\nPrinting cache...(Most recent to least recent)\n"
if self.head is None:
print "Cache is empty"
node = self.head
while node is not None:
print "Key = %s, Value = %s" % (node.key, node.value)
node = node.next
if __name__ == '__main__':
# Initialize Cache
lru = LRUCache(5)
lru.print_cache()
for key, value in zip([10, 20, 30, 40, 50], ["ten", "twenty", "thirty", "forty", "fifty"]):
lru.insert(key, value)
lru.print_cache()
# Cache is full, add something
lru.insert(90, "ninety")
lru.print_cache()
# Add a value multiple times
lru.insert(100, "hundred")
lru.insert(100, "hundred")
lru.insert(100, "hundred")
lru.print_cache()