1
2
3
4 """
5 This file is part of the web2py Web Framework
6 Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
7 License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
8
9 Basic caching classes and methods
10 =================================
11
12 - Cache - The generic caching object interfacing with the others
13 - CacheInRam - providing caching in ram
14 - CacheInDisk - provides caches on disk
15
16 Memcache is also available via a different module (see gluon.contrib.memcache)
17
18 When web2py is running on Google App Engine,
19 caching will be provided by the GAE memcache
20 (see gluon.contrib.gae_memcache)
21 """
22
23 import time
24 import portalocker
25 import shelve
26 import thread
27 import os
28 import logging
29 import re
30
31 logger = logging.getLogger("web2py.cache")
32
33 __all__ = ['Cache']
34
35
36 DEFAULT_TIME_EXPIRE = 300
37
38
40 """
41 Abstract class for cache implementations.
42 Main function is now to provide referenced api documentation.
43
44 Use CacheInRam or CacheOnDisk instead which are derived from this class.
45 """
46
47 cache_stats_name = 'web2py_cache_statistics'
48
50 """
51 Paremeters
52 ----------
53 request:
54 the global request object
55 """
56 raise NotImplementedError
57
60 """
61 Tries retrieve the value corresponding to `key` from the cache of the
62 object exists and if it did not expire, else it called the function `f`
63 and stores the output in the cache corresponding to `key`. In the case
64 the output of the function is returned.
65
66 :param key: the key of the object to be store or retrieved
67 :param f: the function, whose output is to be cached
68 :param time_expire: expiration of the cache in microseconds
69
70 - `time_expire` is used to compare the current time with the time when
71 the requested object was last saved in cache. It does not affect
72 future requests.
73 - Setting `time_expire` to 0 or negative value forces the cache to
74 refresh.
75
76 If the function `f` is `None` the cache is cleared.
77 """
78 raise NotImplementedError
79
80 - def clear(self, regex=None):
81 """
82 Clears the cache of all keys that match the provided regular expression.
83 If no regular expression is provided, it clears all entries in cache.
84
85 Parameters
86 ----------
87 regex:
88 if provided, only keys matching the regex will be cleared.
89 Otherwise all keys are cleared.
90 """
91
92 raise NotImplementedError
93
95 """
96 Increments the cached value for the given key by the amount in value
97
98 Parameters
99 ----------
100 key:
101 key for the cached object to be incremeneted
102 value:
103 amount of the increment (defaults to 1, can be negative)
104 """
105 raise NotImplementedError
106
107 - def _clear(self, storage, regex):
108 """
109 Auxiliary function called by `clear` to search and clear cache entries
110 """
111 r = re.compile(regex)
112 for (key, value) in storage.items():
113 if r.match(str(key)):
114 del storage[key]
115
117 """
118 Ram based caching
119
120 This is implemented as global (per process, shared by all threads)
121 dictionary.
122 A mutex-lock mechanism avoid conflicts.
123 """
124
125 locker = thread.allocate_lock()
126 meta_storage = {}
127
143
144 - def clear(self, regex=None):
159
162 """
163 Attention! cache.ram does not copy the cached object. It just stores a reference to it.
164 Turns out the deepcopying the object has some problems:
165 1) would break backward compatibility
166 2) would be limiting because people may want to cache live objects
167 3) would work unless we deepcopy no storage and retrival which would make things slow.
168 Anyway. You can deepcopy explicitly in the function generating the value to be cached.
169 """
170
171 dt = time_expire
172
173 self.locker.acquire()
174 item = self.storage.get(key, None)
175 if item and f == None:
176 del self.storage[key]
177 self.storage[CacheAbstract.cache_stats_name]['hit_total'] += 1
178 self.locker.release()
179
180 if f is None:
181 return None
182 if item and (dt == None or item[0] > time.time() - dt):
183 return item[1]
184 value = f()
185
186 self.locker.acquire()
187 self.storage[key] = (time.time(), value)
188 self.storage[CacheAbstract.cache_stats_name]['misses'] += 1
189 self.locker.release()
190 return value
191
203
204
206 """
207 Disk based cache
208
209 This is implemented as a shelve object and it is shared by multiple web2py
210 processes (and threads) as long as they share the same filesystem.
211 The file is locked wen accessed.
212
213 Disk cache provides persistance when web2py is started/stopped but it slower
214 than `CacheInRam`
215
216 Values stored in disk cache must be pickable.
217 """
218
219 speedup_checks = set()
220
221 - def __init__(self, request, folder=None):
222 self.request = request
223
224
225
226 folder = folder or os.path.join(request.folder, 'cache')
227
228 if not os.path.exists(folder):
229 os.mkdir(folder)
230
231
232
233 self.locker_name = os.path.join(folder,'cache.lock')
234 self.shelve_name = os.path.join(folder,'cache.shelve')
235
236 locker, locker_locked = None, False
237 speedup_key = (folder,CacheAbstract.cache_stats_name)
238 if not speedup_key in self.speedup_checks or \
239 not os.path.exists(self.shelve_name):
240 try:
241 locker = open(self.locker_name, 'a')
242 portalocker.lock(locker, portalocker.LOCK_EX)
243 locker_locked = True
244 storage = shelve.open(self.shelve_name)
245
246 if not storage.has_key(CacheAbstract.cache_stats_name):
247 storage[CacheAbstract.cache_stats_name] = {
248 'hit_total': 0,
249 'misses': 0,
250 }
251 storage.sync()
252 self.speedup_checks.add(speedup_key)
253 except ImportError:
254 pass
255 except:
256 logger.error('corrupted file: %s' % self.shelve_name)
257 if locker_locked:
258 portalocker.unlock(locker)
259 if locker:
260 locker.close()
261
262 - def clear(self, regex=None):
278
324
341
342
344 """
345 Sets up generic caching, creating an instance of both CacheInRam and
346 CacheOnDisk.
347 In case of GAE will make use of gluon.contrib.gae_memcache.
348
349 - self.ram is an instance of CacheInRam
350 - self.disk is an instance of CacheOnDisk
351 """
352
354 """
355 Parameters
356 ----------
357 request:
358 the global request object
359 """
360
361 import settings
362 if settings.global_settings.web2py_runtime_gae:
363 from contrib.gae_memcache import MemcacheClient
364 self.ram=self.disk=MemcacheClient(request)
365 else:
366
367 self.ram = CacheInRam(request)
368 try:
369 self.disk = CacheOnDisk(request)
370 except IOError:
371 logger.warning('no cache.disk (IOError)')
372 except AttributeError:
373
374
375 logger.warning('no cache.disk (AttributeError)')
376
381 """
382 Decorator function that can be used to cache any function/method.
383
384 Example::
385
386 @cache('key', 5000, cache.ram)
387 def f():
388 return time.ctime()
389
390 When the function f is called, web2py tries to retrieve
391 the value corresponding to `key` from the cache of the
392 object exists and if it did not expire, else it calles the function `f`
393 and stores the output in the cache corresponding to `key`. In the case
394 the output of the function is returned.
395
396 :param key: the key of the object to be store or retrieved
397 :param time_expire: expiration of the cache in microseconds
398 :param cache_model: `cache.ram`, `cache.disk`, or other
399 (like `cache.memcache` if defined). It defaults to `cache.ram`.
400
401 Notes
402 -----
403 `time_expire` is used to compare the curret time with the time when the
404 requested object was last saved in cache. It does not affect future
405 requests.
406 Setting `time_expire` to 0 or negative value forces the cache to
407 refresh.
408
409 If the function `f` is an action, we suggest using
410 `request.env.path_info` as key.
411 """
412 if not cache_model:
413 cache_model = self.ram
414
415 def tmp(func):
416 return lambda: cache_model(key, func, time_expire)
417
418 return tmp
419