-
Notifications
You must be signed in to change notification settings - Fork 51
/
Copy pathmemcache.py
153 lines (118 loc) · 5.26 KB
/
memcache.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
"""Utilities for caching data in memcache."""
import functools
import logging
import os
from google.cloud.ndb.global_cache import _InProcessGlobalCache, MemcacheCache
from oauth_dropins.webutil import appengine_info
from pymemcache.client.base import PooledClient
from pymemcache.serde import PickleSerde
from pymemcache.test.utils import MockMemcacheClient
logger = logging.getLogger(__name__)
# https://github.com/memcached/memcached/wiki/Commands#standard-protocol
KEY_MAX_LEN = 250
MEMOIZE_VERSION = 2
if appengine_info.DEBUG or appengine_info.LOCAL_SERVER:
logger.info('Using in memory mock memcache')
memcache = MockMemcacheClient(allow_unicode_keys=True)
pickle_memcache = MockMemcacheClient(allow_unicode_keys=True, serde=PickleSerde())
global_cache = _InProcessGlobalCache()
else:
logger.info('Using production Memorystore memcache')
memcache = PooledClient(os.environ['MEMCACHE_HOST'], allow_unicode_keys=True,
timeout=10, connect_timeout=10) # seconds
pickle_memcache = PooledClient(os.environ['MEMCACHE_HOST'],
serde=PickleSerde(), allow_unicode_keys=True,
timeout=10, connect_timeout=10) # seconds
global_cache = MemcacheCache(memcache)
def key(key):
"""Preprocesses a memcache key. Right now just truncates it to 250 chars.
https://pymemcache.readthedocs.io/en/latest/apidoc/pymemcache.client.base.html
https://github.com/memcached/memcached/wiki/Commands#standard-protocol
TODO: truncate to 250 *UTF-8* chars, to handle Unicode chars in URLs. Related:
pymemcache Client's allow_unicode_keys constructor kwarg.
Args:
key (str)
Returns:
bytes:
"""
assert isinstance(key, str), repr(key)
return key.replace(' ', '%20').encode()[:KEY_MAX_LEN]
def memoize_key(fn, *args, _version=MEMOIZE_VERSION, **kwargs):
return key(f'{fn.__qualname__}-{_version}-{repr(args)}-{repr(kwargs)}')
NONE = () # empty tuple
def memoize(expire=None, key=None, write=True, version=MEMOIZE_VERSION):
"""Memoize function decorator that stores the cached value in memcache.
Args:
expire (timedelta): optional, expiration
key (callable): function that takes the function's ``(*args, **kwargs)``
and returns the cache key to use. If it returns None, memcache won't be
used.
write (bool or callable): whether to write to memcache. If this is a
callable, it will be called with the function's ``(*args, **kwargs)``
and should return True or False.
version (int): overrides our default version number in the memcache key.
Bumping this version can have the same effect as clearing the cache for
just the affected function.
"""
if expire:
expire = int(expire.total_seconds())
def decorator(fn):
@functools.wraps(fn)
def wrapped(*args, **kwargs):
cache_key = None
if key:
key_val = key(*args, **kwargs)
if key_val:
cache_key = memoize_key(fn, key_val, _version=version)
else:
cache_key = memoize_key(fn, *args, _version=version, **kwargs)
if cache_key:
val = pickle_memcache.get(cache_key)
if val is not None:
logger.debug(f'cache hit {cache_key} {repr(val)[:100]}')
return None if val == NONE else val
else:
logger.debug(f'cache miss {cache_key}')
val = fn(*args, **kwargs)
if cache_key:
write_cache = (write if isinstance(write, bool)
else write(*args, **kwargs))
if write_cache:
logger.debug(f'cache set {cache_key} {repr(val)[:100]}')
pickle_memcache.set(cache_key, NONE if val is None else val,
expire=expire)
return val
return wrapped
return decorator
###########################################
# https://github.com/googleapis/python-ndb/issues/743#issuecomment-2067590945
#
# fixes "RuntimeError: Key has already been set in this batch" errors due to
# tasklets in pages.serve_feed
from logging import error as log_error
from sys import modules
from google.cloud.datastore_v1.types.entity import Key
from google.cloud.ndb._cache import (
_GlobalCacheSetBatch,
global_compare_and_swap,
global_set_if_not_exists,
global_watch,
)
from google.cloud.ndb.tasklets import Future, Return, tasklet
GLOBAL_CACHE_KEY_PREFIX: bytes = modules["google.cloud.ndb._cache"]._PREFIX
LOCKED_FOR_READ: bytes = modules["google.cloud.ndb._cache"]._LOCKED_FOR_READ
LOCK_TIME: bytes = modules["google.cloud.ndb._cache"]._LOCK_TIME
@tasklet
def custom_global_lock_for_read(key: str, value: str):
if value is not None:
yield global_watch(key, value)
lock_acquired = yield global_compare_and_swap(
key, LOCKED_FOR_READ, expires=LOCK_TIME
)
else:
lock_acquired = yield global_set_if_not_exists(
key, LOCKED_FOR_READ, expires=LOCK_TIME
)
if lock_acquired:
raise Return(LOCKED_FOR_READ)
modules["google.cloud.ndb._cache"].global_lock_for_read = custom_global_lock_for_read