Completely migrate feature-related cache off ramcache (#2234)

* Implement scan

* Migrate feature cache

* Address comments

* Fix

* Add cache_key argument

Co-authored-by: Kyle Ju <kyleju@chromium.org>
This commit is contained in:
Kyle Ju 2022-09-14 10:07:32 -07:00 коммит произвёл GitHub
Родитель 1dabcc4911
Коммит fe65355d6a
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
12 изменённых файлов: 95 добавлений и 433 удалений

Просмотреть файл

@ -17,7 +17,7 @@ import logging
from framework import basehandlers
from framework import permissions
from framework import ramcache
from framework import rediscache
from framework import users
from internals import core_models
from internals import search
@ -81,6 +81,6 @@ class FeaturesAPI(basehandlers.APIHandler):
feature = self.get_specified_feature(feature_id=feature_id)
feature.deleted = True
feature.put()
ramcache.flush_all()
rediscache.delete_keys_with_prefix(core_models.feature_cache_prefix())
return {'message': 'Done'}

Просмотреть файл

@ -22,7 +22,7 @@ from api import features_api
from internals import core_enums
from internals import core_models
from internals import user_models
from framework import ramcache
from framework import rediscache
test_app = flask.Flask(__name__)
@ -45,11 +45,12 @@ class FeaturesAPITestDelete(testing_config.CustomTestCase):
self.app_admin.put()
def tearDown(self):
cache_key = '%s|%s' % (
core_models.Feature.DEFAULT_CACHE_KEY, self.feature_1.key.integer_id())
self.feature_1.key.delete()
self.app_admin.key.delete()
testing_config.sign_out()
ramcache.flush_all()
ramcache.check_for_distributed_invalidation()
rediscache.delete(cache_key)
def test_delete__valid(self):
"""Admin wants to soft-delete a feature."""
@ -139,8 +140,7 @@ class FeaturesAPITestGet(testing_config.CustomTestCase):
self.feature_3.key.delete()
self.app_admin.key.delete()
testing_config.sign_out()
ramcache.flush_all()
ramcache.check_for_distributed_invalidation()
rediscache.delete_keys_with_prefix('features|*')
def test_get__all_listed(self):
"""Get all features that are listed."""

Просмотреть файл

@ -29,7 +29,6 @@ from google.cloud import ndb
import settings
from framework import csp
from framework import permissions
from framework import ramcache
from framework import secrets
from framework import users
from framework import utils
@ -163,7 +162,6 @@ class APIHandler(BaseHandler):
def get(self, *args, **kwargs):
"""Handle an incoming HTTP GET request."""
headers = self.get_headers()
ramcache.check_for_distributed_invalidation()
handler_data = self.do_get(*args, **kwargs)
return self.defensive_jsonify(handler_data), headers
@ -178,7 +176,6 @@ class APIHandler(BaseHandler):
if not is_login_request:
self.require_signed_in_and_xsrf_token()
headers = self.get_headers()
ramcache.check_for_distributed_invalidation()
handler_data = self.do_post(*args, **kwargs)
return self.defensive_jsonify(handler_data), headers
@ -186,7 +183,6 @@ class APIHandler(BaseHandler):
"""Handle an incoming HTTP PATCH request."""
self.require_signed_in_and_xsrf_token()
headers = self.get_headers()
ramcache.check_for_distributed_invalidation()
handler_data = self.do_patch(*args, **kwargs)
return self.defensive_jsonify(handler_data), headers
@ -194,7 +190,6 @@ class APIHandler(BaseHandler):
"""Handle an incoming HTTP DELETE request."""
self.require_signed_in_and_xsrf_token()
headers = self.get_headers()
ramcache.check_for_distributed_invalidation()
handler_data = self.do_delete(*args, **kwargs)
return self.defensive_jsonify(handler_data), headers
@ -356,7 +351,6 @@ class FlaskHandler(BaseHandler):
logging.info('Striping www and redirecting to %r', location)
return self.redirect(location)
ramcache.check_for_distributed_invalidation()
handler_data = self.get_template_data(*args, **kwargs)
users.refresh_user_session()
@ -381,7 +375,6 @@ class FlaskHandler(BaseHandler):
def post(self, *args, **kwargs):
"""POST handlers return a string, JSON, or a redirect."""
ramcache.check_for_distributed_invalidation()
self.require_xsrf_token()
handler_data = self.process_post_data(*args, **kwargs)
headers = self.get_headers()

Просмотреть файл

@ -1,201 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright 2020 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module manages a distributed RAM cache as a global python dictionary in
each AppEngine instance. AppEngine can spin up new instances or kill old ones
at any time. Each instance's RAM cache is independent and might not have the
same entries as found in the RAM caches of other instances.
Each instance will do the work needed to compute a given RAM cache entry
itself. The values computed in a given instance will speed up future requests
made to that instance only.
When the user edits something in the app, the updated entity is stored in
datastore. Also, the singleton SharedInvalidate entity is updated with the
timestamp of the change. Every request handler must start processing a request
by first calling SharedInvalidate.check_for_distributed_invalidation() which
checks for any needed invalidations and clears RAM cache entries in
that instance if needed.
For now, there is only a single RAM cache per instance and when anything is
invalidated, that entire RAM cache is completely cleared. In the future,
invalidations could be compartmentalized by RAM cache type, or even specific
entity IDs. Monorail uses that approach, but existing ChromeStatus code does
not need it.
Calling code must not mutate any value that is passed into set() or returned
from get(). If calling code needs to mutate such objects, it should call
copy.copy() or copy.deepcopy() to avoid unintentional cumulative mutations.
Unlike memcache, this RAM cache has no concept of expiration time. So,
whenever a cached value would become invalid, it must be invalidated.
"""
import logging
import time as time_module
from google.cloud import ndb
client = ndb.Client()
global_cache = {}
expires = {}
# Whenever the cache would have more than this many items, some
# random item is dropped, or the entire cache is cleared.
# If our instances are killed by appengine for exceeding memory limits,
# we can configure larger instances and/or reduce this value.
MAX_CACHE_SIZE = 10000
total_num_hits = 0
total_num_misses = 0
def set(key, value, time=None):
"""Emulate the memcache.set() method using a RAM cache."""
if len(global_cache) + 1 > MAX_CACHE_SIZE:
popped_item = global_cache.popitem()
if popped_item[0] in expires:
del expires[popped_item[0]]
global_cache[key] = value
if time:
expires[key] = int(time_module.time()) + time
def _check_expired(keys):
now = int(time_module.time())
for key in keys:
if key in expires and expires[key] < now:
del expires[key]
del global_cache[key]
def _count_hit_or_miss(keys):
global total_num_hits, total_num_misses
for key in keys:
if key in global_cache:
total_num_hits += 1
verb = 'hit'
else:
total_num_misses += 1
verb = 'miss'
# TODO(jrobbins): Replace this with proper monitoring variables
logging.info('cache %s for %r. Hit ratio: %5.2f%%.', verb, key,
total_num_hits / (total_num_hits + total_num_misses) * 100)
def get(key):
"""Emulate the memcache.get() method using a RAM cache."""
_check_expired([key])
_count_hit_or_miss([key])
return global_cache.get(key)
def get_multi(keys):
"""Emulate the memcache.get_multi() method using a RAM cache."""
_check_expired(keys)
_count_hit_or_miss(keys)
return {
key: global_cache[key]
for key in keys
if key in global_cache
}
def set_multi(entries, time=None):
"""Emulate the memcache.set_multi() method using a RAM cache."""
if len(global_cache) + len(entries) > MAX_CACHE_SIZE:
global_cache.clear()
expires.clear()
global_cache.update(entries)
if time:
expire_time = int(time_module.time()) + time
for key in entries:
expires[key] = expire_time
def delete(key):
"""Emulate the memcache.delete() method using a RAM cache."""
if key in global_cache:
del global_cache[key]
flush_all() # Note: this is wasteful but infrequent in our app.
def flush_all():
"""Emulate the memcache.flush_all() method using a RAM cache.
This does not clear the RAM cache in this instance. That happens
at the start of the next request when the request handler calls
SharedInvalidate.check_for_distributed_invalidation().
"""
SharedInvalidate.invalidate()
class SharedInvalidateParent(ndb.Model):
pass
class SharedInvalidate(ndb.Model):
PARENT_ENTITY_ID = 1234
SINGLETON_ENTITY_ID = 5678
with client.context():
PARENT_KEY = ndb.Key('SharedInvalidateParent', PARENT_ENTITY_ID)
SINGLETON_KEY = ndb.Key(
'SharedInvalidateParent', PARENT_ENTITY_ID,
'SharedInvalidate', SINGLETON_ENTITY_ID)
last_processed_timestamp = None
updated = ndb.DateTimeProperty(auto_now=True)
@classmethod
def invalidate(cls):
"""Tell this and other appengine instances to invalidate their caches."""
singleton = None
entities = cls.query(ancestor=cls.PARENT_KEY).fetch(1)
if entities:
singleton = entities[0]
if not singleton:
singleton = SharedInvalidate(key=cls.SINGLETON_KEY)
singleton.put() # automatically sets singleton.updated to now.
# The cache in each instance (including this one) will be
# cleared on the next call to check_for_distributed_invalidation()
# which should happen at the start of request processing.
@classmethod
def check_for_distributed_invalidation(cls):
"""Check if any appengine instance has invalidated the cache."""
singleton = None
entities = cls.query(ancestor=cls.PARENT_KEY).fetch(1)
if entities:
singleton = entities[0]
if not singleton:
return # No news is good news
if (cls.last_processed_timestamp is None or
singleton.updated > cls.last_processed_timestamp):
global_cache.clear()
expires.clear()
cls.last_processed_timestamp = singleton.updated
def check_for_distributed_invalidation():
"""Just a shorthand way to call the class method."""
SharedInvalidate.check_for_distributed_invalidation()

Просмотреть файл

@ -1,160 +0,0 @@
# Copyright 2020 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
from unittest import mock
import testing_config # Must be imported before the module under test.
from framework import ramcache
KEY_1 = 'cache_key|1'
KEY_2 = 'cache_key|2'
KEY_3 = 'cache_key|3'
KEY_4 = 'cache_key|4'
KEY_5 = 'cache_key|5'
KEY_6 = 'cache_key|6'
KEY_7 = 'cache_key|7'
class RAMCacheFunctionTests(testing_config.CustomTestCase):
def testSetAndGet(self):
"""We can cache a value and retrieve it from the cache."""
self.assertEqual(None, ramcache.get(KEY_1))
ramcache.set(KEY_1, 101)
self.assertEqual(101, ramcache.get(KEY_1))
def testSetAndGetMulti(self):
"""We can cache values and retrieve them from the cache."""
self.assertEqual({}, ramcache.get_multi([]))
self.assertEqual({}, ramcache.get_multi([KEY_2, KEY_3]))
ramcache.set_multi({KEY_2: 202, KEY_3: 303})
self.assertEqual(
{KEY_2: 202, KEY_3: 303},
ramcache.get_multi([KEY_2, KEY_3]))
ramcache.set_multi({KEY_2: 202, KEY_3: 303})
self.assertEqual(
{KEY_2: 202, KEY_3: 303},
ramcache.get_multi([KEY_2, KEY_3, KEY_4]))
@mock.patch('time.time')
def testExpiration(self, mock_time):
"""If a value is set with an expiration time, it is dropped later."""
NOW = 1607128969
mock_time.return_value = NOW
ramcache.set(KEY_1, 101, time=60)
self.assertEqual(101, ramcache.get(KEY_1))
mock_time.return_value = NOW + 59
self.assertEqual(101, ramcache.get(KEY_1))
mock_time.return_value = NOW + 61
self.assertEqual(None, ramcache.get(KEY_1))
mock_time.return_value = NOW
ramcache.set_multi({KEY_1 + 'multi': 101}, time=60)
self.assertEqual(101, ramcache.get(KEY_1 + 'multi'))
mock_time.return_value = NOW + 59
self.assertEqual(101, ramcache.get(KEY_1 + 'multi'))
mock_time.return_value = NOW + 61
self.assertEqual(None, ramcache.get(KEY_1 + 'multi'))
@mock.patch('framework.ramcache.SharedInvalidate.invalidate')
def testDelete_NotFound(self, mock_invalidate):
"""Deleting an item that is not in the cache still invalidates."""
ramcache.delete(KEY_5)
mock_invalidate.assert_called()
@mock.patch('framework.ramcache.SharedInvalidate.invalidate')
def testDelete_Found(self, mock_invalidate):
"""We can delete an item from the cache, causing an invalidation."""
ramcache.set(KEY_6, 606)
self.assertEqual(606, ramcache.get(KEY_6))
ramcache.delete(KEY_6)
self.assertEqual(None, ramcache.get(KEY_6))
mock_invalidate.assert_called_once()
@mock.patch('framework.ramcache.SharedInvalidate.invalidate')
def testFlushAll(self, mock_invalidate):
"""flush_all simply causes an invalidation."""
ramcache.flush_all()
mock_invalidate.assert_called_once()
class SharedInvalidateTests(testing_config.CustomTestCase):
def assertTimestampWasUpdated(self, start_time):
singleton = None
entities = ramcache.SharedInvalidate.query(
ancestor=ramcache.SharedInvalidate.PARENT_KEY).fetch(1)
if entities:
singleton = entities[0]
self.assertTrue(singleton.updated > start_time)
def testInvalidate(self):
"""Calling invalidate sets a new updated time."""
start_time = datetime.datetime.now()
ramcache.SharedInvalidate.invalidate()
self.assertTimestampWasUpdated(start_time)
def testCheckForDistributedInvalidation_Unneeded_NoEntity(self):
"""When the system first launches, no need to clear cache."""
ramcache.SharedInvalidate.SINGLETON_KEY.delete()
ramcache.SharedInvalidate.last_processed_timestamp = None
ramcache.global_cache = {KEY_7: 777}
ramcache.SharedInvalidate.check_for_distributed_invalidation()
self.assertEqual({KEY_7: 777}, ramcache.global_cache)
self.assertIsNone(ramcache.SharedInvalidate.last_processed_timestamp)
def testCheckForDistributedInvalidation_Unneeded_Fresh(self):
"""When no other instance has invalidated, this cache is fresh."""
ramcache.SharedInvalidate.invalidate()
ramcache.SharedInvalidate.check_for_distributed_invalidation()
# From this point on there are no invalidations, so our cache is fresh.
ramcache.global_cache = {KEY_7: 777}
ramcache.SharedInvalidate.check_for_distributed_invalidation()
# Since cache is fresh, it is not cleared.
self.assertEqual({KEY_7: 777}, ramcache.global_cache)
def testCheckForDistributedInvalidation_Needed_None(self):
"""When needed, we clear our local RAM cache."""
start_time = datetime.datetime.now()
ramcache.SharedInvalidate.last_processed_timestamp = None
ramcache.global_cache = {KEY_7: 777}
ramcache.flush_all()
ramcache.SharedInvalidate.check_for_distributed_invalidation()
self.assertEqual({}, ramcache.global_cache)
self.assertTimestampWasUpdated(start_time)
def testCheckForDistributedInvalidation_Needed_Stale(self):
"""When needed, we clear our local RAM cache."""
start_time = datetime.datetime.now()
ramcache.SharedInvalidate.last_processed_timestamp = start_time
ramcache.global_cache = {KEY_7: 777}
ramcache.flush_all()
ramcache.SharedInvalidate.check_for_distributed_invalidation()
self.assertEqual({}, ramcache.global_cache)
self.assertTimestampWasUpdated(start_time)

Просмотреть файл

@ -20,7 +20,6 @@ import pickle
import logging
import settings
from google.cloud import ndb
import redis
import fakeredis
@ -118,6 +117,23 @@ def delete(key):
redis_client.delete(cache_key)
def delete_keys_with_prefix(pattern):
"""Delete all keys matching a prefix pattern."""
if redis_client is None:
return
prefix = add_gae_prefix(pattern)
# https://redis.io/commands/scan/
pos, keys = redis_client.scan(cursor=0, match=prefix)
target = keys
while pos != 0:
pos, keys = redis_client.scan(cursor=pos, match=prefix)
target.extend(keys)
for key in target:
redis_client.delete(key)
def flushall():
"""Delete all the keys in Redis, https://redis.io/commands/flushall/."""
if redis_client is None:

Просмотреть файл

@ -17,6 +17,7 @@ import testing_config # Must be imported before the module under test.
from framework import rediscache
PREFIX = 'cache_key|'
KEY_1 = 'cache_key|1'
KEY_2 = 'cache_key|2'
KEY_3 = 'cache_key|3'
@ -27,8 +28,10 @@ KEY_7 = 'cache_key|7'
class RedisCacheFunctionTests(testing_config.CustomTestCase):
def tearDown(self):
rediscache.flushall()
def testSetAndGet(self):
def test_set_and_get(self):
"""We can cache a value and retrieve it from the cache."""
self.assertEqual(None, rediscache.get(KEY_1))
@ -41,7 +44,7 @@ class RedisCacheFunctionTests(testing_config.CustomTestCase):
rediscache.set(KEY_4, '123', 3600)
self.assertEqual('123', rediscache.get(KEY_4))
def testSetAndGetMulti(self):
def test_set_and_get_multi(self):
"""We can cache values and retrieve them from the cache."""
self.assertEqual({}, rediscache.get_multi([]))
@ -62,8 +65,23 @@ class RedisCacheFunctionTests(testing_config.CustomTestCase):
rediscache.set_multi({KEY_5: '222'}, 3600)
self.assertEqual({KEY_5: '222'}, rediscache.get_multi([KEY_5]))
def testDelete(self):
def test_delete(self):
rediscache.set(KEY_6, '606')
self.assertEqual('606', rediscache.get(KEY_6))
rediscache.delete(KEY_6)
self.assertEqual(None, rediscache.get(KEY_6))
def test_delete_keys_with_prefix(self):
for x in range(17):
key = PREFIX + str(x)
rediscache.set(key, str(x))
rediscache.set('random_key', '303')
rediscache.set('random_key1', '404')
self.assertEqual('1', rediscache.get(KEY_1))
rediscache.delete_keys_with_prefix('cache_key|*')
self.assertEqual(None, rediscache.get(KEY_1))
self.assertEqual(None, rediscache.get(KEY_2))
self.assertEqual('303', rediscache.get('random_key'))
self.assertEqual('404', rediscache.get('random_key1'))

Просмотреть файл

@ -19,7 +19,7 @@ import re
from google.cloud import ndb
from framework import ramcache
from framework import rediscache
from framework import users
from framework import cloud_tasks_helpers
@ -43,6 +43,12 @@ def del_none(d):
del_none(value)
return d
def feature_cache_key(cache_key, feature_id):
return '%s|%s' % (cache_key, feature_id)
def feature_cache_prefix():
return '%s|*' % (Feature.DEFAULT_CACHE_KEY)
class DictModel(ndb.Model):
# def to_dict(self):
@ -398,7 +404,7 @@ class Feature(DictModel):
s = ('%s%s' % (filterby[0], filterby[1])).replace(' ', '')
KEY += '|%s' % s
feature_list = ramcache.get(KEY)
feature_list = rediscache.get(KEY)
if feature_list is None or update_cache:
query = Feature.query().order(-Feature.updated) #.order('name')
@ -422,7 +428,7 @@ class Feature(DictModel):
feature_list = [
f.format_for_template(version=version) for f in feature_list]
ramcache.set(KEY, feature_list)
rediscache.set(KEY, feature_list)
return feature_list
@ -440,17 +446,17 @@ class Feature(DictModel):
KEY = '%s|%s' % (Feature.DEFAULT_CACHE_KEY, sorted(statuses))
feature_list = ramcache.get(KEY)
feature_list = rediscache.get(KEY)
if feature_list is None or update_cache:
# There's no way to do an OR in a single datastore query, and there's a
# very good chance that the self.get_all() results will already be in
# ramcache, so use an array comprehension to grab the features we
# rediscache, so use an array comprehension to grab the features we
# want from the array of everything.
feature_list = [
feature for feature in self.get_all(update_cache=update_cache)
if feature['browsers']['chrome']['status']['text'] in statuses]
ramcache.set(KEY, feature_list)
rediscache.set(KEY, feature_list)
return feature_list
@ -463,8 +469,8 @@ class Feature(DictModel):
procesing a POST to edit data. For editing use case, load the
data from NDB directly.
"""
KEY = '%s|%s' % (Feature.DEFAULT_CACHE_KEY, feature_id)
feature = ramcache.get(KEY)
KEY = feature_cache_key(Feature.DEFAULT_CACHE_KEY, feature_id)
feature = rediscache.get(KEY)
if feature is None or update_cache:
unformatted_feature = Feature.get_by_id(feature_id)
@ -475,7 +481,7 @@ class Feature(DictModel):
feature['updated_display'] = (
unformatted_feature.updated.strftime("%Y-%m-%d"))
feature['new_crbug_url'] = unformatted_feature.new_crbug_url()
ramcache.set(KEY, feature)
rediscache.set(KEY, feature)
return feature
@ -510,8 +516,8 @@ class Feature(DictModel):
futures = []
for feature_id in feature_ids:
lookup_key = '%s|%s' % (Feature.DEFAULT_CACHE_KEY, feature_id)
feature = ramcache.get(lookup_key)
lookup_key = feature_cache_key(Feature.DEFAULT_CACHE_KEY, feature_id)
feature = rediscache.get(lookup_key)
if feature is None or update_cache:
futures.append(Feature.get_by_id_async(feature_id))
else:
@ -524,9 +530,8 @@ class Feature(DictModel):
feature['updated_display'] = (
unformatted_feature.updated.strftime("%Y-%m-%d"))
feature['new_crbug_url'] = unformatted_feature.new_crbug_url()
store_key = '%s|%s' % (Feature.DEFAULT_CACHE_KEY,
unformatted_feature.key.integer_id())
ramcache.set(store_key, feature)
store_key = feature_cache_key(Feature.DEFAULT_CACHE_KEY, unformatted_feature.key.integer_id())
rediscache.set(store_key, feature)
result_dict[unformatted_feature.key.integer_id()] = feature
result_list = [
@ -547,7 +552,7 @@ class Feature(DictModel):
cache_key = '%s|%s|%s|%s' % (Feature.DEFAULT_CACHE_KEY,
'cronorder', limit, version)
feature_list = ramcache.get(cache_key)
feature_list = rediscache.get(cache_key)
logging.info('getting chronological feature list')
# On cache miss, do a db query.
@ -629,7 +634,7 @@ class Feature(DictModel):
self._annotate_first_of_milestones(feature_list, version=version)
ramcache.set(cache_key, feature_list)
rediscache.set(cache_key, feature_list)
if not show_unlisted:
feature_list = self.filter_unlisted(feature_list)
@ -652,7 +657,7 @@ class Feature(DictModel):
features_by_type = {}
cache_key = '%s|%s|%s' % (
Feature.DEFAULT_CACHE_KEY, 'milestone', milestone)
cached_features_by_type = ramcache.get(cache_key)
cached_features_by_type = rediscache.get(cache_key)
if cached_features_by_type:
features_by_type = cached_features_by_type
else:
@ -780,7 +785,7 @@ class Feature(DictModel):
features_by_type[shipping_type] = [
f.format_for_template() for f in all_features[shipping_type]]
ramcache.set(cache_key, features_by_type)
rediscache.set(cache_key, features_by_type)
for shipping_type in features_by_type:
if not show_unlisted:
@ -888,9 +893,9 @@ class Feature(DictModel):
if notify:
self.__notify_feature_subscribers_of_changes(amendments, is_update)
# Invalidate ramcache for the individual feature view.
cache_key = '%s|%s' % (Feature.DEFAULT_CACHE_KEY, self.key.integer_id())
ramcache.delete(cache_key)
# Invalidate rediscache for the individual feature view.
cache_key = feature_cache_key(Feature.DEFAULT_CACHE_KEY, self.key.integer_id())
rediscache.delete(cache_key)
return key
@ -1132,15 +1137,15 @@ class FeatureEntry(ndb.Model): # Copy from Feature
@classmethod
def get_feature_entry(self, feature_id, update_cache=False):
KEY = '%s|%s' % (Feature.DEFAULT_CACHE_KEY, feature_id)
feature = ramcache.get(KEY)
KEY = feature_cache_key(FeatureEntry.DEFAULT_CACHE_KEY, feature_id)
feature = rediscache.get(KEY)
if feature is None or update_cache:
entry = FeatureEntry.get_by_id(feature_id)
if entry:
if entry.deleted:
return None
ramcache.set(KEY, entry)
rediscache.set(KEY, entry)
return entry
@ -1175,8 +1180,8 @@ class FeatureEntry(ndb.Model): # Copy from Feature
futures = []
for fe_id in entry_ids:
lookup_key = '%s|%s' % (FeatureEntry.DEFAULT_CACHE_KEY, fe_id)
entry = ramcache.get(lookup_key)
lookup_key = feature_cache_key(FeatureEntry.DEFAULT_CACHE_KEY, fe_id)
entry = rediscache.get(lookup_key)
if entry is None or update_cache:
futures.append(FeatureEntry.get_by_id_async(fe_id))
else:
@ -1185,9 +1190,8 @@ class FeatureEntry(ndb.Model): # Copy from Feature
for future in futures:
entry = future.get_result()
if entry and not entry.deleted:
store_key = '%s|%s' % (
FeatureEntry.DEFAULT_CACHE_KEY, entry.key.integer_id())
ramcache.set(store_key, entry)
store_key = feature_cache_key(FeatureEntry.DEFAULT_CACHE_KEY, entry.key.integer_id())
rediscache.set(store_key, entry)
result_dict[entry.key.integer_id()] = entry
result_list = [

Просмотреть файл

@ -16,7 +16,7 @@ import testing_config # Must be imported before the module under test.
import datetime
from unittest import mock
from framework import ramcache
from framework import rediscache
from framework import users
from internals import core_enums
@ -49,7 +49,6 @@ class ModelsFunctionsTest(testing_config.CustomTestCase):
class FeatureTest(testing_config.CustomTestCase):
def setUp(self):
ramcache.SharedInvalidate.check_for_distributed_invalidation()
self.feature_2 = core_models.Feature(
name='feature b', summary='sum', owner=['feature_owner@example.com'],
category=1, visibility=1, standardization=1, web_dev_views=1,
@ -79,7 +78,7 @@ class FeatureTest(testing_config.CustomTestCase):
self.feature_2.key.delete()
self.feature_3.key.delete()
self.feature_4.key.delete()
ramcache.flush_all()
rediscache.flushall()
def test_get_all__normal(self):
"""We can retrieve a list of all features with no filter."""
@ -164,8 +163,6 @@ class FeatureTest(testing_config.CustomTestCase):
def test_get_by_ids__cache_miss(self):
"""We can load features from datastore, and cache them for later."""
ramcache.global_cache.clear()
actual = core_models.Feature.get_by_ids([
self.feature_1.key.integer_id(),
self.feature_2.key.integer_id()])
@ -178,12 +175,11 @@ class FeatureTest(testing_config.CustomTestCase):
self.feature_1.key.integer_id())
lookup_key_2 = '%s|%s' % (core_models.Feature.DEFAULT_CACHE_KEY,
self.feature_2.key.integer_id())
self.assertEqual('feature a', ramcache.get(lookup_key_1)['name'])
self.assertEqual('feature b', ramcache.get(lookup_key_2)['name'])
self.assertEqual('feature a', rediscache.get(lookup_key_1)['name'])
self.assertEqual('feature b', rediscache.get(lookup_key_2)['name'])
def test_get_by_ids__cache_hit(self):
"""We can load features from ramcache."""
ramcache.global_cache.clear()
"""We can load features from rediscache."""
cache_key = '%s|%s' % (
core_models.Feature.DEFAULT_CACHE_KEY, self.feature_1.key.integer_id())
cached_feature = {
@ -191,7 +187,7 @@ class FeatureTest(testing_config.CustomTestCase):
'id': self.feature_1.key.integer_id(),
'unlisted': False
}
ramcache.set(cache_key, cached_feature)
rediscache.set(cache_key, cached_feature)
actual = core_models.Feature.get_by_ids([self.feature_1.key.integer_id()])
@ -216,7 +212,6 @@ class FeatureTest(testing_config.CustomTestCase):
def test_get_by_ids__cached_correctly(self):
"""We should no longer be able to trigger bug #1647."""
# Cache one to try to trigger the bug.
ramcache.global_cache.clear()
core_models.Feature.get_by_ids([
self.feature_2.key.integer_id(),
])
@ -306,7 +301,7 @@ class FeatureTest(testing_config.CustomTestCase):
cache_key = '%s|%s|%s' % (
core_models.Feature.DEFAULT_CACHE_KEY, 'milestone', 1)
cached_result = ramcache.get(cache_key)
cached_result = rediscache.get(cache_key)
self.assertEqual(cached_result, actual)
@ -364,7 +359,7 @@ class FeatureTest(testing_config.CustomTestCase):
cache_key = '%s|%s|%s' % (
core_models.Feature.DEFAULT_CACHE_KEY, 'milestone', 1)
cached_test_feature = {'test': [{'name': 'test_feature', 'unlisted': False}]}
ramcache.set(cache_key, cached_test_feature)
rediscache.set(cache_key, cached_test_feature)
actual = core_models.Feature.get_in_milestone(milestone=1)
self.assertEqual(

Просмотреть файл

@ -23,7 +23,7 @@ from internals import core_enums
from internals import core_models
from internals import user_models
from pages import featurelist
from framework import ramcache
from framework import rediscache
test_app = flask.Flask(__name__)
@ -57,8 +57,7 @@ class TestWithFeature(testing_config.CustomTestCase):
self.feature_1.key.delete()
self.app_user.delete()
self.app_admin.delete()
ramcache.flush_all()
ramcache.check_for_distributed_invalidation()
rediscache.flushall()
class FeaturesJsonHandlerTest(TestWithFeature):

Просмотреть файл

@ -18,7 +18,7 @@ import logging
from google.cloud import ndb
# Appengine imports.
from framework import ramcache
from framework import rediscache
from framework import basehandlers
from framework import permissions
@ -64,9 +64,8 @@ class FeatureCreateHandler(basehandlers.FlaskHandler):
created_by=signed_in_user,
updated_by=signed_in_user)
key = feature.put()
# TODO(jrobbins): enumerate and remove only the relevant keys.
ramcache.flush_all()
# Remove all feature-related cache.
rediscache.delete_keys_with_prefix(core_models.feature_cache_prefix())
redirect_url = '/guide/edit/' + str(key.integer_id())
return self.redirect(redirect_url)
@ -396,8 +395,8 @@ class FeatureEditHandler(basehandlers.FlaskHandler):
_auth_domain='gmail.com')
key = feature.put()
# TODO(jrobbins): enumerate and remove only the relevant keys.
ramcache.flush_all()
# Remove all feature-related cache.
rediscache.delete_keys_with_prefix(core_models.feature_cache_prefix())
redirect_url = '/guide/edit/' + str(key.integer_id())
return self.redirect(redirect_url)

Просмотреть файл

@ -20,7 +20,7 @@ import flask
import werkzeug
import html5lib
from framework import ramcache
from framework import rediscache
from internals import core_enums
from internals import core_models
from pages import guide
@ -39,8 +39,7 @@ class TestWithFeature(testing_config.CustomTestCase):
self.handler = self.HANDLER_CLASS()
def tearDown(self):
ramcache.flush_all()
ramcache.check_for_distributed_invalidation()
rediscache.flushall()
class FeatureCreateTest(testing_config.CustomTestCase):