Skip to content

User cache purge #3189

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Mar 12, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 4 additions & 10 deletions tests/unit/admin/views/test_users.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from webob.multidict import MultiDict

from warehouse.admin.views import users as views
from warehouse.packaging.models import Project

from ....common.db.accounts import User, UserFactory, EmailFactory
from ....common.db.packaging import (
Expand Down Expand Up @@ -178,6 +179,7 @@ class TestUserDelete:
def test_deletes_user(self, db_request, monkeypatch):
user = UserFactory.create()
project = ProjectFactory.create()
another_project = ProjectFactory.create()
journal = JournalEntryFactory(submitted_by=user)
RoleFactory(project=project, user=user, role_name='Owner')
deleted_user = UserFactory.create(username="deleted-user")
Expand All @@ -188,17 +190,12 @@ def test_deletes_user(self, db_request, monkeypatch):
db_request.user = UserFactory.create()
db_request.remote_addr = '10.10.10.10'

remove_project = pretend.call_recorder(lambda *a, **kw: None)
monkeypatch.setattr(views, 'remove_project', remove_project)

result = views.user_delete(db_request)

db_request.db.flush()

assert not db_request.db.query(User).get(user.id)
assert remove_project.calls == [
pretend.call(project, db_request, flash=False),
]
assert db_request.db.query(Project).all() == [another_project]
assert db_request.route_path.calls == [pretend.call('admin.user.list')]
assert result.status_code == 303
assert result.location == '/foobar'
Expand All @@ -213,15 +210,12 @@ def test_deletes_user_bad_confirm(self, db_request, monkeypatch):
db_request.params = {'username': 'wrong'}
db_request.route_path = pretend.call_recorder(lambda a, **k: '/foobar')

remove_project = pretend.call_recorder(lambda *a, **kw: None)
monkeypatch.setattr(views, 'remove_project', remove_project)

result = views.user_delete(db_request)

db_request.db.flush()

assert db_request.db.query(User).get(user.id)
assert remove_project.calls == []
assert db_request.db.query(Project).all() == [project]
assert db_request.route_path.calls == [
pretend.call('admin.user.detail', user_id=user.id),
]
Expand Down
49 changes: 36 additions & 13 deletions tests/unit/cache/origin/test_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,32 +204,55 @@ class TestKeyMaker:
def test_both_cache_and_purge(self):
key_maker = origin.key_maker_factory(
cache_keys=["foo", "foo/{obj.attr}"],
purge_keys=["bar", "bar/{obj.attr}"],
)
assert key_maker(pretend.stub(attr="bar")) == origin.CacheKeys(
cache=["foo", "foo/bar"],
purge=["bar", "bar/bar"],
purge_keys=[
origin.key_factory("bar"),
origin.key_factory("bar/{obj.attr}"),
],
)
cache_keys = key_maker(pretend.stub(attr="bar"))

assert isinstance(cache_keys, origin.CacheKeys)
assert cache_keys.cache == ["foo", "foo/bar"]
assert list(cache_keys.purge) == ["bar", "bar/bar"]

def test_only_cache(self):
key_maker = origin.key_maker_factory(
cache_keys=["foo", "foo/{obj.attr}"],
purge_keys=None,
)
assert key_maker(pretend.stub(attr="bar")) == origin.CacheKeys(
cache=["foo", "foo/bar"],
purge=[],
)
cache_keys = key_maker(pretend.stub(attr="bar"))

assert isinstance(cache_keys, origin.CacheKeys)
assert cache_keys.cache == ["foo", "foo/bar"]
assert list(cache_keys.purge) == []

def test_only_purge(self):
key_maker = origin.key_maker_factory(
cache_keys=None,
purge_keys=["bar", "bar/{obj.attr}"],
purge_keys=[
origin.key_factory("bar"),
origin.key_factory("bar/{obj.attr}"),
],
)
assert key_maker(pretend.stub(attr="bar")) == origin.CacheKeys(
cache=[],
purge=["bar", "bar/bar"],
cache_keys = key_maker(pretend.stub(attr="bar"))

assert isinstance(cache_keys, origin.CacheKeys)
assert cache_keys.cache == []
assert list(cache_keys.purge) == ["bar", "bar/bar"]

def test_iterate_on(self):
key_maker = origin.key_maker_factory(
cache_keys=['foo'], # Intentionally does not support `iterate_on`
purge_keys=[
origin.key_factory('bar'),
origin.key_factory('bar/{itr}', iterate_on='iterate_me'),
],
)
cache_keys = key_maker(pretend.stub(iterate_me=['biz', 'baz']))

assert isinstance(cache_keys, origin.CacheKeys)
assert cache_keys.cache == ['foo']
assert list(cache_keys.purge) == ['bar', 'bar/biz', 'bar/baz']


def test_register_origin_keys(monkeypatch):
Expand Down
29 changes: 25 additions & 4 deletions tests/unit/packaging/test_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

from warehouse import packaging
from warehouse.packaging.interfaces import IDownloadStatService, IFileStorage
from warehouse.packaging.models import Project, Release
from warehouse.packaging.models import Project, Release, User
from warehouse.packaging.tasks import compute_trending


Expand All @@ -33,6 +33,11 @@ def test_includme(monkeypatch, with_trending):
packaging, "RedisDownloadStatService", download_stat_service_cls,
)

def key_factory(keystring, iterate_on=None):
return pretend.call(keystring, iterate_on=iterate_on)

monkeypatch.setattr(packaging, 'key_factory', key_factory)

config = pretend.stub(
maybe_dotted=lambda dotted: storage_class,
register_service=pretend.call_recorder(
Expand Down Expand Up @@ -72,14 +77,30 @@ def test_includme(monkeypatch, with_trending):
pretend.call(
Project,
cache_keys=["project/{obj.normalized_name}"],
purge_keys=["project/{obj.normalized_name}", "all-projects"],
purge_keys=[
key_factory("project/{obj.normalized_name}"),
key_factory("user/{itr.username}", iterate_on='users'),
key_factory("all-projects"),
],
),
pretend.call(
Release,
cache_keys=["project/{obj.project.normalized_name}"],
purge_keys=[
"project/{obj.project.normalized_name}",
"all-projects",
key_factory("project/{obj.project.normalized_name}"),
key_factory("user/{itr.username}", iterate_on='project.users'),
key_factory("all-projects"),
],
),
pretend.call(
User,
cache_keys=["user/{obj.username}"],
purge_keys=[
key_factory("user/{obj.username}"),
key_factory(
"project/{itr.normalized_name}",
iterate_on='projects',
),
],
),
]
Expand Down
21 changes: 14 additions & 7 deletions warehouse/admin/views/users.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,8 @@

from warehouse import forms
from warehouse.accounts.models import User, Email
from warehouse.packaging.models import JournalEntry, Role
from warehouse.packaging.models import JournalEntry, Project, Role
from warehouse.utils.paginate import paginate_url_factory
from warehouse.utils.project import remove_project


@view_config(
Expand Down Expand Up @@ -139,16 +138,24 @@ def user_delete(request):
user = request.db.query(User).get(request.matchdict['user_id'])

if user.username != request.params.get('username'):
print(user.username)
print(request.params.get('username'))
request.session.flash(f'Wrong confirmation input.', queue='error')
return HTTPSeeOther(
request.route_path('admin.user.detail', user_id=user.id)
)

# Delete projects one by one so they are purged from the cache
for project in user.projects:
remove_project(project, request, flash=False)
# Delete all the user's projects
projects = (
request.db.query(Project)
.filter(
Project.name.in_(
request.db.query(Project.name)
.join(Role.project)
.filter(Role.user == user)
.subquery()
)
)
)
projects.delete(synchronize_session=False)

# Update all journals to point to `deleted-user` instead
deleted_user = (
Expand Down
22 changes: 21 additions & 1 deletion warehouse/cache/origin/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@

import collections
import functools
import operator
from itertools import chain

from warehouse import db
from warehouse.cache.origin.interfaces import IOriginCache
Expand Down Expand Up @@ -87,6 +89,18 @@ def wrapped(context, request):
CacheKeys = collections.namedtuple("CacheKeys", ["cache", "purge"])


def key_factory(keystring, iterate_on=None):

def generate_key(obj):
if iterate_on:
for itr in operator.attrgetter(iterate_on)(obj):
yield keystring.format(itr=itr, obj=obj)
else:
yield keystring.format(obj=obj)

return generate_key


def key_maker_factory(cache_keys, purge_keys):
if cache_keys is None:
cache_keys = []
Expand All @@ -96,8 +110,14 @@ def key_maker_factory(cache_keys, purge_keys):

def key_maker(obj):
return CacheKeys(
# Note: this does not support setting the `cache` argument via
# multiple `key_factories` as we do with `purge` because there is
# a limit to how many surrogate keys we can attach to a single HTTP
# response, and being able to use use `iterate_on` would allow this
# size to be unbounded.
# ref: https://github.com/pypa/warehouse/pull/3189
cache=[k.format(obj=obj) for k in cache_keys],
purge=[k.format(obj=obj) for k in purge_keys],
purge=chain.from_iterable(key(obj) for key in purge_keys),
)

return key_maker
Expand Down
22 changes: 20 additions & 2 deletions warehouse/packaging/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@

from celery.schedules import crontab

from warehouse.accounts.models import User
from warehouse.cache.origin import key_factory
from warehouse.packaging.interfaces import IDownloadStatService, IFileStorage
from warehouse.packaging.services import RedisDownloadStatService
from warehouse.packaging.models import Project, Release
Expand Down Expand Up @@ -39,12 +41,28 @@ def includeme(config):
config.register_origin_cache_keys(
Project,
cache_keys=["project/{obj.normalized_name}"],
purge_keys=["project/{obj.normalized_name}", "all-projects"],
purge_keys=[
key_factory("project/{obj.normalized_name}"),
key_factory("user/{itr.username}", iterate_on='users'),
key_factory("all-projects"),
],
)
config.register_origin_cache_keys(
Release,
cache_keys=["project/{obj.project.normalized_name}"],
purge_keys=["project/{obj.project.normalized_name}", "all-projects"],
purge_keys=[
key_factory("project/{obj.project.normalized_name}"),
key_factory("user/{itr.username}", iterate_on='project.users'),
key_factory("all-projects"),
],
)
config.register_origin_cache_keys(
User,
cache_keys=["user/{obj.username}"],
purge_keys=[
key_factory("user/{obj.username}"),
key_factory("project/{itr.normalized_name}", iterate_on='projects')
],
)

# Add a periodic task to compute trending once a day, assuming we have
Expand Down