forked from CGM_Public/pretix_original
Compare commits
3 Commits
v2024.1.1
...
quota-cach
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e54837c532 | ||
|
|
bc49f0f7f1 | ||
|
|
3e122e0270 |
@@ -112,6 +112,7 @@ memcached = ["pylibmc"]
|
|||||||
dev = [
|
dev = [
|
||||||
"coverage",
|
"coverage",
|
||||||
"coveralls",
|
"coveralls",
|
||||||
|
"fakeredis==2.18.*",
|
||||||
"flake8==6.0.*",
|
"flake8==6.0.*",
|
||||||
"freezegun",
|
"freezegun",
|
||||||
"isort==5.12.*",
|
"isort==5.12.*",
|
||||||
|
|||||||
@@ -43,6 +43,7 @@ from typing import Optional, Tuple
|
|||||||
from zoneinfo import ZoneInfo
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
import dateutil.parser
|
import dateutil.parser
|
||||||
|
import django_redis
|
||||||
from dateutil.tz import datetime_exists
|
from dateutil.tz import datetime_exists
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
@@ -57,7 +58,6 @@ from django.utils.functional import cached_property
|
|||||||
from django.utils.timezone import is_naive, make_aware, now
|
from django.utils.timezone import is_naive, make_aware, now
|
||||||
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
||||||
from django_countries.fields import Country
|
from django_countries.fields import Country
|
||||||
from django_redis import get_redis_connection
|
|
||||||
from django_scopes import ScopedManager
|
from django_scopes import ScopedManager
|
||||||
from i18nfield.fields import I18nCharField, I18nTextField
|
from i18nfield.fields import I18nCharField, I18nTextField
|
||||||
|
|
||||||
@@ -1910,8 +1910,13 @@ class Quota(LoggedModel):
|
|||||||
|
|
||||||
def rebuild_cache(self, now_dt=None):
|
def rebuild_cache(self, now_dt=None):
|
||||||
if settings.HAS_REDIS:
|
if settings.HAS_REDIS:
|
||||||
rc = get_redis_connection("redis")
|
rc = django_redis.get_redis_connection("redis")
|
||||||
rc.hdel(f'quotas:{self.event_id}:availabilitycache', str(self.pk))
|
p = rc.pipeline()
|
||||||
|
p.hdel(f'quotas:{self.event_id}:availabilitycache', str(self.pk))
|
||||||
|
p.hdel(f'quotas:{self.event_id}:availabilitycache:nocw', str(self.pk))
|
||||||
|
p.hdel(f'quotas:{self.event_id}:availabilitycache:igcl', str(self.pk))
|
||||||
|
p.hdel(f'quotas:{self.event_id}:availabilitycache:nocw:igcl', str(self.pk))
|
||||||
|
p.execute()
|
||||||
self.availability(now_dt=now_dt)
|
self.availability(now_dt=now_dt)
|
||||||
|
|
||||||
def availability(
|
def availability(
|
||||||
|
|||||||
@@ -24,13 +24,13 @@ import time
|
|||||||
from collections import Counter, defaultdict
|
from collections import Counter, defaultdict
|
||||||
from itertools import zip_longest
|
from itertools import zip_longest
|
||||||
|
|
||||||
|
import django_redis
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import (
|
from django.db.models import (
|
||||||
Case, Count, F, Func, Max, OuterRef, Q, Subquery, Sum, Value, When,
|
Case, Count, F, Func, Max, OuterRef, Q, Subquery, Sum, Value, When,
|
||||||
)
|
)
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from django_redis import get_redis_connection
|
|
||||||
|
|
||||||
from pretix.base.models import (
|
from pretix.base.models import (
|
||||||
CartPosition, Checkin, Order, OrderPosition, Quota, Voucher,
|
CartPosition, Checkin, Order, OrderPosition, Quota, Voucher,
|
||||||
@@ -102,6 +102,12 @@ class QuotaAvailability:
|
|||||||
self.count_waitinglist = defaultdict(int)
|
self.count_waitinglist = defaultdict(int)
|
||||||
self.count_cart = defaultdict(int)
|
self.count_cart = defaultdict(int)
|
||||||
|
|
||||||
|
self._cache_key_suffix = ""
|
||||||
|
if not self._count_waitinglist:
|
||||||
|
self._cache_key_suffix += ":nocw"
|
||||||
|
if self._ignore_closed:
|
||||||
|
self._cache_key_suffix += ":igcl"
|
||||||
|
|
||||||
self.sizes = {}
|
self.sizes = {}
|
||||||
|
|
||||||
def queue(self, *quota):
|
def queue(self, *quota):
|
||||||
@@ -121,17 +127,14 @@ class QuotaAvailability:
|
|||||||
if self._full_results:
|
if self._full_results:
|
||||||
raise ValueError("You cannot combine full_results and allow_cache.")
|
raise ValueError("You cannot combine full_results and allow_cache.")
|
||||||
|
|
||||||
elif not self._count_waitinglist:
|
|
||||||
raise ValueError("If you set allow_cache, you need to set count_waitinglist.")
|
|
||||||
|
|
||||||
elif settings.HAS_REDIS:
|
elif settings.HAS_REDIS:
|
||||||
rc = get_redis_connection("redis")
|
rc = django_redis.get_redis_connection("redis")
|
||||||
quotas_by_event = defaultdict(list)
|
quotas_by_event = defaultdict(list)
|
||||||
for q in [_q for _q in self._queue if _q.id in quota_ids_set]:
|
for q in [_q for _q in self._queue if _q.id in quota_ids_set]:
|
||||||
quotas_by_event[q.event_id].append(q)
|
quotas_by_event[q.event_id].append(q)
|
||||||
|
|
||||||
for eventid, evquotas in quotas_by_event.items():
|
for eventid, evquotas in quotas_by_event.items():
|
||||||
d = rc.hmget(f'quotas:{eventid}:availabilitycache', [str(q.pk) for q in evquotas])
|
d = rc.hmget(f'quotas:{eventid}:availabilitycache{self._cache_key_suffix}', [str(q.pk) for q in evquotas])
|
||||||
for redisval, q in zip(d, evquotas):
|
for redisval, q in zip(d, evquotas):
|
||||||
if redisval is not None:
|
if redisval is not None:
|
||||||
data = [rv for rv in redisval.decode().split(',')]
|
data = [rv for rv in redisval.decode().split(',')]
|
||||||
@@ -164,12 +167,12 @@ class QuotaAvailability:
|
|||||||
if not settings.HAS_REDIS or not quotas:
|
if not settings.HAS_REDIS or not quotas:
|
||||||
return
|
return
|
||||||
|
|
||||||
rc = get_redis_connection("redis")
|
rc = django_redis.get_redis_connection("redis")
|
||||||
# We write the computed availability to redis in a per-event hash as
|
# We write the computed availability to redis in a per-event hash as
|
||||||
#
|
#
|
||||||
# quota_id -> (availability_state, availability_number, timestamp).
|
# quota_id -> (availability_state, availability_number, timestamp).
|
||||||
#
|
#
|
||||||
# We store this in a hash instead of inidividual values to avoid making two many redis requests
|
# We store this in a hash instead of individual values to avoid making too many redis requests
|
||||||
# which would introduce latency.
|
# which would introduce latency.
|
||||||
|
|
||||||
# The individual entries in the hash are "valid" for 120 seconds. This means in a typical peak scenario with
|
# The individual entries in the hash are "valid" for 120 seconds. This means in a typical peak scenario with
|
||||||
@@ -179,16 +182,16 @@ class QuotaAvailability:
|
|||||||
# these quotas. We choose 10 seconds since that should be well above the duration of a write.
|
# these quotas. We choose 10 seconds since that should be well above the duration of a write.
|
||||||
|
|
||||||
lock_name = '_'.join([str(p) for p in sorted([q.pk for q in quotas])])
|
lock_name = '_'.join([str(p) for p in sorted([q.pk for q in quotas])])
|
||||||
if rc.exists(f'quotas:availabilitycachewrite:{lock_name}'):
|
if rc.exists(f'quotas:availabilitycachewrite:{lock_name}{self._cache_key_suffix}'):
|
||||||
return
|
return
|
||||||
rc.setex(f'quotas:availabilitycachewrite:{lock_name}', '1', 10)
|
rc.setex(f'quotas:availabilitycachewrite:{lock_name}{self._cache_key_suffix}', '1', 10)
|
||||||
|
|
||||||
update = defaultdict(list)
|
update = defaultdict(list)
|
||||||
for q in quotas:
|
for q in quotas:
|
||||||
update[q.event_id].append(q)
|
update[q.event_id].append(q)
|
||||||
|
|
||||||
for eventid, quotas in update.items():
|
for eventid, quotas in update.items():
|
||||||
rc.hmset(f'quotas:{eventid}:availabilitycache', {
|
rc.hmset(f'quotas:{eventid}:availabilitycache{self._cache_key_suffix}', {
|
||||||
str(q.id): ",".join(
|
str(q.id): ",".join(
|
||||||
[str(i) for i in self.results[q]] +
|
[str(i) for i in self.results[q]] +
|
||||||
[str(int(time.time()))]
|
[str(int(time.time()))]
|
||||||
@@ -197,7 +200,7 @@ class QuotaAvailability:
|
|||||||
# To make sure old events do not fill up our redis instance, we set an expiry on the cache. However, we set it
|
# To make sure old events do not fill up our redis instance, we set an expiry on the cache. However, we set it
|
||||||
# on 7 days even though we mostly ignore values older than 2 monites. The reasoning is that we have some places
|
# on 7 days even though we mostly ignore values older than 2 monites. The reasoning is that we have some places
|
||||||
# where we set allow_cache_stale and use the old entries anyways to save on performance.
|
# where we set allow_cache_stale and use the old entries anyways to save on performance.
|
||||||
rc.expire(f'quotas:{eventid}:availabilitycache', 3600 * 24 * 7)
|
rc.expire(f'quotas:{eventid}:availabilitycache{self._cache_key_suffix}', 3600 * 24 * 7)
|
||||||
|
|
||||||
# We used to also delete item_quota_cache:* from the event cache here, but as the cache
|
# We used to also delete item_quota_cache:* from the event cache here, but as the cache
|
||||||
# gets more complex, this does not seem worth it. The cache is only present for up to
|
# gets more complex, this does not seem worth it. The cache is only present for up to
|
||||||
|
|||||||
@@ -21,6 +21,7 @@
|
|||||||
#
|
#
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
import fakeredis
|
||||||
from pytest_mock import MockFixture
|
from pytest_mock import MockFixture
|
||||||
|
|
||||||
|
|
||||||
@@ -34,3 +35,7 @@ def mocker_context():
|
|||||||
result = MockFixture(FakePytestConfig())
|
result = MockFixture(FakePytestConfig())
|
||||||
yield result
|
yield result
|
||||||
result.stopall()
|
result.stopall()
|
||||||
|
|
||||||
|
|
||||||
|
def get_redis_connection(alias="default", write=True):
|
||||||
|
return fakeredis.FakeStrictRedis(server=fakeredis.FakeServer.get_server("127.0.0.1:None:v(7, 0)", (7, 0)))
|
||||||
|
|||||||
@@ -37,6 +37,7 @@ filterwarnings =
|
|||||||
ignore::ResourceWarning
|
ignore::ResourceWarning
|
||||||
ignore:django.contrib.staticfiles.templatetags.static:DeprecationWarning
|
ignore:django.contrib.staticfiles.templatetags.static:DeprecationWarning
|
||||||
ignore::DeprecationWarning:compressor
|
ignore::DeprecationWarning:compressor
|
||||||
|
ignore:.*FakeStrictRedis.hmset.*:DeprecationWarning:
|
||||||
ignore:pkg_resources is deprecated as an API:
|
ignore:pkg_resources is deprecated as an API:
|
||||||
ignore:.*pkg_resources.declare_namespace.*:
|
ignore:.*pkg_resources.declare_namespace.*:
|
||||||
|
|
||||||
|
|||||||
@@ -98,6 +98,7 @@ class BaseQuotaTestCase(TestCase):
|
|||||||
self.var3 = ItemVariation.objects.create(item=self.item3, value='Fancy')
|
self.var3 = ItemVariation.objects.create(item=self.item3, value='Fancy')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("fakeredis_client")
|
||||||
class QuotaTestCase(BaseQuotaTestCase):
|
class QuotaTestCase(BaseQuotaTestCase):
|
||||||
@classscope(attr='o')
|
@classscope(attr='o')
|
||||||
def test_available(self):
|
def test_available(self):
|
||||||
@@ -434,6 +435,62 @@ class QuotaTestCase(BaseQuotaTestCase):
|
|||||||
self.assertEqual(self.var1.check_quotas(), (Quota.AVAILABILITY_ORDERED, 0))
|
self.assertEqual(self.var1.check_quotas(), (Quota.AVAILABILITY_ORDERED, 0))
|
||||||
self.assertEqual(self.var1.check_quotas(count_waitinglist=False), (Quota.AVAILABILITY_OK, 1))
|
self.assertEqual(self.var1.check_quotas(count_waitinglist=False), (Quota.AVAILABILITY_OK, 1))
|
||||||
|
|
||||||
|
@classscope(attr='o')
|
||||||
|
def test_waitinglist_cache_separation(self):
|
||||||
|
self.quota.items.add(self.item1)
|
||||||
|
self.quota.size = 1
|
||||||
|
self.quota.save()
|
||||||
|
WaitingListEntry.objects.create(
|
||||||
|
event=self.event, item=self.item1, email='foo@bar.com'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check that there is no "cache mixup" even across multiple runs
|
||||||
|
qa = QuotaAvailability(count_waitinglist=False)
|
||||||
|
qa.queue(self.quota)
|
||||||
|
qa.compute()
|
||||||
|
assert qa.results[self.quota] == (Quota.AVAILABILITY_OK, 1)
|
||||||
|
|
||||||
|
qa = QuotaAvailability(count_waitinglist=True)
|
||||||
|
qa.queue(self.quota)
|
||||||
|
qa.compute(allow_cache=True)
|
||||||
|
assert qa.results[self.quota] == (Quota.AVAILABILITY_ORDERED, 0)
|
||||||
|
|
||||||
|
qa = QuotaAvailability(count_waitinglist=True)
|
||||||
|
qa.queue(self.quota)
|
||||||
|
qa.compute(allow_cache=True)
|
||||||
|
assert qa.results[self.quota] == (Quota.AVAILABILITY_ORDERED, 0)
|
||||||
|
|
||||||
|
qa = QuotaAvailability(count_waitinglist=False)
|
||||||
|
qa.queue(self.quota)
|
||||||
|
qa.compute()
|
||||||
|
assert qa.results[self.quota] == (Quota.AVAILABILITY_OK, 1)
|
||||||
|
|
||||||
|
# Rebuild cache required
|
||||||
|
self.quota.size = 5
|
||||||
|
self.quota.save()
|
||||||
|
|
||||||
|
qa = QuotaAvailability(count_waitinglist=True)
|
||||||
|
qa.queue(self.quota)
|
||||||
|
qa.compute(allow_cache=True)
|
||||||
|
assert qa.results[self.quota] == (Quota.AVAILABILITY_ORDERED, 0)
|
||||||
|
|
||||||
|
qa = QuotaAvailability(count_waitinglist=False)
|
||||||
|
qa.queue(self.quota)
|
||||||
|
qa.compute(allow_cache=True)
|
||||||
|
assert qa.results[self.quota] == (Quota.AVAILABILITY_OK, 1)
|
||||||
|
|
||||||
|
self.quota.rebuild_cache()
|
||||||
|
|
||||||
|
qa = QuotaAvailability(count_waitinglist=True)
|
||||||
|
qa.queue(self.quota)
|
||||||
|
qa.compute(allow_cache=True)
|
||||||
|
assert qa.results[self.quota] == (Quota.AVAILABILITY_OK, 4)
|
||||||
|
|
||||||
|
qa = QuotaAvailability(count_waitinglist=False)
|
||||||
|
qa.queue(self.quota)
|
||||||
|
qa.compute(allow_cache=True)
|
||||||
|
assert qa.results[self.quota] == (Quota.AVAILABILITY_OK, 5)
|
||||||
|
|
||||||
@classscope(attr='o')
|
@classscope(attr='o')
|
||||||
def test_waitinglist_variation_fulfilled(self):
|
def test_waitinglist_variation_fulfilled(self):
|
||||||
self.quota.variations.add(self.var1)
|
self.quota.variations.add(self.var1)
|
||||||
|
|||||||
@@ -22,10 +22,14 @@
|
|||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from django.test import override_settings
|
||||||
from django.utils import translation
|
from django.utils import translation
|
||||||
from django_scopes import scopes_disabled
|
from django_scopes import scopes_disabled
|
||||||
|
from fakeredis import FakeConnection
|
||||||
from xdist.dsession import DSession
|
from xdist.dsession import DSession
|
||||||
|
|
||||||
|
from pretix.testutils.mock import get_redis_connection
|
||||||
|
|
||||||
CRASHED_ITEMS = set()
|
CRASHED_ITEMS = set()
|
||||||
|
|
||||||
|
|
||||||
@@ -74,3 +78,38 @@ def pytest_fixture_setup(fixturedef, request):
|
|||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def reset_locale():
|
def reset_locale():
|
||||||
translation.activate("en")
|
translation.activate("en")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def fakeredis_client(monkeypatch):
|
||||||
|
with override_settings(
|
||||||
|
HAS_REDIS=True,
|
||||||
|
REAL_CACHE_USED=True,
|
||||||
|
CACHES={
|
||||||
|
'redis': {
|
||||||
|
'BACKEND': 'django.core.cache.backends.redis.RedisCache',
|
||||||
|
'LOCATION': 'redis://127.0.0.1',
|
||||||
|
'OPTIONS': {
|
||||||
|
'connection_class': FakeConnection
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'redis_session': {
|
||||||
|
'BACKEND': 'django.core.cache.backends.redis.RedisCache',
|
||||||
|
'LOCATION': 'redis://127.0.0.1',
|
||||||
|
'OPTIONS': {
|
||||||
|
'connection_class': FakeConnection
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'default': {
|
||||||
|
'BACKEND': 'django.core.cache.backends.redis.RedisCache',
|
||||||
|
'LOCATION': 'redis://127.0.0.1',
|
||||||
|
'OPTIONS': {
|
||||||
|
'connection_class': FakeConnection
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
):
|
||||||
|
redis = get_redis_connection("default", True)
|
||||||
|
redis.flushall()
|
||||||
|
monkeypatch.setattr('django_redis.get_redis_connection', get_redis_connection, raising=False)
|
||||||
|
yield redis
|
||||||
|
|||||||
Reference in New Issue
Block a user