forked from CGM_Public/pretix_original
Compare commits
2 Commits
hacky-debu
...
actions-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e1c9a176d9 | ||
|
|
c7bcce0100 |
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pip-
|
${{ runner.os }}-pip-
|
||||||
- name: Install system dependencies
|
- name: Install system dependencies
|
||||||
run: sudo apt update && sudo apt install -y gettext unzip
|
run: sudo apt update && sudo apt install gettext unzip
|
||||||
- name: Install Python dependencies
|
- name: Install Python dependencies
|
||||||
run: pip3 install -U setuptools build pip check-manifest
|
run: pip3 install -U setuptools build pip check-manifest
|
||||||
- name: Run check-manifest
|
- name: Run check-manifest
|
||||||
|
|||||||
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pip-
|
${{ runner.os }}-pip-
|
||||||
- name: Install system packages
|
- name: Install system packages
|
||||||
run: sudo apt update && sudo apt install -y enchant-2 hunspell aspell-en
|
run: sudo apt update && sudo apt install enchant-2 hunspell aspell-en
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
run: pip3 install -Ur requirements.txt
|
run: pip3 install -Ur requirements.txt
|
||||||
working-directory: ./doc
|
working-directory: ./doc
|
||||||
|
|||||||
6
.github/workflows/strings.yml
vendored
6
.github/workflows/strings.yml
vendored
@@ -35,9 +35,9 @@ jobs:
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pip-
|
${{ runner.os }}-pip-
|
||||||
- name: Install system packages
|
- name: Install system packages
|
||||||
run: sudo apt update && sudo apt -y install gettext
|
run: sudo apt update && sudo apt install gettext
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
run: pip3 install uv && uv pip install --system -e ".[dev]"
|
run: pip3 install -e ".[dev]"
|
||||||
- name: Compile messages
|
- name: Compile messages
|
||||||
run: python manage.py compilemessages
|
run: python manage.py compilemessages
|
||||||
working-directory: ./src
|
working-directory: ./src
|
||||||
@@ -62,7 +62,7 @@ jobs:
|
|||||||
- name: Install system packages
|
- name: Install system packages
|
||||||
run: sudo apt update && sudo apt install enchant-2 hunspell hunspell-de-de aspell-en aspell-de
|
run: sudo apt update && sudo apt install enchant-2 hunspell hunspell-de-de aspell-en aspell-de
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
run: pip3 install uv && uv pip install --system -e ".[dev]"
|
run: pip3 install -e ".[dev]"
|
||||||
- name: Spellcheck translations
|
- name: Spellcheck translations
|
||||||
run: potypo
|
run: potypo
|
||||||
working-directory: ./src
|
working-directory: ./src
|
||||||
|
|||||||
4
.github/workflows/style.yml
vendored
4
.github/workflows/style.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pip-
|
${{ runner.os }}-pip-
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
run: pip3 install uv && uv pip install --system -e ".[dev]" psycopg2-binary
|
run: pip3 install -e ".[dev]" psycopg2-binary
|
||||||
- name: Run isort
|
- name: Run isort
|
||||||
run: isort -c .
|
run: isort -c .
|
||||||
working-directory: ./src
|
working-directory: ./src
|
||||||
@@ -55,7 +55,7 @@ jobs:
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pip-
|
${{ runner.os }}-pip-
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
run: pip3 install uv && uv pip install --system -e ".[dev]" psycopg2-binary
|
run: pip3 install -e ".[dev]" psycopg2-binary
|
||||||
- name: Run flake8
|
- name: Run flake8
|
||||||
run: flake8 .
|
run: flake8 .
|
||||||
working-directory: ./src
|
working-directory: ./src
|
||||||
|
|||||||
36
.github/workflows/tests.yml
vendored
36
.github/workflows/tests.yml
vendored
@@ -25,27 +25,21 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.9", "3.10", "3.11"]
|
python-version: ["3.9", "3.10", "3.11"]
|
||||||
database: [ postgres]
|
database: [sqlite, postgres]
|
||||||
exclude:
|
exclude:
|
||||||
- database: sqlite
|
- database: sqlite
|
||||||
python-version: "3.9"
|
python-version: "3.9"
|
||||||
- database: sqlite
|
- database: sqlite
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:15
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: postgres
|
|
||||||
POSTGRES_DB: pretix
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
- uses: harmon758/postgresql-action@v1
|
||||||
|
with:
|
||||||
|
postgresql version: '15'
|
||||||
|
postgresql db: 'pretix'
|
||||||
|
postgresql user: 'postgres'
|
||||||
|
postgresql password: 'postgres'
|
||||||
|
if: matrix.database == 'postgres'
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
@@ -57,9 +51,9 @@ jobs:
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pip-
|
${{ runner.os }}-pip-
|
||||||
- name: Install system dependencies
|
- name: Install system dependencies
|
||||||
run: sudo apt update && sudo apt install -y gettext
|
run: sudo apt update && sudo apt install gettext
|
||||||
- name: Install Python dependencies
|
- name: Install Python dependencies
|
||||||
run: pip3 install uv && uv pip install --system -e ".[dev]" psycopg2-binary
|
run: pip3 install --ignore-requires-python -e ".[dev]" psycopg2-binary # We ignore that flake8 needs newer python as we don't run flake8 during tests
|
||||||
- name: Run checks
|
- name: Run checks
|
||||||
run: python manage.py check
|
run: python manage.py check
|
||||||
working-directory: ./src
|
working-directory: ./src
|
||||||
@@ -71,16 +65,10 @@ jobs:
|
|||||||
run: make all compress
|
run: make all compress
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
working-directory: ./src
|
working-directory: ./src
|
||||||
run: PRETIX_CONFIG_FILE=tests/ci_${{ matrix.database }}.cfg py.test -n 3 -p no:sugar --cov=./ --cov-report=xml --reruns 0 tests --maxfail=100 || true
|
run: PRETIX_CONFIG_FILE=tests/travis_${{ matrix.database }}.cfg py.test -vv -n 3 -p no:sugar --cov=./ --cov-report=xml --reruns 3 tests --maxfail=100
|
||||||
|
|
||||||
# XXXXXXXXXXXXXX for test only
|
|
||||||
- name: print debug output
|
|
||||||
working-directory: ./src
|
|
||||||
run: cat /tmp/test.txt
|
|
||||||
|
|
||||||
- name: Run concurrency tests
|
- name: Run concurrency tests
|
||||||
working-directory: ./src
|
working-directory: ./src
|
||||||
run: PRETIX_CONFIG_FILE=tests/ci_${{ matrix.database }}.cfg py.test tests/concurrency_tests/ --reruns 0 --reuse-db
|
run: PRETIX_CONFIG_FILE=tests/travis_${{ matrix.database }}.cfg py.test tests/concurrency_tests/ --reruns 0 --reuse-db
|
||||||
if: matrix.database == 'postgres'
|
if: matrix.database == 'postgres'
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
uses: codecov/codecov-action@v1
|
uses: codecov/codecov-action@v1
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ tests:
|
|||||||
- cd src
|
- cd src
|
||||||
- python manage.py check
|
- python manage.py check
|
||||||
- make all compress
|
- make all compress
|
||||||
- PRETIX_CONFIG_FILE=tests/ci_sqlite.cfg py.test --reruns 3 -n 3 tests --maxfail=100
|
- PRETIX_CONFIG_FILE=tests/travis_sqlite.cfg py.test --reruns 3 -n 3 tests --maxfail=100
|
||||||
except:
|
except:
|
||||||
- pypi
|
- pypi
|
||||||
pypi:
|
pypi:
|
||||||
|
|||||||
@@ -156,7 +156,7 @@ def event_list(request):
|
|||||||
max_fromto=Greatest(Max('subevents__date_to'), Max('subevents__date_from'))
|
max_fromto=Greatest(Max('subevents__date_to'), Max('subevents__date_from'))
|
||||||
).annotate(
|
).annotate(
|
||||||
order_from=Coalesce('min_from', 'date_from'),
|
order_from=Coalesce('min_from', 'date_from'),
|
||||||
).order_by('-order_from', 'slug')
|
).order_by('-order_from')
|
||||||
|
|
||||||
total = qs.count()
|
total = qs.count()
|
||||||
pagesize = 20
|
pagesize = 20
|
||||||
@@ -318,7 +318,7 @@ def nav_context_list(request):
|
|||||||
max_fromto=Greatest(Max('subevents__date_to'), Max('subevents__date_from'))
|
max_fromto=Greatest(Max('subevents__date_to'), Max('subevents__date_from'))
|
||||||
).annotate(
|
).annotate(
|
||||||
order_from=Coalesce('min_from', 'date_from'),
|
order_from=Coalesce('min_from', 'date_from'),
|
||||||
).order_by('-order_from', 'slug')
|
).order_by('-order_from')
|
||||||
|
|
||||||
if request.user.has_active_staff_session(request.session.session_key):
|
if request.user.has_active_staff_session(request.session.session_key):
|
||||||
qs_orga = Organizer.objects.all()
|
qs_orga = Organizer.objects.all()
|
||||||
|
|||||||
@@ -210,7 +210,7 @@ class EventListMixin:
|
|||||||
)
|
)
|
||||||
).annotate(
|
).annotate(
|
||||||
order_to=Coalesce('max_fromto', 'max_to', 'max_from', 'date_to', 'date_from'),
|
order_to=Coalesce('max_fromto', 'max_to', 'max_from', 'date_to', 'date_from'),
|
||||||
).order_by('-order_to', 'name', 'slug')
|
).order_by('-order_to')
|
||||||
else:
|
else:
|
||||||
date_q = Q(date_to__gte=now()) | (Q(date_to__isnull=True) & Q(date_from__gte=now()))
|
date_q = Q(date_to__gte=now()) | (Q(date_to__isnull=True) & Q(date_from__gte=now()))
|
||||||
qs = qs.filter(
|
qs = qs.filter(
|
||||||
@@ -219,7 +219,7 @@ class EventListMixin:
|
|||||||
)
|
)
|
||||||
).annotate(
|
).annotate(
|
||||||
order_from=Coalesce('min_from', 'date_from'),
|
order_from=Coalesce('min_from', 'date_from'),
|
||||||
).order_by('order_from', 'name', 'slug')
|
).order_by('order_from')
|
||||||
qs = Event.annotated(filter_qs_by_attr(
|
qs = Event.annotated(filter_qs_by_attr(
|
||||||
qs, self.request, match_subevents_with_conditions=Q(active=True) & Q(is_public=True) & date_q
|
qs, self.request, match_subevents_with_conditions=Q(active=True) & Q(is_public=True) & date_q
|
||||||
), self.request.sales_channel)
|
), self.request.sales_channel)
|
||||||
|
|||||||
@@ -19,7 +19,6 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||||
# <https://www.gnu.org/licenses/>.
|
# <https://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
import os
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
import fakeredis
|
import fakeredis
|
||||||
@@ -39,9 +38,4 @@ def mocker_context():
|
|||||||
|
|
||||||
|
|
||||||
def get_redis_connection(alias="default", write=True):
|
def get_redis_connection(alias="default", write=True):
|
||||||
worker_id = os.environ.get("PYTEST_XDIST_WORKER")
|
return fakeredis.FakeStrictRedis(server=fakeredis.FakeServer.get_server("127.0.0.1:None:v(7, 0)", (7, 0), server_type="redis"))
|
||||||
if worker_id.startswith("gw"):
|
|
||||||
redis_port = 1000 + int(worker_id.replace("gw", ""))
|
|
||||||
else:
|
|
||||||
redis_port = 1000
|
|
||||||
return fakeredis.FakeStrictRedis(server=fakeredis.FakeServer.get_server(f"127.0.0.1:{redis_port}:redis:v(7, 0)", (7, 0), server_type="redis"))
|
|
||||||
|
|||||||
@@ -95,5 +95,5 @@ class DisableMigrations(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
if not os.environ.get("GITHUB_WORKFLOW", ""):
|
if not os.environ.get("TRAVIS", "") and not os.environ.get("GITHUB_WORKFLOW", ""):
|
||||||
MIGRATION_MODULES = DisableMigrations()
|
MIGRATION_MODULES = DisableMigrations()
|
||||||
|
|||||||
@@ -451,7 +451,7 @@ def test_order_create_invoice(token_client, organizer, event, order):
|
|||||||
"invoice_to_vat_id": "DE123",
|
"invoice_to_vat_id": "DE123",
|
||||||
"invoice_to_beneficiary": "",
|
"invoice_to_beneficiary": "",
|
||||||
"custom_field": None,
|
"custom_field": None,
|
||||||
'date': now().astimezone(event.timezone).date().isoformat(),
|
'date': now().date().isoformat(),
|
||||||
'refers': None,
|
'refers': None,
|
||||||
'locale': 'en',
|
'locale': 'en',
|
||||||
'introductory_text': '',
|
'introductory_text': '',
|
||||||
|
|||||||
@@ -20,11 +20,8 @@
|
|||||||
# <https://www.gnu.org/licenses/>.
|
# <https://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
|
||||||
import time
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from django.db import connection
|
|
||||||
from django.test import override_settings
|
from django.test import override_settings
|
||||||
from django.utils import translation
|
from django.utils import translation
|
||||||
from django_scopes import scopes_disabled
|
from django_scopes import scopes_disabled
|
||||||
@@ -39,8 +36,8 @@ CRASHED_ITEMS = set()
|
|||||||
@pytest.hookimpl(trylast=True)
|
@pytest.hookimpl(trylast=True)
|
||||||
def pytest_configure(config):
|
def pytest_configure(config):
|
||||||
"""
|
"""
|
||||||
Somehow, somewhere, our test suite causes a segfault in SQLite in the past, but only when run
|
Somehow, somewhere, our test suite causes a segfault in SQLite, but only when run
|
||||||
on CI in full. Therefore, we monkeypatch pytest-xdist to retry segfaulted
|
on Travis CI in full. Therefore, we monkeypatch pytest-xdist to retry segfaulted
|
||||||
tests and keep fingers crossed that this doesn't turn into an infinite loop.
|
tests and keep fingers crossed that this doesn't turn into an infinite loop.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -85,32 +82,27 @@ def reset_locale():
|
|||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def fakeredis_client(monkeypatch):
|
def fakeredis_client(monkeypatch):
|
||||||
worker_id = os.environ.get("PYTEST_XDIST_WORKER")
|
|
||||||
if worker_id.startswith("gw"):
|
|
||||||
redis_port = 1000 + int(worker_id.replace("gw", ""))
|
|
||||||
else:
|
|
||||||
redis_port = 1000
|
|
||||||
with override_settings(
|
with override_settings(
|
||||||
HAS_REDIS=True,
|
HAS_REDIS=True,
|
||||||
REAL_CACHE_USED=True,
|
REAL_CACHE_USED=True,
|
||||||
CACHES={
|
CACHES={
|
||||||
'redis': {
|
'redis': {
|
||||||
'BACKEND': 'django.core.cache.backends.redis.RedisCache',
|
'BACKEND': 'django.core.cache.backends.redis.RedisCache',
|
||||||
'LOCATION': f'redis://127.0.0.1:{redis_port}',
|
'LOCATION': 'redis://127.0.0.1',
|
||||||
'OPTIONS': {
|
'OPTIONS': {
|
||||||
'connection_class': FakeConnection
|
'connection_class': FakeConnection
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'redis_session': {
|
'redis_session': {
|
||||||
'BACKEND': 'django.core.cache.backends.redis.RedisCache',
|
'BACKEND': 'django.core.cache.backends.redis.RedisCache',
|
||||||
'LOCATION': f'redis://127.0.0.1:{redis_port}',
|
'LOCATION': 'redis://127.0.0.1',
|
||||||
'OPTIONS': {
|
'OPTIONS': {
|
||||||
'connection_class': FakeConnection
|
'connection_class': FakeConnection
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'default': {
|
'default': {
|
||||||
'BACKEND': 'django.core.cache.backends.redis.RedisCache',
|
'BACKEND': 'django.core.cache.backends.redis.RedisCache',
|
||||||
'LOCATION': f'redis://127.0.0.1:{redis_port}',
|
'LOCATION': 'redis://127.0.0.1',
|
||||||
'OPTIONS': {
|
'OPTIONS': {
|
||||||
'connection_class': FakeConnection
|
'connection_class': FakeConnection
|
||||||
}
|
}
|
||||||
@@ -121,33 +113,3 @@ def fakeredis_client(monkeypatch):
|
|||||||
redis.flushall()
|
redis.flushall()
|
||||||
monkeypatch.setattr('django_redis.get_redis_connection', get_redis_connection, raising=False)
|
monkeypatch.setattr('django_redis.get_redis_connection', get_redis_connection, raising=False)
|
||||||
yield redis
|
yield redis
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# XXXXXXXXXXXXXXXXXXX for test only
|
|
||||||
|
|
||||||
f = open("/tmp/test.txt","w")
|
|
||||||
|
|
||||||
if os.environ.get("GITHUB_WORKFLOW", ""):
|
|
||||||
@pytest.fixture(autouse=True)
|
|
||||||
def ensure_healthy_connection(request, worker_id):
|
|
||||||
# We have no idea why this is neccessary. It shouldn't be, and it costs some performance.
|
|
||||||
# However, in ~August 2024 our tests became really flake on GitHub Actions (failing more than 80% of the time)
|
|
||||||
# for no apparent reason with some error messages related to PostgreSQL connection issues. This appears to
|
|
||||||
# work around it...
|
|
||||||
|
|
||||||
# Check if the test even has DB access
|
|
||||||
marker = request.node.get_closest_marker("django_db")
|
|
||||||
f.write(str(time.time())+"\t"+ str(worker_id)+"\t"+str(request.path)+"\t"+ str(request.module.__name__)+"\t"+ str(request.function.__name__)+"\tstart\n")
|
|
||||||
f.flush()
|
|
||||||
# Run actual test
|
|
||||||
yield
|
|
||||||
f.write(str(time.time())+"\t"+ str(worker_id)+"\t"+str(request.path)+"\t"+ str(request.module.__name__)+"\t"+ str(request.function.__name__)+"\tend\n")
|
|
||||||
f.flush()
|
|
||||||
# If yes, do a dummy query at the end of the test
|
|
||||||
#if marker:
|
|
||||||
# with connection.cursor() as cursor:
|
|
||||||
# cursor.execute("SELECT 1")
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ def test_crash():
|
|||||||
"""
|
"""
|
||||||
This is a test that crashes with SIGKILL every (n+1)-th time it runs (n = 0, 1, 2, …).
|
This is a test that crashes with SIGKILL every (n+1)-th time it runs (n = 0, 1, 2, …).
|
||||||
This is useful for debugging our pytest-xdist monkeypatch that we apply in conftest.py
|
This is useful for debugging our pytest-xdist monkeypatch that we apply in conftest.py
|
||||||
to deal with random test crashes on CI using SQLite. Usually, this test is
|
to deal with random test crashes on Travis CI using SQLite. Usually, this test is
|
||||||
skipped to avoid causing additional crashes in real runs.
|
skipped to avoid causing additional crashes in real runs.
|
||||||
"""
|
"""
|
||||||
if os.path.exists('crashed.tmp'):
|
if os.path.exists('crashed.tmp'):
|
||||||
|
|||||||
Reference in New Issue
Block a user