Compare commits

..

4 Commits

Author SHA1 Message Date
Richard Schreiber
4e1e082ee3 fix icon alignment 2023-06-05 09:40:56 +02:00
Richard Schreiber
9a57371f9e move seat-info inside panel next to date-info 2023-06-05 09:40:56 +02:00
Richard Schreiber
8bc7045bba add seating-info to panel-title 2023-06-05 09:40:56 +02:00
Richard Schreiber
3c29223e5c Checkout/Add-ons: Do not show products without add-ons 2023-06-05 09:40:56 +02:00
490 changed files with 164000 additions and 358843 deletions

View File

@@ -35,7 +35,7 @@ jobs:
restore-keys: | restore-keys: |
${{ runner.os }}-pip- ${{ runner.os }}-pip-
- name: Install Dependencies - name: Install Dependencies
run: pip3 install -e ".[dev]" psycopg2-binary run: pip3 install -e ".[dev]" mysqlclient psycopg2-binary
- name: Run isort - name: Run isort
run: isort -c . run: isort -c .
working-directory: ./src working-directory: ./src
@@ -55,7 +55,7 @@ jobs:
restore-keys: | restore-keys: |
${{ runner.os }}-pip- ${{ runner.os }}-pip-
- name: Install Dependencies - name: Install Dependencies
run: pip3 install -e ".[dev]" psycopg2-binary run: pip3 install -e ".[dev]" mysqlclient psycopg2-binary
- name: Run flake8 - name: Run flake8
run: flake8 . run: flake8 .
working-directory: ./src working-directory: ./src

View File

@@ -25,17 +25,27 @@ jobs:
strategy: strategy:
matrix: matrix:
python-version: ["3.9", "3.10", "3.11"] python-version: ["3.9", "3.10", "3.11"]
database: [sqlite, postgres] database: [sqlite, postgres, mysql]
exclude: exclude:
- database: mysql
python-version: "3.9"
- database: mysql
python-version: "3.11"
- database: sqlite - database: sqlite
python-version: "3.9" python-version: "3.9"
- database: sqlite - database: sqlite
python-version: "3.10" python-version: "3.10"
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: getong/mariadb-action@v1.1
with:
mariadb version: '10.10'
mysql database: 'pretix'
mysql root password: ''
if: matrix.database == 'mysql'
- uses: harmon758/postgresql-action@v1 - uses: harmon758/postgresql-action@v1
with: with:
postgresql version: '15' postgresql version: '11'
postgresql db: 'pretix' postgresql db: 'pretix'
postgresql user: 'postgres' postgresql user: 'postgres'
postgresql password: 'postgres' postgresql password: 'postgres'
@@ -51,9 +61,9 @@ jobs:
restore-keys: | restore-keys: |
${{ runner.os }}-pip- ${{ runner.os }}-pip-
- name: Install system dependencies - name: Install system dependencies
run: sudo apt update && sudo apt install gettext run: sudo apt update && sudo apt install gettext mariadb-client
- name: Install Python dependencies - name: Install Python dependencies
run: pip3 install --ignore-requires-python -e ".[dev]" psycopg2-binary # We ignore that flake8 needs newer python as we don't run flake8 during tests run: pip3 install --ignore-requires-python -e ".[dev]" mysqlclient psycopg2-binary # We ignore that flake8 needs newer python as we don't run flake8 during tests
- name: Run checks - name: Run checks
run: python manage.py check run: python manage.py check
working-directory: ./src working-directory: ./src
@@ -66,10 +76,6 @@ jobs:
- name: Run tests - name: Run tests
working-directory: ./src working-directory: ./src
run: PRETIX_CONFIG_FILE=tests/travis_${{ matrix.database }}.cfg py.test -n 3 -p no:sugar --cov=./ --cov-report=xml --reruns 3 tests --maxfail=100 run: PRETIX_CONFIG_FILE=tests/travis_${{ matrix.database }}.cfg py.test -n 3 -p no:sugar --cov=./ --cov-report=xml --reruns 3 tests --maxfail=100
- name: Run concurrency tests
working-directory: ./src
run: PRETIX_CONFIG_FILE=tests/travis_${{ matrix.database }}.cfg py.test tests/concurrency_tests/ --reruns 0 --reuse-db
if: matrix.database == 'postgres'
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v1 uses: codecov/codecov-action@v1
with: with:

View File

@@ -1,8 +1,9 @@
FROM python:3.11-bookworm FROM python:3.11-bullseye
RUN apt-get update && \ RUN apt-get update && \
apt-get install -y --no-install-recommends \ apt-get install -y --no-install-recommends \
build-essential \ build-essential \
libmariadb-dev \
gettext \ gettext \
git \ git \
libffi-dev \ libffi-dev \
@@ -20,20 +21,21 @@ RUN apt-get update && \
supervisor \ supervisor \
libmaxminddb0 \ libmaxminddb0 \
libmaxminddb-dev \ libmaxminddb-dev \
zlib1g-dev \ zlib1g-dev && \
nodejs \
npm && \
apt-get clean && \ apt-get clean && \
rm -rf /var/lib/apt/lists/* && \ rm -rf /var/lib/apt/lists/* && \
dpkg-reconfigure locales && \ dpkg-reconfigure locales && \
locale-gen C.UTF-8 && \ locale-gen C.UTF-8 && \
/usr/sbin/update-locale LANG=C.UTF-8 && \ /usr/sbin/update-locale LANG=C.UTF-8 && \
mkdir /etc/pretix && \ mkdir /etc/pretix && \
mkdir /data && \ mkdir /data && \
useradd -ms /bin/bash -d /pretix -u 15371 pretixuser && \ useradd -ms /bin/bash -d /pretix -u 15371 pretixuser && \
echo 'pretixuser ALL=(ALL) NOPASSWD:SETENV: /usr/bin/supervisord' >> /etc/sudoers && \ echo 'pretixuser ALL=(ALL) NOPASSWD:SETENV: /usr/bin/supervisord' >> /etc/sudoers && \
mkdir /static && \ mkdir /static && \
mkdir /etc/supervisord mkdir /etc/supervisord && \
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - && \
apt-get install -y nodejs && \
curl -qL https://www.npmjs.com/install.sh | sh
ENV LC_ALL=C.UTF-8 \ ENV LC_ALL=C.UTF-8 \
@@ -56,17 +58,17 @@ RUN pip3 install -U \
wheel && \ wheel && \
cd /pretix && \ cd /pretix && \
PRETIX_DOCKER_BUILD=TRUE pip3 install \ PRETIX_DOCKER_BUILD=TRUE pip3 install \
-e ".[memcached]" \ -e ".[memcached,mysql]" \
gunicorn django-extensions ipython && \ gunicorn django-extensions ipython && \
rm -rf ~/.cache/pip rm -rf ~/.cache/pip
RUN chmod +x /usr/local/bin/pretix && \ RUN chmod +x /usr/local/bin/pretix && \
rm /etc/nginx/sites-enabled/default && \ rm /etc/nginx/sites-enabled/default && \
cd /pretix/src && \ cd /pretix/src && \
rm -f pretix.cfg && \ rm -f pretix.cfg && \
mkdir -p data && \ mkdir -p data && \
chown -R pretixuser:pretixuser /pretix /data data && \ chown -R pretixuser:pretixuser /pretix /data data && \
sudo -u pretixuser make production sudo -u pretixuser make production
USER pretixuser USER pretixuser
VOLUME ["/etc/pretix", "/data"] VOLUME ["/etc/pretix", "/data"]

View File

@@ -1,4 +1,4 @@
from pretix.settings import * from pretix.settings import *
LOGGING['handlers']['mail_admins']['include_html'] = True LOGGING['handlers']['mail_admins']['include_html'] = True
STORAGES["staticfiles"]["BACKEND"] = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage' STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'

View File

@@ -152,32 +152,24 @@ Example::
password=abcd password=abcd
host=localhost host=localhost
port=3306 port=3306
advisory_lock_index=1
sslmode=require
sslrootcert=/etc/pretix/postgresql-ca.crt
sslcert=/etc/pretix/postgresql-client-crt.crt
sslkey=/etc/pretix/postgresql-client-key.key
``backend`` ``backend``
One of ``sqlite3`` and ``postgresql``. One of ``mysql`` (deprecated), ``sqlite3`` and ``postgresql``.
Default: ``sqlite3``. Default: ``sqlite3``.
If you use MySQL, be sure to create your database using
``CREATE DATABASE <dbname> CHARACTER SET utf8;``. Otherwise, Unicode
support will not properly work.
``name`` ``name``
The database's name. Default: ``db.sqlite3``. The database's name. Default: ``db.sqlite3``.
``user``, ``password``, ``host``, ``port`` ``user``, ``password``, ``host``, ``port``
Connection details for the database connection. Empty by default. Connection details for the database connection. Empty by default.
``advisory_lock_index`` ``galera``
On PostgreSQL, pretix uses the "advisory lock" feature. However, advisory locks use a server-wide name space and (Deprecated) Indicates if the database backend is a MySQL/MariaDB Galera cluster and
and are not scoped to a specific database. If you run multiple pretix applications with the same PostgreSQL server, turns on some optimizations/special case handlers. Default: ``False``
you should set separate values for this setting (integers up to 256).
``sslmode``, ``sslrootcert``
Connection TLS details for the PostgreSQL database connection. Possible values of ``sslmode`` are ``disable``, ``allow``, ``prefer``, ``require``, ``verify-ca``, and ``verify-full``. ``sslrootcert`` should be the accessible path of the ca certificate. Both values are empty by default.
``sslcert``, ``sslkey``
Connection mTLS details for the PostgreSQL database connection. It's also necessary to specify ``sslmode`` and ``sslrootcert`` parameters, please check the correct values from the TLS part. ``sslcert`` should be the accessible path of the client certificate. ``sslkey`` should be the accessible path of the client key. All values are empty by default.
.. _`config-replica`: .. _`config-replica`:
@@ -340,10 +332,6 @@ to speed up various operations::
["sentinel_host_3", 26379] ["sentinel_host_3", 26379]
] ]
password=password password=password
ssl_cert_reqs=required
ssl_ca_certs=/etc/pretix/redis-ca.pem
ssl_keyfile=/etc/pretix/redis-client-crt.pem
ssl_certfile=/etc/pretix/redis-client-key.key
``location`` ``location``
The location of redis, as a URL of the form ``redis://[:password]@localhost:6379/0`` The location of redis, as a URL of the form ``redis://[:password]@localhost:6379/0``
@@ -367,22 +355,6 @@ to speed up various operations::
If your redis setup doesn't require a password or you already specified it in the location you can omit this option. If your redis setup doesn't require a password or you already specified it in the location you can omit this option.
If this is set it will be passed to redis as the connection option PASSWORD. If this is set it will be passed to redis as the connection option PASSWORD.
``ssl_cert_reqs``
If this is set it will be passed to redis as the connection option ``SSL_CERT_REQS``.
Possible values are ``none``, ``optional``, and ``required``.
``ssl_ca_certs``
If your redis setup doesn't require TLS you can omit this option.
If this is set it will be passed to redis as the connection option ``SSL_CA_CERTS``. Possible value is the ca path.
``ssl_keyfile``
If your redis setup doesn't require mTLS you can omit this option.
If this is set it will be passed to redis as the connection option ``SSL_KEYFILE``. Possible value is the keyfile path.
``ssl_certfile``
If your redis setup doesn't require mTLS you can omit this option.
If this is set it will be passed to redis as the connection option ``SSL_CERTFILE``. Possible value is the certfile path.
If redis is not configured, pretix will store sessions and locks in the database. If memcached If redis is not configured, pretix will store sessions and locks in the database. If memcached
is configured, memcached will be used for caching instead of redis. is configured, memcached will be used for caching instead of redis.
@@ -432,8 +404,6 @@ The two ``transport_options`` entries can be omitted in most cases.
If they are present they need to be a valid JSON dictionary. If they are present they need to be a valid JSON dictionary.
For possible entries in that dictionary see the `Celery documentation`_. For possible entries in that dictionary see the `Celery documentation`_.
It is possible the use Redis with TLS/mTLS for the broker or the backend. To do so, it is necessary to specify the TLS identifier ``rediss``, the ssl mode ``ssl_cert_reqs`` and optionally specify the CA (TLS) ``ssl_ca_certs``, cert ``ssl_certfile`` and key ``ssl_keyfile`` (mTLS) path as encoded string. the following uri describes the format and possible parameters ``rediss://0.0.0.0:6379/1?ssl_cert_reqs=required&ssl_ca_certs=%2Fetc%2Fpretix%2Fredis-ca.pem&ssl_certfile=%2Fetc%2Fpretix%2Fredis-client-crt.pem&ssl_keyfile=%2Fetc%2Fpretix%2Fredis-client-key.key``
To use redis with sentinels set the broker or backend to ``sentinel://sentinel_host_1:26379;sentinel_host_2:26379/0`` To use redis with sentinels set the broker or backend to ``sentinel://sentinel_host_1:26379;sentinel_host_2:26379/0``
and the respective transport_options to ``{"master_name":"mymaster"}``. and the respective transport_options to ``{"master_name":"mymaster"}``.
If your redis instances behind the sentinel have a password use ``sentinel://:my_password@sentinel_host_1:26379;sentinel_host_2:26379/0``. If your redis instances behind the sentinel have a password use ``sentinel://:my_password@sentinel_host_1:26379;sentinel_host_2:26379/0``.

View File

@@ -26,7 +26,7 @@ installation guides):
* `Docker`_ * `Docker`_
* A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for * A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for
* A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections * A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections
* A `PostgreSQL`_ 11+ database server * A `PostgreSQL`_ 9.6+ database server
* A `redis`_ server * A `redis`_ server
We also recommend that you use a firewall, although this is not a pretix-specific recommendation. If you're new to We also recommend that you use a firewall, although this is not a pretix-specific recommendation. If you're new to
@@ -321,11 +321,11 @@ workers, e.g. ``docker run … taskworker -Q notifications --concurrency 32``.
.. _Docker: https://docs.docker.com/engine/installation/linux/debian/ .. _Docker: https://docs.docker.com/engine/installation/linux/debian/
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-22-04 .. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-16-04
.. _nginx: https://botleg.com/stories/https-with-lets-encrypt-and-nginx/ .. _nginx: https://botleg.com/stories/https-with-lets-encrypt-and-nginx/
.. _Let's Encrypt: https://letsencrypt.org/ .. _Let's Encrypt: https://letsencrypt.org/
.. _pretix.eu: https://pretix.eu/ .. _pretix.eu: https://pretix.eu/
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-22-04 .. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-20-04
.. _redis: https://blog.programster.org/debian-8-install-redis-server/ .. _redis: https://blog.programster.org/debian-8-install-redis-server/
.. _ufw: https://en.wikipedia.org/wiki/Uncomplicated_Firewall .. _ufw: https://en.wikipedia.org/wiki/Uncomplicated_Firewall
.. _redis website: https://redis.io/topics/security .. _redis website: https://redis.io/topics/security

View File

@@ -68,7 +68,7 @@ generated key and installs the plugin from the URL we told you::
mkdir -p /etc/ssh && \ mkdir -p /etc/ssh && \
ssh-keyscan -t rsa -p 10022 code.rami.io >> /root/.ssh/known_hosts && \ ssh-keyscan -t rsa -p 10022 code.rami.io >> /root/.ssh/known_hosts && \
echo StrictHostKeyChecking=no >> /root/.ssh/config && \ echo StrictHostKeyChecking=no >> /root/.ssh/config && \
DJANGO_SETTINGS_MODULE= pip3 install -U "git+ssh://git@code.rami.io:10022/pretix/pretix-slack.git@stable#egg=pretix-slack" && \ DJANGO_SETTINGS_MODULE=pretix.settings pip3 install -U "git+ssh://git@code.rami.io:10022/pretix/pretix-slack.git@stable#egg=pretix-slack" && \
cd /pretix/src && \ cd /pretix/src && \
sudo -u pretixuser make production sudo -u pretixuser make production
USER pretixuser USER pretixuser

View File

@@ -16,11 +16,14 @@ To use pretix, you will need the following things:
* A periodic task runner, e.g. ``cron`` * A periodic task runner, e.g. ``cron``
* **A database**. This needs to be a SQL-based that is supported by Django. We highly recommend to either * **A database**. This needs to be a SQL-based that is supported by Django. We highly recommend to either
go for **PostgreSQL**. If you do not provide one, pretix will run on SQLite, which is useful go for **PostgreSQL** or **MySQL/MariaDB**. If you do not provide one, pretix will run on SQLite, which is useful
for evaluation and development purposes. for evaluation and development purposes.
.. warning:: Do not ever use SQLite in production. It will break. .. warning:: Do not ever use SQLite in production. It will break.
.. warning:: We recommend **PostgreSQL**. If you go for MySQL, make sure you run **MySQL 5.7 or newer** or
**MariaDB 10.2.7 or newer**.
* A **reverse proxy**. pretix needs to deliver some static content to your users (e.g. CSS, images, ...). While pretix * A **reverse proxy**. pretix needs to deliver some static content to your users (e.g. CSS, images, ...). While pretix
is capable of doing this, having this handled by a proper web server like **nginx** or **Apache** will be much is capable of doing this, having this handled by a proper web server like **nginx** or **Apache** will be much
faster. Also, you need a proxying web server in front to provide SSL encryption. faster. Also, you need a proxying web server in front to provide SSL encryption.

View File

@@ -12,7 +12,7 @@ solution with many things readily set-up, look at :ref:`dockersmallscale`.
get it right. If you're not feeling comfortable managing a Linux server, check out our hosting and service get it right. If you're not feeling comfortable managing a Linux server, check out our hosting and service
offers at `pretix.eu`_. offers at `pretix.eu`_.
We tested this guide on the Linux distribution **Debian 12** but it should work very similar on other We tested this guide on the Linux distribution **Debian 11.6** but it should work very similar on other
modern distributions, especially on all systemd-based ones. modern distributions, especially on all systemd-based ones.
Requirements Requirements
@@ -21,7 +21,6 @@ Requirements
Please set up the following systems beforehand, we'll not explain them here in detail (but see these links for external Please set up the following systems beforehand, we'll not explain them here in detail (but see these links for external
installation guides): installation guides):
* A python 3.9+ installation
* A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for * A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for
* A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections * A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections
* A `PostgreSQL`_ 11+ database server * A `PostgreSQL`_ 11+ database server
@@ -64,7 +63,7 @@ Package dependencies
To build and run pretix, you will need the following debian packages:: To build and run pretix, you will need the following debian packages::
# apt-get install git build-essential python3-dev python3-venv python3 python3-pip \ # apt-get install git build-essential python-dev python3-venv python3 python3-pip \
python3-dev libxml2-dev libxslt1-dev libffi-dev zlib1g-dev libssl-dev \ python3-dev libxml2-dev libxslt1-dev libffi-dev zlib1g-dev libssl-dev \
gettext libpq-dev libjpeg-dev libopenjp2-7-dev gettext libpq-dev libjpeg-dev libopenjp2-7-dev
@@ -130,10 +129,9 @@ We now install pretix, its direct dependencies and gunicorn::
Note that you need Python 3.9 or newer. You can find out your Python version using ``python -V``. Note that you need Python 3.9 or newer. You can find out your Python version using ``python -V``.
We also need to create a data directory and allow your webserver to traverse to the root directory:: We also need to create a data directory::
(venv)$ mkdir -p /var/pretix/data/media (venv)$ mkdir -p /var/pretix/data/media
(venv)$ chmod +x /var/pretix
Finally, we compile static files and translation data and create the database structure:: Finally, we compile static files and translation data and create the database structure::
@@ -249,14 +247,14 @@ The following snippet is an example on how to configure a nginx proxy for pretix
} }
location /static/ { location /static/ {
alias /var/pretix/venv/lib/python3.11/site-packages/pretix/static.dist/; alias /var/pretix/venv/lib/python3.10/site-packages/pretix/static.dist/;
access_log off; access_log off;
expires 365d; expires 365d;
add_header Cache-Control "public"; add_header Cache-Control "public";
} }
} }
.. note:: Remember to replace the ``python3.11`` in the ``/static/`` path in the config .. note:: Remember to replace the ``python3.10`` in the ``/static/`` path in the config
above with your python version. above with your python version.
We recommend reading about setting `strong encryption settings`_ for your web server. We recommend reading about setting `strong encryption settings`_ for your web server.
@@ -325,11 +323,11 @@ Then, proceed like after any plugin installation::
(venv)$ python -m pretix updatestyles (venv)$ python -m pretix updatestyles
# systemctl restart pretix-web pretix-worker # systemctl restart pretix-web pretix-worker
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-22-04 .. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-16-04
.. _nginx: https://botleg.com/stories/https-with-lets-encrypt-and-nginx/ .. _nginx: https://botleg.com/stories/https-with-lets-encrypt-and-nginx/
.. _Let's Encrypt: https://letsencrypt.org/ .. _Let's Encrypt: https://letsencrypt.org/
.. _pretix.eu: https://pretix.eu/ .. _pretix.eu: https://pretix.eu/
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-22-04 .. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-20-04
.. _redis: https://blog.programster.org/debian-8-install-redis-server/ .. _redis: https://blog.programster.org/debian-8-install-redis-server/
.. _ufw: https://en.wikipedia.org/wiki/Uncomplicated_Firewall .. _ufw: https://en.wikipedia.org/wiki/Uncomplicated_Firewall
.. _strong encryption settings: https://mozilla.github.io/server-side-tls/ssl-config-generator/ .. _strong encryption settings: https://mozilla.github.io/server-side-tls/ssl-config-generator/

View File

@@ -3,11 +3,11 @@
Migrating from MySQL/MariaDB to PostgreSQL Migrating from MySQL/MariaDB to PostgreSQL
========================================== ==========================================
Our recommended database for all production installations is PostgreSQL. Support for MySQL/MariaDB has been removed Our recommended database for all production installations is PostgreSQL. Support for MySQL/MariaDB will be removed in
in newer pretix releases. pretix 5.0.
In order to follow this guide, your pretix installation needs to be a version that fully supports MySQL/MariaDB. If you In order to follow this guide, your pretix installation needs to be a version that fully supports MySQL/MariaDB. If you
already upgraded to pretix 5.0 or later, downgrade back to the last 4.x release using ``pip``. already upgraded to pretix 5.0, downgrade back to the last 4.x release using ``pip``.
.. note:: We have tested this guide carefully, but we can't assume any liability for its correctness. The data loss .. note:: We have tested this guide carefully, but we can't assume any liability for its correctness. The data loss
risk should be low as long as pretix is not running while you do the migration. If you are a pretix Enterprise risk should be low as long as pretix is not running while you do the migration. If you are a pretix Enterprise
@@ -16,17 +16,12 @@ already upgraded to pretix 5.0 or later, downgrade back to the last 4.x release
Update database schema Update database schema
---------------------- ----------------------
Before you start, make sure your database schema is up to date. With a local installation:: Before you start, make sure your database schema is up to date::
# sudo -u pretix -s # sudo -u pretix -s
$ source /var/pretix/venv/bin/activate $ source /var/pretix/venv/bin/activate
(venv)$ python -m pretix migrate (venv)$ python -m pretix migrate
With a docker installation::
docker exec -it pretix.service pretix migrate
Install PostgreSQL Install PostgreSQL
------------------ ------------------
@@ -75,14 +70,10 @@ Of course, instead of all this you can also run a PostgreSQL docker container an
Stop pretix Stop pretix
----------- -----------
To prevent any more changes to your data, stop pretix from running. With a local installation:: To prevent any more changes to your data, stop pretix from running::
# systemctl stop pretix-web pretix-worker # systemctl stop pretix-web pretix-worker
With docker::
# systemctl stop pretix
Change configuration Change configuration
-------------------- --------------------
@@ -99,16 +90,12 @@ Change the database configuration in your ``/etc/pretix/pretix.cfg`` file::
Create database schema Create database schema
----------------------- -----------------------
To create the schema in your new PostgreSQL database, use the following commands. With a local installation:: To create the schema in your new PostgreSQL database, use the following commands::
# sudo -u pretix -s # sudo -u pretix -s
$ source /var/pretix/venv/bin/activate $ source /var/pretix/venv/bin/activate
(venv)$ python -m pretix migrate (venv)$ python -m pretix migrate
With docker::
# docker run --rm -v /var/pretix-data:/data -v /etc/pretix:/etc/pretix -v /var/run/redis:/var/run/redis pretix/standalone:stable migrate
Migrate your data Migrate your data
----------------- -----------------
@@ -157,18 +144,11 @@ Afterwards, delete the file again::
Start pretix Start pretix
------------ ------------
Stop your MySQL server as a verification step that you are no longer using it:: Now, restart pretix. Maybe stop your MySQL server as a verification step that you are no longer using it::
# systemctl stop mariadb # systemctl stop mariadb
Then, restart pretix. With a local installation::
# systemctl start pretix-web pretix-worker # systemctl start pretix-web pretix-worker
With a docker installation::
# systemctl start pretix
And you're done! After you've verified everything has been copied correctly, you can delete the old MySQL database. And you're done! After you've verified everything has been copied correctly, you can delete the old MySQL database.
.. note:: Don't forget to update your backup process to back up your PostgreSQL database instead of your MySQL database now. .. note:: Don't forget to update your backup process to back up your PostgreSQL database instead of your MySQL database now.

View File

@@ -32,16 +32,10 @@ as well as the type of underlying hardware. Example:
"token": "kpp4jn8g2ynzonp6", "token": "kpp4jn8g2ynzonp6",
"hardware_brand": "Samsung", "hardware_brand": "Samsung",
"hardware_model": "Galaxy S", "hardware_model": "Galaxy S",
"os_name": "Android",
"os_version": "2.3.6",
"software_brand": "pretixdroid", "software_brand": "pretixdroid",
"software_version": "4.0.0", "software_version": "4.0.0"
"rsa_pubkey": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqh…nswIDAQAB\n-----END PUBLIC KEY-----\n"
} }
The ``rsa_pubkey`` is optional any only required for certain fatures such as working with reusable
media and NFC cryptography.
Every initialization token can only be used once. On success, you will receive a response containing Every initialization token can only be used once. On success, you will receive a response containing
information on your device as well as your API token: information on your device as well as your API token:
@@ -104,8 +98,6 @@ following endpoint:
{ {
"hardware_brand": "Samsung", "hardware_brand": "Samsung",
"hardware_model": "Galaxy S", "hardware_model": "Galaxy S",
"os_name": "Android",
"os_version": "2.3.6",
"software_brand": "pretixdroid", "software_brand": "pretixdroid",
"software_version": "4.1.0", "software_version": "4.1.0",
"info": {"arbitrary": "data"} "info": {"arbitrary": "data"}
@@ -141,29 +133,9 @@ The response will look like this:
"id": 3, "id": 3,
"name": "South entrance" "name": "South entrance"
} }
}, }
"server": {
"version": {
"pretix": "3.6.0.dev0",
"pretix_numeric": 30060001000
}
},
"medium_key_sets": [
{
"public_id": 3456349,
"organizer": "foo",
"active": true,
"media_type": "nfc_mf0aes",
"uid_key": "base64-encoded-encrypted-key",
"diversification_key": "base64-encoded-encrypted-key",
}
]
} }
``"medium_key_sets`` will always be empty if you did not set an ``rsa_pubkey``.
The individual keys in the key sets are encrypted with the device's ``rsa_pubkey``
using ``RSA/ECB/PKCS1Padding``.
Creating a new API key Creating a new API key
---------------------- ----------------------

View File

@@ -24,8 +24,6 @@ all_events boolean Whether this de
limit_events list List of event slugs this device has access to limit_events list List of event slugs this device has access to
hardware_brand string Device hardware manufacturer (read-only) hardware_brand string Device hardware manufacturer (read-only)
hardware_model string Device hardware model (read-only) hardware_model string Device hardware model (read-only)
os_name string Device operating system name (read-only)
os_version string Device operating system version (read-only)
software_brand string Device software product (read-only) software_brand string Device software product (read-only)
software_version string Device software version (read-only) software_version string Device software version (read-only)
created datetime Creation time created datetime Creation time
@@ -78,8 +76,6 @@ Device endpoints
"security_profile": "full", "security_profile": "full",
"hardware_brand": "Zebra", "hardware_brand": "Zebra",
"hardware_model": "TC25", "hardware_model": "TC25",
"os_name": "Android",
"os_version": "8.1.0",
"software_brand": "pretixSCAN", "software_brand": "pretixSCAN",
"software_version": "1.5.1" "software_version": "1.5.1"
} }
@@ -127,8 +123,6 @@ Device endpoints
"security_profile": "full", "security_profile": "full",
"hardware_brand": "Zebra", "hardware_brand": "Zebra",
"hardware_model": "TC25", "hardware_model": "TC25",
"os_name": "Android",
"os_version": "8.1.0",
"software_brand": "pretixSCAN", "software_brand": "pretixSCAN",
"software_version": "1.5.1" "software_version": "1.5.1"
} }
@@ -179,8 +173,6 @@ Device endpoints
"initialized": null "initialized": null
"hardware_brand": null, "hardware_brand": null,
"hardware_model": null, "hardware_model": null,
"os_name": null,
"os_version": null,
"software_brand": null, "software_brand": null,
"software_version": null "software_version": null
} }

View File

@@ -31,9 +31,9 @@ subevent_mode strings Determines h
``"same"`` (discount is only applied for groups within ``"same"`` (discount is only applied for groups within
the same date), or ``"distinct"`` (discount is only applied the same date), or ``"distinct"`` (discount is only applied
for groups with no two same dates). for groups with no two same dates).
condition_all_products boolean If ``true``, the discount condition applies to all items. condition_all_products boolean If ``true``, the discount applies to all items.
condition_limit_products list of integers If ``condition_all_products`` is not set, this is a list condition_limit_products list of integers If ``condition_all_products`` is not set, this is a list
of internal item IDs that the discount condition applies to. of internal item IDs that the discount applies to.
condition_apply_to_addons boolean If ``true``, the discount applies to add-on products as well, condition_apply_to_addons boolean If ``true``, the discount applies to add-on products as well,
otherwise it only applies to top-level items. The discount never otherwise it only applies to top-level items. The discount never
applies to bundled products. applies to bundled products.
@@ -48,17 +48,6 @@ benefit_discount_matching_percent decimal (string) The percenta
benefit_only_apply_to_cheapest_n_matches integer If set higher than 0, the discount will only be applied to benefit_only_apply_to_cheapest_n_matches integer If set higher than 0, the discount will only be applied to
the cheapest matches. Useful for a "3 for 2"-style discount. the cheapest matches. Useful for a "3 for 2"-style discount.
Cannot be combined with ``condition_min_value``. Cannot be combined with ``condition_min_value``.
benefit_same_products boolean If ``true``, the discount benefit applies to the same set of items
as the condition (see above).
benefit_limit_products list of integers If ``benefit_same_products`` is not set, this is a list
of internal item IDs that the discount benefit applies to.
benefit_apply_to_addons boolean (Only used if ``benefit_same_products`` is ``false``.)
If ``true``, the discount applies to add-on products as well,
otherwise it only applies to top-level items. The discount never
applies to bundled products.
benefit_ignore_voucher_discounted boolean (Only used if ``benefit_same_products`` is ``false``.)
If ``true``, the discount does not apply to products which have
been discounted by a voucher.
======================================== ========================== ======================================================= ======================================== ========================== =======================================================
@@ -105,10 +94,6 @@ Endpoints
"condition_ignore_voucher_discounted": false, "condition_ignore_voucher_discounted": false,
"condition_min_count": 3, "condition_min_count": 3,
"condition_min_value": "0.00", "condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00", "benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1 "benefit_only_apply_to_cheapest_n_matches": 1
} }
@@ -161,10 +146,6 @@ Endpoints
"condition_ignore_voucher_discounted": false, "condition_ignore_voucher_discounted": false,
"condition_min_count": 3, "condition_min_count": 3,
"condition_min_value": "0.00", "condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00", "benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1 "benefit_only_apply_to_cheapest_n_matches": 1
} }
@@ -203,10 +184,6 @@ Endpoints
"condition_ignore_voucher_discounted": false, "condition_ignore_voucher_discounted": false,
"condition_min_count": 3, "condition_min_count": 3,
"condition_min_value": "0.00", "condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00", "benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1 "benefit_only_apply_to_cheapest_n_matches": 1
} }
@@ -234,10 +211,6 @@ Endpoints
"condition_ignore_voucher_discounted": false, "condition_ignore_voucher_discounted": false,
"condition_min_count": 3, "condition_min_count": 3,
"condition_min_value": "0.00", "condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00", "benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1 "benefit_only_apply_to_cheapest_n_matches": 1
} }
@@ -294,10 +267,6 @@ Endpoints
"condition_ignore_voucher_discounted": false, "condition_ignore_voucher_discounted": false,
"condition_min_count": 3, "condition_min_count": 3,
"condition_min_value": "0.00", "condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00", "benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1 "benefit_only_apply_to_cheapest_n_matches": 1
} }

View File

@@ -70,11 +70,6 @@ Endpoints
The ``public_url`` field has been added. The ``public_url`` field has been added.
.. versionchanged:: 5.0
The ``date_from_before``, ``date_from_after``, ``date_to_before``, and ``date_to_after`` query parameters have been
added.
.. http:get:: /api/v1/organizers/(organizer)/events/ .. http:get:: /api/v1/organizers/(organizer)/events/
Returns a list of all events within a given organizer the authenticated user/token has access to. Returns a list of all events within a given organizer the authenticated user/token has access to.
@@ -146,10 +141,6 @@ Endpoints
:query has_subevents: If set to ``true``/``false``, only events with a matching value of ``has_subevents`` are returned. :query has_subevents: If set to ``true``/``false``, only events with a matching value of ``has_subevents`` are returned.
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned. Event series are never (always) returned. :query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned. Event series are never (always) returned.
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned. Event series are never (always) returned. :query is_past: If set to ``true`` (``false``), only events that are over are (not) returned. Event series are never (always) returned.
:query date_from_after: If set to a date and time, only events that start at or after the given time are returned.
:query date_from_before: If set to a date and time, only events that start at or before the given time are returned.
:query date_to_after: If set to a date and time, only events that have an end date and end at or after the given time are returned.
:query date_to_before: If set to a date and time, only events that have an end date and end at or before the given time are returned.
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned. Event series are never returned. :query ends_after: If set to a date and time, only events that happen during of after the given time are returned. Event series are never returned.
:query string ordering: Manually set the ordering of results. Valid fields to be used are ``date_from`` and :query string ordering: Manually set the ordering of results. Valid fields to be used are ``date_from`` and
``slug``. Keep in mind that ``date_from`` of event series does not really tell you anything. ``slug``. Keep in mind that ``date_from`` of event series does not really tell you anything.

View File

@@ -111,7 +111,7 @@ Listing available exporters
"input_parameters": [ "input_parameters": [
{ {
"name": "events", "name": "events",
"required": false "required": true
}, },
{ {
"name": "_format", "name": "_format",

View File

@@ -12,7 +12,6 @@ The invoice resource contains the following public fields:
Field Type Description Field Type Description
===================================== ========================== ======================================================= ===================================== ========================== =======================================================
number string Invoice number (with prefix) number string Invoice number (with prefix)
event string The slug of the parent event
order string Order code of the order this invoice belongs to order string Order code of the order this invoice belongs to
is_cancellation boolean ``true``, if this invoice is the cancellation of a is_cancellation boolean ``true``, if this invoice is the cancellation of a
different invoice. different invoice.
@@ -122,13 +121,9 @@ internal_reference string Customer's refe
The attribute ``lines.subevent`` has been added. The attribute ``lines.subevent`` has been added.
.. versionchanged:: 2023.8
The ``event`` attribute has been added. The organizer-level endpoint has been added. Endpoints
---------
List of all invoices
--------------------
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/invoices/ .. http:get:: /api/v1/organizers/(organizer)/events/(event)/invoices/
@@ -157,7 +152,6 @@ List of all invoices
"results": [ "results": [
{ {
"number": "SAMPLECONF-00001", "number": "SAMPLECONF-00001",
"event": "sampleconf",
"order": "ABC12", "order": "ABC12",
"is_cancellation": false, "is_cancellation": false,
"invoice_from_name": "Big Events LLC", "invoice_from_name": "Big Events LLC",
@@ -227,50 +221,6 @@ List of all invoices
:statuscode 401: Authentication failure :statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource. :statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
.. http:get:: /api/v1/organizers/(organizer)/invoices/
Returns a list of all invoices within all events of a given organizer (with sufficient access permissions).
Supported query parameters and output format of this endpoint are identical to the list endpoint within an event.
**Example request**:
.. sourcecode:: http
GET /api/v1/organizers/bigevents/events/sampleconf/invoices/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"number": "SAMPLECONF-00001",
"event": "sampleconf",
"order": "ABC12",
...
]
}
:param organizer: The ``slug`` field of the organizer to fetch
:statuscode 200: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
Fetching individual invoices
----------------------------
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/invoices/(number)/ .. http:get:: /api/v1/organizers/(organizer)/events/(event)/invoices/(number)/
Returns information on one invoice, identified by its invoice number. Returns information on one invoice, identified by its invoice number.
@@ -293,7 +243,6 @@ Fetching individual invoices
{ {
"number": "SAMPLECONF-00001", "number": "SAMPLECONF-00001",
"event": "sampleconf",
"order": "ABC12", "order": "ABC12",
"is_cancellation": false, "is_cancellation": false,
"invoice_from_name": "Big Events LLC", "invoice_from_name": "Big Events LLC",
@@ -388,12 +337,6 @@ Fetching individual invoices
:statuscode 409: The file is not yet ready and will now be prepared. Retry the request after waiting for a few :statuscode 409: The file is not yet ready and will now be prepared. Retry the request after waiting for a few
seconds. seconds.
Modifying invoices
------------------
Invoices cannot be edited directly, but the following actions can be triggered:
.. http:post:: /api/v1/organizers/(organizer)/events/(event)/invoices/(invoice_no)/reissue/ .. http:post:: /api/v1/organizers/(organizer)/events/(event)/invoices/(invoice_no)/reissue/
Cancels the invoice and creates a new one. Cancels the invoice and creates a new one.

View File

@@ -20,7 +20,6 @@ The order resource contains the following public fields:
Field Type Description Field Type Description
===================================== ========================== ======================================================= ===================================== ========================== =======================================================
code string Order code code string Order code
event string The slug of the parent event
status string Order status, one of: status string Order status, one of:
* ``n`` pending * ``n`` pending
@@ -131,10 +130,6 @@ last_modified datetime Last modificati
The ``valid_if_pending`` attribute has been added. The ``valid_if_pending`` attribute has been added.
.. versionchanged:: 2023.8
The ``event`` attribute has been added. The organizer-level endpoint has been added.
.. _order-position-resource: .. _order-position-resource:
@@ -294,7 +289,6 @@ List of all orders
"results": [ "results": [
{ {
"code": "ABC12", "code": "ABC12",
"event": "sampleconf",
"status": "p", "status": "p",
"testmode": false, "testmode": false,
"secret": "k24fiuwvu8kxz3y1", "secret": "k24fiuwvu8kxz3y1",
@@ -447,48 +441,6 @@ List of all orders
:statuscode 401: Authentication failure :statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource. :statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
.. http:get:: /api/v1/organizers/(organizer)/orders/
Returns a list of all orders within all events of a given organizer (with sufficient access permissions).
Supported query parameters and output format of this endpoint are identical to the list endpoint within an event,
with the exception that the ``pdf_data`` parameter is not supported here.
**Example request**:
.. sourcecode:: http
GET /api/v1/organizers/bigevents/orders/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
X-Page-Generated: 2017-12-01T10:00:00Z
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"code": "ABC12",
"event": "sampleconf",
...
}
]
}
:param organizer: The ``slug`` field of the organizer to fetch
:statuscode 200: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
Fetching individual orders Fetching individual orders
-------------------------- --------------------------
@@ -514,7 +466,6 @@ Fetching individual orders
{ {
"code": "ABC12", "code": "ABC12",
"event": "sampleconf",
"status": "p", "status": "p",
"testmode": false, "testmode": false,
"secret": "k24fiuwvu8kxz3y1", "secret": "k24fiuwvu8kxz3y1",

View File

@@ -18,8 +18,7 @@ The reusable medium resource contains the following public fields:
Field Type Description Field Type Description
===================================== ========================== ======================================================= ===================================== ========================== =======================================================
id integer Internal ID of the medium id integer Internal ID of the medium
type string Type of medium, e.g. ``"barcode"``, ``"nfc_uid"`` or ``"nfc_mf0aes"``. type string Type of medium, e.g. ``"barcode"`` or ``"nfc_uid"``.
organizer string Organizer slug of the organizer who "owns" this medium.
identifier string Unique identifier of the medium. The format depends on the ``type``. identifier string Unique identifier of the medium. The format depends on the ``type``.
active boolean Whether this medium may be used. active boolean Whether this medium may be used.
created datetime Date of creation created datetime Date of creation
@@ -37,7 +36,6 @@ Existing media types are:
- ``barcode`` - ``barcode``
- ``nfc_uid`` - ``nfc_uid``
- ``nfc_mf0aes``
Endpoints Endpoints
--------- ---------
@@ -69,7 +67,6 @@ Endpoints
"results": [ "results": [
{ {
"id": 1, "id": 1,
"organizer": "bigevents",
"identifier": "ABCDEFGH", "identifier": "ABCDEFGH",
"created": "2021-04-06T13:44:22.809377Z", "created": "2021-04-06T13:44:22.809377Z",
"updated": "2021-04-06T13:44:22.809377Z", "updated": "2021-04-06T13:44:22.809377Z",
@@ -126,7 +123,6 @@ Endpoints
{ {
"id": 1, "id": 1,
"organizer": "bigevents",
"identifier": "ABCDEFGH", "identifier": "ABCDEFGH",
"created": "2021-04-06T13:44:22.809377Z", "created": "2021-04-06T13:44:22.809377Z",
"updated": "2021-04-06T13:44:22.809377Z", "updated": "2021-04-06T13:44:22.809377Z",
@@ -156,9 +152,6 @@ Endpoints
Look up a new reusable medium by its identifier. In some cases, this might lead to the automatic creation of a new Look up a new reusable medium by its identifier. In some cases, this might lead to the automatic creation of a new
medium behind the scenes. medium behind the scenes.
This endpoint, and this endpoint only, might return media from a different organizer if there is a cross-acceptance
agreement. In this case, only linked gift cards will be returned, no order position or customer records,
**Example request**: **Example request**:
.. sourcecode:: http .. sourcecode:: http
@@ -183,7 +176,6 @@ Endpoints
{ {
"id": 1, "id": 1,
"organizer": "bigevents",
"identifier": "ABCDEFGH", "identifier": "ABCDEFGH",
"created": "2021-04-06T13:44:22.809377Z", "created": "2021-04-06T13:44:22.809377Z",
"updated": "2021-04-06T13:44:22.809377Z", "updated": "2021-04-06T13:44:22.809377Z",
@@ -243,7 +235,6 @@ Endpoints
{ {
"id": 1, "id": 1,
"organizer": "bigevents",
"identifier": "ABCDEFGH", "identifier": "ABCDEFGH",
"created": "2021-04-06T13:44:22.809377Z", "created": "2021-04-06T13:44:22.809377Z",
"updated": "2021-04-06T13:44:22.809377Z", "updated": "2021-04-06T13:44:22.809377Z",
@@ -300,7 +291,6 @@ Endpoints
{ {
"id": 1, "id": 1,
"organizer": "bigevents",
"identifier": "ABCDEFGH", "identifier": "ABCDEFGH",
"created": "2021-04-06T13:44:22.809377Z", "created": "2021-04-06T13:44:22.809377Z",
"updated": "2021-04-06T13:44:22.809377Z", "updated": "2021-04-06T13:44:22.809377Z",

View File

@@ -1,10 +1,10 @@
Scheduled email rules Automated email rules
===================== =====================
Resource description Resource description
-------------------- --------------------
Scheduled email rules that specify emails that the system will send automatically at a specific point in time, e.g. Automated email rules that specify emails that the system will send automatically at a specific point in time, e.g.
the day of the event. the day of the event.
.. rst-class:: rest-resource-table .. rst-class:: rest-resource-table
@@ -18,19 +18,8 @@ subject multi-lingual string The subject of
template multi-lingual string The body of the email template multi-lingual string The body of the email
all_products boolean If ``true``, the email is sent to buyers of all products all_products boolean If ``true``, the email is sent to buyers of all products
limit_products list of integers List of product IDs, if ``all_products`` is not set limit_products list of integers List of product IDs, if ``all_products`` is not set
[**DEPRECATED**] include_pending boolean If ``true``, the email is sent to pending orders. If ``false``, include_pending boolean If ``true``, the email is sent to pending orders. If ``false``,
only paid orders are considered. only paid orders are considered.
restrict_to_status list List of order states to restrict recipients to. Valid
entries are ``p`` for paid, ``e`` for expired, ``c`` for canceled,
``n__pending_approval`` for pending approval,
``n__not_pending_approval_and_not_valid_if_pending`` for payment
pending, ``n__valid_if_pending`` for payment pending but already confirmed,
and ``n__pending_overdue`` for pending with payment overdue.
The default is ``["p", "n__valid_if_pending"]``.
checked_in_status string Check-in status to restrict recipients to. Valid strings are:
``null`` for no filtering (default), ``checked_in`` for
limiting to attendees that are or have been checked in, and
``no_checkin`` for limiting to attendees who have not checked in.
date_is_absolute boolean If ``true``, the email is set at a specific point in time. date_is_absolute boolean If ``true``, the email is set at a specific point in time.
send_date datetime If ``date_is_absolute`` is set: Date and time to send the email. send_date datetime If ``date_is_absolute`` is set: Date and time to send the email.
send_offset_days integer If ``date_is_absolute`` is not set, this is the number of days send_offset_days integer If ``date_is_absolute`` is not set, this is the number of days
@@ -48,10 +37,7 @@ send_to string Can be ``"order
or ``"both"``. or ``"both"``.
date. Otherwise it is relative to the event start date. date. Otherwise it is relative to the event start date.
===================================== ========================== ======================================================= ===================================== ========================== =======================================================
.. versionchanged:: 2023.7
The ``include_pending`` field has been deprecated.
The ``restrict_to_status`` field has been added.
Endpoints Endpoints
--------- ---------
@@ -88,12 +74,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"}, "template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true, "all_products": true,
"limit_products": [], "limit_products": [],
"restrict_to_status": [ "include_pending": false,
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": null,
"send_date": null, "send_date": null,
"send_offset_days": 1, "send_offset_days": 1,
"send_offset_time": "18:00", "send_offset_time": "18:00",
@@ -139,12 +120,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"}, "template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true, "all_products": true,
"limit_products": [], "limit_products": [],
"restrict_to_status": [ "include_pending": false,
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": null,
"send_date": null, "send_date": null,
"send_offset_days": 1, "send_offset_days": 1,
"send_offset_time": "18:00", "send_offset_time": "18:00",
@@ -181,12 +157,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"}, "template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true, "all_products": true,
"limit_products": [], "limit_products": [],
"restrict_to_status": [ "include_pending": false,
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": "checked_in",
"send_date": null, "send_date": null,
"send_offset_days": 1, "send_offset_days": 1,
"send_offset_time": "18:00", "send_offset_time": "18:00",
@@ -211,12 +182,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"}, "template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true, "all_products": true,
"limit_products": [], "limit_products": [],
"restrict_to_status": [ "include_pending": false,
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": "checked_in",
"send_date": null, "send_date": null,
"send_offset_days": 1, "send_offset_days": 1,
"send_offset_time": "18:00", "send_offset_time": "18:00",
@@ -269,12 +235,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"}, "template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true, "all_products": true,
"limit_products": [], "limit_products": [],
"restrict_to_status": [ "include_pending": false,
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": "checked_in",
"send_date": null, "send_date": null,
"send_offset_days": 1, "send_offset_days": 1,
"send_offset_time": "18:00", "send_offset_time": "18:00",

View File

@@ -63,15 +63,6 @@ last_modified datetime Last modificati
The ``search`` query parameter has been added to filter sub-events by their name or location in any language. The ``search`` query parameter has been added to filter sub-events by their name or location in any language.
.. versionchanged:: 5.0
The ``date_from_before``, ``date_from_after``, ``date_to_before``, and ``date_to_after`` query parameters have been
added.
.. versionchanged:: 2023.8.0
For the organizer-wide endpoint, the ``search`` query parameter has been modified to filter sub-events by their parent events slug too.
Endpoints Endpoints
--------- ---------
@@ -139,10 +130,6 @@ Endpoints
:query active: If set to ``true``/``false``, only events with a matching value of ``active`` are returned. :query active: If set to ``true``/``false``, only events with a matching value of ``active`` are returned.
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned. :query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned.
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned. :query is_past: If set to ``true`` (``false``), only events that are over are (not) returned.
:query date_from_after: If set to a date and time, only events that start at or after the given time are returned.
:query date_from_before: If set to a date and time, only events that start at or before the given time are returned.
:query date_to_after: If set to a date and time, only events that have an end date and end at or after the given time are returned.
:query date_to_before: If set to a date and time, only events that have an end date and end at or before the given time are returned.
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned. :query ends_after: If set to a date and time, only events that happen during of after the given time are returned.
:query search: Only return events matching a given search query. :query search: Only return events matching a given search query.
:param organizer: The ``slug`` field of a valid organizer :param organizer: The ``slug`` field of a valid organizer
@@ -471,12 +458,7 @@ Endpoints
:query event__live: If set to ``true``/``false``, only events with a matching value of ``live`` on the parent event are returned. :query event__live: If set to ``true``/``false``, only events with a matching value of ``live`` on the parent event are returned.
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned. :query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned.
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned. :query is_past: If set to ``true`` (``false``), only events that are over are (not) returned.
:query date_from_after: If set to a date and time, only events that start at or after the given time are returned.
:query date_from_before: If set to a date and time, only events that start at or before the given time are returned.
:query date_to_after: If set to a date and time, only events that have an end date and end at or after the given time are returned.
:query date_to_before: If set to a date and time, only events that have an end date and end at or before the given time are returned.
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned. :query ends_after: If set to a date and time, only events that happen during of after the given time are returned.
:query search: Only return events matching a given search query.
:query sales_channel: If set to a sales channel identifier, the response will only contain subevents from events available on this sales channel. :query sales_channel: If set to a sales channel identifier, the response will only contain subevents from events available on this sales channel.
:param organizer: The ``slug`` field of a valid organizer :param organizer: The ``slug`` field of a valid organizer
:param event: The ``slug`` field of the event to fetch :param event: The ``slug`` field of the event to fetch

View File

@@ -20,16 +20,11 @@ internal_name string An optional nam
rate decimal (string) Tax rate in percent rate decimal (string) Tax rate in percent
price_includes_tax boolean If ``true`` (default), tax is assumed to be included in price_includes_tax boolean If ``true`` (default), tax is assumed to be included in
the specified product price the specified product price
eu_reverse_charge boolean If ``true``, EU reverse charge rules are applied. Will eu_reverse_charge boolean If ``true``, EU reverse charge rules are applied
be ignored if custom rules are set.
home_country string Merchant country (required for reverse charge), can be home_country string Merchant country (required for reverse charge), can be
``null`` or empty string ``null`` or empty string
keep_gross_if_rate_changes boolean If ``true``, changes of the tax rate based on custom keep_gross_if_rate_changes boolean If ``true``, changes of the tax rate based on custom
rules keep the gross price constant (default is ``false``) rules keep the gross price constant (default is ``false``)
custom_rules object Dynamic rules specification. Each list element
corresponds to one rule that will be processed in order.
The current version of the schema in use can be found
`here`_.
===================================== ========================== ======================================================= ===================================== ========================== =======================================================
@@ -37,10 +32,6 @@ custom_rules object Dynamic rules s
The ``internal_name`` and ``keep_gross_if_rate_changes`` attributes have been added. The ``internal_name`` and ``keep_gross_if_rate_changes`` attributes have been added.
.. versionchanged:: 2023.6
The ``custom_rules`` attribute has been added.
Endpoints Endpoints
--------- ---------
@@ -77,7 +68,6 @@ Endpoints
"price_includes_tax": true, "price_includes_tax": true,
"eu_reverse_charge": false, "eu_reverse_charge": false,
"keep_gross_if_rate_changes": false, "keep_gross_if_rate_changes": false,
"custom_rules": null,
"home_country": "DE" "home_country": "DE"
} }
] ]
@@ -118,7 +108,6 @@ Endpoints
"price_includes_tax": true, "price_includes_tax": true,
"eu_reverse_charge": false, "eu_reverse_charge": false,
"keep_gross_if_rate_changes": false, "keep_gross_if_rate_changes": false,
"custom_rules": null,
"home_country": "DE" "home_country": "DE"
} }
@@ -167,7 +156,6 @@ Endpoints
"price_includes_tax": true, "price_includes_tax": true,
"eu_reverse_charge": false, "eu_reverse_charge": false,
"keep_gross_if_rate_changes": false, "keep_gross_if_rate_changes": false,
"custom_rules": null,
"home_country": "DE" "home_country": "DE"
} }
@@ -215,7 +203,6 @@ Endpoints
"price_includes_tax": true, "price_includes_tax": true,
"eu_reverse_charge": false, "eu_reverse_charge": false,
"keep_gross_if_rate_changes": false, "keep_gross_if_rate_changes": false,
"custom_rules": null,
"home_country": "DE" "home_country": "DE"
} }
@@ -255,5 +242,3 @@ Endpoints
:statuscode 204: no error :statuscode 204: no error
:statuscode 401: Authentication failure :statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event/rule does not exist **or** you have no permission to change it **or** this tax rule cannot be deleted since it is currently in use. :statuscode 403: The requested organizer/event/rule does not exist **or** you have no permission to change it **or** this tax rule cannot be deleted since it is currently in use.
.. _here: https://github.com/pretix/pretix/blob/master/src/pretix/static/schema/tax-rules-custom.schema.json

View File

@@ -50,10 +50,6 @@ The following values for ``action_types`` are valid with pretix core:
* ``pretix.event.order.payment.confirmed`` * ``pretix.event.order.payment.confirmed``
* ``pretix.event.order.approved`` * ``pretix.event.order.approved``
* ``pretix.event.order.denied`` * ``pretix.event.order.denied``
* ``pretix.event.orders.waitinglist.added``
* ``pretix.event.orders.waitinglist.changed``
* ``pretix.event.orders.waitinglist.deleted``
* ``pretix.event.orders.waitinglist.voucher_assigned``
* ``pretix.event.checkin`` * ``pretix.event.checkin``
* ``pretix.event.checkin.reverted`` * ``pretix.event.checkin.reverted``
* ``pretix.event.added`` * ``pretix.event.added``
@@ -67,9 +63,6 @@ The following values for ``action_types`` are valid with pretix core:
* ``pretix.event.live.deactivated`` * ``pretix.event.live.deactivated``
* ``pretix.event.testmode.activated`` * ``pretix.event.testmode.activated``
* ``pretix.event.testmode.deactivated`` * ``pretix.event.testmode.deactivated``
* ``pretix.customer.created``
* ``pretix.customer.changed``
* ``pretix.customer.anonymized``
Installed plugins might register more valid values. Installed plugins might register more valid values.

View File

@@ -18,13 +18,13 @@ If you want to add a custom view to the control area of an event, just register
.. code-block:: python .. code-block:: python
from django.urls import re_path from django.conf.urls import url
from . import views from . import views
urlpatterns = [ urlpatterns = [
re_path(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/', url(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
views.admin_view, name='backend'), views.admin_view, name='backend'),
] ]
It is required that your URL parameters are called ``organizer`` and ``event``. If you want to It is required that your URL parameters are called ``organizer`` and ``event``. If you want to

View File

@@ -61,7 +61,7 @@ Backend
item_formsets, order_search_filter_q, order_search_forms item_formsets, order_search_filter_q, order_search_forms
.. automodule:: pretix.base.signals .. automodule:: pretix.base.signals
:members: logentry_display, logentry_object_link, requiredaction_display, timeline_events, orderposition_blocked_display, customer_created, customer_signed_in :members: logentry_display, logentry_object_link, requiredaction_display, timeline_events, orderposition_blocked_display
Vouchers Vouchers
"""""""" """"""""

View File

@@ -70,8 +70,6 @@ The provider class
.. autoattribute:: settings_form_fields .. autoattribute:: settings_form_fields
.. autoattribute:: walletqueries
.. automethod:: settings_form_clean .. automethod:: settings_form_clean
.. automethod:: settings_content_render .. automethod:: settings_content_render

View File

@@ -37,7 +37,7 @@ you to execute a piece of code with a different locale:
This is very useful e.g. when sending an email to a user that has a different language than the user performing the This is very useful e.g. when sending an email to a user that has a different language than the user performing the
action that causes the mail to be sent. action that causes the mail to be sent.
.. _translation features: https://docs.djangoproject.com/en/4.2/topics/i18n/translation/ .. _translation features: https://docs.djangoproject.com/en/1.9/topics/i18n/translation/
.. _GNU gettext: https://www.gnu.org/software/gettext/ .. _GNU gettext: https://www.gnu.org/software/gettext/
.. _strings: https://django-i18nfield.readthedocs.io/en/latest/strings.html .. _strings: https://django-i18nfield.readthedocs.io/en/latest/strings.html
.. _database fields: https://django-i18nfield.readthedocs.io/en/latest/quickstart.html .. _database fields: https://django-i18nfield.readthedocs.io/en/latest/quickstart.html

View File

@@ -18,4 +18,3 @@ Contents:
email email
permissions permissions
logging logging
locking

View File

@@ -1,69 +0,0 @@
.. highlight:: python
Resource locking
================
.. versionchanged:: 2023.8
Our locking mechanism changed heavily in version 2023.8. Read `this PR`_ for background information.
One of pretix's core objectives as a ticketing system could be described as the management of scarce resources.
Specifically, the following types of scarce-ness exist in pretix:
- Quotas can limit the number of tickets available
- Seats can only be booked once
- Vouchers can only be used a limited number of times
- Some memberships can only be used a limited number of times
For all of these, it is critical that we prevent race conditions.
While for some events it wouldn't be a big deal to sell a ticket more or less, for some it would be problematic and selling the same seat twice would always be catastrophic.
We therefore implement a standardized locking approach across the system to limit concurrency in cases where it could
be problematic.
To acquire a lock on a set of quotas to create a new order that uses that quota, you should follow the following pattern::
with transaction.atomic(durable=True):
quotas = Quota.objects.filter(...)
lock_objects(quotas, shared_lock_objects=[event])
check_quota(quotas)
create_ticket()
The lock will automatically be released at the end of your database transaction.
Generally, follow the following guidelines during your development:
- **Always** acquire a lock on every **quota**, **voucher** or **seat** that you "use" during your transaction. "Use"
here means any action after which the quota, voucher or seat will be **less available**, such as creating a cart
position, creating an order, creating a blocking voucher, etc.
- There is **no need** to acquire a lock if you **free up** capacity, e.g. by canceling an order, deleting a voucher, etc.
- **Always** acquire a shared lock on the ``event`` you are working in whenever you acquire a lock on a quota, voucher,
or seat.
- Only call ``lock_objects`` **once** per transaction. If you violate this rule, `deadlocks`_ become possible.
- For best performance, call ``lock_objects`` as **late** in your transaction as possible, but always before you check
if the desired resource is still available in sufficient quantity.
Behind the scenes, the locking is implemented through `PostgreSQL advisory locks`_. You should also be aware of the following
properties of our system:
- In some situations, an exclusive lock on the ``event`` is used, such as when the system can't determine for sure which
seats will become unavailable after the transaction.
- An exclusive lock on the event is also used if you pass more than 20 objects to ``lock_objects``. This is a performance
trade-off because it would take long to acquire all of the individual locks.
- If ``lock_objects`` is unable to acquire a lock within 3 seconds, a ``LockTimeoutException`` will be thrown.
.. note::
We currently do not use ``lock_objects`` for memberships. Instead, we use ``select_for_update()`` on the membership
model. This might change in the future, but you should usually not be concerned about it since
``validate_memberships_in_order(lock=True)`` will handle it for you.
.. _this PR: https://github.com/pretix/pretix/pull/2408
.. _deadlocks: https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-DEADLOCKS
.. _PostgreSQL advisory locks: https://www.postgresql.org/docs/11/explicit-locking.html#ADVISORY-LOCKS

View File

@@ -15,41 +15,33 @@ and the admin panel is available at ``https://pretix.eu/control/event/bigorg/awe
If the organizer now configures a custom domain like ``tickets.bigorg.com``, his event will If the organizer now configures a custom domain like ``tickets.bigorg.com``, his event will
from now on be available on ``https://tickets.bigorg.com/awesomecon/``. The former URL at from now on be available on ``https://tickets.bigorg.com/awesomecon/``. The former URL at
``pretix.eu`` will redirect there. It's also possible to do this for just an event, in which ``pretix.eu`` will redirect there. However, the admin panel will still only be available
case the event will be available on ``https://tickets.awesomecon.org/``. on ``pretix.eu`` for convenience and security reasons.
However, the admin panel will still only be available on ``pretix.eu`` for convenience and security reasons.
URL routing URL routing
----------- -----------
The hard part about implementing this URL routing in Django is that The hard part about implementing this URL routing in Django is that
``https://pretix.eu/bigorg/awesomecon/`` contains two parameters of nearly arbitrary content ``https://pretix.eu/bigorg/awesomecon/`` contains two parameters of nearly arbitrary content
and ``https://tickets.bigorg.com/awesomecon/`` contains only one and ``https://tickets.awesomecon.org/`` does not contain any. and ``https://tickets.bigorg.com/awesomecon/`` contains only one. The only robust way to do
The only robust way to do this is by having *separate* URL configuration for those three cases. this is by having *separate* URL configuration for those two cases. In pretix, we call the
former our ``maindomain`` config and the latter our ``subdomain`` config. For pretix's core
modules we do some magic to avoid duplicate configuration, but for a fairly simple plugin with
only a handful of routes, we recommend just configuring the two URL sets separately.
In pretix, we therefore do not have a global URL configuration, but three, living in the following modules:
- ``pretix.multidomain.maindomain_urlconf``
- ``pretix.multidomain.organizer_domain_urlconf``
- ``pretix.multidomain.event_domain_urlconf``
We provide some helper utilities to work with these to avoid duplicate configuration of the individual URLs.
The file ``urls.py`` inside your plugin package will be loaded and scanned for URL configuration The file ``urls.py`` inside your plugin package will be loaded and scanned for URL configuration
automatically and should be provided by any plugin that provides any view. automatically and should be provided by any plugin that provides any view.
However, unlike plain Django, we look not only for a ``urlpatterns`` attribute on the module but support other
attributes like ``event_patterns`` and ``organizer_patterns`` as well.
For example, for a simple plugin that adds one URL to the backend and one event-level URL to the frontend, you can A very basic example that provides one view in the admin panel and one view in the frontend
create the following configuration in your ``urls.py``:: could look like this::
from django.urls import re_path from django.conf.urls import url
from . import views from . import views
urlpatterns = [ urlpatterns = [
re_path(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/', url(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
views.AdminView.as_view(), name='backend'), views.AdminView.as_view(), name='backend'),
] ]
event_patterns = [ event_patterns = [
@@ -60,7 +52,7 @@ create the following configuration in your ``urls.py``::
As you can see, the view in the frontend is not included in the standard Django ``urlpatterns`` As you can see, the view in the frontend is not included in the standard Django ``urlpatterns``
setting but in a separate list with the name ``event_patterns``. This will automatically prepend setting but in a separate list with the name ``event_patterns``. This will automatically prepend
the appropriate parameters to the regex (e.g. the event or the event and the organizer, depending the appropriate parameters to the regex (e.g. the event or the event and the organizer, depending
on the called domain). For organizer-level views, ``organizer_patterns`` works the same way. on the called domain).
If you only provide URLs in the admin area, you do not need to provide a ``event_patterns`` attribute. If you only provide URLs in the admin area, you do not need to provide a ``event_patterns`` attribute.
@@ -79,16 +71,11 @@ is a python method that emulates a behavior similar to ``reverse``:
.. autofunction:: pretix.multidomain.urlreverse.eventreverse .. autofunction:: pretix.multidomain.urlreverse.eventreverse
If you need to communicate the URL externally, you can use a different method to ensure that it is always an absolute URL:
.. autofunction:: pretix.multidomain.urlreverse.build_absolute_uri
In addition, there is a template tag that works similar to ``url`` but takes an event or organizer object In addition, there is a template tag that works similar to ``url`` but takes an event or organizer object
as its first argument and can be used like this:: as its first argument and can be used like this::
{% load eventurl %} {% load eventurl %}
<a href="{% eventurl request.event "presale:event.checkout" step="payment" %}">Pay</a> <a href="{% eventurl request.event "presale:event.checkout" step="payment" %}">Pay</a>
<a href="{% abseventurl request.event "presale:event.checkout" step="payment" %}">Pay</a>
Implementation details Implementation details

View File

@@ -12,4 +12,3 @@ Developer documentation
api/index api/index
structure structure
translation/index translation/index
nfc/index

View File

@@ -1,15 +0,0 @@
NFC media
=========
pretix supports using NFC chips as "reusable media", for example to store gift cards or tickets.
Most of this implementation currently lives in our proprietary app pretixPOS, but in the future might also become part of our open-source pretixSCAN solution.
Either way, we want this to be an open ecosystem and therefore document the exact mechanisms in use on the following pages.
We support multiple implementations of NFC media, each documented on its own page:
.. toctree::
:maxdepth: 2
uid
mf0aes

View File

@@ -1,113 +0,0 @@
Mifare Ultralight AES
=====================
We offer an implementation that provides a higher security level than the UID-based approach and uses the `Mifare Ultralight AES`_ chip sold by NXP.
We believe the security model of this approach is adequate to the situation where this will usually be used and we'll outline known risks below.
If you want to dive deeper into the properties of the Mifare Ultralight AES chip, we recommend reading the `data sheet`_.
Random UIDs
-----------
Mifare Ultralight AES supports a feature that returns a randomized UID every time a non-authenticated user tries to
read the UID. This has a strong privacy benefit, since no unauthorized entity can use the NFC chips to track users.
On the other hand, this reduces interoperability of the system. For example, this prevents you from using the same NFC
chips for a different purpose where you only need the UID. This will also prevent your guests from reading their UID
themselves with their phones, which might be useful e.g. in debugging situations.
Since there's no one-size-fits-all choice here, you can enable or disable this feature in the pretix organizer
settings. If you change it, the change will apply to all newly encoded chips after the change.
Key management
--------------
For every organizer, the server will generate create a "key set", which consists of a publicly known ID (random 32-bit integer) and two 16-byte keys ("diversification key" and "UID key").
Using our :ref:`Device authentication mechanism <rest-deviceauth>`, an authorized device can submit a locally generated RSA public key to the server.
This key can no longer changed on the server once it is set, thus protecting against the attack scenario of a leaked device API token.
The server will then include key sets in the response to ``/api/v1/device/info``, encrypted with the device's RSA key.
This includes all key sets generated for the organizer the device belongs to, as well as all keys of organizers that have granted sufficient access to this organizer.
The device will decrypt the key sets using its RSA key and store the key sets locally.
.. warning:: The device **will** have access to the raw key sets. Therefore, there is a risk of leaked master keys if an
authorized device is stolen or abused. Our implementation in pretixPOS attempts to make this very hard on
modern, non-rooted Android devices by keeping them encrypted with the RSA key and only storing the RSA key
in the hardware-backed keystore of the device. A sufficiently motivated attacker, however, will likely still
be able to extract the keys from a stolen device.
Encoding a chip
---------------
When a new chip is encoded, the following steps will be taken:
- The UID of the chip is retrieved.
- A chip-specific key is generated using the mechanism documented in `AN10922`_ using the "diversification key" from the
organizer's key set as the CMAC key and the diversification input concatenated in the from of ``0x01 + UID + APPID + SYSTEMID``
with the following values:
- The UID of the chip as ``UID``
- ``"eu.pretix"`` (``0x65 0x75 0x2e 0x70 0x72 0x65 0x74 0x69 0x78``) as ``APPID``
- The ``public_id`` from the organizer's key set as a 4-byte big-endian value as ``SYSTEMID``
- The chip-specific key is written to the chip as the "data protection key" (config pages 0x30 to 0x33)
- The UID key from the organizer's key set is written to the chip as the "UID retrieval key" (config pages 0x34 to 0x37)
- The config page 0x29 is set like this:
- ``RID_ACT`` (random UID) to ``1`` or ``0`` based on the organizer's configuration
- ``SEC_MSG_ACT`` (secure messaging) to ``1``
- ``AUTH0`` (first page that needs authentication) to 0x04 (first non-UID page)
- The config page 0x2A is set like this:
- ``PROT`` to ``0`` (only write access restricted, not read access)
- ``AUTHLIM`` to ``256`` (maximum number of wrong authentications before "self-desctruction")
- Everything else to its default value (no lock bits are set)
- The ``public_id`` of the key set will be written to page 0x04 as a big-endian value
- The UID of the chip will be registered as a reusable medium on the server.
.. warning:: During encoding, the chip-specific key and the UID key are transmitted in plain text over the air. The
security model therefore relies on the encoding of chips being performed in a trusted physical environment
to prevent a nearby attacker from sniffing the keys with a strong antenna.
.. note:: If an attacker tries to authenticate with the chip 256 times using the wrong key, the chip will become
unusable. A chip may also become unusable if it is detached from the reader in the middle of the encoding
process (even though we've tried to implement it in a way that makes this unlikely).
Usage
-----
When a chip is presented to the NFC reader, the following steps will be taken:
- Command ``GET_VERSION`` is used to determine if it is a Mifare Ultralight AES chip (if not, abort).
- Page 0x04 is read. If it is all zeroes, the chip is considered un-encoded (abort). If it contains a value that
corresponds to the ``public_id`` of a known key set, this key set is used for all further operations. If it contains
a different value, we consider this chip to belong to a different organizer or not to a pretix system at all (abort).
- An authentication with the chip using the UID key is performed.
- The UID of the chip will be read.
- The chip-specific key will be derived using the mechanism described above in the encoding step.
- An authentication with the chip using the chip-specific key is performed. If this is fully successful, this step
proves that the chip knows the same chip-specific key as we do and is therefore an authentic chip encoded by us and
we can trust its UID value.
- The UID is transmitted to the server to fetch the correct medium.
During these steps, the keys are never transmitted in plain text and can thus not be sniffed by a nearby attacker
with a strong antenna.
.. _Mifare Ultralight AES: https://www.nxp.com/products/rfid-nfc/mifare-hf/mifare-ultralight/mifare-ultralight-aes-enhanced-security-for-limited-use-contactless-applications:MF0AESx20
.. _data sheet: https://www.nxp.com/docs/en/data-sheet/MF0AES(H)20.pdf
.. _AN10922: https://www.nxp.com/docs/en/application-note/AN10922.pdf

View File

@@ -1,10 +0,0 @@
UID-based
=========
With UID-based NFC, only the unique ID (UID) of the NFC chip is used for identification purposes.
This can be used with virtually all NFC chips that provide compatibility with the NFC reader in use, typically at least all chips that comply with ISO/IEC 14443-3A.
We make only one restriction: The UID may not start with ``08``, since that usually signifies a randomized UID that changes on every read (which would not be very useful).
.. warning:: The UID-based approach provides only a very low level of security. It is easy to clone a chip with the same
UID and impersonate someone else.

View File

@@ -96,20 +96,6 @@ http://localhost:8000/control/ for the admin view.
port (for example because you develop on `pretixdroid`_), you can check port (for example because you develop on `pretixdroid`_), you can check
`Django's documentation`_ for more options. `Django's documentation`_ for more options.
When running the local development webserver, ensure Celery is not configured
in ``pretix.cfg``. i.e., you should remove anything such as::
[celery]
backend=redis://redis:6379/2
broker=redis://redis:6379/2
If you choose to use Celery for development, you must also start a Celery worker
process::
celery -A pretix.celery_app worker -l info
However, beware that code changes will not auto-reload within Celery.
.. _`checksandtests`: .. _`checksandtests`:
Code checks and unit tests Code checks and unit tests

View File

@@ -1,143 +0,0 @@
ePayBL
======
.. note::
Since ePayBL is only available to german federal, provincial and communal entities, the following page is also
only provided in german. Should you require assistance with ePayBL and do not speak this language, please feel free
reach out to support@pretix.eu.
Einführung
----------
.. note::
Sollten Sie lediglich schnell entscheiden wollen, welcher Kontierungsmodus in den Einstellungen des pretix
ePayBL-plugins gewählt werden soll, so springen Sie direkt zur Sektion :ref:`Kontierungsmodus`.
`ePayBL`_ - das ePayment-System von Bund und Länder - ist das am weitesten verbreitete Zahlungssystem für Bundes-, Länder-
sowie kommunale Aufgabenträger. Während es nur wie eines von vielen anderen Zahlungssystemen scheint, so bietet es
seinen Nutzern besondere Vorteile, wie die automatische Erfassung von Zahlungsbelegen, dem Übertragen von Buchungen in
Haushaltskassen/-systeme sowie die automatische Erfassung von Kontierungen und Steuermerkmalen.
Rein technisch gesehen ist ePayBL hierbei nicht ein eigenständiger Zahlungsdienstleister sondern nur ein eine Komponente
im komplexen System, dass die Zahlungsabwicklung für Kommunen und Behörden ist.
Im folgenden der schematische Aufbau einer Umgebung, in welcher ePayBL zum Einsatz kommt:
.. figure:: img/epaybl_flowchart.png
:class: screenshot
Quelle: Integrationshandbuch ePayBL-Konnektor, DResearch Digital Media Systems GmbH
In diesem Schaubild stellt pretix, bzw. die von Ihnen als Veranstalter angelegten Ticketshops, das Fachverfahren dar.
ePayBL stellt das Bindeglied zwischen den Fachverfahren, Haushaltssystemen und dem eigentlichen Zahlungsdienstleister,
dem sog. ZV-Provider dar. Dieser ZV-Provider ist die Stelle, welche die eigentlichen Kundengelder einzieht und an den
Händler auszahlt. Das Gros der Zahlungsdienstleister unterstützt pretix hierbei auch direkt; sprich: Sollten Sie die
Anbindung an Ihre Haushaltssysteme nicht benötigen, kann eine direkte Anbindung in der Regel ebenso - und dies bei meist
vermindertem Aufwand - vorgenommen werden.
In der Vergangenheit zeigte sich jedoch schnell, dass nicht jeder IT-Dienstleister immer sofort die neueste Version von
ePayBL seinen Nutzern angeboten hat. Die Gründe hierfür sind mannigfaltig: Von fest vorgegebenen Update-Zyklen bis hin
zu Systeme mit speziellen Anpassungen, kann leider nicht davon ausgegangen werden, dass alle ePayBL-Systeme exakt gleich
ansprechbar sind - auch wenn es sich dabei eigentlich um einen standardisierten Dienst handelt.
Aus diesem Grund gibt es mit dem ePayBL-Konnektor eine weitere Abstraktionsschicht welche optional zwischen den
Fachverfahren und dem ePayBL-Server sitzt. Dieser Konnektor wird so gepflegt, dass er zum einen eine dauerhaft
gleichartige Schnittstelle den Fachverfahren bietet aber gleichzeitig auch mit jeder Version des ePayBL-Servers
kommunizieren kann - egal wie neu oder alt, wie regulär oder angepasst diese ist.
Im Grunde müsste daher eigentlich immer gesagt werden, dass pretix eine Anbindung an den ePayBL-Konnektor bietet; nicht
an "ePayBL" oder den "ePayBL-Server". Diese Unterscheidung kann bei der Ersteinrichtung und Anforderung von Zugangsdaten
von Relevanz sein. Da in der Praxis jedoch beide Begriffe gleichbedeutend genutzt werden, wird im Folgenden auch nur von
einer ePayBL-Anbindung die Rede sein - auch wenn explizit der Konnektor gemeint ist.
.. _`Kontierungsmodus`:
Kontierungsmodus
----------------
ePayBL ist ein Produkt, welches für die Abwicklung von Online-Zahlungsvorgängen in der Verwaltung geschaffen wurde. Ein
Umfeld, in dem klar definiert ist, was ein Kunde gerade bezahlt und wohin das Geld genau fließt. Diese Annahmen lassen
sich in einem Ticketshop wie pretix jedoch nur teilweise genauso abbilden.
Die ePayBL-Integration für pretix bietet daher zwei unterschiedliche Modi an, wie Buchungen erfasst und an ePayBL und
damit auch an die dahinterliegenden Haushaltssysteme gemeldet werden können.
Kontierung pro Position/Artikel
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Dieser Modus versucht den klassischen, behördentypischen ePayBL-Zahlungsvorgang abzubilden: Jede einzelne Position, die
ein Kunde in den Warenkorb legt, wird auch genauso 1:1 an ePayBL und die Hintergrundsysteme übermittelt.
Hierbei muss zwingend auch für jede Position ein Kennzeichen für Haushaltsstelle und Objektnummer, sowie optional ein
Kontierungsobjekt (``HREF``; bspw. ``stsl=Steuerschlüssel;psp=gsb:Geschäftsbereich,auft:Innenauftrag,kst:Kostenstelle;``
) übermittelt werden.
Diese Daten sind vom Veranstalter entsprechend für jeden in der Veranstaltung angelegten Artikel innerhalb des Tabs
"Zusätzliche Einstellungen" der Produkteinstellungen zu hinterlegen.
Während diese Einstellung eine größtmögliche Menge an Kontierungsdaten überträgt und auch ein separates Verbuchen von
Leistungen auf unterschiedliche Haushaltsstellen erlaubt, so hat diese Option auch einen großen Nachteil: Der Kunde kann
nur eine Zahlung für seine Bestellung leisten.
Während sich dies nicht nach einem großen Problem anhört, so kann dies beim Kunden zu Frust führen. pretix bietet die
Option an, dass ein Veranstalter eine Bestellung jederzeit verändern kann: Ändern von Preisen von Positionen in einer
aufgegebenen Bestellung, Zubuchen und Entfernen von Bestellpositionen, etc. Hat der Kunde seine ursprüngliche Bestellung
jedoch schon bezahlt, kann pretix nicht mehr die komplette Bestellung mit den passenden Kontierungen übertragen - es
müsste nur ein Differenz-Abbild zwischen Ursprungsbestellung und aktueller Bestellung übertragen werden. Aber auch wenn
eine "Nachmeldung" möglich wäre, so wäre ein konkretes Auflösen für was jetzt genau gezahlt wird, nicht mehr möglich.
Daher gilt bei der Nutzung der Kontierung pro Position/Artikel: Der Kunde kann nur eine (erfolgreiche) Zahlung auf seine
Bestellung leisten.
Eine weitere Einschränkung dieses Modus ist, dass aktuell keine Gebühren-Positionen (Versandkosten, Zahlungs-, Storno-
oder Servicegebühren) in diesem Modus übertragen werden können. Bitte wenden Sie sich an uns, wenn Sie diese
Funktionalität benötigen.
Kontierung pro Zahlvorgang
^^^^^^^^^^^^^^^^^^^^^^^^^^
Dieser Modus verabschiedet sich vom behördlichen "Jede Position gehört genau zu einem Haushaltskonto und muss genau
zugeordnet werden". Stattdessen werden alle Bestellpositionen - inklusive eventuell definierter Gebühren - vermengt und
nur als ein großer Warenkorb, genauer gesagt: eine einzige Position an ePayBL sowie die Hintergrundsysteme gemeldet.
Während im "pro Postion/Artikel"-Modus jeder Artikel einzeln übermittelt wird und damit auch korrekt pro Artikel der
jeweilige Brutto- und Nettopreis, sowie der anfallende Steuerbetrag und ein Steuerkennzeichen (mit Hilfe des optionalen
``HREF``-Attributs) übermittelt werden, ist dies im "pro Zahlvorgang"-Modus nicht möglich.
Stattdessen übermittelt pretix nur einen Betrag für den gesamten Warenkorb: Bruttopreis == Nettopreis. Der Steuerbetrag
wird hierbei als 0 übermittelt.
Die Angabe einer Haushaltsstelle und Objektnummer, sowie optional der ``HREF``-Kontierungsinformationen ist jedoch
weiterhin notwendig - allerdings nicht mehr individuell für jeden Artikel/jede Position sondern nur für die gesamte
Bestellung. Diese Daten sind direkt in den ePayBL-Einstellungen der Veranstaltung unter Einstellungen -> Zahlung ->
ePayBL vorzunehmen
In der Praxis bedeutet dies, dass in einem angeschlossenen Haushaltssystem nicht nachvollzogen kann, welche Positionen
konkret erworben und bezahlt wurden - stattdessen kann nur der Fakt, dass etwas verkauft wurde erfasst werden.
Je nach Aufbau und Vorgaben der Finanzbuchhaltung kann dies jedoch ausreichend sein - wenn bspw. eine Ferienfahrt
angeboten wird und seitens der Haushaltssysteme nicht erfasst werden muss, wie viel vom Gesamtbetrag einer Bestellung
auf die Ferienfahrt an sich, auf einen Zubringerbus und einen Satz Bettwäsche entfallen ist, sondern (vereinfacht
gesagt) es ausreichend ist, dass "Eine Summe X für die Haushaltsstelle/Objektnummer geflossen ist".
Dieser Modus der Kontierung bietet Ihnen auch als Vorteil gegenüber dem vorhergehenden an, dass die Bestellungen der
Kunden jederzeit erweitert und verändert werden können - auch wenn die Ursprungsbestellung schon bezahlt wurde und nur
noch eine Differenz gezahlt wird.
Einschränkungen
---------------
Zum aktuellen Zeitpunkt erlaubt die pretix-Anbindung an ePayBL nicht das durchführen von Erstattungen von bereits
geleisteten Zahlungen. Der Prozess hierfür unterscheidet sich von Behörde zu Behörde und muss daher händisch
durchgeführt werden.
.. _ePayBL: https://www.epaybl.de/

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

View File

@@ -18,7 +18,6 @@ If you want to **create** a plugin, please go to the
campaigns campaigns
certificates certificates
digital digital
epaybl
exhibitors exhibitors
shipping shipping
imported_secrets imported_secrets

View File

@@ -293,16 +293,6 @@ with that information::
</pretix-widget> </pretix-widget>
This works for the pretix Button as well, if you also specify a product. This works for the pretix Button as well, if you also specify a product.
As data-attributes are reactive, you can change them with JavaScript as well. Please note, that once the user
started the checkout process, we do not update the data-attributes in the existing checkout process to not
interrupt the checkout UX.
When updating data-attributes through JavaScript, make sure you do not have a stale reference to the HTMLNode of the
widget. When the widget is created, the original HTMLNode can happen to be replaced. So make sure to always have a
fresh reference like so
``document.querySelectorAll("pretix-widget, pretix-button, .pretix-widget-wrapper")``
Currently, the following attributes are understood by pretix itself: Currently, the following attributes are understood by pretix itself:
* ``data-email`` will pre-fill the order email field as well as the attendee email field (if enabled). * ``data-email`` will pre-fill the order email field as well as the attendee email field (if enabled).
@@ -339,72 +329,125 @@ Hosted or pretix Enterprise are active, you can pass the following fields:
* If you use the campaigns plugin, you can pass a campaign ID as a value to ``data-campaign``. This way, all orders * If you use the campaigns plugin, you can pass a campaign ID as a value to ``data-campaign``. This way, all orders
made through this widget will be counted towards this campaign. made through this widget will be counted towards this campaign.
* If you use the tracking plugin, you can enable cross-domain tracking. Please note: when you run your pretix-shop on a * If you use the tracking plugin, you can enable cross-domain tracking. To do so, you need to initialize the
subdomain of your main tracking domain, then you do not need cross-domain tracking as tracking automatically works pretix-widget manually. Use the html code to embed the widget and add one the following code snippets. Make sure to
across subdomains. See :ref:`custom_domain` for how to set this up. replace all occurrences of <MEASUREMENT_ID> with your Google Analytics MEASUREMENT_ID (UA-XXXXXXX-X or G-XXXXXXXX)
Please make sure to add the embedding website to your `Referral exclusions Please also make sure to add the embedding website to your `Referral exclusions
<https://support.google.com/analytics/answer/2795830>`_ in your Google Analytics settings. <https://support.google.com/analytics/answer/2795830>`_ in your Google Analytics settings.
Add Google Analytics as you normally would with all your `window.dataLayer` and `gtag` configurations. Also add the If you use Google Analytics 4 (GA4 G-XXXXXXXX)::
widget code normally. Then you have two options:
* Block loading of the widget at most 2 seconds or until Googles client- and session-ID are loaded. This method <script async src="https://www.googletagmanager.com/gtag/js?id=<MEASUREMENT_ID>"></script>
uses `window.pretixWidgetCallback`. Note that if it takes longer than 2 seconds to load, client- and session-ID <script type="text/javascript">
are never passed to the widget. Make sure to replace all occurrences of <MEASUREMENT_ID> with your Google window.dataLayer = window.dataLayer || [];
Analytics MEASUREMENT_ID (G-XXXXXXXX):: function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', '<MEASUREMENT_ID>');
<script type="text/javascript"> window.pretixWidgetCallback = function () {
window.pretixWidgetCallback = function () { window.PretixWidget.build_widgets = false;
window.PretixWidget.build_widgets = false; window.addEventListener('load', function() { // Wait for GA to be loaded
window.addEventListener('load', function() { // Wait for GA to be loaded if (!window['google_tag_manager']) {
if (!window['google_tag_manager']) { window.PretixWidget.buildWidgets();
window.PretixWidget.buildWidgets(); return;
return; }
}
var clientId; var clientId;
var sessionId; var sessionId;
var loadingTimeout; var loadingTimeout;
function build() { function build() {
// use loadingTimeout to make sure build() is only called once // use loadingTimeout to make sure build() is only called once
if (!loadingTimeout) return; if (!loadingTimeout) return;
window.clearTimeout(loadingTimeout); window.clearTimeout(loadingTimeout);
loadingTimeout = null; loadingTimeout = null;
if (clientId) window.PretixWidget.widget_data["tracking-ga-id"] = clientId; if (clientId) window.PretixWidget.widget_data["tracking-ga-id"] = clientId;
if (sessionId) window.PretixWidget.widget_data["tracking-ga-sessid"] = sessionId; if (sessionId) window.PretixWidget.widget_data["tracking-ga-sessid"] = sessionId;
window.PretixWidget.buildWidgets(); window.PretixWidget.buildWidgets();
}; };
// make sure to build pretix-widgets if gtag fails to load either client_id or session_id // make sure to build pretix-widgets if gtag fails to load either client_id or session_id
loadingTimeout = window.setTimeout(build, 2000); loadingTimeout = window.setTimeout(build, 2000);
gtag('get', '<MEASUREMENT_ID>', 'client_id', function(id) {
clientId = id;
if (sessionId !== undefined) build();
});
gtag('get', '<MEASUREMENT_ID>', 'session_id', function(id) {
sessionId = id;
if (clientId !== undefined) build();
});
});
};
</script>
* Or asynchronously set data-attributes the widgets are shown immediately, but once the user has started checkout,
data-attributes are not updated. Make sure to replace all occurrences of <MEASUREMENT_ID> with your Google
Analytics MEASUREMENT_ID (G-XXXXXXXX)::
<script type="text/javascript">
window.addEventListener('load', function() {
gtag('get', '<MEASUREMENT_ID>', 'client_id', function(id) { gtag('get', '<MEASUREMENT_ID>', 'client_id', function(id) {
const widgets = document.querySelectorAll("pretix-widget, pretix-button, .pretix-widget-wrapper"); clientId = id;
widgets.forEach(widget => widget.setAttribute("data-tracking-ga-id", id)) if (sessionId !== undefined) build();
}); });
gtag('get', '<MEASUREMENT_ID>', 'session_id', function(id) { gtag('get', '<MEASUREMENT_ID>', 'session_id', function(id) {
const widgets = document.querySelectorAll("pretix-widget, pretix-button, .pretix-widget-wrapper"); sessionId = id;
widgets.forEach(widget => widget.setAttribute("data-tracking-ga-sessid", id)) if (clientId !== undefined) build();
}); });
}); });
</script> };
</script>
If you use Universal Analytics with ``gtag.js`` (UA-XXXXXXX-X)::
<script async src="https://www.googletagmanager.com/gtag/js?id=<MEASUREMENT_ID>"></script>
<script type="text/javascript">
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', '<MEASUREMENT_ID>');
window.pretixWidgetCallback = function () {
window.PretixWidget.build_widgets = false;
window.addEventListener('load', function() { // Wait for GA to be loaded
if (!window['google_tag_manager']) {
window.PretixWidget.buildWidgets();
return;
}
// make sure to build pretix-widgets if gtag fails to load client_id
var loadingTimeout = window.setTimeout(function() {
loadingTimeout = null;
window.PretixWidget.buildWidgets();
}, 1000);
gtag('get', '<MEASUREMENT_ID>', 'client_id', function(id) {
if (loadingTimeout) {
window.clearTimeout(loadingTimeout);
window.PretixWidget.widget_data["tracking-ga-id"] = id;
window.PretixWidget.buildWidgets();
}
});
});
};
</script>
If you use ``analytics.js`` (Universal Analytics)::
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', '<MEASUREMENT_ID>', 'auto');
ga('send', 'pageview');
window.pretixWidgetCallback = function () {
window.PretixWidget.build_widgets = false;
window.addEventListener('load', function() { // Wait for GA to be loaded
if (!window['ga'] || !ga.create) {
// Tracking is probably blocked
window.PretixWidget.buildWidgets()
return;
}
var loadingTimeout = window.setTimeout(function() {
loadingTimeout = null;
window.PretixWidget.buildWidgets();
}, 1000);
ga(function(tracker) {
if (loadingTimeout) {
window.clearTimeout(loadingTimeout);
window.PretixWidget.widget_data["tracking-ga-id"] = tracker.get('clientId');
window.PretixWidget.buildWidgets();
}
});
});
};
</script>
.. _Let's Encrypt: https://letsencrypt.org/ .. _Let's Encrypt: https://letsencrypt.org/

View File

@@ -22,7 +22,7 @@ classifiers = [
"Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.11",
"Framework :: Django :: 4.1", "Framework :: Django :: 3.2",
] ]
dependencies = [ dependencies = [
@@ -30,13 +30,13 @@ dependencies = [
"babel", "babel",
"BeautifulSoup4==4.12.*", "BeautifulSoup4==4.12.*",
"bleach==5.0.*", "bleach==5.0.*",
"celery==5.3.*", "celery==5.2.*",
"chardet==5.1.*", "chardet==5.1.*",
"cryptography>=3.4.2", "cryptography>=3.4.2",
"css-inline==0.8.*", "css-inline==0.8.*",
"defusedcsv>=1.1.0", "defusedcsv>=1.1.0",
"dj-static", "dj-static",
"Django==4.2.*", "Django==3.2.*,>=3.2.18",
"django-bootstrap3==23.1.*", "django-bootstrap3==23.1.*",
"django-compressor==4.3.*", "django-compressor==4.3.*",
"django-countries==7.5.*", "django-countries==7.5.*",
@@ -49,6 +49,7 @@ dependencies = [
"django-libsass==0.9", "django-libsass==0.9",
"django-localflavor==4.0", "django-localflavor==4.0",
"django-markup", "django-markup",
"django-mysql",
"django-oauth-toolkit==2.2.*", "django-oauth-toolkit==2.2.*",
"django-otp==1.2.*", "django-otp==1.2.*",
"django-phonenumber-field==7.1.*", "django-phonenumber-field==7.1.*",
@@ -59,10 +60,10 @@ dependencies = [
"dnspython==2.3.*", "dnspython==2.3.*",
"drf_ujson2==1.7.*", "drf_ujson2==1.7.*",
"geoip2==4.*", "geoip2==4.*",
"importlib_metadata==6.*", # Polyfill, we can probably drop this once we require Python 3.10+ "importlib_metadata==6.6.*", # Polyfill, we can probably drop this once we require Python 3.10+
"isoweek", "isoweek",
"jsonschema", "jsonschema",
"kombu==5.3.*", "kombu==5.2.*",
"libsass==0.22.*", "libsass==0.22.*",
"lxml", "lxml",
"markdown==3.4.3", # 3.3.5 requires importlib-metadata>=4.4, but django-bootstrap3 requires importlib-metadata<3. "markdown==3.4.3", # 3.3.5 requires importlib-metadata>=4.4, but django-bootstrap3 requires importlib-metadata<3.
@@ -73,10 +74,9 @@ dependencies = [
"packaging", "packaging",
"paypalrestsdk==1.13.*", "paypalrestsdk==1.13.*",
"paypal-checkout-serversdk==1.0.*", "paypal-checkout-serversdk==1.0.*",
"PyJWT==2.7.*", "PyJWT==2.6.*",
"phonenumberslite==8.13.*", "phonenumberslite==8.13.*",
"Pillow==9.5.*", "Pillow==9.5.*",
"pretix-plugin-build",
"protobuf==4.23.*", "protobuf==4.23.*",
"psycopg2-binary", "psycopg2-binary",
"pycountry", "pycountry",
@@ -87,12 +87,11 @@ dependencies = [
"python-dateutil==2.8.*", "python-dateutil==2.8.*",
"python-u2flib-server==4.*", "python-u2flib-server==4.*",
"pytz", "pytz",
"pytz-deprecation-shim==0.1.*",
"pyuca", "pyuca",
"qrcode==7.4.*", "qrcode==7.4.*",
"redis==4.6.*", "redis==4.5.*,>=4.5.4",
"reportlab==4.0.*", "reportlab==4.0.*",
"requests==2.31.*", "requests==2.30.*",
"sentry-sdk==1.15.*", "sentry-sdk==1.15.*",
"sepaxml==2.6.*", "sepaxml==2.6.*",
"slimit", "slimit",
@@ -109,11 +108,10 @@ dependencies = [
[project.optional-dependencies] [project.optional-dependencies]
memcached = ["pylibmc"] memcached = ["pylibmc"]
mysql = ["mysqlclient"]
dev = [ dev = [
"aiohttp==3.8.*",
"coverage", "coverage",
"coveralls", "coveralls",
"fakeredis==2.18.*",
"flake8==6.0.*", "flake8==6.0.*",
"freezegun", "freezegun",
"isort==5.12.*", "isort==5.12.*",
@@ -121,14 +119,13 @@ dev = [
"potypo", "potypo",
"pycodestyle==2.10.*", "pycodestyle==2.10.*",
"pyflakes==3.0.*", "pyflakes==3.0.*",
"pytest-asyncio",
"pytest-cache", "pytest-cache",
"pytest-cov", "pytest-cov",
"pytest-django==4.*", "pytest-django==4.*",
"pytest-mock==3.10.*", "pytest-mock==3.10.*",
"pytest-rerunfailures==11.*", "pytest-rerunfailures==11.*",
"pytest-sugar", "pytest-sugar",
"pytest-xdist==3.3.*", "pytest-xdist==3.2.*",
"pytest==7.3.*", "pytest==7.3.*",
"responses", "responses",
] ]

View File

@@ -29,6 +29,7 @@ sys.path.append(str(Path.cwd() / 'src'))
def _CustomBuild(*args, **kwargs): def _CustomBuild(*args, **kwargs):
print(sys.path)
from pretix._build import CustomBuild from pretix._build import CustomBuild
return CustomBuild(*args, **kwargs) return CustomBuild(*args, **kwargs)

View File

@@ -19,4 +19,4 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see # You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>. # <https://www.gnu.org/licenses/>.
# #
__version__ = "2023.8.0.dev0" __version__ = "4.21.0.dev0"

View File

@@ -30,6 +30,7 @@ from django.utils.translation import gettext_lazy as _ # NOQA
BASE_DIR = os.path.dirname(os.path.dirname(__file__)) BASE_DIR = os.path.dirname(os.path.dirname(__file__))
USE_I18N = True USE_I18N = True
USE_L10N = True
USE_TZ = True USE_TZ = True
INSTALLED_APPS = [ INSTALLED_APPS = [
@@ -67,7 +68,6 @@ INSTALLED_APPS = [
'oauth2_provider', 'oauth2_provider',
'phonenumber_field', 'phonenumber_field',
'statici18n', 'statici18n',
'django.forms', # after pretix.base for overrides
] ]
FORMAT_MODULE_PATH = [ FORMAT_MODULE_PATH = [
@@ -89,7 +89,6 @@ ALL_LANGUAGES = [
('fi', _('Finnish')), ('fi', _('Finnish')),
('gl', _('Galician')), ('gl', _('Galician')),
('el', _('Greek')), ('el', _('Greek')),
('id', _('Indonesian')),
('it', _('Italian')), ('it', _('Italian')),
('lv', _('Latvian')), ('lv', _('Latvian')),
('pl', _('Polish')), ('pl', _('Polish')),
@@ -181,8 +180,6 @@ TEMPLATES = [
}, },
] ]
FORM_RENDERER = "django.forms.renderers.TemplatesSetting"
STATIC_ROOT = os.path.join(os.path.dirname(__file__), 'static.dist') STATIC_ROOT = os.path.join(os.path.dirname(__file__), 'static.dist')
STATICFILES_FINDERS = ( STATICFILES_FINDERS = (
@@ -197,14 +194,7 @@ STATICFILES_DIRS = [
STATICI18N_ROOT = os.path.join(BASE_DIR, "pretix/static") STATICI18N_ROOT = os.path.join(BASE_DIR, "pretix/static")
STORAGES = { STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
"default": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
},
"staticfiles": {
"BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage",
},
}
# if os.path.exists(os.path.join(DATA_DIR, 'static')): # if os.path.exists(os.path.join(DATA_DIR, 'static')):
# STATICFILES_DIRS.insert(0, os.path.join(DATA_DIR, 'static')) # STATICFILES_DIRS.insert(0, os.path.join(DATA_DIR, 'static'))
@@ -260,20 +250,3 @@ PRETIX_PRIMARY_COLOR = '#8E44B3'
# stressful for some cache setups so it is enabled by default and currently can't be enabled through pretix.cfg # stressful for some cache setups so it is enabled by default and currently can't be enabled through pretix.cfg
CACHE_LARGE_VALUES_ALLOWED = False CACHE_LARGE_VALUES_ALLOWED = False
CACHE_LARGE_VALUES_ALIAS = 'default' CACHE_LARGE_VALUES_ALIAS = 'default'
# Allowed file extensions for various places plus matching Pillow formats.
# Never allow EPS, it is full of dangerous bugs.
FILE_UPLOAD_EXTENSIONS_IMAGE = (".png", ".jpg", ".gif", ".jpeg")
PILLOW_FORMATS_IMAGE = ('PNG', 'GIF', 'JPEG')
FILE_UPLOAD_EXTENSIONS_FAVICON = (".ico", ".png", "jpg", ".gif", ".jpeg")
FILE_UPLOAD_EXTENSIONS_QUESTION_IMAGE = (".png", "jpg", ".gif", ".jpeg", ".bmp", ".tif", ".tiff", ".jfif")
PILLOW_FORMATS_QUESTIONS_IMAGE = ('PNG', 'GIF', 'JPEG', 'BMP', 'TIFF')
FILE_UPLOAD_EXTENSIONS_EMAIL_ATTACHMENT = (
".png", ".jpg", ".gif", ".jpeg", ".pdf", ".txt", ".docx", ".gif", ".svg",
".pptx", ".ppt", ".doc", ".xlsx", ".xls", ".jfif", ".heic", ".heif", ".pages",
".bmp", ".tif", ".tiff"
)
FILE_UPLOAD_EXTENSIONS_OTHER = FILE_UPLOAD_EXTENSIONS_EMAIL_ATTACHMENT

View File

@@ -45,10 +45,6 @@ def npm_install():
class CustomBuild(build): class CustomBuild(build):
def run(self): def run(self):
if "src" not in os.listdir(".") or "pretix" not in os.listdir("src"):
# Only run this command on the pretix module, not on other modules even if it's registered globally
# in some cases
return build.run(self)
if "PRETIX_DOCKER_BUILD" in os.environ: if "PRETIX_DOCKER_BUILD" in os.environ:
return # this is a hack to allow calling this file early in our docker build to make use of caching return # this is a hack to allow calling this file early in our docker build to make use of caching
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pretix._build_settings") os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pretix._build_settings")
@@ -72,10 +68,6 @@ class CustomBuild(build):
class CustomBuildExt(build_ext): class CustomBuildExt(build_ext):
def run(self): def run(self):
if "src" not in os.listdir(".") or "pretix" not in os.listdir("src"):
# Only run this command on the pretix module, not on other modules even if it's registered globally
# in some cases
return build_ext.run(self)
if "PRETIX_DOCKER_BUILD" in os.environ: if "PRETIX_DOCKER_BUILD" in os.environ:
return # this is a hack to allow calling this file early in our docker build to make use of caching return # this is a hack to allow calling this file early in our docker build to make use of caching
npm_install() npm_install()

View File

@@ -223,7 +223,6 @@ class PretixPosSecurityProfile(AllowListSecurityProfile):
('POST', 'api-v1:checkinrpc.redeem'), ('POST', 'api-v1:checkinrpc.redeem'),
('GET', 'api-v1:checkinrpc.search'), ('GET', 'api-v1:checkinrpc.search'),
('POST', 'api-v1:reusablemedium-lookup'), ('POST', 'api-v1:reusablemedium-lookup'),
('POST', 'api-v1:reusablemedium-list'),
) )

View File

@@ -59,7 +59,7 @@ class IdempotencyMiddleware:
auth_hash = sha1(auth_hash_parts.encode()).hexdigest() auth_hash = sha1(auth_hash_parts.encode()).hexdigest()
idempotency_key = request.headers.get('X-Idempotency-Key', '') idempotency_key = request.headers.get('X-Idempotency-Key', '')
with transaction.atomic(durable=True): with transaction.atomic():
call, created = ApiCall.objects.select_for_update(of=OF_SELF).get_or_create( call, created = ApiCall.objects.select_for_update(of=OF_SELF).get_or_create(
auth_hash=auth_hash, auth_hash=auth_hash,
idempotency_key=idempotency_key, idempotency_key=idempotency_key,
@@ -75,7 +75,7 @@ class IdempotencyMiddleware:
if created: if created:
resp = self.get_response(request) resp = self.get_response(request)
with transaction.atomic(durable=True): with transaction.atomic():
if resp.status_code in (409, 429, 500, 503): if resp.status_code in (409, 429, 500, 503):
# This is the exception: These calls are *meant* to be retried! # This is the exception: These calls are *meant* to be retried!
call.delete() call.delete()

View File

@@ -19,8 +19,6 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see # You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>. # <https://www.gnu.org/licenses/>.
# #
import json
from rest_framework import serializers from rest_framework import serializers
@@ -48,16 +46,3 @@ class AsymmetricField(serializers.Field):
def run_validation(self, data=serializers.empty): def run_validation(self, data=serializers.empty):
return self.write.run_validation(data) return self.write.run_validation(data)
class CompatibleJSONField(serializers.JSONField):
def to_internal_value(self, data):
try:
return json.dumps(data)
except (TypeError, ValueError):
self.fail('invalid')
def to_representation(self, value):
if value:
return json.loads(value)
return value

View File

@@ -32,13 +32,11 @@ class DiscountSerializer(I18nAwareModelSerializer):
'available_until', 'subevent_mode', 'condition_all_products', 'condition_limit_products', 'available_until', 'subevent_mode', 'condition_all_products', 'condition_limit_products',
'condition_apply_to_addons', 'condition_min_count', 'condition_min_value', 'condition_apply_to_addons', 'condition_min_count', 'condition_min_value',
'benefit_discount_matching_percent', 'benefit_only_apply_to_cheapest_n_matches', 'benefit_discount_matching_percent', 'benefit_only_apply_to_cheapest_n_matches',
'benefit_same_products', 'benefit_limit_products', 'benefit_apply_to_addons', 'condition_ignore_voucher_discounted')
'benefit_ignore_voucher_discounted', 'condition_ignore_voucher_discounted')
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.fields['condition_limit_products'].queryset = self.context['event'].items.all() self.fields['condition_limit_products'].queryset = self.context['event'].items.all()
self.fields['benefit_limit_products'].queryset = self.context['event'].items.all()
def validate(self, data): def validate(self, data):
data = super().validate(data) data = super().validate(data)

View File

@@ -46,7 +46,6 @@ from rest_framework import serializers
from rest_framework.fields import ChoiceField, Field from rest_framework.fields import ChoiceField, Field
from rest_framework.relations import SlugRelatedField from rest_framework.relations import SlugRelatedField
from pretix.api.serializers import CompatibleJSONField
from pretix.api.serializers.i18n import I18nAwareModelSerializer from pretix.api.serializers.i18n import I18nAwareModelSerializer
from pretix.api.serializers.settings import SettingsSerializer from pretix.api.serializers.settings import SettingsSerializer
from pretix.base.models import Device, Event, TaxRule, TeamAPIToken from pretix.base.models import Device, Event, TaxRule, TeamAPIToken
@@ -54,7 +53,6 @@ from pretix.base.models.event import SubEvent
from pretix.base.models.items import ( from pretix.base.models.items import (
ItemMetaProperty, SubEventItem, SubEventItemVariation, ItemMetaProperty, SubEventItem, SubEventItemVariation,
) )
from pretix.base.models.tax import CustomRulesValidator
from pretix.base.services.seating import ( from pretix.base.services.seating import (
SeatProtected, generate_seats, validate_plan_change, SeatProtected, generate_seats, validate_plan_change,
) )
@@ -652,16 +650,9 @@ class SubEventSerializer(I18nAwareModelSerializer):
class TaxRuleSerializer(CountryFieldMixin, I18nAwareModelSerializer): class TaxRuleSerializer(CountryFieldMixin, I18nAwareModelSerializer):
custom_rules = CompatibleJSONField(
validators=[CustomRulesValidator()],
required=False,
allow_null=True,
)
class Meta: class Meta:
model = TaxRule model = TaxRule
fields = ('id', 'name', 'rate', 'price_includes_tax', 'eu_reverse_charge', 'home_country', 'internal_name', fields = ('id', 'name', 'rate', 'price_includes_tax', 'eu_reverse_charge', 'home_country', 'internal_name', 'keep_gross_if_rate_changes')
'keep_gross_if_rate_changes', 'custom_rules')
class EventSettingsSerializer(SettingsSerializer): class EventSettingsSerializer(SettingsSerializer):
@@ -728,7 +719,6 @@ class EventSettingsSerializer(SettingsSerializer):
'payment_term_minutes', 'payment_term_minutes',
'payment_term_last', 'payment_term_last',
'payment_term_expire_automatically', 'payment_term_expire_automatically',
'payment_term_expire_delay_days',
'payment_term_accept_late', 'payment_term_accept_late',
'payment_explanation', 'payment_explanation',
'payment_pending_hidden', 'payment_pending_hidden',
@@ -817,10 +807,6 @@ class EventSettingsSerializer(SettingsSerializer):
'reusable_media_type_nfc_uid', 'reusable_media_type_nfc_uid',
'reusable_media_type_nfc_uid_autocreate_giftcard', 'reusable_media_type_nfc_uid_autocreate_giftcard',
'reusable_media_type_nfc_uid_autocreate_giftcard_currency', 'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes_random_uid',
] ]
readonly_fields = [ readonly_fields = [
# These are read-only since they are currently only settable on organizers, not events # These are read-only since they are currently only settable on organizers, not events
@@ -830,10 +816,6 @@ class EventSettingsSerializer(SettingsSerializer):
'reusable_media_type_nfc_uid', 'reusable_media_type_nfc_uid',
'reusable_media_type_nfc_uid_autocreate_giftcard', 'reusable_media_type_nfc_uid_autocreate_giftcard',
'reusable_media_type_nfc_uid_autocreate_giftcard_currency', 'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes_random_uid',
] ]
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
@@ -902,8 +884,6 @@ class DeviceEventSettingsSerializer(EventSettingsSerializer):
'name_scheme', 'name_scheme',
'reusable_media_type_barcode', 'reusable_media_type_barcode',
'reusable_media_type_nfc_uid', 'reusable_media_type_nfc_uid',
'reusable_media_type_nfc_mf0aes',
'reusable_media_type_nfc_mf0aes_random_uid',
'system_question_order', 'system_question_order',
] ]

View File

@@ -93,7 +93,7 @@ class JobRunSerializer(serializers.Serializer):
if events is not None and not isinstance(ex, OrganizerLevelExportMixin): if events is not None and not isinstance(ex, OrganizerLevelExportMixin):
self.fields["events"] = serializers.SlugRelatedField( self.fields["events"] = serializers.SlugRelatedField(
queryset=events, queryset=events,
required=False, required=True,
allow_empty=False, allow_empty=False,
slug_field='slug', slug_field='slug',
many=True many=True
@@ -156,9 +156,8 @@ class JobRunSerializer(serializers.Serializer):
def to_internal_value(self, data): def to_internal_value(self, data):
if isinstance(data, QueryDict): if isinstance(data, QueryDict):
data = data.copy() data = data.copy()
for k, v in self.fields.items(): for k, v in self.fields.items():
if isinstance(v, serializers.ManyRelatedField) and k not in data and k != "events": if isinstance(v, serializers.ManyRelatedField) and k not in data:
data[k] = [] data[k] = []
for fk in self.fields.keys(): for fk in self.fields.keys():

View File

@@ -60,8 +60,6 @@ class NestedGiftCardSerializer(GiftCardSerializer):
class ReusableMediaSerializer(I18nAwareModelSerializer): class ReusableMediaSerializer(I18nAwareModelSerializer):
organizer = serializers.SlugRelatedField(slug_field='slug', read_only=True)
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
@@ -113,7 +111,6 @@ class ReusableMediaSerializer(I18nAwareModelSerializer):
model = ReusableMedium model = ReusableMedium
fields = ( fields = (
'id', 'id',
'organizer',
'created', 'created',
'updated', 'updated',
'type', 'type',

View File

@@ -19,16 +19,15 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see # You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>. # <https://www.gnu.org/licenses/>.
# #
import json
import logging import logging
import os import os
from collections import Counter, defaultdict from collections import Counter, defaultdict
from datetime import timedelta
from decimal import Decimal from decimal import Decimal
import pycountry import pycountry
from django.conf import settings from django.conf import settings
from django.core.files import File from django.core.files import File
from django.db import models
from django.db.models import F, Q from django.db.models import F, Q
from django.utils.encoding import force_str from django.utils.encoding import force_str
from django.utils.timezone import now from django.utils.timezone import now
@@ -40,7 +39,6 @@ from rest_framework.exceptions import ValidationError
from rest_framework.relations import SlugRelatedField from rest_framework.relations import SlugRelatedField
from rest_framework.reverse import reverse from rest_framework.reverse import reverse
from pretix.api.serializers import CompatibleJSONField
from pretix.api.serializers.event import SubEventSerializer from pretix.api.serializers.event import SubEventSerializer
from pretix.api.serializers.i18n import I18nAwareModelSerializer from pretix.api.serializers.i18n import I18nAwareModelSerializer
from pretix.api.serializers.item import ( from pretix.api.serializers.item import (
@@ -60,11 +58,10 @@ from pretix.base.models.orders import (
) )
from pretix.base.pdf import get_images, get_variables from pretix.base.pdf import get_images, get_variables
from pretix.base.services.cart import error_messages from pretix.base.services.cart import error_messages
from pretix.base.services.locking import LOCK_TRUST_WINDOW, lock_objects from pretix.base.services.locking import NoLockManager
from pretix.base.services.pricing import ( from pretix.base.services.pricing import (
apply_discounts, get_line_price, get_listed_price, is_included_for_free, apply_discounts, get_line_price, get_listed_price, is_included_for_free,
) )
from pretix.base.services.quotas import QuotaAvailability
from pretix.base.settings import COUNTRIES_WITH_STATE_IN_ADDRESS from pretix.base.settings import COUNTRIES_WITH_STATE_IN_ADDRESS
from pretix.base.signals import register_ticket_outputs from pretix.base.signals import register_ticket_outputs
from pretix.helpers.countries import CachedCountries from pretix.helpers.countries import CachedCountries
@@ -286,12 +283,11 @@ class FailedCheckinSerializer(I18nAwareModelSerializer):
raw_item = serializers.PrimaryKeyRelatedField(queryset=Item.objects.none(), required=False, allow_null=True) raw_item = serializers.PrimaryKeyRelatedField(queryset=Item.objects.none(), required=False, allow_null=True)
raw_variation = serializers.PrimaryKeyRelatedField(queryset=ItemVariation.objects.none(), required=False, allow_null=True) raw_variation = serializers.PrimaryKeyRelatedField(queryset=ItemVariation.objects.none(), required=False, allow_null=True)
raw_subevent = serializers.PrimaryKeyRelatedField(queryset=SubEvent.objects.none(), required=False, allow_null=True) raw_subevent = serializers.PrimaryKeyRelatedField(queryset=SubEvent.objects.none(), required=False, allow_null=True)
nonce = serializers.CharField(required=False, allow_null=True)
class Meta: class Meta:
model = Checkin model = Checkin
fields = ('error_reason', 'error_explanation', 'raw_barcode', 'raw_item', 'raw_variation', fields = ('error_reason', 'error_explanation', 'raw_barcode', 'raw_item', 'raw_variation',
'raw_subevent', 'nonce', 'datetime', 'type', 'position') 'raw_subevent', 'datetime', 'type', 'position')
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
@@ -376,15 +372,11 @@ class PdfDataSerializer(serializers.Field):
self.context['vars_images'] = get_images(self.context['event']) self.context['vars_images'] = get_images(self.context['event'])
for k, f in self.context['vars'].items(): for k, f in self.context['vars'].items():
if 'evaluate_bulk' in f: try:
# Will be evaluated later by our list serializers res[k] = f['evaluate'](instance, instance.order, ev)
res[k] = (f['evaluate_bulk'], instance) except:
else: logger.exception('Evaluating PDF variable failed')
try: res[k] = '(error)'
res[k] = f['evaluate'](instance, instance.order, ev)
except:
logger.exception('Evaluating PDF variable failed')
res[k] = '(error)'
if not hasattr(ev, '_cached_meta_data'): if not hasattr(ev, '_cached_meta_data'):
ev._cached_meta_data = ev.meta_data ev._cached_meta_data = ev.meta_data
@@ -437,38 +429,6 @@ class PdfDataSerializer(serializers.Field):
return res return res
class OrderPositionListSerializer(serializers.ListSerializer):
def to_representation(self, data):
# We have a custom implementation of this method because PdfDataSerializer() might keep some elements unevaluated
# with a (callable, input) tuple. We'll loop over these entries and evaluate them bulk-wise to save on SQL queries.
if isinstance(self.parent, OrderSerializer) and isinstance(self.parent.parent, OrderListSerializer):
# Do not execute our custom code because it will be executed by OrderListSerializer later for the
# full result set.
return super().to_representation(data)
iterable = data.all() if isinstance(data, models.Manager) else data
data = []
evaluate_queue = defaultdict(list)
for item in iterable:
entry = self.child.to_representation(item)
if "pdf_data" in entry:
for k, v in entry["pdf_data"].items():
if isinstance(v, tuple) and callable(v[0]):
evaluate_queue[v[0]].append((v[1], entry, k))
data.append(entry)
for func, entries in evaluate_queue.items():
results = func([item for (item, entry, k) in entries])
for (item, entry, k), result in zip(entries, results):
entry["pdf_data"][k] = result
return data
class OrderPositionSerializer(I18nAwareModelSerializer): class OrderPositionSerializer(I18nAwareModelSerializer):
checkins = CheckinSerializer(many=True, read_only=True) checkins = CheckinSerializer(many=True, read_only=True)
answers = AnswerSerializer(many=True) answers = AnswerSerializer(many=True)
@@ -480,7 +440,6 @@ class OrderPositionSerializer(I18nAwareModelSerializer):
attendee_name = serializers.CharField(required=False) attendee_name = serializers.CharField(required=False)
class Meta: class Meta:
list_serializer_class = OrderPositionListSerializer
model = OrderPosition model = OrderPosition
fields = ('id', 'order', 'positionid', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts', fields = ('id', 'order', 'positionid', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts',
'company', 'street', 'zipcode', 'city', 'country', 'state', 'discount', 'company', 'street', 'zipcode', 'city', 'country', 'state', 'discount',
@@ -509,20 +468,6 @@ class OrderPositionSerializer(I18nAwareModelSerializer):
def validate(self, data): def validate(self, data):
raise TypeError("this serializer is readonly") raise TypeError("this serializer is readonly")
def to_representation(self, data):
if isinstance(self.parent, (OrderListSerializer, OrderPositionListSerializer)):
# Do not execute our custom code because it will be executed by OrderListSerializer later for the
# full result set.
return super().to_representation(data)
entry = super().to_representation(data)
if "pdf_data" in entry:
for k, v in entry["pdf_data"].items():
if isinstance(v, tuple) and callable(v[0]):
entry["pdf_data"][k] = v[0]([v[1]])[0]
return entry
class RequireAttentionField(serializers.Field): class RequireAttentionField(serializers.Field):
def to_representation(self, instance: OrderPosition): def to_representation(self, instance: OrderPosition):
@@ -590,9 +535,8 @@ class OrderPaymentTypeField(serializers.Field):
# TODO: Remove after pretix 2.2 # TODO: Remove after pretix 2.2
def to_representation(self, instance: Order): def to_representation(self, instance: Order):
t = None t = None
if instance.pk: for p in instance.payments.all():
for p in instance.payments.all(): t = p.provider
t = p.provider
return t return t
@@ -600,10 +544,10 @@ class OrderPaymentDateField(serializers.DateField):
# TODO: Remove after pretix 2.2 # TODO: Remove after pretix 2.2
def to_representation(self, instance: Order): def to_representation(self, instance: Order):
t = None t = None
if instance.pk: for p in instance.payments.all():
for p in instance.payments.all(): t = p.payment_date or t
t = p.payment_date or t
if t: if t:
return super().to_representation(t.date()) return super().to_representation(t.date())
@@ -617,7 +561,7 @@ class PaymentURLField(serializers.URLField):
def to_representation(self, instance: OrderPayment): def to_representation(self, instance: OrderPayment):
if instance.state != OrderPayment.PAYMENT_STATE_CREATED: if instance.state != OrderPayment.PAYMENT_STATE_CREATED:
return None return None
return build_absolute_uri(instance.order.event, 'presale:event.order.pay', kwargs={ return build_absolute_uri(self.context['event'], 'presale:event.order.pay', kwargs={
'order': instance.order.code, 'order': instance.order.code,
'secret': instance.order.secret, 'secret': instance.order.secret,
'payment': instance.pk, 'payment': instance.pk,
@@ -662,42 +606,13 @@ class OrderRefundSerializer(I18nAwareModelSerializer):
class OrderURLField(serializers.URLField): class OrderURLField(serializers.URLField):
def to_representation(self, instance: Order): def to_representation(self, instance: Order):
return build_absolute_uri(instance.event, 'presale:event.order', kwargs={ return build_absolute_uri(self.context['event'], 'presale:event.order', kwargs={
'order': instance.code, 'order': instance.code,
'secret': instance.secret, 'secret': instance.secret,
}) })
class OrderListSerializer(serializers.ListSerializer):
def to_representation(self, data):
# We have a custom implementation of this method because PdfDataSerializer() might keep some elements
# unevaluated with a (callable, input) tuple. We'll loop over these entries and evaluate them bulk-wise to
# save on SQL queries.
iterable = data.all() if isinstance(data, models.Manager) else data
data = []
evaluate_queue = defaultdict(list)
for item in iterable:
entry = self.child.to_representation(item)
for p in entry.get("positions", []):
if "pdf_data" in p:
for k, v in p["pdf_data"].items():
if isinstance(v, tuple) and callable(v[0]):
evaluate_queue[v[0]].append((v[1], p, k))
data.append(entry)
for func, entries in evaluate_queue.items():
results = func([item for (item, entry, k) in entries])
for (item, entry, k), result in zip(entries, results):
entry["pdf_data"][k] = result
return data
class OrderSerializer(I18nAwareModelSerializer): class OrderSerializer(I18nAwareModelSerializer):
event = SlugRelatedField(slug_field='slug', read_only=True)
invoice_address = InvoiceAddressSerializer(allow_null=True) invoice_address = InvoiceAddressSerializer(allow_null=True)
positions = OrderPositionSerializer(many=True, read_only=True) positions = OrderPositionSerializer(many=True, read_only=True)
fees = OrderFeeSerializer(many=True, read_only=True) fees = OrderFeeSerializer(many=True, read_only=True)
@@ -711,9 +626,8 @@ class OrderSerializer(I18nAwareModelSerializer):
class Meta: class Meta:
model = Order model = Order
list_serializer_class = OrderListSerializer
fields = ( fields = (
'code', 'event', 'status', 'testmode', 'secret', 'email', 'phone', 'locale', 'datetime', 'expires', 'payment_date', 'code', 'status', 'testmode', 'secret', 'email', 'phone', 'locale', 'datetime', 'expires', 'payment_date',
'payment_provider', 'fees', 'total', 'comment', 'custom_followup_at', 'invoice_address', 'positions', 'downloads', 'payment_provider', 'fees', 'total', 'comment', 'custom_followup_at', 'invoice_address', 'positions', 'downloads',
'checkin_attention', 'last_modified', 'payments', 'refunds', 'require_approval', 'sales_channel', 'checkin_attention', 'last_modified', 'payments', 'refunds', 'require_approval', 'sales_channel',
'url', 'customer', 'valid_if_pending' 'url', 'customer', 'valid_if_pending'
@@ -981,6 +895,19 @@ class OrderPositionCreateSerializer(I18nAwareModelSerializer):
return data return data
class CompatibleJSONField(serializers.JSONField):
def to_internal_value(self, data):
try:
return json.dumps(data)
except (TypeError, ValueError):
self.fail('invalid')
def to_representation(self, value):
if value:
return json.loads(value)
return value
class WrappedList: class WrappedList:
def __init__(self, data): def __init__(self, data):
self._data = data self._data = data
@@ -1146,367 +1073,337 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
else: else:
ia = None ia = None
quotas_by_item = {} lock_required = False
quota_diff_for_locking = Counter()
voucher_diff_for_locking = Counter()
seat_diff_for_locking = Counter()
quota_usage = Counter()
voucher_usage = Counter()
seat_usage = Counter()
v_budget = {}
now_dt = now()
delete_cps = []
consume_carts = validated_data.pop('consume_carts', [])
for pos_data in positions_data: for pos_data in positions_data:
if (pos_data.get('item'), pos_data.get('variation'), pos_data.get('subevent')) not in quotas_by_item: pos_data['_quotas'] = list(
quotas_by_item[pos_data.get('item'), pos_data.get('variation'), pos_data.get('subevent')] = list( pos_data.get('variation').quotas.filter(subevent=pos_data.get('subevent'))
pos_data.get('variation').quotas.filter(subevent=pos_data.get('subevent')) if pos_data.get('variation')
if pos_data.get('variation') else pos_data.get('item').quotas.filter(subevent=pos_data.get('subevent'))
else pos_data.get('item').quotas.filter(subevent=pos_data.get('subevent')) )
) if pos_data.get('voucher') or pos_data.get('seat') or any(q.size is not None for q in pos_data['_quotas']):
for q in quotas_by_item[pos_data.get('item'), pos_data.get('variation'), pos_data.get('subevent')]: lock_required = True
quota_diff_for_locking[q] += 1
if pos_data.get('voucher'):
voucher_diff_for_locking[pos_data['voucher']] += 1
if pos_data.get('seat'):
try:
seat = self.context['event'].seats.get(seat_guid=pos_data['seat'], subevent=pos_data.get('subevent'))
except Seat.DoesNotExist:
pos_data['seat'] = Seat.DoesNotExist
else:
pos_data['seat'] = seat
seat_diff_for_locking[pos_data['seat']] += 1
if consume_carts: lockfn = self.context['event'].lock
offset = now() + timedelta(seconds=LOCK_TRUST_WINDOW) if simulate or not lock_required:
for cp in CartPosition.objects.filter( lockfn = NoLockManager
event=self.context['event'], cart_id__in=consume_carts, expires__gt=now_dt with lockfn() as now_dt:
): free_seats = set()
quotas = (cp.variation.quotas.filter(subevent=cp.subevent) seats_seen = set()
if cp.variation else cp.item.quotas.filter(subevent=cp.subevent)) consume_carts = validated_data.pop('consume_carts', [])
for quota in quotas: delete_cps = []
if cp.expires > offset: quota_avail_cache = {}
quota_diff_for_locking[quota] -= 1 v_budget = {}
quota_usage[quota] -= 1 voucher_usage = Counter()
if cp.voucher: if consume_carts:
if cp.expires > offset: for cp in CartPosition.objects.filter(
voucher_diff_for_locking[cp.voucher] -= 1 event=self.context['event'], cart_id__in=consume_carts, expires__gt=now()
voucher_usage[cp.voucher] -= 1 ):
if cp.seat: quotas = (cp.variation.quotas.filter(subevent=cp.subevent)
if cp.expires > offset: if cp.variation else cp.item.quotas.filter(subevent=cp.subevent))
seat_diff_for_locking[cp.seat] -= 1 for quota in quotas:
seat_usage[cp.seat] -= 1 if quota not in quota_avail_cache:
delete_cps.append(cp) quota_avail_cache[quota] = list(quota.availability())
if quota_avail_cache[quota][1] is not None:
quota_avail_cache[quota][1] += 1
if cp.voucher:
voucher_usage[cp.voucher] -= 1
if cp.expires > now_dt:
if cp.seat:
free_seats.add(cp.seat)
delete_cps.append(cp)
if not simulate: errs = [{} for p in positions_data]
full_lock_required = seat_diff_for_locking and self.context['event'].settings.seating_minimal_distance > 0
if full_lock_required:
# We lock the entire event in this case since we don't want to deal with fine-granular locking
# in the case of seating distance enforcement
lock_objects([self.context['event']])
else:
lock_objects(
[q for q, d in quota_diff_for_locking.items() if d > 0 and q.size is not None and not force] +
[v for v, d in voucher_diff_for_locking.items() if d > 0 and not force] +
[s for s, d in seat_diff_for_locking.items() if d > 0],
shared_lock_objects=[self.context['event']]
)
qa = QuotaAvailability() for i, pos_data in enumerate(positions_data):
qa.queue(*[q for q, d in quota_diff_for_locking.items() if d > 0])
qa.compute()
# These are not technically correct as diff use due to the time offset applied above, so let's prevent accidental if pos_data.get('voucher'):
# use further down v = pos_data['voucher']
del quota_diff_for_locking, voucher_diff_for_locking, seat_diff_for_locking
errs = [{} for p in positions_data] if pos_data.get('addon_to'):
errs[i]['voucher'] = ['Vouchers are currently not supported for add-on products.']
continue
for i, pos_data in enumerate(positions_data): if not v.applies_to(pos_data['item'], pos_data.get('variation')):
if pos_data.get('voucher'): errs[i]['voucher'] = [error_messages['voucher_invalid_item']]
v = pos_data['voucher'] continue
if pos_data.get('addon_to'): if v.subevent_id and pos_data.get('subevent').pk != v.subevent_id:
errs[i]['voucher'] = ['Vouchers are currently not supported for add-on products.'] errs[i]['voucher'] = [error_messages['voucher_invalid_subevent']]
continue continue
if not v.applies_to(pos_data['item'], pos_data.get('variation')): if v.valid_until is not None and v.valid_until < now_dt:
errs[i]['voucher'] = [error_messages['voucher_invalid_item']] errs[i]['voucher'] = [error_messages['voucher_expired']]
continue continue
if v.subevent_id and pos_data.get('subevent').pk != v.subevent_id: voucher_usage[v] += 1
errs[i]['voucher'] = [error_messages['voucher_invalid_subevent']] if voucher_usage[v] > 0:
continue redeemed_in_carts = CartPosition.objects.filter(
Q(voucher=pos_data['voucher']) & Q(event=self.context['event']) & Q(expires__gte=now_dt)
).exclude(pk__in=[cp.pk for cp in delete_cps])
v_avail = v.max_usages - v.redeemed - redeemed_in_carts.count()
if v_avail < voucher_usage[v]:
errs[i]['voucher'] = [
'The voucher has already been used the maximum number of times.'
]
if v.valid_until is not None and v.valid_until < now_dt: if v.budget is not None:
errs[i]['voucher'] = [error_messages['voucher_expired']] price = pos_data.get('price')
continue listed_price = get_listed_price(pos_data.get('item'), pos_data.get('variation'), pos_data.get('subevent'))
voucher_usage[v] += 1 if pos_data.get('voucher'):
if voucher_usage[v] > 0: price_after_voucher = pos_data.get('voucher').calculate_price(listed_price)
redeemed_in_carts = CartPosition.objects.filter( else:
Q(voucher=pos_data['voucher']) & Q(event=self.context['event']) & Q(expires__gte=now_dt) price_after_voucher = listed_price
).exclude(pk__in=[cp.pk for cp in delete_cps]) if price is None:
v_avail = v.max_usages - v.redeemed - redeemed_in_carts.count() price = price_after_voucher
if v_avail < voucher_usage[v]:
errs[i]['voucher'] = [
'The voucher has already been used the maximum number of times.'
]
if v.budget is not None: if v not in v_budget:
price = pos_data.get('price') v_budget[v] = v.budget - v.budget_used()
listed_price = get_listed_price(pos_data.get('item'), pos_data.get('variation'), pos_data.get('subevent')) disc = max(listed_price - price, 0)
if disc > v_budget[v]:
new_disc = v_budget[v]
v_budget[v] -= new_disc
if new_disc == Decimal('0.00') or pos_data.get('price') is not None:
errs[i]['voucher'] = [
'The voucher has a remaining budget of {}, therefore a discount of {} can not be '
'given.'.format(v_budget[v] + new_disc, disc)
]
continue
pos_data['price'] = price + (disc - new_disc)
else:
v_budget[v] -= disc
seated = pos_data.get('item').seat_category_mappings.filter(subevent=pos_data.get('subevent')).exists()
if pos_data.get('seat'):
if pos_data.get('addon_to'):
errs[i]['seat'] = ['Seats are currently not supported for add-on products.']
continue
if not seated:
errs[i]['seat'] = ['The specified product does not allow to choose a seat.']
try:
seat = self.context['event'].seats.get(seat_guid=pos_data['seat'], subevent=pos_data.get('subevent'))
except Seat.DoesNotExist:
errs[i]['seat'] = ['The specified seat does not exist.']
else:
pos_data['seat'] = seat
if (seat not in free_seats and not seat.is_available(sales_channel=validated_data.get('sales_channel', 'web'))) or seat in seats_seen:
errs[i]['seat'] = [gettext_lazy('The selected seat "{seat}" is not available.').format(seat=seat.name)]
seats_seen.add(seat)
elif seated:
errs[i]['seat'] = ['The specified product requires to choose a seat.']
requested_valid_from = pos_data.pop('requested_valid_from', None)
if 'valid_from' not in pos_data and 'valid_until' not in pos_data:
valid_from, valid_until = pos_data['item'].compute_validity(
requested_start=(
max(requested_valid_from, now())
if requested_valid_from and pos_data['item'].validity_dynamic_start_choice
else now()
),
enforce_start_limit=True,
override_tz=self.context['event'].timezone,
)
pos_data['valid_from'] = valid_from
pos_data['valid_until'] = valid_until
if not force:
for i, pos_data in enumerate(positions_data):
if pos_data.get('voucher'): if pos_data.get('voucher'):
price_after_voucher = pos_data.get('voucher').calculate_price(listed_price) if pos_data['voucher'].allow_ignore_quota or pos_data['voucher'].block_quota:
continue
if pos_data.get('subevent'):
if pos_data.get('item').pk in pos_data['subevent'].item_overrides and pos_data['subevent'].item_overrides[pos_data['item'].pk].disabled:
errs[i]['item'] = [gettext_lazy('The product "{}" is not available on this date.').format(
str(pos_data.get('item'))
)]
if (
pos_data.get('variation') and pos_data['variation'].pk in pos_data['subevent'].var_overrides and
pos_data['subevent'].var_overrides[pos_data['variation'].pk].disabled
):
errs[i]['item'] = [gettext_lazy('The product "{}" is not available on this date.').format(
str(pos_data.get('item'))
)]
new_quotas = pos_data['_quotas']
if len(new_quotas) == 0:
errs[i]['item'] = [gettext_lazy('The product "{}" is not assigned to a quota.').format(
str(pos_data.get('item'))
)]
else:
for quota in new_quotas:
if quota not in quota_avail_cache:
quota_avail_cache[quota] = list(quota.availability())
if quota_avail_cache[quota][1] is not None:
quota_avail_cache[quota][1] -= 1
if quota_avail_cache[quota][1] < 0:
errs[i]['item'] = [
gettext_lazy('There is not enough quota available on quota "{}" to perform the operation.').format(
quota.name
)
]
if any(errs):
raise ValidationError({'positions': errs})
if validated_data.get('locale', None) is None:
validated_data['locale'] = self.context['event'].settings.locale
order = Order(event=self.context['event'], **validated_data)
order.set_expires(subevents=[p.get('subevent') for p in positions_data])
order.meta_info = "{}"
order.total = Decimal('0.00')
if validated_data.get('require_approval') is not None:
order.require_approval = validated_data['require_approval']
if simulate:
order = WrappedModel(order)
order.last_modified = now()
order.code = 'PREVIEW'
else:
order.save()
if ia:
if not simulate:
ia.order = order
ia.save()
else:
order.invoice_address = ia
ia.last_modified = now()
# Generate position objects
pos_map = {}
for pos_data in positions_data:
addon_to = pos_data.pop('addon_to', None)
attendee_name = pos_data.pop('attendee_name', '')
if attendee_name and not pos_data.get('attendee_name_parts'):
pos_data['attendee_name_parts'] = {
'_legacy': attendee_name
}
pos = OrderPosition(**{k: v for k, v in pos_data.items() if k != 'answers' and k != '_quotas' and k != 'use_reusable_medium'})
if simulate:
pos.order = order._wrapped
else:
pos.order = order
if addon_to:
if simulate:
pos.addon_to = pos_map[addon_to]
else:
pos.addon_to = pos_map[addon_to]
pos_map[pos.positionid] = pos
pos_data['__instance'] = pos
# Calculate prices if not set
for pos_data in positions_data:
pos = pos_data['__instance']
if pos.addon_to_id and is_included_for_free(pos.item, pos.addon_to):
listed_price = Decimal('0.00')
else:
listed_price = get_listed_price(pos.item, pos.variation, pos.subevent)
if pos.price is None:
if pos.voucher:
price_after_voucher = pos.voucher.calculate_price(listed_price)
else: else:
price_after_voucher = listed_price price_after_voucher = listed_price
if price is None:
price = price_after_voucher
if v not in v_budget: line_price = get_line_price(
v_budget[v] = v.budget - v.budget_used() price_after_voucher=price_after_voucher,
disc = max(listed_price - price, 0) custom_price_input=None,
if disc > v_budget[v]: custom_price_input_is_net=False,
new_disc = v_budget[v] tax_rule=pos.item.tax_rule,
v_budget[v] -= new_disc invoice_address=ia,
if new_disc == Decimal('0.00') or pos_data.get('price') is not None: bundled_sum=Decimal('0.00'),
errs[i]['voucher'] = [
'The voucher has a remaining budget of {}, therefore a discount of {} can not be '
'given.'.format(v_budget[v] + new_disc, disc)
]
continue
pos_data['price'] = price + (disc - new_disc)
else:
v_budget[v] -= disc
seated = pos_data.get('item').seat_category_mappings.filter(subevent=pos_data.get('subevent')).exists()
if pos_data.get('seat'):
if pos_data.get('addon_to'):
errs[i]['seat'] = ['Seats are currently not supported for add-on products.']
continue
if not seated:
errs[i]['seat'] = ['The specified product does not allow to choose a seat.']
seat = pos_data['seat']
if seat is Seat.DoesNotExist:
errs[i]['seat'] = ['The specified seat does not exist.']
else:
seat_usage[seat] += 1
if (seat_usage[seat] > 0 and not seat.is_available(sales_channel=validated_data.get('sales_channel', 'web'))) or seat_usage[seat] > 1:
errs[i]['seat'] = [gettext_lazy('The selected seat "{seat}" is not available.').format(seat=seat.name)]
elif seated:
errs[i]['seat'] = ['The specified product requires to choose a seat.']
requested_valid_from = pos_data.pop('requested_valid_from', None)
if 'valid_from' not in pos_data and 'valid_until' not in pos_data:
valid_from, valid_until = pos_data['item'].compute_validity(
requested_start=(
max(requested_valid_from, now())
if requested_valid_from and pos_data['item'].validity_dynamic_start_choice
else now()
),
enforce_start_limit=True,
override_tz=self.context['event'].timezone,
)
pos_data['valid_from'] = valid_from
pos_data['valid_until'] = valid_until
if not force:
for i, pos_data in enumerate(positions_data):
if pos_data.get('voucher'):
if pos_data['voucher'].allow_ignore_quota or pos_data['voucher'].block_quota:
continue
if pos_data.get('subevent'):
if pos_data.get('item').pk in pos_data['subevent'].item_overrides and pos_data['subevent'].item_overrides[pos_data['item'].pk].disabled:
errs[i]['item'] = [gettext_lazy('The product "{}" is not available on this date.').format(
str(pos_data.get('item'))
)]
if (
pos_data.get('variation') and pos_data['variation'].pk in pos_data['subevent'].var_overrides and
pos_data['subevent'].var_overrides[pos_data['variation'].pk].disabled
):
errs[i]['item'] = [gettext_lazy('The product "{}" is not available on this date.').format(
str(pos_data.get('item'))
)]
new_quotas = quotas_by_item[pos_data.get('item'), pos_data.get('variation'), pos_data.get('subevent')]
if len(new_quotas) == 0:
errs[i]['item'] = [gettext_lazy('The product "{}" is not assigned to a quota.').format(
str(pos_data.get('item'))
)]
else:
for quota in new_quotas:
quota_usage[quota] += 1
if quota_usage[quota] > 0 and qa.results[quota][1] is not None:
if qa.results[quota][1] < quota_usage[quota]:
errs[i]['item'] = [
gettext_lazy('There is not enough quota available on quota "{}" to perform the operation.').format(
quota.name
)
]
if any(errs):
raise ValidationError({'positions': errs})
if validated_data.get('locale', None) is None:
validated_data['locale'] = self.context['event'].settings.locale
order = Order(event=self.context['event'], **validated_data)
order.set_expires(subevents=[p.get('subevent') for p in positions_data])
order.meta_info = "{}"
order.total = Decimal('0.00')
if validated_data.get('require_approval') is not None:
order.require_approval = validated_data['require_approval']
if simulate:
order = WrappedModel(order)
order.last_modified = now()
order.code = 'PREVIEW'
else:
order.save()
if ia:
if not simulate:
ia.order = order
ia.save()
else:
order.invoice_address = ia
ia.last_modified = now()
# Generate position objects
pos_map = {}
for pos_data in positions_data:
addon_to = pos_data.pop('addon_to', None)
attendee_name = pos_data.pop('attendee_name', '')
if attendee_name and not pos_data.get('attendee_name_parts'):
pos_data['attendee_name_parts'] = {
'_legacy': attendee_name
}
pos = OrderPosition(**{k: v for k, v in pos_data.items() if k != 'answers' and k != '_quotas' and k != 'use_reusable_medium'})
if simulate:
pos.order = order._wrapped
else:
pos.order = order
if addon_to:
if simulate:
pos.addon_to = pos_map[addon_to]
else:
pos.addon_to = pos_map[addon_to]
pos_map[pos.positionid] = pos
pos_data['__instance'] = pos
# Calculate prices if not set
for pos_data in positions_data:
pos = pos_data['__instance']
if pos.addon_to_id and is_included_for_free(pos.item, pos.addon_to):
listed_price = Decimal('0.00')
else:
listed_price = get_listed_price(pos.item, pos.variation, pos.subevent)
if pos.price is None:
if pos.voucher:
price_after_voucher = pos.voucher.calculate_price(listed_price)
else:
price_after_voucher = listed_price
line_price = get_line_price(
price_after_voucher=price_after_voucher,
custom_price_input=None,
custom_price_input_is_net=False,
tax_rule=pos.item.tax_rule,
invoice_address=ia,
bundled_sum=Decimal('0.00'),
)
pos.price = line_price.gross
pos._auto_generated_price = True
else:
if pos.voucher:
if not pos.item.tax_rule or pos.item.tax_rule.price_includes_tax:
price_after_voucher = max(pos.price, pos.voucher.calculate_price(listed_price))
else:
price_after_voucher = max(pos.price - pos.tax_value, pos.voucher.calculate_price(listed_price))
else:
price_after_voucher = listed_price
pos._auto_generated_price = False
pos._voucher_discount = listed_price - price_after_voucher
if pos.voucher:
pos.voucher_budget_use = max(listed_price - price_after_voucher, Decimal('0.00'))
order_positions = [pos_data['__instance'] for pos_data in positions_data]
discount_results = apply_discounts(
self.context['event'],
order.sales_channel,
[
(cp.item_id, cp.subevent_id, cp.price, bool(cp.addon_to), cp.is_bundled, pos._voucher_discount)
for cp in order_positions
]
)
for cp, (new_price, discount) in zip(order_positions, discount_results):
if new_price != pos.price and pos._auto_generated_price:
pos.price = new_price
pos.discount = discount
# Save instances
for pos_data in positions_data:
answers_data = pos_data.pop('answers', [])
use_reusable_medium = pos_data.pop('use_reusable_medium', None)
pos = pos_data['__instance']
pos._calculate_tax()
if simulate:
pos = WrappedModel(pos)
pos.id = 0
answers = []
for answ_data in answers_data:
options = answ_data.pop('options', [])
answ = WrappedModel(QuestionAnswer(**answ_data))
answ.options = WrappedList(options)
answers.append(answ)
pos.answers = answers
pos.pseudonymization_id = "PREVIEW"
pos.checkins = []
pos_map[pos.positionid] = pos
else:
if pos.voucher:
Voucher.objects.filter(pk=pos.voucher.pk).update(redeemed=F('redeemed') + 1)
pos.save()
seen_answers = set()
for answ_data in answers_data:
# Workaround for a pretixPOS bug :-(
if answ_data.get('question') in seen_answers:
continue
seen_answers.add(answ_data.get('question'))
options = answ_data.pop('options', [])
if isinstance(answ_data['answer'], File):
an = answ_data.pop('answer')
answ = pos.answers.create(**answ_data, answer='')
answ.file.save(os.path.basename(an.name), an, save=False)
answ.answer = 'file://' + answ.file.name
answ.save()
else:
answ = pos.answers.create(**answ_data)
answ.options.add(*options)
if use_reusable_medium:
use_reusable_medium.linked_orderposition = pos
use_reusable_medium.save(update_fields=['linked_orderposition'])
use_reusable_medium.log_action(
'pretix.reusable_medium.linked_orderposition.changed',
data={
'by_order': order.code,
'linked_orderposition': pos.pk,
}
) )
pos.price = line_price.gross
pos._auto_generated_price = True
else:
if pos.voucher:
if not pos.item.tax_rule or pos.item.tax_rule.price_includes_tax:
price_after_voucher = max(pos.price, pos.voucher.calculate_price(listed_price))
else:
price_after_voucher = max(pos.price - pos.tax_value, pos.voucher.calculate_price(listed_price))
else:
price_after_voucher = listed_price
pos._auto_generated_price = False
pos._voucher_discount = listed_price - price_after_voucher
if pos.voucher:
pos.voucher_budget_use = max(listed_price - price_after_voucher, Decimal('0.00'))
if not simulate: order_positions = [pos_data['__instance'] for pos_data in positions_data]
for cp in delete_cps: discount_results = apply_discounts(
if cp.addon_to_id: self.context['event'],
continue order.sales_channel,
cp.addons.all().delete() [
cp.delete() (cp.item_id, cp.subevent_id, cp.price, bool(cp.addon_to), cp.is_bundled, pos._voucher_discount)
for cp in order_positions
]
)
for cp, (new_price, discount) in zip(order_positions, discount_results):
if new_price != pos.price and pos._auto_generated_price:
pos.price = new_price
pos.discount = discount
# Save instances
for pos_data in positions_data:
answers_data = pos_data.pop('answers', [])
use_reusable_medium = pos_data.pop('use_reusable_medium', None)
pos = pos_data['__instance']
pos._calculate_tax()
if simulate:
pos = WrappedModel(pos)
pos.id = 0
answers = []
for answ_data in answers_data:
options = answ_data.pop('options', [])
answ = WrappedModel(QuestionAnswer(**answ_data))
answ.options = WrappedList(options)
answers.append(answ)
pos.answers = answers
pos.pseudonymization_id = "PREVIEW"
pos_map[pos.positionid] = pos
else:
if pos.voucher:
Voucher.objects.filter(pk=pos.voucher.pk).update(redeemed=F('redeemed') + 1)
pos.save()
seen_answers = set()
for answ_data in answers_data:
# Workaround for a pretixPOS bug :-(
if answ_data.get('question') in seen_answers:
continue
seen_answers.add(answ_data.get('question'))
options = answ_data.pop('options', [])
if isinstance(answ_data['answer'], File):
an = answ_data.pop('answer')
answ = pos.answers.create(**answ_data, answer='')
answ.file.save(os.path.basename(an.name), an, save=False)
answ.answer = 'file://' + answ.file.name
answ.save()
else:
answ = pos.answers.create(**answ_data)
answ.options.add(*options)
if use_reusable_medium:
use_reusable_medium.linked_orderposition = pos
use_reusable_medium.save(update_fields=['linked_orderposition'])
use_reusable_medium.log_action(
'pretix.reusable_medium.linked_orderposition.changed',
data={
'by_order': order.code,
'linked_orderposition': pos.pk,
}
)
if not simulate:
for cp in delete_cps:
if cp.addon_to_id:
continue
cp.addons.all().delete()
cp.delete()
order.total = sum([p.price for p in pos_map.values()]) order.total = sum([p.price for p in pos_map.values()])
fees = [] fees = []
@@ -1562,8 +1459,6 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
if simulate: if simulate:
order.fees = fees order.fees = fees
order.positions = pos_map.values() order.positions = pos_map.values()
order.payments = []
order.refunds = []
return order # ignore payments return order # ignore payments
else: else:
order.save(update_fields=['total']) order.save(update_fields=['total'])
@@ -1626,7 +1521,6 @@ class InlineInvoiceLineSerializer(I18nAwareModelSerializer):
class InvoiceSerializer(I18nAwareModelSerializer): class InvoiceSerializer(I18nAwareModelSerializer):
event = SlugRelatedField(slug_field='slug', read_only=True)
order = serializers.SlugRelatedField(slug_field='code', read_only=True) order = serializers.SlugRelatedField(slug_field='code', read_only=True)
refers = serializers.SlugRelatedField(slug_field='full_invoice_no', read_only=True) refers = serializers.SlugRelatedField(slug_field='full_invoice_no', read_only=True)
lines = InlineInvoiceLineSerializer(many=True) lines = InlineInvoiceLineSerializer(many=True)
@@ -1635,7 +1529,7 @@ class InvoiceSerializer(I18nAwareModelSerializer):
class Meta: class Meta:
model = Invoice model = Invoice
fields = ('event', 'order', 'number', 'is_cancellation', 'invoice_from', 'invoice_from_name', 'invoice_from_zipcode', fields = ('order', 'number', 'is_cancellation', 'invoice_from', 'invoice_from_name', 'invoice_from_zipcode',
'invoice_from_city', 'invoice_from_country', 'invoice_from_tax_id', 'invoice_from_vat_id', 'invoice_from_city', 'invoice_from_country', 'invoice_from_tax_id', 'invoice_from_vat_id',
'invoice_to', 'invoice_to_company', 'invoice_to_name', 'invoice_to_street', 'invoice_to_zipcode', 'invoice_to', 'invoice_to_company', 'invoice_to_name', 'invoice_to_street', 'invoice_to_zipcode',
'invoice_to_city', 'invoice_to_state', 'invoice_to_country', 'invoice_to_vat_id', 'invoice_to_beneficiary', 'invoice_to_city', 'invoice_to_state', 'invoice_to_country', 'invoice_to_vat_id', 'invoice_to_beneficiary',

View File

@@ -36,9 +36,9 @@ from pretix.api.serializers.settings import SettingsSerializer
from pretix.base.auth import get_auth_backends from pretix.base.auth import get_auth_backends
from pretix.base.i18n import get_language_without_region from pretix.base.i18n import get_language_without_region
from pretix.base.models import ( from pretix.base.models import (
Customer, Device, GiftCard, GiftCardAcceptance, GiftCardTransaction, Customer, Device, GiftCard, GiftCardTransaction, Membership,
Membership, MembershipType, OrderPosition, Organizer, ReusableMedium, MembershipType, OrderPosition, Organizer, ReusableMedium, SeatingPlan,
SeatingPlan, Team, TeamAPIToken, TeamInvite, User, Team, TeamAPIToken, TeamInvite, User,
) )
from pretix.base.models.seating import SeatingPlanLayoutValidator from pretix.base.models.seating import SeatingPlanLayoutValidator
from pretix.base.services.mail import SendMailException, mail from pretix.base.services.mail import SendMailException, mail
@@ -94,14 +94,6 @@ class CustomerSerializer(I18nAwareModelSerializer):
data['name_parts']['_scheme'] = self.context['request'].organizer.settings.name_scheme data['name_parts']['_scheme'] = self.context['request'].organizer.settings.name_scheme
return data return data
def validate_email(self, value):
qs = Customer.objects.filter(organizer=self.context['organizer'], email__iexact=value)
if self.instance and self.instance.pk:
qs = qs.exclude(pk=self.instance.pk)
if qs.exists():
raise ValidationError(_("An account with this email address is already registered."))
return value
class CustomerCreateSerializer(CustomerSerializer): class CustomerCreateSerializer(CustomerSerializer):
send_email = serializers.BooleanField(default=False, required=False, allow_null=True) send_email = serializers.BooleanField(default=False, required=False, allow_null=True)
@@ -191,11 +183,8 @@ class GiftCardSerializer(I18nAwareModelSerializer):
qs = GiftCard.objects.filter( qs = GiftCard.objects.filter(
secret=s secret=s
).filter( ).filter(
Q(issuer=self.context["organizer"]) | Q(issuer=self.context["organizer"]) | Q(
Q(issuer__in=GiftCardAcceptance.objects.filter( issuer__gift_card_collector_acceptance__collector=self.context["organizer"])
acceptor=self.context["organizer"],
active=True,
).values_list('issuer', flat=True))
) )
if self.instance: if self.instance:
qs = qs.exclude(pk=self.instance.pk) qs = qs.exclude(pk=self.instance.pk)
@@ -259,8 +248,6 @@ class DeviceSerializer(serializers.ModelSerializer):
unique_serial = serializers.CharField(read_only=True) unique_serial = serializers.CharField(read_only=True)
hardware_brand = serializers.CharField(read_only=True) hardware_brand = serializers.CharField(read_only=True)
hardware_model = serializers.CharField(read_only=True) hardware_model = serializers.CharField(read_only=True)
os_name = serializers.CharField(read_only=True)
os_version = serializers.CharField(read_only=True)
software_brand = serializers.CharField(read_only=True) software_brand = serializers.CharField(read_only=True)
software_version = serializers.CharField(read_only=True) software_version = serializers.CharField(read_only=True)
created = serializers.DateTimeField(read_only=True) created = serializers.DateTimeField(read_only=True)
@@ -273,7 +260,7 @@ class DeviceSerializer(serializers.ModelSerializer):
fields = ( fields = (
'device_id', 'unique_serial', 'initialization_token', 'all_events', 'limit_events', 'device_id', 'unique_serial', 'initialization_token', 'all_events', 'limit_events',
'revoked', 'name', 'created', 'initialized', 'hardware_brand', 'hardware_model', 'revoked', 'name', 'created', 'initialized', 'hardware_brand', 'hardware_model',
'os_name', 'os_version', 'software_brand', 'software_version', 'security_profile' 'software_brand', 'software_version', 'security_profile'
) )
@@ -400,9 +387,6 @@ class OrganizerSettingsSerializer(SettingsSerializer):
'reusable_media_type_nfc_uid', 'reusable_media_type_nfc_uid',
'reusable_media_type_nfc_uid_autocreate_giftcard', 'reusable_media_type_nfc_uid_autocreate_giftcard',
'reusable_media_type_nfc_uid_autocreate_giftcard_currency', 'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
] ]
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):

View File

@@ -94,13 +94,8 @@ class VoucherSerializer(I18nAwareModelSerializer):
) )
if check_quota: if check_quota:
Voucher.clean_quota_check( Voucher.clean_quota_check(
full_data, full_data, 1, self.instance, self.context.get('event'),
full_data.get('max_usages', 1) - (self.instance.redeemed if self.instance else 0), full_data.get('quota'), full_data.get('item'), full_data.get('variation')
self.instance,
self.context.get('event'),
full_data.get('quota'),
full_data.get('item'),
full_data.get('variation')
) )
Voucher.clean_voucher_code(full_data, self.context.get('event'), self.instance.pk if self.instance else None) Voucher.clean_voucher_code(full_data, self.context.get('event'), self.instance.pk if self.instance else None)

View File

@@ -35,7 +35,8 @@
import importlib import importlib
from django.apps import apps from django.apps import apps
from django.urls import include, re_path from django.conf.urls import re_path
from django.urls import include
from rest_framework import routers from rest_framework import routers
from pretix.api.views import cart from pretix.api.views import cart
@@ -61,8 +62,6 @@ orga_router.register(r'membershiptypes', organizer.MembershipTypeViewSet)
orga_router.register(r'reusablemedia', media.ReusableMediaViewSet) orga_router.register(r'reusablemedia', media.ReusableMediaViewSet)
orga_router.register(r'teams', organizer.TeamViewSet) orga_router.register(r'teams', organizer.TeamViewSet)
orga_router.register(r'devices', organizer.DeviceViewSet) orga_router.register(r'devices', organizer.DeviceViewSet)
orga_router.register(r'orders', order.OrganizerOrderViewSet)
orga_router.register(r'invoices', order.InvoiceViewSet)
orga_router.register(r'exporters', exporters.OrganizerExportersViewSet, basename='exporters') orga_router.register(r'exporters', exporters.OrganizerExportersViewSet, basename='exporters')
team_router = routers.DefaultRouter() team_router = routers.DefaultRouter()
@@ -79,7 +78,7 @@ event_router.register(r'questions', item.QuestionViewSet)
event_router.register(r'discounts', discount.DiscountViewSet) event_router.register(r'discounts', discount.DiscountViewSet)
event_router.register(r'quotas', item.QuotaViewSet) event_router.register(r'quotas', item.QuotaViewSet)
event_router.register(r'vouchers', voucher.VoucherViewSet) event_router.register(r'vouchers', voucher.VoucherViewSet)
event_router.register(r'orders', order.EventOrderViewSet) event_router.register(r'orders', order.OrderViewSet)
event_router.register(r'orderpositions', order.OrderPositionViewSet) event_router.register(r'orderpositions', order.OrderPositionViewSet)
event_router.register(r'invoices', order.InvoiceViewSet) event_router.register(r'invoices', order.InvoiceViewSet)
event_router.register(r'revokedsecrets', order.RevokedSecretViewSet, basename='revokedsecrets') event_router.register(r'revokedsecrets', order.RevokedSecretViewSet, basename='revokedsecrets')

View File

@@ -25,7 +25,6 @@ from typing import List
from django.db import transaction from django.db import transaction
from django.utils.crypto import get_random_string from django.utils.crypto import get_random_string
from django.utils.functional import cached_property from django.utils.functional import cached_property
from django.utils.timezone import now
from django.utils.translation import gettext as _ from django.utils.translation import gettext as _
from rest_framework import status, viewsets from rest_framework import status, viewsets
from rest_framework.decorators import action from rest_framework.decorators import action
@@ -42,7 +41,7 @@ from pretix.base.models import CartPosition
from pretix.base.services.cart import ( from pretix.base.services.cart import (
_get_quota_availability, _get_voucher_availability, error_messages, _get_quota_availability, _get_voucher_availability, error_messages,
) )
from pretix.base.services.locking import lock_objects from pretix.base.services.locking import NoLockManager
class CartPositionViewSet(CreateModelMixin, DestroyModelMixin, viewsets.ReadOnlyModelViewSet): class CartPositionViewSet(CreateModelMixin, DestroyModelMixin, viewsets.ReadOnlyModelViewSet):
@@ -151,21 +150,12 @@ class CartPositionViewSet(CreateModelMixin, DestroyModelMixin, viewsets.ReadOnly
quota_diff[q] += 1 quota_diff[q] += 1
seats_seen = set() seats_seen = set()
now_dt = now()
with transaction.atomic():
full_lock_required = seat_diff and self.request.event.settings.seating_minimal_distance > 0
if full_lock_required:
# We lock the entire event in this case since we don't want to deal with fine-granular locking
# in the case of seating distance enforcement
lock_objects([self.request.event])
else:
lock_objects(
[q for q, d in quota_diff.items() if q.size is not None and d > 0] +
[v for v, d in voucher_use_diff.items() if d > 0] +
[s for s, d in seat_diff.items() if d > 0],
shared_lock_objects=[self.request.event]
)
lockfn = NoLockManager
if self._require_locking(quota_diff, voucher_use_diff, seat_diff):
lockfn = self.request.event.lock
with lockfn() as now_dt, transaction.atomic():
vouchers_ok, vouchers_depend_on_cart = _get_voucher_availability( vouchers_ok, vouchers_depend_on_cart = _get_voucher_availability(
self.request.event, self.request.event,
voucher_use_diff, voucher_use_diff,

View File

@@ -164,21 +164,8 @@ class CheckinListViewSet(viewsets.ModelViewSet):
secret=serializer.validated_data['raw_barcode'] secret=serializer.validated_data['raw_barcode']
).first() ).first()
clist = self.get_object()
if serializer.validated_data.get('nonce'):
if kwargs.get('position'):
prev = kwargs['position'].all_checkins.filter(nonce=serializer.validated_data['nonce']).first()
else:
prev = clist.checkins.filter(
nonce=serializer.validated_data['nonce'],
raw_barcode=serializer.validated_data['raw_barcode'],
).first()
if prev:
# Ignore because nonce is already handled
return Response(serializer.data, status=201)
c = serializer.save( c = serializer.save(
list=clist, list=self.get_object(),
successful=False, successful=False,
forced=True, forced=True,
force_sent=True, force_sent=True,
@@ -278,7 +265,6 @@ with scopes_disabled():
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.checkinlist = kwargs.pop('checkinlist') self.checkinlist = kwargs.pop('checkinlist')
self.gate = kwargs.pop('gate')
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
def has_checkin_qs(self, queryset, name, value): def has_checkin_qs(self, queryset, name, value):
@@ -288,7 +274,7 @@ with scopes_disabled():
if not self.checkinlist.rules: if not self.checkinlist.rules:
return queryset return queryset
return queryset.filter( return queryset.filter(
SQLLogic(self.checkinlist, self.gate).apply(self.checkinlist.rules) SQLLogic(self.checkinlist).apply(self.checkinlist.rules)
).filter( ).filter(
Q(valid_from__isnull=True) | Q(valid_from__lte=now()), Q(valid_from__isnull=True) | Q(valid_from__lte=now()),
Q(valid_until__isnull=True) | Q(valid_until__gte=now()), Q(valid_until__isnull=True) | Q(valid_until__gte=now()),
@@ -410,7 +396,7 @@ def _checkin_list_position_queryset(checkinlists, ignore_status=False, ignore_pr
def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force, checkin_type, ignore_unpaid, nonce, def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force, checkin_type, ignore_unpaid, nonce,
untrusted_input, user, auth, expand, pdf_data, request, questions_supported, canceled_supported, untrusted_input, user, auth, expand, pdf_data, request, questions_supported, canceled_supported,
source_type='barcode', legacy_url_support=False, simulate=False, gate=None): source_type='barcode', legacy_url_support=False):
if not checkinlists: if not checkinlists:
raise ValidationError('No check-in list passed.') raise ValidationError('No check-in list passed.')
@@ -418,7 +404,7 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
prefetch_related_objects([cl for cl in checkinlists if not cl.all_products], 'limit_products') prefetch_related_objects([cl for cl in checkinlists if not cl.all_products], 'limit_products')
device = auth if isinstance(auth, Device) else None device = auth if isinstance(auth, Device) else None
gate = gate or (auth.gate if isinstance(auth, Device) else None) gate = auth.gate if isinstance(auth, Device) else None
context = { context = {
'request': request, 'request': request,
@@ -447,8 +433,6 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
) )
raw_barcode_for_checkin = None raw_barcode_for_checkin = None
from_revoked_secret = False from_revoked_secret = False
if simulate:
common_checkin_args['__fake_arg_to_prevent_this_from_being_saved'] = True
# 1. Gather a list of positions that could be the one we looking for, either from their ID, secret or # 1. Gather a list of positions that could be the one we looking for, either from their ID, secret or
# parent secret # parent secret
@@ -488,14 +472,13 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
revoked_matches = list( revoked_matches = list(
RevokedTicketSecret.objects.filter(event_id__in=list_by_event.keys(), secret=raw_barcode)) RevokedTicketSecret.objects.filter(event_id__in=list_by_event.keys(), secret=raw_barcode))
if len(revoked_matches) == 0: if len(revoked_matches) == 0:
if not simulate: checkinlists[0].event.log_action('pretix.event.checkin.unknown', data={
checkinlists[0].event.log_action('pretix.event.checkin.unknown', data={ 'datetime': datetime,
'datetime': datetime, 'type': checkin_type,
'type': checkin_type, 'list': checkinlists[0].pk,
'list': checkinlists[0].pk, 'barcode': raw_barcode,
'barcode': raw_barcode, 'searched_lists': [cl.pk for cl in checkinlists]
'searched_lists': [cl.pk for cl in checkinlists] }, user=user, auth=auth)
}, user=user, auth=auth)
for cl in checkinlists: for cl in checkinlists:
for k, s in cl.event.ticket_secret_generators.items(): for k, s in cl.event.ticket_secret_generators.items():
@@ -509,13 +492,12 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
except: except:
pass pass
if not simulate: Checkin.objects.create(
Checkin.objects.create( position=None,
position=None, successful=False,
successful=False, error_reason=Checkin.REASON_INVALID,
error_reason=Checkin.REASON_INVALID, **common_checkin_args,
**common_checkin_args, )
)
if force and legacy_url_support and isinstance(auth, Device): if force and legacy_url_support and isinstance(auth, Device):
# There was a bug in libpretixsync: If you scanned a ticket in offline mode that was # There was a bug in libpretixsync: If you scanned a ticket in offline mode that was
@@ -557,20 +539,19 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
from_revoked_secret = True from_revoked_secret = True
else: else:
op = revoked_matches[0].position op = revoked_matches[0].position
if not simulate: op.order.log_action('pretix.event.checkin.revoked', data={
op.order.log_action('pretix.event.checkin.revoked', data={ 'datetime': datetime,
'datetime': datetime, 'type': checkin_type,
'type': checkin_type, 'list': list_by_event[revoked_matches[0].event_id].pk,
'list': list_by_event[revoked_matches[0].event_id].pk, 'barcode': raw_barcode
'barcode': raw_barcode }, user=user, auth=auth)
}, user=user, auth=auth) common_checkin_args['list'] = list_by_event[revoked_matches[0].event_id]
common_checkin_args['list'] = list_by_event[revoked_matches[0].event_id] Checkin.objects.create(
Checkin.objects.create( position=op,
position=op, successful=False,
successful=False, error_reason=Checkin.REASON_REVOKED,
error_reason=Checkin.REASON_REVOKED, **common_checkin_args
**common_checkin_args )
)
return Response({ return Response({
'status': 'error', 'status': 'error',
'reason': Checkin.REASON_REVOKED, 'reason': Checkin.REASON_REVOKED,
@@ -607,25 +588,24 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
# We choose the first match (regardless of product) for the logging since it's most likely to be the # We choose the first match (regardless of product) for the logging since it's most likely to be the
# base product according to our order_by above. # base product according to our order_by above.
op = op_candidates[0] op = op_candidates[0]
if not simulate: op.order.log_action('pretix.event.checkin.denied', data={
op.order.log_action('pretix.event.checkin.denied', data={ 'position': op.id,
'position': op.id, 'positionid': op.positionid,
'positionid': op.positionid, 'errorcode': Checkin.REASON_AMBIGUOUS,
'errorcode': Checkin.REASON_AMBIGUOUS, 'reason_explanation': None,
'reason_explanation': None, 'force': force,
'force': force, 'datetime': datetime,
'datetime': datetime, 'type': checkin_type,
'type': checkin_type, 'list': list_by_event[op.order.event_id].pk,
'list': list_by_event[op.order.event_id].pk, }, user=user, auth=auth)
}, user=user, auth=auth) common_checkin_args['list'] = list_by_event[op.order.event_id]
common_checkin_args['list'] = list_by_event[op.order.event_id] Checkin.objects.create(
Checkin.objects.create( position=op,
position=op, successful=False,
successful=False, error_reason=Checkin.REASON_AMBIGUOUS,
error_reason=Checkin.REASON_AMBIGUOUS, error_explanation=None,
error_explanation=None, **common_checkin_args,
**common_checkin_args, )
)
return Response({ return Response({
'status': 'error', 'status': 'error',
'reason': Checkin.REASON_AMBIGUOUS, 'reason': Checkin.REASON_AMBIGUOUS,
@@ -672,8 +652,6 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
raw_barcode=raw_barcode_for_checkin, raw_barcode=raw_barcode_for_checkin,
raw_source_type=source_type, raw_source_type=source_type,
from_revoked_secret=from_revoked_secret, from_revoked_secret=from_revoked_secret,
simulate=simulate,
gate=gate,
) )
except RequiredQuestionsError as e: except RequiredQuestionsError as e:
return Response({ return Response({
@@ -686,24 +664,23 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
'list': MiniCheckinListSerializer(list_by_event[op.order.event_id]).data, 'list': MiniCheckinListSerializer(list_by_event[op.order.event_id]).data,
}, status=400) }, status=400)
except CheckInError as e: except CheckInError as e:
if not simulate: op.order.log_action('pretix.event.checkin.denied', data={
op.order.log_action('pretix.event.checkin.denied', data={ 'position': op.id,
'position': op.id, 'positionid': op.positionid,
'positionid': op.positionid, 'errorcode': e.code,
'errorcode': e.code, 'reason_explanation': e.reason,
'reason_explanation': e.reason, 'force': force,
'force': force, 'datetime': datetime,
'datetime': datetime, 'type': checkin_type,
'type': checkin_type, 'list': list_by_event[op.order.event_id].pk,
'list': list_by_event[op.order.event_id].pk, }, user=user, auth=auth)
}, user=user, auth=auth) Checkin.objects.create(
Checkin.objects.create( position=op,
position=op, successful=False,
successful=False, error_reason=e.code,
error_reason=e.code, error_explanation=e.reason,
error_explanation=e.reason, **common_checkin_args,
**common_checkin_args, )
)
return Response({ return Response({
'status': 'error', 'status': 'error',
'reason': e.code, 'reason': e.code,
@@ -772,7 +749,6 @@ class CheckinListPositionViewSet(viewsets.ReadOnlyModelViewSet):
def get_filterset_kwargs(self): def get_filterset_kwargs(self):
return { return {
'checkinlist': self.checkinlist, 'checkinlist': self.checkinlist,
'gate': self.request.auth.gate if isinstance(self.request.auth, Device) else None,
} }
@cached_property @cached_property

View File

@@ -19,12 +19,8 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see # You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>. # <https://www.gnu.org/licenses/>.
# #
import base64
import logging import logging
from cryptography.hazmat.backends.openssl.backend import Backend
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives.serialization import load_pem_public_key
from django.db.models import Exists, OuterRef, Q from django.db.models import Exists, OuterRef, Q
from django.db.models.functions import Coalesce from django.db.models.functions import Coalesce
from django.utils.timezone import now from django.utils.timezone import now
@@ -38,8 +34,6 @@ from pretix.api.auth.device import DeviceTokenAuthentication
from pretix.api.views.version import numeric_version from pretix.api.views.version import numeric_version
from pretix.base.models import CheckinList, Device, SubEvent from pretix.base.models import CheckinList, Device, SubEvent
from pretix.base.models.devices import Gate, generate_api_token from pretix.base.models.devices import Gate, generate_api_token
from pretix.base.models.media import MediumKeySet
from pretix.base.services.media import get_keysets_for_organizer
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -48,73 +42,17 @@ class InitializationRequestSerializer(serializers.Serializer):
token = serializers.CharField(max_length=190) token = serializers.CharField(max_length=190)
hardware_brand = serializers.CharField(max_length=190) hardware_brand = serializers.CharField(max_length=190)
hardware_model = serializers.CharField(max_length=190) hardware_model = serializers.CharField(max_length=190)
os_name = serializers.CharField(max_length=190, required=False, allow_null=True)
os_version = serializers.CharField(max_length=190, required=False, allow_null=True)
software_brand = serializers.CharField(max_length=190) software_brand = serializers.CharField(max_length=190)
software_version = serializers.CharField(max_length=190) software_version = serializers.CharField(max_length=190)
info = serializers.JSONField(required=False, allow_null=True) info = serializers.JSONField(required=False, allow_null=True)
rsa_pubkey = serializers.CharField(required=False, allow_null=True)
def validate(self, attrs):
if attrs.get('rsa_pubkey'):
try:
load_pem_public_key(
attrs['rsa_pubkey'].encode(), Backend()
)
except:
raise ValidationError({'rsa_pubkey': ['Not a valid public key.']})
return attrs
class UpdateRequestSerializer(serializers.Serializer): class UpdateRequestSerializer(serializers.Serializer):
hardware_brand = serializers.CharField(max_length=190) hardware_brand = serializers.CharField(max_length=190)
hardware_model = serializers.CharField(max_length=190) hardware_model = serializers.CharField(max_length=190)
os_name = serializers.CharField(max_length=190, required=False, allow_null=True)
os_version = serializers.CharField(max_length=190, required=False, allow_null=True)
software_brand = serializers.CharField(max_length=190) software_brand = serializers.CharField(max_length=190)
software_version = serializers.CharField(max_length=190) software_version = serializers.CharField(max_length=190)
info = serializers.JSONField(required=False, allow_null=True) info = serializers.JSONField(required=False, allow_null=True)
rsa_pubkey = serializers.CharField(required=False, allow_null=True)
def validate(self, attrs):
if attrs.get('rsa_pubkey'):
try:
load_pem_public_key(
attrs['rsa_pubkey'].encode(), Backend()
)
except:
raise ValidationError({'rsa_pubkey': ['Not a valid public key.']})
return attrs
class RSAEncryptedField(serializers.Field):
def to_representation(self, value):
public_key = load_pem_public_key(
self.context['device'].rsa_pubkey.encode(), Backend()
)
cipher_text = public_key.encrypt(
# RSA/ECB/PKCS1Padding
value,
padding.PKCS1v15()
)
return base64.b64encode(cipher_text).decode()
class MediumKeySetSerializer(serializers.ModelSerializer):
uid_key = RSAEncryptedField(read_only=True)
diversification_key = RSAEncryptedField(read_only=True)
organizer = serializers.SlugRelatedField(slug_field='slug', read_only=True)
class Meta:
model = MediumKeySet
fields = [
'public_id',
'organizer',
'active',
'media_type',
'uid_key',
'diversification_key',
]
class GateSerializer(serializers.ModelSerializer): class GateSerializer(serializers.ModelSerializer):
@@ -161,12 +99,9 @@ class InitializeView(APIView):
device.initialized = now() device.initialized = now()
device.hardware_brand = serializer.validated_data.get('hardware_brand') device.hardware_brand = serializer.validated_data.get('hardware_brand')
device.hardware_model = serializer.validated_data.get('hardware_model') device.hardware_model = serializer.validated_data.get('hardware_model')
device.os_name = serializer.validated_data.get('os_name')
device.os_version = serializer.validated_data.get('os_version')
device.software_brand = serializer.validated_data.get('software_brand') device.software_brand = serializer.validated_data.get('software_brand')
device.software_version = serializer.validated_data.get('software_version') device.software_version = serializer.validated_data.get('software_version')
device.info = serializer.validated_data.get('info') device.info = serializer.validated_data.get('info')
device.rsa_pubkey = serializer.validated_data.get('rsa_pubkey')
device.api_token = generate_api_token() device.api_token = generate_api_token()
device.save() device.save()
@@ -185,15 +120,8 @@ class UpdateView(APIView):
device = request.auth device = request.auth
device.hardware_brand = serializer.validated_data.get('hardware_brand') device.hardware_brand = serializer.validated_data.get('hardware_brand')
device.hardware_model = serializer.validated_data.get('hardware_model') device.hardware_model = serializer.validated_data.get('hardware_model')
device.os_name = serializer.validated_data.get('os_name')
device.os_version = serializer.validated_data.get('os_version')
device.software_brand = serializer.validated_data.get('software_brand') device.software_brand = serializer.validated_data.get('software_brand')
device.software_version = serializer.validated_data.get('software_version') device.software_version = serializer.validated_data.get('software_version')
if serializer.validated_data.get('rsa_pubkey') and serializer.validated_data.get('rsa_pubkey') != device.rsa_pubkey:
if device.rsa_pubkey:
raise ValidationError({'rsa_pubkey': ['You cannot change the rsa_pubkey of the device once it is set.']})
else:
device.rsa_pubkey = serializer.validated_data.get('rsa_pubkey')
device.info = serializer.validated_data.get('info') device.info = serializer.validated_data.get('info')
device.save() device.save()
device.log_action('pretix.device.updated', data=serializer.validated_data, auth=device) device.log_action('pretix.device.updated', data=serializer.validated_data, auth=device)
@@ -241,12 +169,8 @@ class InfoView(APIView):
'pretix': __version__, 'pretix': __version__,
'pretix_numeric': numeric_version(__version__), 'pretix_numeric': numeric_version(__version__),
} }
}, }
'medium_key_sets': MediumKeySetSerializer(
get_keysets_for_organizer(device.organizer),
many=True,
context={'device': request.auth}
).data if device.rsa_pubkey else []
}) })

View File

@@ -71,8 +71,6 @@ with scopes_disabled():
ends_after = django_filters.rest_framework.IsoDateTimeFilter(method='ends_after_qs') ends_after = django_filters.rest_framework.IsoDateTimeFilter(method='ends_after_qs')
sales_channel = django_filters.rest_framework.CharFilter(method='sales_channel_qs') sales_channel = django_filters.rest_framework.CharFilter(method='sales_channel_qs')
search = django_filters.rest_framework.CharFilter(method='search_qs') search = django_filters.rest_framework.CharFilter(method='search_qs')
date_from = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
date_to = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
class Meta: class Meta:
model = Event model = Event
@@ -338,8 +336,6 @@ with scopes_disabled():
modified_since = django_filters.IsoDateTimeFilter(field_name='last_modified', lookup_expr='gte') modified_since = django_filters.IsoDateTimeFilter(field_name='last_modified', lookup_expr='gte')
sales_channel = django_filters.rest_framework.CharFilter(method='sales_channel_qs') sales_channel = django_filters.rest_framework.CharFilter(method='sales_channel_qs')
search = django_filters.rest_framework.CharFilter(method='search_qs') search = django_filters.rest_framework.CharFilter(method='search_qs')
date_from = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
date_to = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
class Meta: class Meta:
model = SubEvent model = SubEvent
@@ -381,29 +377,16 @@ with scopes_disabled():
| Q(location__icontains=i18ncomp(value)) | Q(location__icontains=i18ncomp(value))
) )
class OrganizerSubEventFilter(SubEventFilter):
def search_qs(self, queryset, name, value):
return queryset.filter(
Q(name__icontains=i18ncomp(value))
| Q(event__slug__icontains=value)
| Q(location__icontains=i18ncomp(value))
)
class SubEventViewSet(ConditionalListView, viewsets.ModelViewSet): class SubEventViewSet(ConditionalListView, viewsets.ModelViewSet):
serializer_class = SubEventSerializer serializer_class = SubEventSerializer
queryset = SubEvent.objects.none() queryset = SubEvent.objects.none()
write_permission = 'can_change_event_settings' write_permission = 'can_change_event_settings'
filter_backends = (DjangoFilterBackend, TotalOrderingFilter) filter_backends = (DjangoFilterBackend, TotalOrderingFilter)
filterset_class = SubEventFilter
ordering = ('date_from',) ordering = ('date_from',)
ordering_fields = ('id', 'date_from', 'last_modified') ordering_fields = ('id', 'date_from', 'last_modified')
@property
def filterset_class(self):
if getattr(self.request, 'event', None):
return SubEventFilter
return OrganizerSubEventFilter
def get_queryset(self): def get_queryset(self):
if getattr(self.request, 'event', None): if getattr(self.request, 'event', None):
qs = self.request.event.subevents qs = self.request.event.subevents
@@ -428,7 +411,6 @@ class SubEventViewSet(ConditionalListView, viewsets.ModelViewSet):
'subeventitem_set', 'subeventitem_set',
'subeventitemvariation_set', 'subeventitemvariation_set',
'meta_values', 'meta_values',
'meta_values__property',
Prefetch( Prefetch(
'seat_category_mappings', 'seat_category_mappings',
to_attr='_seat_category_mappings', to_attr='_seat_category_mappings',

View File

@@ -133,12 +133,7 @@ class EventExportersViewSet(ExportersMixin, viewsets.ViewSet):
def exporters(self): def exporters(self):
exporters = [] exporters = []
responses = register_data_exporters.send(self.request.event) responses = register_data_exporters.send(self.request.event)
raw_exporters = [response(self.request.event, self.request.organizer) for r, response in responses if response] for ex in sorted([response(self.request.event, self.request.organizer) for r, response in responses if response], key=lambda ex: str(ex.verbose_name)):
raw_exporters = [
ex for ex in raw_exporters
if ex.available_for_user(self.request.user if self.request.user and self.request.user.is_authenticated else None)
]
for ex in sorted(raw_exporters, key=lambda ex: str(ex.verbose_name)):
ex._serializer = JobRunSerializer(exporter=ex) ex._serializer = JobRunSerializer(exporter=ex)
exporters.append(ex) exporters.append(ex)
return exporters return exporters
@@ -171,7 +166,7 @@ class OrganizerExportersViewSet(ExportersMixin, viewsets.ViewSet):
if ( if (
not isinstance(ex, OrganizerLevelExportMixin) or not isinstance(ex, OrganizerLevelExportMixin) or
perm_holder.has_organizer_permission(self.request.organizer, ex.organizer_required_permission, self.request) perm_holder.has_organizer_permission(self.request.organizer, ex.organizer_required_permission, self.request)
) and ex.available_for_user(self.request.user if self.request.user and self.request.user.is_authenticated else None) )
] ]
for ex in sorted(raw_exporters, key=lambda ex: str(ex.verbose_name)): for ex in sorted(raw_exporters, key=lambda ex: str(ex.verbose_name)):
ex._serializer = JobRunSerializer(exporter=ex, events=events) ex._serializer = JobRunSerializer(exporter=ex, events=events)

View File

@@ -39,8 +39,7 @@ from pretix.api.serializers.media import (
) )
from pretix.base.media import MEDIA_TYPES from pretix.base.media import MEDIA_TYPES
from pretix.base.models import ( from pretix.base.models import (
Checkin, GiftCard, GiftCardAcceptance, GiftCardTransaction, OrderPosition, Checkin, GiftCard, GiftCardTransaction, OrderPosition, ReusableMedium,
ReusableMedium,
) )
from pretix.helpers import OF_SELF from pretix.helpers import OF_SELF
from pretix.helpers.dicts import merge_dicts from pretix.helpers.dicts import merge_dicts
@@ -104,12 +103,6 @@ class ReusableMediaViewSet(viewsets.ModelViewSet):
auth=self.request.auth, auth=self.request.auth,
data=merge_dicts(self.request.data, {'id': inst.pk}) data=merge_dicts(self.request.data, {'id': inst.pk})
) )
mt = MEDIA_TYPES.get(serializer.validated_data["type"])
if mt:
m = mt.handle_new(self.request.organizer, inst, self.request.user, self.request.auth)
if m:
s = self.get_serializer(m)
return Response({"result": s.data})
@transaction.atomic() @transaction.atomic()
def perform_update(self, serializer): def perform_update(self, serializer):
@@ -142,28 +135,12 @@ class ReusableMediaViewSet(viewsets.ModelViewSet):
s = self.get_serializer(m) s = self.get_serializer(m)
return Response({"result": s.data}) return Response({"result": s.data})
except ReusableMedium.DoesNotExist: except ReusableMedium.DoesNotExist:
try: mt = MEDIA_TYPES.get(s.validated_data["type"])
with scopes_disabled(): if mt:
m = ReusableMedium.objects.get( m = mt.handle_unknown(request.organizer, s.validated_data["identifier"], request.user, request.auth)
organizer__in=GiftCardAcceptance.objects.filter( if m:
acceptor=request.organizer, s = self.get_serializer(m)
active=True, return Response({"result": s.data})
reusable_media=True,
).values_list('issuer', flat=True),
type=s.validated_data["type"],
identifier=s.validated_data["identifier"],
)
m.linked_orderposition = None # not relevant for cross-organizer
m.customer = None # not relevant for cross-organizer
s = self.get_serializer(m)
return Response({"result": s.data})
except ReusableMedium.DoesNotExist:
mt = MEDIA_TYPES.get(s.validated_data["type"])
if mt:
m = mt.handle_unknown(request.organizer, s.validated_data["identifier"], request.user, request.auth)
if m:
s = self.get_serializer(m)
return Response({"result": s.data})
return Response({"result": None}) return Response({"result": None})

View File

@@ -23,10 +23,9 @@ import datetime
import mimetypes import mimetypes
import os import os
from decimal import Decimal from decimal import Decimal
from zoneinfo import ZoneInfo
import django_filters import django_filters
from django.conf import settings import pytz
from django.db import transaction from django.db import transaction
from django.db.models import ( from django.db.models import (
Exists, F, OuterRef, Prefetch, Q, Subquery, prefetch_related_objects, Exists, F, OuterRef, Prefetch, Q, Subquery, prefetch_related_objects,
@@ -45,7 +44,6 @@ from rest_framework.exceptions import (
APIException, NotFound, PermissionDenied, ValidationError, APIException, NotFound, PermissionDenied, ValidationError,
) )
from rest_framework.mixins import CreateModelMixin from rest_framework.mixins import CreateModelMixin
from rest_framework.permissions import SAFE_METHODS
from rest_framework.response import Response from rest_framework.response import Response
from pretix.api.models import OAuthAccessToken from pretix.api.models import OAuthAccessToken
@@ -117,16 +115,12 @@ with scopes_disabled():
@scopes_disabled() @scopes_disabled()
def subevent_after_qs(self, qs, name, value): def subevent_after_qs(self, qs, name, value):
if getattr(self.request, 'event', None):
subevents = self.request.event.subevents
else:
subevents = SubEvent.objects.filter(event__organizer=self.request.organizer)
qs = qs.filter( qs = qs.filter(
pk__in=Subquery( pk__in=Subquery(
OrderPosition.all.filter( OrderPosition.all.filter(
subevent_id__in=subevents.filter( subevent_id__in=SubEvent.objects.filter(
Q(date_to__gt=value) | Q(date_from__gt=value, date_to__isnull=True), Q(date_to__gt=value) | Q(date_from__gt=value, date_to__isnull=True),
event=self.request.event
).values_list('id'), ).values_list('id'),
).values_list('order_id') ).values_list('order_id')
) )
@@ -134,16 +128,12 @@ with scopes_disabled():
return qs return qs
def subevent_before_qs(self, qs, name, value): def subevent_before_qs(self, qs, name, value):
if getattr(self.request, 'event', None):
subevents = self.request.event.subevents
else:
subevents = SubEvent.objects.filter(event__organizer=self.request.organizer)
qs = qs.filter( qs = qs.filter(
pk__in=Subquery( pk__in=Subquery(
OrderPosition.all.filter( OrderPosition.all.filter(
subevent_id__in=subevents.filter( subevent_id__in=SubEvent.objects.filter(
Q(date_from__lt=value), Q(date_from__lt=value),
event=self.request.event
).values_list('id'), ).values_list('id'),
).values_list('order_id') ).values_list('order_id')
) )
@@ -195,7 +185,7 @@ with scopes_disabled():
) )
class OrderViewSetMixin: class OrderViewSet(viewsets.ModelViewSet):
serializer_class = OrderSerializer serializer_class = OrderSerializer
queryset = Order.objects.none() queryset = Order.objects.none()
filter_backends = (DjangoFilterBackend, TotalOrderingFilter) filter_backends = (DjangoFilterBackend, TotalOrderingFilter)
@@ -203,12 +193,19 @@ class OrderViewSetMixin:
ordering_fields = ('datetime', 'code', 'status', 'last_modified') ordering_fields = ('datetime', 'code', 'status', 'last_modified')
filterset_class = OrderFilter filterset_class = OrderFilter
lookup_field = 'code' lookup_field = 'code'
permission = 'can_view_orders'
write_permission = 'can_change_orders'
def get_base_queryset(self): def get_serializer_context(self):
raise NotImplementedError() ctx = super().get_serializer_context()
ctx['event'] = self.request.event
ctx['pdf_data'] = self.request.query_params.get('pdf_data', 'false') == 'true'
ctx['exclude'] = self.request.query_params.getlist('exclude')
ctx['include'] = self.request.query_params.getlist('include')
return ctx
def get_queryset(self): def get_queryset(self):
qs = self.get_base_queryset() qs = self.request.event.orders
if 'fees' not in self.request.GET.getlist('exclude'): if 'fees' not in self.request.GET.getlist('exclude'):
if self.request.query_params.get('include_canceled_fees', 'false') == 'true': if self.request.query_params.get('include_canceled_fees', 'false') == 'true':
fqs = OrderFee.all fqs = OrderFee.all
@@ -230,12 +227,11 @@ class OrderViewSetMixin:
opq = OrderPosition.all opq = OrderPosition.all
else: else:
opq = OrderPosition.objects opq = OrderPosition.objects
if request.query_params.get('pdf_data', 'false') == 'true' and getattr(request, 'event', None): if request.query_params.get('pdf_data', 'false') == 'true':
prefetch_related_objects([request.organizer], 'meta_properties') prefetch_related_objects([request.organizer], 'meta_properties')
prefetch_related_objects( prefetch_related_objects(
[request.event], [request.event],
Prefetch('meta_values', queryset=EventMetaValue.objects.select_related('property'), Prefetch('meta_values', queryset=EventMetaValue.objects.select_related('property'), to_attr='meta_values_cached'),
to_attr='meta_values_cached'),
'questions', 'questions',
'item_meta_properties', 'item_meta_properties',
) )
@@ -270,12 +266,13 @@ class OrderViewSetMixin:
) )
) )
def get_serializer_context(self): def _get_output_provider(self, identifier):
ctx = super().get_serializer_context() responses = register_ticket_outputs.send(self.request.event)
ctx['exclude'] = self.request.query_params.getlist('exclude') for receiver, response in responses:
ctx['include'] = self.request.query_params.getlist('include') prov = response(self.request.event)
ctx['pdf_data'] = False if prov.identifier == identifier:
return ctx return prov
raise NotFound('Unknown output provider.')
@scopes_disabled() # we are sure enough that get_queryset() is correct, so we save some perforamnce @scopes_disabled() # we are sure enough that get_queryset() is correct, so we save some perforamnce
def list(self, request, **kwargs): def list(self, request, **kwargs):
@@ -292,45 +289,6 @@ class OrderViewSetMixin:
serializer = self.get_serializer(queryset, many=True) serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data, headers={'X-Page-Generated': date}) return Response(serializer.data, headers={'X-Page-Generated': date})
class OrganizerOrderViewSet(OrderViewSetMixin, viewsets.ReadOnlyModelViewSet):
def get_base_queryset(self):
perm = "can_view_orders" if self.request.method in SAFE_METHODS else "can_change_orders"
if isinstance(self.request.auth, (TeamAPIToken, Device)):
return Order.objects.filter(
event__organizer=self.request.organizer,
event__in=self.request.auth.get_events_with_permission(perm, request=self.request)
)
elif self.request.user.is_authenticated:
return Order.objects.filter(
event__organizer=self.request.organizer,
event__in=self.request.user.get_events_with_permission(perm, request=self.request)
)
else:
raise PermissionDenied()
class EventOrderViewSet(OrderViewSetMixin, viewsets.ModelViewSet):
permission = 'can_view_orders'
write_permission = 'can_change_orders'
def get_serializer_context(self):
ctx = super().get_serializer_context()
ctx['event'] = self.request.event
ctx['pdf_data'] = self.request.query_params.get('pdf_data', 'false') == 'true'
return ctx
def get_base_queryset(self):
return self.request.event.orders
def _get_output_provider(self, identifier):
responses = register_ticket_outputs.send(self.request.event)
for receiver, response in responses:
prov = response(self.request.event)
if prov.identifier == identifier:
return prov
raise NotFound('Unknown output provider.')
@action(detail=True, url_name='download', url_path='download/(?P<output>[^/]+)') @action(detail=True, url_name='download', url_path='download/(?P<output>[^/]+)')
def download(self, request, output, **kwargs): def download(self, request, output, **kwargs):
provider = self._get_output_provider(output) provider = self._get_output_provider(output)
@@ -654,7 +612,7 @@ class EventOrderViewSet(OrderViewSetMixin, viewsets.ModelViewSet):
status=status.HTTP_400_BAD_REQUEST status=status.HTTP_400_BAD_REQUEST
) )
tz = ZoneInfo(self.request.event.settings.timezone) tz = pytz.timezone(self.request.event.settings.timezone)
new_date = make_aware(datetime.datetime.combine( new_date = make_aware(datetime.datetime.combine(
new_date, new_date,
datetime.time(hour=23, minute=59, second=59) datetime.time(hour=23, minute=59, second=59)
@@ -703,16 +661,7 @@ class EventOrderViewSet(OrderViewSetMixin, viewsets.ModelViewSet):
with language(order.locale, self.request.event.settings.region): with language(order.locale, self.request.event.settings.region):
payment = order.payments.last() payment = order.payments.last()
# OrderCreateSerializer creates at most one payment
if payment and payment.state == OrderPayment.PAYMENT_STATE_CONFIRMED:
order.log_action(
'pretix.event.order.payment.confirmed', {
'local_id': payment.local_id,
'provider': payment.provider,
},
user=request.user if request.user.is_authenticated else None,
auth=request.auth,
)
order_placed.send(self.request.event, order=order) order_placed.send(self.request.event, order=order)
if order.status == Order.STATUS_PAID: if order.status == Order.STATUS_PAID:
order_paid.send(self.request.event, order=order) order_paid.send(self.request.event, order=order)
@@ -987,7 +936,6 @@ with scopes_disabled():
| Q(addon_to__attendee_email__icontains=value) | Q(addon_to__attendee_email__icontains=value)
| Q(order__code__istartswith=value) | Q(order__code__istartswith=value)
| Q(order__invoice_address__name_cached__icontains=value) | Q(order__invoice_address__name_cached__icontains=value)
| Q(order__invoice_address__company__icontains=value)
| Q(order__email__icontains=value) | Q(order__email__icontains=value)
| Q(pk__in=matching_media) | Q(pk__in=matching_media)
) )
@@ -1233,7 +1181,7 @@ class OrderPositionViewSet(viewsets.ModelViewSet):
ftype, ignored = mimetypes.guess_type(image_file.name) ftype, ignored = mimetypes.guess_type(image_file.name)
extension = os.path.basename(image_file.name).split('.')[-1] extension = os.path.basename(image_file.name).split('.')[-1]
else: else:
img = Image.open(image_file, formats=settings.PILLOW_FORMATS_QUESTIONS_IMAGE) img = Image.open(image_file)
ftype = Image.MIME[img.format] ftype = Image.MIME[img.format]
extensions = { extensions = {
'GIF': 'gif', 'TIFF': 'tif', 'BMP': 'bmp', 'JPEG': 'jpg', 'PNG': 'png' 'GIF': 'gif', 'TIFF': 'tif', 'BMP': 'bmp', 'JPEG': 'jpg', 'PNG': 'png'
@@ -1824,24 +1772,11 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
write_permission = 'can_change_orders' write_permission = 'can_change_orders'
def get_queryset(self): def get_queryset(self):
perm = "can_view_orders" if self.request.method in SAFE_METHODS else "can_change_orders" return self.request.event.invoices.prefetch_related('lines').select_related('order', 'refers').annotate(
if getattr(self.request, 'event', None):
qs = self.request.event.invoices
elif isinstance(self.request.auth, (TeamAPIToken, Device)):
qs = Invoice.objects.filter(
event__organizer=self.request.organizer,
event__in=self.request.auth.get_events_with_permission(perm, request=self.request)
)
elif self.request.user.is_authenticated:
qs = Invoice.objects.filter(
event__organizer=self.request.organizer,
event__in=self.request.user.get_events_with_permission(perm, request=self.request)
)
return qs.prefetch_related('lines').select_related('order', 'refers').annotate(
nr=Concat('prefix', 'invoice_no') nr=Concat('prefix', 'invoice_no')
) )
@action(detail=True) @action(detail=True, )
def download(self, request, **kwargs): def download(self, request, **kwargs):
invoice = self.get_object() invoice = self.get_object()
@@ -1860,7 +1795,7 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
return resp return resp
@action(detail=True, methods=['POST']) @action(detail=True, methods=['POST'])
def regenerate(self, request, **kwargs): def regenerate(self, request, **kwarts):
inv = self.get_object() inv = self.get_object()
if inv.canceled: if inv.canceled:
raise ValidationError('The invoice has already been canceled.') raise ValidationError('The invoice has already been canceled.')
@@ -1870,7 +1805,7 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
raise PermissionDenied('The invoice file is no longer stored on the server.') raise PermissionDenied('The invoice file is no longer stored on the server.')
elif inv.sent_to_organizer: elif inv.sent_to_organizer:
raise PermissionDenied('The invoice file has already been exported.') raise PermissionDenied('The invoice file has already been exported.')
elif now().astimezone(inv.event.timezone).date() - inv.date > datetime.timedelta(days=1): elif now().astimezone(self.request.event.timezone).date() - inv.date > datetime.timedelta(days=1):
raise PermissionDenied('The invoice file is too old to be regenerated.') raise PermissionDenied('The invoice file is too old to be regenerated.')
else: else:
inv = regenerate_invoice(inv) inv = regenerate_invoice(inv)
@@ -1885,7 +1820,7 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
return Response(status=204) return Response(status=204)
@action(detail=True, methods=['POST']) @action(detail=True, methods=['POST'])
def reissue(self, request, **kwargs): def reissue(self, request, **kwarts):
inv = self.get_object() inv = self.get_object()
if inv.canceled: if inv.canceled:
raise ValidationError('The invoice has already been canceled.') raise ValidationError('The invoice has already been canceled.')

View File

@@ -19,6 +19,8 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see # You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>. # <https://www.gnu.org/licenses/>.
# #
import contextlib
from django.db import transaction from django.db import transaction
from django.db.models import F, Q from django.db.models import F, Q
from django.utils.timezone import now from django.utils.timezone import now
@@ -67,9 +69,30 @@ class VoucherViewSet(viewsets.ModelViewSet):
def get_queryset(self): def get_queryset(self):
return self.request.event.vouchers.select_related('seat').all() return self.request.event.vouchers.select_related('seat').all()
@transaction.atomic() def _predict_quota_check(self, data, instance):
# This method predicts if Voucher.clean_quota_needs_checking
# *migh* later require a quota check. It is only approximate
# and returns True a little too often. The point is to avoid
# locks when we know we won't need them.
if 'allow_ignore_quota' in data and data.get('allow_ignore_quota'):
return False
if instance and 'allow_ignore_quota' not in data and instance.allow_ignore_quota:
return False
if 'block_quota' in data and not data.get('block_quota'):
return False
if instance and 'block_quota' not in data and not instance.block_quota:
return False
return True
def create(self, request, *args, **kwargs): def create(self, request, *args, **kwargs):
return super().create(request, *args, **kwargs) if self._predict_quota_check(request.data, None):
lockfn = request.event.lock
else:
lockfn = contextlib.suppress # noop context manager
with lockfn():
return super().create(request, *args, **kwargs)
def perform_create(self, serializer): def perform_create(self, serializer):
serializer.save(event=self.request.event) serializer.save(event=self.request.event)
@@ -85,9 +108,13 @@ class VoucherViewSet(viewsets.ModelViewSet):
ctx['event'] = self.request.event ctx['event'] = self.request.event
return ctx return ctx
@transaction.atomic()
def update(self, request, *args, **kwargs): def update(self, request, *args, **kwargs):
return super().update(request, *args, **kwargs) if self._predict_quota_check(request.data, self.get_object()):
lockfn = request.event.lock
else:
lockfn = contextlib.suppress # noop context manager
with lockfn():
return super().update(request, *args, **kwargs)
def perform_update(self, serializer): def perform_update(self, serializer):
serializer.save(event=self.request.event) serializer.save(event=self.request.event)
@@ -113,18 +140,22 @@ class VoucherViewSet(viewsets.ModelViewSet):
super().perform_destroy(instance) super().perform_destroy(instance)
@action(detail=False, methods=['POST']) @action(detail=False, methods=['POST'])
@transaction.atomic()
def batch_create(self, request, *args, **kwargs): def batch_create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data, many=True) if any(self._predict_quota_check(d, None) for d in request.data):
serializer.is_valid(raise_exception=True) lockfn = request.event.lock
with transaction.atomic(): else:
serializer.save(event=self.request.event) lockfn = contextlib.suppress # noop context manager
for i, v in enumerate(serializer.instance): with lockfn():
v.log_action( serializer = self.get_serializer(data=request.data, many=True)
'pretix.voucher.added', serializer.is_valid(raise_exception=True)
user=self.request.user, with transaction.atomic():
auth=self.request.auth, serializer.save(event=self.request.event)
data=self.request.data[i] for i, v in enumerate(serializer.instance):
) v.log_action(
'pretix.voucher.added',
user=self.request.user,
auth=self.request.auth,
data=self.request.data[i]
)
headers = self.get_success_headers(serializer.data) headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers) return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)

View File

@@ -189,34 +189,6 @@ class ParametrizedOrderPositionWebhookEvent(ParametrizedOrderWebhookEvent):
return d return d
class ParametrizedWaitingListEntryWebhookEvent(ParametrizedWebhookEvent):
def build_payload(self, logentry: LogEntry):
# do not use content_object, this is also called in deletion
return {
'notification_id': logentry.pk,
'organizer': logentry.event.organizer.slug,
'event': logentry.event.slug,
'waitinglistentry': logentry.object_id,
'action': logentry.action_type,
}
class ParametrizedCustomerWebhookEvent(ParametrizedWebhookEvent):
def build_payload(self, logentry: LogEntry):
customer = logentry.content_object
if not customer:
return None
return {
'notification_id': logentry.pk,
'organizer': customer.organizer.slug,
'customer': customer.identifier,
'action': logentry.action_type,
}
@receiver(register_webhook_events, dispatch_uid="base_register_default_webhook_events") @receiver(register_webhook_events, dispatch_uid="base_register_default_webhook_events")
def register_default_webhook_events(sender, **kwargs): def register_default_webhook_events(sender, **kwargs):
return ( return (
@@ -349,34 +321,6 @@ def register_default_webhook_events(sender, **kwargs):
'pretix.event.testmode.deactivated', 'pretix.event.testmode.deactivated',
_('Test-Mode of shop has been deactivated'), _('Test-Mode of shop has been deactivated'),
), ),
ParametrizedWaitingListEntryWebhookEvent(
'pretix.event.orders.waitinglist.added',
_('Waiting list entry added'),
),
ParametrizedWaitingListEntryWebhookEvent(
'pretix.event.orders.waitinglist.changed',
_('Waiting list entry changed'),
),
ParametrizedWaitingListEntryWebhookEvent(
'pretix.event.orders.waitinglist.deleted',
_('Waiting list entry deleted'),
),
ParametrizedWaitingListEntryWebhookEvent(
'pretix.event.orders.waitinglist.voucher_assigned',
_('Waiting list entry received voucher'),
),
ParametrizedCustomerWebhookEvent(
'pretix.customer.created',
_('Customer account created'),
),
ParametrizedCustomerWebhookEvent(
'pretix.customer.changed',
_('Customer account changed'),
),
ParametrizedCustomerWebhookEvent(
'pretix.customer.anonymized',
_('Customer account anonymized'),
),
) )

View File

@@ -62,27 +62,27 @@ class NamespacedCache:
prefix = int(time.time()) prefix = int(time.time())
self.cache.set(self.prefixkey, prefix) self.cache.set(self.prefixkey, prefix)
def set(self, key: str, value: any, timeout: int=300): def set(self, key: str, value: str, timeout: int=300):
return self.cache.set(self._prefix_key(key), value, timeout) return self.cache.set(self._prefix_key(key), value, timeout)
def get(self, key: str) -> any: def get(self, key: str) -> str:
return self.cache.get(self._prefix_key(key, known_prefix=self._last_prefix)) return self.cache.get(self._prefix_key(key, known_prefix=self._last_prefix))
def get_or_set(self, key: str, default: Callable, timeout=300) -> any: def get_or_set(self, key: str, default: Callable, timeout=300) -> str:
return self.cache.get_or_set( return self.cache.get_or_set(
self._prefix_key(key, known_prefix=self._last_prefix), self._prefix_key(key, known_prefix=self._last_prefix),
default=default, default=default,
timeout=timeout timeout=timeout
) )
def get_many(self, keys: List[str]) -> Dict[str, any]: def get_many(self, keys: List[str]) -> Dict[str, str]:
values = self.cache.get_many([self._prefix_key(key) for key in keys]) values = self.cache.get_many([self._prefix_key(key) for key in keys])
newvalues = {} newvalues = {}
for k, v in values.items(): for k, v in values.items():
newvalues[self._strip_prefix(k)] = v newvalues[self._strip_prefix(k)] = v
return newvalues return newvalues
def set_many(self, values: Dict[str, any], timeout=300): def set_many(self, values: Dict[str, str], timeout=300):
newvalues = {} newvalues = {}
for k, v in values.items(): for k, v in values.items():
newvalues[self._prefix_key(k)] = v newvalues[self._prefix_key(k)] = v

View File

@@ -134,11 +134,8 @@ class TemplateBasedMailRenderer(BaseHTMLMailRenderer):
def template_name(self): def template_name(self):
raise NotImplementedError() raise NotImplementedError()
def compile_markdown(self, plaintext):
return markdown_compile_email(plaintext)
def render(self, plain_body: str, plain_signature: str, subject: str, order, position) -> str: def render(self, plain_body: str, plain_signature: str, subject: str, order, position) -> str:
body_md = self.compile_markdown(plain_body) body_md = markdown_compile_email(plain_body)
htmlctx = { htmlctx = {
'site': settings.PRETIX_INSTANCE_NAME, 'site': settings.PRETIX_INSTANCE_NAME,
'site_url': settings.SITE_URL, 'site_url': settings.SITE_URL,
@@ -156,7 +153,7 @@ class TemplateBasedMailRenderer(BaseHTMLMailRenderer):
if plain_signature: if plain_signature:
signature_md = plain_signature.replace('\n', '<br>\n') signature_md = plain_signature.replace('\n', '<br>\n')
signature_md = self.compile_markdown(signature_md) signature_md = markdown_compile_email(signature_md)
htmlctx['signature'] = signature_md htmlctx['signature'] = signature_md
if order: if order:
@@ -669,11 +666,6 @@ def base_placeholders(sender, **kwargs):
lambda waiting_list_entry: concatenation_for_salutation(waiting_list_entry.name_parts), lambda waiting_list_entry: concatenation_for_salutation(waiting_list_entry.name_parts),
_("Mr Doe"), _("Mr Doe"),
)) ))
ph.append(SimpleFunctionalMailTextPlaceholder(
"name", ["waiting_list_entry"],
lambda waiting_list_entry: waiting_list_entry.name or "",
_("Mr Doe"),
))
ph.append(SimpleFunctionalMailTextPlaceholder( ph.append(SimpleFunctionalMailTextPlaceholder(
"name_for_salutation", ["position_or_address"], "name_for_salutation", ["position_or_address"],
lambda position_or_address: concatenation_for_salutation(get_best_name(position_or_address, parts=True)), lambda position_or_address: concatenation_for_salutation(get_best_name(position_or_address, parts=True)),

View File

@@ -37,8 +37,8 @@ import tempfile
from collections import OrderedDict, namedtuple from collections import OrderedDict, namedtuple
from decimal import Decimal from decimal import Decimal
from typing import Optional, Tuple from typing import Optional, Tuple
from zoneinfo import ZoneInfo
import pytz
from defusedcsv import csv from defusedcsv import csv
from django import forms from django import forms
from django.conf import settings from django.conf import settings
@@ -68,7 +68,7 @@ class BaseExporter:
self.events = event self.events = event
self.event = None self.event = None
e = self.events.first() e = self.events.first()
self.timezone = e.timezone if e else ZoneInfo(settings.TIME_ZONE) self.timezone = e.timezone if e else pytz.timezone(settings.TIME_ZONE)
else: else:
self.events = Event.objects.filter(pk=event.pk) self.events = Event.objects.filter(pk=event.pk)
self.timezone = event.timezone self.timezone = event.timezone
@@ -140,7 +140,7 @@ class BaseExporter:
""" """
return {} return {}
def render(self, form_data: dict) -> Tuple[str, str, Optional[bytes]]: def render(self, form_data: dict) -> Tuple[str, str, bytes]:
""" """
Render the exported file and return a tuple consisting of a filename, a file type Render the exported file and return a tuple consisting of a filename, a file type
and file content. and file content.
@@ -157,13 +157,6 @@ class BaseExporter:
""" """
raise NotImplementedError() # NOQA raise NotImplementedError() # NOQA
def available_for_user(self, user) -> bool:
"""
Allows to do additional checks whether an exporter is available based on the user who calls it. Note that
``user`` may be ``None`` e.g. during API usage.
"""
return True
class OrganizerLevelExportMixin: class OrganizerLevelExportMixin:
@property @property

View File

@@ -34,8 +34,8 @@
from collections import OrderedDict from collections import OrderedDict
from decimal import Decimal from decimal import Decimal
from zoneinfo import ZoneInfo
import pytz
from django import forms from django import forms
from django.db.models import ( from django.db.models import (
Case, CharField, Count, DateTimeField, F, IntegerField, Max, Min, OuterRef, Case, CharField, Count, DateTimeField, F, IntegerField, Max, Min, OuterRef,
@@ -326,7 +326,7 @@ class OrderListExporter(MultiSheetListExporter):
yield self.ProgressSetTotal(total=qs.count()) yield self.ProgressSetTotal(total=qs.count())
for order in qs.order_by('datetime').iterator(): for order in qs.order_by('datetime').iterator():
tz = ZoneInfo(self.event_object_cache[order.event_id].settings.timezone) tz = pytz.timezone(self.event_object_cache[order.event_id].settings.timezone)
row = [ row = [
self.event_object_cache[order.event_id].slug, self.event_object_cache[order.event_id].slug,
@@ -459,7 +459,7 @@ class OrderListExporter(MultiSheetListExporter):
yield self.ProgressSetTotal(total=qs.count()) yield self.ProgressSetTotal(total=qs.count())
for op in qs.order_by('order__datetime').iterator(): for op in qs.order_by('order__datetime').iterator():
order = op.order order = op.order
tz = ZoneInfo(order.event.settings.timezone) tz = pytz.timezone(order.event.settings.timezone)
row = [ row = [
self.event_object_cache[order.event_id].slug, self.event_object_cache[order.event_id].slug,
order.code, order.code,
@@ -549,9 +549,7 @@ class OrderListExporter(MultiSheetListExporter):
headers.append(_('End date')) headers.append(_('End date'))
headers += [ headers += [
_('Product'), _('Product'),
_('Product ID'),
_('Variation'), _('Variation'),
_('Variation ID'),
_('Price'), _('Price'),
_('Tax rate'), _('Tax rate'),
_('Tax rule'), _('Tax rule'),
@@ -633,7 +631,7 @@ class OrderListExporter(MultiSheetListExporter):
for op in ops: for op in ops:
order = op.order order = op.order
tz = ZoneInfo(self.event_object_cache[order.event_id].settings.timezone) tz = pytz.timezone(self.event_object_cache[order.event_id].settings.timezone)
row = [ row = [
self.event_object_cache[order.event_id].slug, self.event_object_cache[order.event_id].slug,
order.code, order.code,
@@ -658,9 +656,7 @@ class OrderListExporter(MultiSheetListExporter):
row.append('') row.append('')
row += [ row += [
str(op.item), str(op.item),
str(op.item_id),
str(op.variation) if op.variation else '', str(op.variation) if op.variation else '',
str(op.variation_id) if op.variation_id else '',
op.price, op.price,
op.tax_rate, op.tax_rate,
str(op.tax_rule) if op.tax_rule else '', str(op.tax_rule) if op.tax_rule else '',
@@ -854,8 +850,6 @@ class TransactionListExporter(ListExporter):
_('Tax rule ID'), _('Tax rule ID'),
_('Tax rule'), _('Tax rule'),
_('Tax value'), _('Tax value'),
_('Gross total'),
_('Tax total'),
] ]
if form_data.get('_format') == 'xlsx': if form_data.get('_format') == 'xlsx':
@@ -907,8 +901,6 @@ class TransactionListExporter(ListExporter):
t.tax_rule_id or '', t.tax_rule_id or '',
str(t.tax_rule.internal_name or t.tax_rule.name) if t.tax_rule_id else '', str(t.tax_rule.internal_name or t.tax_rule.name) if t.tax_rule_id else '',
t.tax_value, t.tax_value,
t.price * t.count,
t.tax_value * t.count,
] ]
if form_data.get('_format') == 'xlsx': if form_data.get('_format') == 'xlsx':
@@ -1032,7 +1024,7 @@ class PaymentListExporter(ListExporter):
yield self.ProgressSetTotal(total=len(objs)) yield self.ProgressSetTotal(total=len(objs))
for obj in objs: for obj in objs:
tz = ZoneInfo(obj.order.event.settings.timezone) tz = pytz.timezone(obj.order.event.settings.timezone)
if isinstance(obj, OrderPayment) and obj.payment_date: if isinstance(obj, OrderPayment) and obj.payment_date:
d2 = obj.payment_date.astimezone(tz).date().strftime('%Y-%m-%d') d2 = obj.payment_date.astimezone(tz).date().strftime('%Y-%m-%d')
elif isinstance(obj, OrderRefund) and obj.execution_date: elif isinstance(obj, OrderRefund) and obj.execution_date:
@@ -1151,7 +1143,7 @@ class GiftcardTransactionListExporter(OrganizerLevelExportMixin, ListExporter):
def iterate_list(self, form_data): def iterate_list(self, form_data):
qs = GiftCardTransaction.objects.filter( qs = GiftCardTransaction.objects.filter(
card__issuer=self.organizer, card__issuer=self.organizer,
).order_by('datetime').select_related('card', 'order', 'order__event', 'acceptor') ).order_by('datetime').select_related('card', 'order', 'order__event')
if form_data.get('date_range'): if form_data.get('date_range'):
dt_start, dt_end = resolve_timeframe_to_datetime_start_inclusive_end_exclusive(now(), form_data['date_range'], self.timezone) dt_start, dt_end = resolve_timeframe_to_datetime_start_inclusive_end_exclusive(now(), form_data['date_range'], self.timezone)
@@ -1167,7 +1159,6 @@ class GiftcardTransactionListExporter(OrganizerLevelExportMixin, ListExporter):
_('Amount'), _('Amount'),
_('Currency'), _('Currency'),
_('Order'), _('Order'),
_('Organizer'),
] ]
yield headers yield headers
@@ -1179,7 +1170,6 @@ class GiftcardTransactionListExporter(OrganizerLevelExportMixin, ListExporter):
obj.value, obj.value,
obj.card.currency, obj.card.currency,
obj.order.full_code if obj.order else None, obj.order.full_code if obj.order else None,
str(obj.acceptor or ""),
] ]
yield row yield row
@@ -1213,7 +1203,7 @@ class GiftcardRedemptionListExporter(ListExporter):
yield headers yield headers
for obj in objs: for obj in objs:
tz = ZoneInfo(obj.order.event.settings.timezone) tz = pytz.timezone(obj.order.event.settings.timezone)
gc = GiftCard.objects.get(pk=obj.info_data.get('gift_card')) gc = GiftCard.objects.get(pk=obj.info_data.get('gift_card'))
row = [ row = [
obj.order.event.slug, obj.order.event.slug,

View File

@@ -20,8 +20,8 @@
# <https://www.gnu.org/licenses/>. # <https://www.gnu.org/licenses/>.
# #
from collections import OrderedDict from collections import OrderedDict
from zoneinfo import ZoneInfo
import pytz
from django import forms from django import forms
from django.db.models import F, Q from django.db.models import F, Q
from django.dispatch import receiver from django.dispatch import receiver
@@ -137,7 +137,7 @@ class WaitingListExporter(ListExporter):
# which event should be used to output dates in columns "Start date" and "End date" # which event should be used to output dates in columns "Start date" and "End date"
event_for_date_columns = entry.subevent if entry.subevent else entry.event event_for_date_columns = entry.subevent if entry.subevent else entry.event
tz = ZoneInfo(entry.event.settings.timezone) tz = pytz.timezone(entry.event.settings.timezone)
datetime_format = '%Y-%m-%d %H:%M:%S' datetime_format = '%Y-%m-%d %H:%M:%S'
row = [ row = [

View File

@@ -167,7 +167,6 @@ class SettingsForm(i18nfield.forms.I18nFormMixin, HierarkeyForm):
class PrefixForm(forms.Form): class PrefixForm(forms.Form):
prefix = forms.CharField(widget=forms.HiddenInput) prefix = forms.CharField(widget=forms.HiddenInput)
template_name = "django/forms/table.html"
class SafeSessionWizardView(SessionWizardView): class SafeSessionWizardView(SessionWizardView):

View File

@@ -38,10 +38,10 @@ import logging
from datetime import timedelta from datetime import timedelta
from decimal import Decimal from decimal import Decimal
from io import BytesIO from io import BytesIO
from zoneinfo import ZoneInfo
import dateutil.parser import dateutil.parser
import pycountry import pycountry
import pytz
from django import forms from django import forms
from django.conf import settings from django.conf import settings
from django.contrib import messages from django.contrib import messages
@@ -61,7 +61,6 @@ from django.utils.timezone import get_current_timezone, now
from django.utils.translation import gettext_lazy as _, pgettext_lazy from django.utils.translation import gettext_lazy as _, pgettext_lazy
from django_countries import countries from django_countries import countries
from django_countries.fields import Country, CountryField from django_countries.fields import Country, CountryField
from geoip2.errors import AddressNotFoundError
from phonenumber_field.formfields import PhoneNumberField from phonenumber_field.formfields import PhoneNumberField
from phonenumber_field.phonenumber import PhoneNumber from phonenumber_field.phonenumber import PhoneNumber
from phonenumber_field.widgets import PhoneNumberPrefixWidget from phonenumber_field.widgets import PhoneNumberPrefixWidget
@@ -357,12 +356,9 @@ class WrappedPhoneNumberPrefixWidget(PhoneNumberPrefixWidget):
def guess_country_from_request(request, event): def guess_country_from_request(request, event):
if settings.HAS_GEOIP: if settings.HAS_GEOIP:
g = GeoIP2() g = GeoIP2()
try: res = g.country(get_client_ip(request))
res = g.country(get_client_ip(request)) if res['country_code'] and len(res['country_code']) == 2:
if res['country_code'] and len(res['country_code']) == 2: return Country(res['country_code'])
return Country(res['country_code'])
except AddressNotFoundError:
pass
return guess_country(event) return guess_country(event)
@@ -500,14 +496,14 @@ class PortraitImageField(SizeValidationMixin, ExtValidationMixin, forms.FileFiel
file = BytesIO(data['content']) file = BytesIO(data['content'])
try: try:
image = Image.open(file, formats=settings.PILLOW_FORMATS_QUESTIONS_IMAGE) image = Image.open(file)
# verify() must be called immediately after the constructor. # verify() must be called immediately after the constructor.
image.verify() image.verify()
# We want to do more than just verify(), so we need to re-open the file # We want to do more than just verify(), so we need to re-open the file
if hasattr(file, 'seek'): if hasattr(file, 'seek'):
file.seek(0) file.seek(0)
image = Image.open(file, formats=settings.PILLOW_FORMATS_QUESTIONS_IMAGE) image = Image.open(file)
# load() is a potential DoS vector (see Django bug #18520), so we verify the size first # load() is a potential DoS vector (see Django bug #18520), so we verify the size first
if image.width > 10_000 or image.height > 10_000: if image.width > 10_000 or image.height > 10_000:
@@ -566,7 +562,7 @@ class PortraitImageField(SizeValidationMixin, ExtValidationMixin, forms.FileFiel
return f return f
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
kwargs.setdefault('ext_whitelist', settings.FILE_UPLOAD_EXTENSIONS_QUESTION_IMAGE) kwargs.setdefault('ext_whitelist', (".png", ".jpg", ".jpeg", ".jfif", ".tif", ".tiff", ".bmp"))
kwargs.setdefault('max_size', settings.FILE_UPLOAD_MAX_SIZE_IMAGE) kwargs.setdefault('max_size', settings.FILE_UPLOAD_MAX_SIZE_IMAGE)
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
@@ -737,7 +733,7 @@ class BaseQuestionsForm(forms.Form):
initial = answers[0] initial = answers[0]
else: else:
initial = None initial = None
tz = ZoneInfo(event.settings.timezone) tz = pytz.timezone(event.settings.timezone)
help_text = rich_text(q.help_text) help_text = rich_text(q.help_text)
label = escape(q.question) # django-bootstrap3 calls mark_safe label = escape(q.question) # django-bootstrap3 calls mark_safe
required = q.required and not self.all_optional required = q.required and not self.all_optional
@@ -826,7 +822,11 @@ class BaseQuestionsForm(forms.Form):
help_text=help_text, help_text=help_text,
initial=initial.file if initial else None, initial=initial.file if initial else None,
widget=UploadedFileWidget(position=pos, event=event, answer=initial), widget=UploadedFileWidget(position=pos, event=event, answer=initial),
ext_whitelist=settings.FILE_UPLOAD_EXTENSIONS_OTHER, ext_whitelist=(
".png", ".jpg", ".gif", ".jpeg", ".pdf", ".txt", ".docx", ".gif", ".svg",
".pptx", ".ppt", ".doc", ".xlsx", ".xls", ".jfif", ".heic", ".heif", ".pages",
".bmp", ".tif", ".tiff"
),
max_size=settings.FILE_UPLOAD_MAX_SIZE_OTHER, max_size=settings.FILE_UPLOAD_MAX_SIZE_OTHER,
) )
elif q.type == Question.TYPE_DATE: elif q.type == Question.TYPE_DATE:

View File

@@ -1,63 +0,0 @@
#
# This file is part of pretix (Community Edition).
#
# Copyright (C) 2014-2020 Raphael Michel and contributors
# Copyright (C) 2020-2021 rami.io GmbH and contributors
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation in version 3 of the License.
#
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
# this file, see <https://pretix.eu/about/en/license>.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
from bootstrap3.renderers import (
FieldRenderer as BaseFieldRenderer,
InlineFieldRenderer as BaseInlineFieldRenderer,
)
from django.forms import (
CheckboxInput, CheckboxSelectMultiple, ClearableFileInput, RadioSelect,
SelectDateWidget,
)
class FieldRenderer(BaseFieldRenderer):
# Local application of https://github.com/zostera/django-bootstrap3/pull/859
def post_widget_render(self, html):
if isinstance(self.widget, CheckboxSelectMultiple):
html = self.list_to_class(html, "checkbox")
elif isinstance(self.widget, RadioSelect):
html = self.list_to_class(html, "radio")
elif isinstance(self.widget, SelectDateWidget):
html = self.fix_date_select_input(html)
elif isinstance(self.widget, ClearableFileInput):
html = self.fix_clearable_file_input(html)
elif isinstance(self.widget, CheckboxInput):
html = self.put_inside_label(html)
return html
class InlineFieldRenderer(BaseInlineFieldRenderer):
# Local application of https://github.com/zostera/django-bootstrap3/pull/859
def post_widget_render(self, html):
if isinstance(self.widget, CheckboxSelectMultiple):
html = self.list_to_class(html, "checkbox")
elif isinstance(self.widget, RadioSelect):
html = self.list_to_class(html, "radio")
elif isinstance(self.widget, SelectDateWidget):
html = self.fix_date_select_input(html)
elif isinstance(self.widget, ClearableFileInput):
html = self.fix_clearable_file_input(html)
elif isinstance(self.widget, CheckboxInput):
html = self.put_inside_label(html)
return html

View File

@@ -60,18 +60,6 @@ def replace_arabic_numbers(inp):
return inp.translate(table) return inp.translate(table)
def format_placeholders_help_text(placeholders, event=None):
placeholders = [(k, v.render_sample(event) if event else v) for k, v in placeholders.items()]
placeholders.sort(key=lambda x: x[0])
phs = [
'<button type="button" class="content-placeholder" title="%s">{%s}</button>' % (_("Sample: %s") % v if v else "", k)
for k, v in placeholders
]
return _('Available placeholders: {list}').format(
list=' '.join(phs)
)
class DatePickerWidget(forms.DateInput): class DatePickerWidget(forms.DateInput):
def __init__(self, attrs=None, date_format=None): def __init__(self, attrs=None, date_format=None):
attrs = attrs or {} attrs = attrs or {}

View File

@@ -24,7 +24,7 @@ Django, for theoretically very valid reasons, creates migrations for *every sing
we change on a model. Even the `help_text`! This makes sense, as we don't know if any we change on a model. Even the `help_text`! This makes sense, as we don't know if any
database backend unknown to us might actually use this information for its database schema. database backend unknown to us might actually use this information for its database schema.
However, pretix only supports PostgreSQL and SQLite and we can be pretty However, pretix only supports PostgreSQL, MySQL, MariaDB and SQLite and we can be pretty
certain that some changes to models will never require a change to the database. In this case, certain that some changes to models will never require a change to the database. In this case,
not creating a migration for certain changes will save us some performance while applying them not creating a migration for certain changes will save us some performance while applying them
*and* allow for a cleaner git history. Win-win! *and* allow for a cleaner git history. Win-win!

View File

@@ -22,7 +22,7 @@
import json import json
import sys import sys
import pytz_deprecation_shim import pytz
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from django.utils.timezone import override from django.utils.timezone import override
from django_scopes import scope from django_scopes import scope
@@ -60,7 +60,7 @@ class Command(BaseCommand):
sys.exit(1) sys.exit(1)
locale = options.get("locale", None) locale = options.get("locale", None)
timezone = pytz_deprecation_shim.timezone(options['timezone']) if options.get('timezone') else None timezone = pytz.timezone(options['timezone']) if options.get('timezone') else None
with scope(organizer=o): with scope(organizer=o):
if options['event_slug']: if options['event_slug']:

View File

@@ -49,9 +49,6 @@ class BaseMediaType:
def handle_unknown(self, organizer, identifier, user, auth): def handle_unknown(self, organizer, identifier, user, auth):
pass pass
def handle_new(self, organizer, medium, user, auth):
pass
def __str__(self): def __str__(self):
return str(self.verbose_name) return str(self.verbose_name)
@@ -111,43 +108,9 @@ class NfcUidMediaType(BaseMediaType):
return m return m
class NfcMf0aesMediaType(BaseMediaType):
identifier = 'nfc_mf0aes'
verbose_name = 'NFC Mifare Ultralight AES'
medium_created_by_server = False
supports_giftcard = True
supports_orderposition = False
def handle_new(self, organizer, medium, user, auth):
from pretix.base.models import GiftCard
if organizer.settings.get(f'reusable_media_type_{self.identifier}_autocreate_giftcard', as_type=bool):
with transaction.atomic():
gc = GiftCard.objects.create(
issuer=organizer,
expires=organizer.default_gift_card_expiry,
currency=organizer.settings.get(f'reusable_media_type_{self.identifier}_autocreate_giftcard_currency'),
)
medium.linked_giftcard = gc
medium.save()
medium.log_action(
'pretix.reusable_medium.linked_giftcard.changed',
user=user, auth=auth,
data={
'linked_giftcard': gc.pk
}
)
gc.log_action(
'pretix.giftcards.created',
user=user, auth=auth,
)
return medium
MEDIA_TYPES = { MEDIA_TYPES = {
m.identifier: m for m in [ m.identifier: m for m in [
BarcodePlainMediaType(), BarcodePlainMediaType(),
NfcUidMediaType(), NfcUidMediaType(),
NfcMf0aesMediaType(),
] ]
} }

View File

@@ -264,7 +264,7 @@ def metric_values():
# Metrics from redis # Metrics from redis
if settings.HAS_REDIS: if settings.HAS_REDIS:
for key, value in redis.hscan_iter(REDIS_KEY, count=1000): for key, value in redis.hscan_iter(REDIS_KEY):
dkey = key.decode("utf-8") dkey = key.decode("utf-8")
splitted = dkey.split("{", 2) splitted = dkey.split("{", 2)
value = float(value.decode("utf-8")) value = float(value.decode("utf-8"))

View File

@@ -21,12 +21,12 @@
# #
from collections import OrderedDict from collections import OrderedDict
from urllib.parse import urlsplit from urllib.parse import urlsplit
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
import pytz
from django.conf import settings from django.conf import settings
from django.http import Http404, HttpRequest, HttpResponse from django.http import Http404, HttpRequest, HttpResponse
from django.middleware.common import CommonMiddleware from django.middleware.common import CommonMiddleware
from django.urls import get_script_prefix, resolve from django.urls import get_script_prefix
from django.utils import timezone, translation from django.utils import timezone, translation
from django.utils.cache import patch_vary_headers from django.utils.cache import patch_vary_headers
from django.utils.deprecation import MiddlewareMixin from django.utils.deprecation import MiddlewareMixin
@@ -98,9 +98,9 @@ class LocaleMiddleware(MiddlewareMixin):
tzname = request.user.timezone tzname = request.user.timezone
if tzname: if tzname:
try: try:
timezone.activate(ZoneInfo(tzname)) timezone.activate(pytz.timezone(tzname))
request.timezone = tzname request.timezone = tzname
except ZoneInfoNotFoundError: except pytz.UnknownTimeZoneError:
pass pass
else: else:
timezone.deactivate() timezone.deactivate()
@@ -230,8 +230,6 @@ class SecurityMiddleware(MiddlewareMixin):
) )
def process_response(self, request, resp): def process_response(self, request, resp):
url = resolve(request.path_info)
if settings.DEBUG and resp.status_code >= 400: if settings.DEBUG and resp.status_code >= 400:
# Don't use CSP on debug error page as it breaks of Django's fancy error # Don't use CSP on debug error page as it breaks of Django's fancy error
# pages # pages
@@ -251,28 +249,20 @@ class SecurityMiddleware(MiddlewareMixin):
h = { h = {
'default-src': ["{static}"], 'default-src': ["{static}"],
'script-src': ['{static}'], 'script-src': ['{static}', 'https://checkout.stripe.com', 'https://js.stripe.com'],
'object-src': ["'none'"], 'object-src': ["'none'"],
'frame-src': ['{static}'], 'frame-src': ['{static}', 'https://checkout.stripe.com', 'https://js.stripe.com'],
'style-src': ["{static}", "{media}"], 'style-src': ["{static}", "{media}"],
'connect-src': ["{dynamic}", "{media}"], 'connect-src': ["{dynamic}", "{media}", "https://checkout.stripe.com"],
'img-src': ["{static}", "{media}", "data:"] + img_src, 'img-src': ["{static}", "{media}", "data:", "https://*.stripe.com"] + img_src,
'font-src': ["{static}"], 'font-src': ["{static}"],
'media-src': ["{static}", "data:"], 'media-src': ["{static}", "data:"],
# form-action is not only used to match on form actions, but also on URLs # form-action is not only used to match on form actions, but also on URLs
# form-actions redirect to. In the context of e.g. payment providers or # form-actions redirect to. In the context of e.g. payment providers or
# single-sign-on this can be nearly anything, so we cannot really restrict # single-sign-on this can be nearly anything so we cannot really restrict
# this. However, we'll restrict it to HTTPS. # this. However, we'll restrict it to HTTPS.
'form-action': ["{dynamic}", "https:"] + (['http:'] if settings.SITE_URL.startswith('http://') else []), 'form-action': ["{dynamic}", "https:"] + (['http:'] if settings.SITE_URL.startswith('http://') else []),
} }
# Only include pay.google.com for wallet detection purposes on the Payment selection page
if (
url.url_name == "event.order.pay.change" or
(url.url_name == "event.checkout" and url.kwargs['step'] == "payment")
):
h['script-src'].append('https://pay.google.com')
h['frame-src'].append('https://pay.google.com')
h['connect-src'].append('https://google.com/pay')
if settings.LOG_CSP: if settings.LOG_CSP:
h['report-uri'] = ["/csp_report/"] h['report-uri'] = ["/csp_report/"]
if 'Content-Security-Policy' in resp: if 'Content-Security-Policy' in resp:

View File

@@ -2,8 +2,6 @@
# Generated by Django 1.10.4 on 2017-02-03 14:21 # Generated by Django 1.10.4 on 2017-02-03 14:21
from __future__ import unicode_literals from __future__ import unicode_literals
from zoneinfo import ZoneInfo
import django.core.validators import django.core.validators
import django.db.migrations.operations.special import django.db.migrations.operations.special
import django.db.models.deletion import django.db.models.deletion
@@ -28,7 +26,7 @@ def forwards42(apps, schema_editor):
for s in EventSetting.objects.filter(key='timezone').values('object_id', 'value') for s in EventSetting.objects.filter(key='timezone').values('object_id', 'value')
} }
for order in Order.objects.all(): for order in Order.objects.all():
tz = ZoneInfo(etz.get(order.event_id, 'UTC')) tz = pytz.timezone(etz.get(order.event_id, 'UTC'))
order.expires = order.expires.astimezone(tz).replace(hour=23, minute=59, second=59) order.expires = order.expires.astimezone(tz).replace(hour=23, minute=59, second=59)
order.save() order.save()

View File

@@ -2,9 +2,9 @@
# Generated by Django 1.10.2 on 2016-10-19 17:57 # Generated by Django 1.10.2 on 2016-10-19 17:57
from __future__ import unicode_literals from __future__ import unicode_literals
from zoneinfo import ZoneInfo import pytz
from django.db import migrations from django.db import migrations
from django.utils import timezone
def forwards(apps, schema_editor): def forwards(apps, schema_editor):
@@ -15,7 +15,7 @@ def forwards(apps, schema_editor):
for s in EventSetting.objects.filter(key='timezone').values('object_id', 'value') for s in EventSetting.objects.filter(key='timezone').values('object_id', 'value')
} }
for order in Order.objects.all(): for order in Order.objects.all():
tz = ZoneInfo(etz.get(order.event_id, 'UTC')) tz = pytz.timezone(etz.get(order.event_id, 'UTC'))
order.expires = order.expires.astimezone(tz).replace(hour=23, minute=59, second=59) order.expires = order.expires.astimezone(tz).replace(hour=23, minute=59, second=59)
order.save() order.save()

View File

@@ -3,6 +3,7 @@
from django.core.exceptions import ImproperlyConfigured from django.core.exceptions import ImproperlyConfigured
from django.db import migrations, models from django.db import migrations, models
from django_mysql.checks import mysql_connections
def set_attendee_name_parts(apps, schema_editor): def set_attendee_name_parts(apps, schema_editor):
@@ -23,12 +24,40 @@ def set_attendee_name_parts(apps, schema_editor):
ia.save(update_fields=['name_parts']) ia.save(update_fields=['name_parts'])
def check_mysqlversion(apps, schema_editor):
errors = []
any_conn_works = False
conns = list(mysql_connections())
found = 'Unknown version'
for alias, conn in conns:
if hasattr(conn, 'mysql_is_mariadb') and conn.mysql_is_mariadb and hasattr(conn, 'mysql_version'):
if conn.mysql_version >= (10, 2, 7):
any_conn_works = True
else:
found = 'MariaDB ' + '.'.join(str(v) for v in conn.mysql_version)
elif hasattr(conn, 'mysql_version'):
if conn.mysql_version >= (5, 7):
any_conn_works = True
else:
found = 'MySQL ' + '.'.join(str(v) for v in conn.mysql_version)
if conns and not any_conn_works:
raise ImproperlyConfigured(
'As of pretix 2.2, you need MySQL 5.7+ or MariaDB 10.2.7+ to run pretix. However, we detected a '
'database connection to {}'.format(found)
)
return errors
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('pretixbase', '0101_auto_20181025_2255'), ('pretixbase', '0101_auto_20181025_2255'),
] ]
operations = [ operations = [
migrations.RunPython(
check_mysqlversion, migrations.RunPython.noop
),
migrations.RenameField( migrations.RenameField(
model_name='cartposition', model_name='cartposition',
old_name='attendee_name', old_name='attendee_name',

View File

@@ -1,7 +1,8 @@
# Generated by Django 3.2.4 on 2021-09-30 10:25 # Generated by Django 3.2.4 on 2021-09-30 10:25
from datetime import datetime, timezone from datetime import datetime
from django.db import migrations, models from django.db import migrations, models
from pytz import UTC
class Migration(migrations.Migration): class Migration(migrations.Migration):
@@ -14,7 +15,7 @@ class Migration(migrations.Migration):
migrations.AddField( migrations.AddField(
model_name='invoice', model_name='invoice',
name='sent_to_customer', name='sent_to_customer',
field=models.DateTimeField(blank=True, null=True, default=datetime(1970, 1, 1, 0, 0, 0, 0, tzinfo=timezone.utc)), field=models.DateTimeField(blank=True, null=True, default=UTC.localize(datetime(1970, 1, 1, 0, 0, 0, 0))),
preserve_default=False, preserve_default=False,
), ),
] ]

View File

@@ -50,6 +50,6 @@ class Migration(migrations.Migration):
], ],
options={ options={
'unique_together': {('event', 'secret')}, 'unique_together': {('event', 'secret')},
} } if 'mysql' not in settings.DATABASES['default']['ENGINE'] else {}
), ),
] ]

View File

@@ -1,38 +0,0 @@
# Generated by Django 3.2.18 on 2023-05-12 10:08
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pretixbase', '0241_itemmetaproperties_required_values'),
]
operations = [
migrations.RenameField(
model_name='giftcardacceptance',
old_name='collector',
new_name='acceptor',
),
migrations.AddField(
model_name='giftcardacceptance',
name='active',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='giftcardacceptance',
name='reusable_media',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='giftcardacceptance',
name='issuer',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='gift_card_acceptor_acceptance', to='pretixbase.organizer'),
),
migrations.AlterUniqueTogether(
name='giftcardacceptance',
unique_together={('issuer', 'acceptor')},
),
]

View File

@@ -1,23 +0,0 @@
# Generated by Django 4.1.9 on 2023-06-26 10:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pretixbase', '0242_auto_20230512_1008'),
]
operations = [
migrations.AddField(
model_name='device',
name='os_name',
field=models.CharField(max_length=190, null=True),
),
migrations.AddField(
model_name='device',
name='os_version',
field=models.CharField(max_length=190, null=True),
),
]

View File

@@ -1,35 +0,0 @@
# Generated by Django 3.2.18 on 2023-05-17 11:32
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pretixbase', '0243_device_os_name_and_os_version'),
]
operations = [
migrations.AddField(
model_name='device',
name='rsa_pubkey',
field=models.TextField(null=True),
),
migrations.CreateModel(
name='MediumKeySet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)),
('public_id', models.BigIntegerField(unique=True)),
('media_type', models.CharField(max_length=100)),
('active', models.BooleanField(default=True)),
('uid_key', models.BinaryField()),
('diversification_key', models.BinaryField()),
('organizer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='medium_key_sets', to='pretixbase.organizer')),
],
),
migrations.AddConstraint(
model_name='mediumkeyset',
constraint=models.UniqueConstraint(condition=models.Q(('active', True)), fields=('organizer', 'media_type'), name='keyset_unique_active'),
),
]

View File

@@ -1,34 +0,0 @@
# Generated by Django 4.2.4 on 2023-08-28 12:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("pretixbase", "0244_mediumkeyset"),
]
operations = [
migrations.AddField(
model_name="discount",
name="benefit_apply_to_addons",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="discount",
name="benefit_ignore_voucher_discounted",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="discount",
name="benefit_limit_products",
field=models.ManyToManyField(
related_name="benefit_discounts", to="pretixbase.item"
),
),
migrations.AddField(
model_name="discount",
name="benefit_same_products",
field=models.BooleanField(default=True),
),
]

View File

@@ -97,7 +97,7 @@ def _transactions_mark_order_dirty(order_id, using=None):
if getattr(dirty_transactions, 'order_ids', None) is None: if getattr(dirty_transactions, 'order_ids', None) is None:
dirty_transactions.order_ids = set() dirty_transactions.order_ids = set()
if _check_for_dirty_orders not in [func for (savepoint_id, func, *__) in conn.run_on_commit]: if _check_for_dirty_orders not in [func for savepoint_id, func in conn.run_on_commit]:
transaction.on_commit(_check_for_dirty_orders, using) transaction.on_commit(_check_for_dirty_orders, using)
dirty_transactions.order_ids.clear() # This is necessary to clean up after old threads with rollbacked transactions dirty_transactions.order_ids.clear() # This is necessary to clean up after old threads with rollbacked transactions

View File

@@ -265,16 +265,16 @@ class CheckinList(LoggedModel):
# * in pretix.helpers.jsonlogic_boolalg # * in pretix.helpers.jsonlogic_boolalg
# * in checkinrules.js # * in checkinrules.js
# * in libpretixsync # * in libpretixsync
# * in pretixscan-ios # * in pretixscan-ios (in the future)
top_level_operators = { top_level_operators = {
'<', '<=', '>', '>=', '==', '!=', 'inList', 'isBefore', 'isAfter', 'or', 'and' '<', '<=', '>', '>=', '==', '!=', 'inList', 'isBefore', 'isAfter', 'or', 'and'
} }
allowed_operators = top_level_operators | { allowed_operators = top_level_operators | {
'buildTime', 'objectList', 'lookup', 'var', 'entries_since', 'entries_before' 'buildTime', 'objectList', 'lookup', 'var',
} }
allowed_vars = { allowed_vars = {
'product', 'variation', 'now', 'now_isoweekday', 'entries_number', 'entries_today', 'entries_days', 'product', 'variation', 'now', 'now_isoweekday', 'entries_number', 'entries_today', 'entries_days',
'minutes_since_last_entry', 'minutes_since_first_entry', 'gate', 'minutes_since_last_entry', 'minutes_since_first_entry',
} }
if not rules or not isinstance(rules, dict): if not rules or not isinstance(rules, dict):
return rules return rules
@@ -299,10 +299,6 @@ class CheckinList(LoggedModel):
raise ValidationError(f'Logic variable "{values[0]}" is currently not allowed.') raise ValidationError(f'Logic variable "{values[0]}" is currently not allowed.')
return rules return rules
if operator in ('entries_since', 'entries_before'):
if len(values) != 1 or "buildTime" not in values[0]:
raise ValidationError(f'Operator "{operator}" takes exactly one "buildTime" argument.')
if operator in ('or', 'and') and seen_nonbool: if operator in ('or', 'and') and seen_nonbool:
raise ValidationError('You cannot use OR/AND logic on a level below a comparison operator.') raise ValidationError('You cannot use OR/AND logic on a level below a comparison operator.')

View File

@@ -121,23 +121,14 @@ class Customer(LoggedModel):
if self.email: if self.email:
self.email = self.email.lower() self.email = self.email.lower()
if 'update_fields' in kwargs and 'last_modified' not in kwargs['update_fields']: if 'update_fields' in kwargs and 'last_modified' not in kwargs['update_fields']:
kwargs['update_fields'] = {'last_modified'}.union(kwargs['update_fields']) kwargs['update_fields'] = list(kwargs['update_fields']) + ['last_modified']
if not self.identifier: if not self.identifier:
self.assign_identifier() self.assign_identifier()
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
if self.name_parts: if self.name_parts:
name = self.name self.name_cached = self.name
if self.name_cached != name:
self.name_cached = name
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'name_cached'}.union(kwargs['update_fields'])
else: else:
if self.name_cached != "" or self.name_parts != {}: self.name_cached = ""
self.name_cached = "" self.name_parts = {}
self.name_parts = {}
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'name_cached', 'name_parts'}.union(kwargs['update_fields'])
super().save(**kwargs) super().save(**kwargs)
def anonymize(self): def anonymize(self):

View File

@@ -98,8 +98,6 @@ class Gate(LoggedModel):
if not Gate.objects.filter(organizer=self.organizer, identifier=code).exists(): if not Gate.objects.filter(organizer=self.organizer, identifier=code).exists():
self.identifier = code self.identifier = code
break break
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
return super().save(*args, **kwargs) return super().save(*args, **kwargs)
@@ -143,14 +141,6 @@ class Device(LoggedModel):
max_length=190, max_length=190,
null=True, blank=True null=True, blank=True
) )
os_name = models.CharField(
max_length=190,
null=True, blank=True
)
os_version = models.CharField(
max_length=190,
null=True, blank=True
)
software_brand = models.CharField( software_brand = models.CharField(
max_length=190, max_length=190,
null=True, blank=True null=True, blank=True
@@ -166,10 +156,6 @@ class Device(LoggedModel):
null=True, null=True,
blank=False blank=False
) )
rsa_pubkey = models.TextField(
null=True,
blank=True,
)
info = models.JSONField( info = models.JSONField(
null=True, blank=True, null=True, blank=True,
) )
@@ -187,8 +173,6 @@ class Device(LoggedModel):
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
if not self.device_id: if not self.device_id:
self.device_id = (self.organizer.devices.aggregate(m=Max('device_id'))['m'] or 0) + 1 self.device_id = (self.organizer.devices.aggregate(m=Max('device_id'))['m'] or 0) + 1
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'device_id'}.union(kwargs['update_fields'])
super().save(*args, **kwargs) super().save(*args, **kwargs)
def permission_set(self) -> set: def permission_set(self) -> set:

View File

@@ -99,7 +99,7 @@ class Discount(LoggedModel):
) )
condition_apply_to_addons = models.BooleanField( condition_apply_to_addons = models.BooleanField(
default=True, default=True,
verbose_name=_("Count add-on products"), verbose_name=_("Apply to add-on products"),
help_text=_("Discounts never apply to bundled products"), help_text=_("Discounts never apply to bundled products"),
) )
condition_ignore_voucher_discounted = models.BooleanField( condition_ignore_voucher_discounted = models.BooleanField(
@@ -107,7 +107,7 @@ class Discount(LoggedModel):
verbose_name=_("Ignore products discounted by a voucher"), verbose_name=_("Ignore products discounted by a voucher"),
help_text=_("If this option is checked, products that already received a discount through a voucher will not " help_text=_("If this option is checked, products that already received a discount through a voucher will not "
"be considered for this discount. However, products that use a voucher only to e.g. unlock a " "be considered for this discount. However, products that use a voucher only to e.g. unlock a "
"hidden product or gain access to sold-out quota will still be considered."), "hidden product or gain access to sold-out quota will still receive the discount."),
) )
condition_min_count = models.PositiveIntegerField( condition_min_count = models.PositiveIntegerField(
verbose_name=_('Minimum number of matching products'), verbose_name=_('Minimum number of matching products'),
@@ -120,19 +120,6 @@ class Discount(LoggedModel):
default=Decimal('0.00'), default=Decimal('0.00'),
) )
benefit_same_products = models.BooleanField(
default=True,
verbose_name=_("Apply discount to same set of products"),
help_text=_("By default, the discount is applied across the same selection of products than the condition for "
"the discount given above. If you want, you can however also select a different selection of "
"products.")
)
benefit_limit_products = models.ManyToManyField(
'Item',
verbose_name=_("Apply discount to specific products"),
related_name='benefit_discounts',
blank=True
)
benefit_discount_matching_percent = models.DecimalField( benefit_discount_matching_percent = models.DecimalField(
verbose_name=_('Percentual discount on matching products'), verbose_name=_('Percentual discount on matching products'),
decimal_places=2, decimal_places=2,
@@ -152,18 +139,6 @@ class Discount(LoggedModel):
blank=True, blank=True,
validators=[MinValueValidator(1)], validators=[MinValueValidator(1)],
) )
benefit_apply_to_addons = models.BooleanField(
default=True,
verbose_name=_("Apply to add-on products"),
help_text=_("Discounts never apply to bundled products"),
)
benefit_ignore_voucher_discounted = models.BooleanField(
default=False,
verbose_name=_("Ignore products discounted by a voucher"),
help_text=_("If this option is checked, products that already received a discount through a voucher will not "
"be discounted. However, products that use a voucher only to e.g. unlock a hidden product or gain "
"access to sold-out quota will still receive the discount."),
)
# more feature ideas: # more feature ideas:
# - max_usages_per_order # - max_usages_per_order
@@ -212,14 +187,6 @@ class Discount(LoggedModel):
'on a minimum value.') 'on a minimum value.')
) )
if data.get('subevent_mode') == cls.SUBEVENT_MODE_DISTINCT and not data.get('benefit_same_products'):
raise ValidationError(
{'benefit_same_products': [
_('You cannot apply the discount to a different set of products if the discount is only valid '
'for bookings of different dates.')
]}
)
def allow_delete(self): def allow_delete(self):
return not self.orderposition_set.exists() return not self.orderposition_set.exists()
@@ -230,7 +197,6 @@ class Discount(LoggedModel):
'condition_min_value': self.condition_min_value, 'condition_min_value': self.condition_min_value,
'benefit_only_apply_to_cheapest_n_matches': self.benefit_only_apply_to_cheapest_n_matches, 'benefit_only_apply_to_cheapest_n_matches': self.benefit_only_apply_to_cheapest_n_matches,
'subevent_mode': self.subevent_mode, 'subevent_mode': self.subevent_mode,
'benefit_same_products': self.benefit_same_products,
}) })
def is_available_by_time(self, now_dt=None) -> bool: def is_available_by_time(self, now_dt=None) -> bool:
@@ -241,14 +207,14 @@ class Discount(LoggedModel):
return False return False
return True return True
def _apply_min_value(self, positions, condition_idx_group, benefit_idx_group, result): def _apply_min_value(self, positions, idx_group, result):
if self.condition_min_value and sum(positions[idx][2] for idx in condition_idx_group) < self.condition_min_value: if self.condition_min_value and sum(positions[idx][2] for idx in idx_group) < self.condition_min_value:
return return
if self.condition_min_count or self.benefit_only_apply_to_cheapest_n_matches: if self.condition_min_count or self.benefit_only_apply_to_cheapest_n_matches:
raise ValueError('Validation invariant violated.') raise ValueError('Validation invariant violated.')
for idx in benefit_idx_group: for idx in idx_group:
previous_price = positions[idx][2] previous_price = positions[idx][2]
new_price = round_decimal( new_price = round_decimal(
previous_price * (Decimal('100.00') - self.benefit_discount_matching_percent) / Decimal('100.00'), previous_price * (Decimal('100.00') - self.benefit_discount_matching_percent) / Decimal('100.00'),
@@ -256,8 +222,8 @@ class Discount(LoggedModel):
) )
result[idx] = new_price result[idx] = new_price
def _apply_min_count(self, positions, condition_idx_group, benefit_idx_group, result): def _apply_min_count(self, positions, idx_group, result):
if len(condition_idx_group) < self.condition_min_count: if len(idx_group) < self.condition_min_count:
return return
if not self.condition_min_count or self.condition_min_value: if not self.condition_min_count or self.condition_min_value:
@@ -267,17 +233,15 @@ class Discount(LoggedModel):
if not self.condition_min_count: if not self.condition_min_count:
raise ValueError('Validation invariant violated.') raise ValueError('Validation invariant violated.')
condition_idx_group = sorted(condition_idx_group, key=lambda idx: (positions[idx][2], -idx)) # sort by line_price idx_group = sorted(idx_group, key=lambda idx: (positions[idx][2], -idx)) # sort by line_price
benefit_idx_group = sorted(benefit_idx_group, key=lambda idx: (positions[idx][2], -idx)) # sort by line_price
# Prevent over-consuming of items, i.e. if our discount is "buy 2, get 1 free", we only # Prevent over-consuming of items, i.e. if our discount is "buy 2, get 1 free", we only
# want to match multiples of 3 # want to match multiples of 3
n_groups = min(len(condition_idx_group) // self.condition_min_count, len(benefit_idx_group)) consume_idx = idx_group[:len(idx_group) // self.condition_min_count * self.condition_min_count]
consume_idx = condition_idx_group[:n_groups * self.condition_min_count] benefit_idx = idx_group[:len(idx_group) // self.condition_min_count * self.benefit_only_apply_to_cheapest_n_matches]
benefit_idx = benefit_idx_group[:n_groups * self.benefit_only_apply_to_cheapest_n_matches]
else: else:
consume_idx = condition_idx_group consume_idx = idx_group
benefit_idx = benefit_idx_group benefit_idx = idx_group
for idx in benefit_idx: for idx in benefit_idx:
previous_price = positions[idx][2] previous_price = positions[idx][2]
@@ -312,7 +276,7 @@ class Discount(LoggedModel):
limit_products = {p.pk for p in self.condition_limit_products.all()} limit_products = {p.pk for p in self.condition_limit_products.all()}
# First, filter out everything not even covered by our product scope # First, filter out everything not even covered by our product scope
condition_candidates = [ initial_candidates = [
idx idx
for idx, (item_id, subevent_id, line_price_gross, is_addon_to, voucher_discount) in positions.items() for idx, (item_id, subevent_id, line_price_gross, is_addon_to, voucher_discount) in positions.items()
if ( if (
@@ -322,25 +286,11 @@ class Discount(LoggedModel):
) )
] ]
if self.benefit_same_products:
benefit_candidates = list(condition_candidates)
else:
benefit_products = {p.pk for p in self.benefit_limit_products.all()}
benefit_candidates = [
idx
for idx, (item_id, subevent_id, line_price_gross, is_addon_to, voucher_discount) in positions.items()
if (
item_id in benefit_products and
(self.benefit_apply_to_addons or not is_addon_to) and
(not self.benefit_ignore_voucher_discounted or voucher_discount is None or voucher_discount == Decimal('0.00'))
)
]
if self.subevent_mode == self.SUBEVENT_MODE_MIXED: # also applies to non-series events if self.subevent_mode == self.SUBEVENT_MODE_MIXED: # also applies to non-series events
if self.condition_min_count: if self.condition_min_count:
self._apply_min_count(positions, condition_candidates, benefit_candidates, result) self._apply_min_count(positions, initial_candidates, result)
else: else:
self._apply_min_value(positions, condition_candidates, benefit_candidates, result) self._apply_min_value(positions, initial_candidates, result)
elif self.subevent_mode == self.SUBEVENT_MODE_SAME: elif self.subevent_mode == self.SUBEVENT_MODE_SAME:
def key(idx): def key(idx):
@@ -349,18 +299,17 @@ class Discount(LoggedModel):
# Build groups of candidates with the same subevent, then apply our regular algorithm # Build groups of candidates with the same subevent, then apply our regular algorithm
# to each group # to each group
_groups = groupby(sorted(condition_candidates, key=key), key=key) _groups = groupby(sorted(initial_candidates, key=key), key=key)
candidate_groups = [(k, list(g)) for k, g in _groups] candidate_groups = [list(g) for k, g in _groups]
for subevent_id, g in candidate_groups: for g in candidate_groups:
benefit_g = [idx for idx in benefit_candidates if positions[idx][1] == subevent_id]
if self.condition_min_count: if self.condition_min_count:
self._apply_min_count(positions, g, benefit_g, result) self._apply_min_count(positions, g, result)
else: else:
self._apply_min_value(positions, g, benefit_g, result) self._apply_min_value(positions, g, result)
elif self.subevent_mode == self.SUBEVENT_MODE_DISTINCT: elif self.subevent_mode == self.SUBEVENT_MODE_DISTINCT:
if self.condition_min_value or not self.benefit_same_products: if self.condition_min_value:
raise ValueError('Validation invariant violated.') raise ValueError('Validation invariant violated.')
# Build optimal groups of candidates with distinct subevents, then apply our regular algorithm # Build optimal groups of candidates with distinct subevents, then apply our regular algorithm
@@ -387,7 +336,7 @@ class Discount(LoggedModel):
candidates = [] candidates = []
cardinality = None cardinality = None
for se, l in subevent_to_idx.items(): for se, l in subevent_to_idx.items():
l = [ll for ll in l if ll in condition_candidates and ll not in current_group] l = [ll for ll in l if ll in initial_candidates and ll not in current_group]
if cardinality and len(l) != cardinality: if cardinality and len(l) != cardinality:
continue continue
if se not in {positions[idx][1] for idx in current_group}: if se not in {positions[idx][1] for idx in current_group}:
@@ -424,5 +373,5 @@ class Discount(LoggedModel):
break break
for g in candidate_groups: for g in candidate_groups:
self._apply_min_count(positions, g, g, result) self._apply_min_count(positions, g, result)
return result return result

View File

@@ -40,9 +40,8 @@ from collections import Counter, OrderedDict, defaultdict
from datetime import datetime, time, timedelta from datetime import datetime, time, timedelta
from operator import attrgetter from operator import attrgetter
from urllib.parse import urljoin from urllib.parse import urljoin
from zoneinfo import ZoneInfo
import pytz_deprecation_shim import pytz
from django.conf import settings from django.conf import settings
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from django.core.files.storage import default_storage from django.core.files.storage import default_storage
@@ -215,7 +214,7 @@ class EventMixin:
@property @property
def timezone(self): def timezone(self):
return pytz_deprecation_shim.timezone(self.settings.timezone) return pytz.timezone(self.settings.timezone)
@property @property
def effective_presale_end(self): def effective_presale_end(self):
@@ -743,7 +742,12 @@ class Event(EventMixin, LoggedModel):
return ObjectRelatedCache(self) return ObjectRelatedCache(self)
def lock(self): def lock(self):
raise NotImplementedError("this method has been removed") """
Returns a contextmanager that can be used to lock an event for bookings.
"""
from pretix.base.services import locking
return locking.LockManager(self)
def get_mail_backend(self, timeout=None): def get_mail_backend(self, timeout=None):
""" """
@@ -769,7 +773,7 @@ class Event(EventMixin, LoggedModel):
""" """
The last datetime of payments for this event. The last datetime of payments for this event.
""" """
tz = ZoneInfo(self.settings.timezone) tz = pytz.timezone(self.settings.timezone)
return make_aware(datetime.combine( return make_aware(datetime.combine(
self.settings.get('payment_term_last', as_type=RelativeDateWrapper).datetime(self).date(), self.settings.get('payment_term_last', as_type=RelativeDateWrapper).datetime(self).date(),
time(hour=23, minute=59, second=59) time(hour=23, minute=59, second=59)
@@ -902,18 +906,14 @@ class Event(EventMixin, LoggedModel):
self.items.filter(hidden_if_available_id=oldid).update(hidden_if_available=q) self.items.filter(hidden_if_available_id=oldid).update(hidden_if_available=q)
for d in Discount.objects.filter(event=other).prefetch_related('condition_limit_products'): for d in Discount.objects.filter(event=other).prefetch_related('condition_limit_products'):
c_items = list(d.condition_limit_products.all()) items = list(d.condition_limit_products.all())
b_items = list(d.benefit_limit_products.all())
d.pk = None d.pk = None
d.event = self d.event = self
d.save(force_insert=True) d.save(force_insert=True)
d.log_action('pretix.object.cloned') d.log_action('pretix.object.cloned')
for i in c_items: for i in items:
if i.pk in item_map: if i.pk in item_map:
d.condition_limit_products.add(item_map[i.pk]) d.condition_limit_products.add(item_map[i.pk])
for i in b_items:
if i.pk in item_map:
d.benefit_limit_products.add(item_map[i.pk])
question_map = {} question_map = {}
for q in Question.objects.filter(event=other).prefetch_related('items', 'options'): for q in Question.objects.filter(event=other).prefetch_related('items', 'options'):
@@ -1276,9 +1276,6 @@ class Event(EventMixin, LoggedModel):
return not self.orders.exists() and not self.invoices.exists() return not self.orders.exists() and not self.invoices.exists()
def delete_sub_objects(self): def delete_sub_objects(self):
from .checkin import Checkin
Checkin.all.filter(successful=False, list__event=self).delete()
self.cartposition_set.filter(addon_to__isnull=False).delete() self.cartposition_set.filter(addon_to__isnull=False).delete()
self.cartposition_set.all().delete() self.cartposition_set.all().delete()
self.vouchers.all().delete() self.vouchers.all().delete()

View File

@@ -19,11 +19,10 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see # You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>. # <https://www.gnu.org/licenses/>.
# #
import zoneinfo
from datetime import datetime, timedelta from datetime import datetime, timedelta
import pytz
from dateutil.rrule import rrulestr from dateutil.rrule import rrulestr
from dateutil.tz import datetime_exists
from django.conf import settings from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder from django.core.serializers.json import DjangoJSONEncoder
from django.db import models from django.db import models
@@ -109,9 +108,12 @@ class AbstractScheduledExport(LoggedModel):
self.schedule_next_run = None self.schedule_next_run = None
return return
self.schedule_next_run = make_aware(datetime.combine(new_d.date(), self.schedule_rrule_time), tz) try:
if not datetime_exists(self.schedule_next_run): self.schedule_next_run = make_aware(datetime.combine(new_d.date(), self.schedule_rrule_time), tz)
self.schedule_next_run += timedelta(hours=1) except pytz.exceptions.AmbiguousTimeError:
self.schedule_next_run = make_aware(datetime.combine(new_d.date(), self.schedule_rrule_time), tz, is_dst=False)
except pytz.exceptions.NonExistentTimeError:
self.schedule_next_run = make_aware(datetime.combine(new_d.date(), self.schedule_rrule_time) + timedelta(hours=1), tz)
class ScheduledEventExport(AbstractScheduledExport): class ScheduledEventExport(AbstractScheduledExport):
@@ -134,4 +136,4 @@ class ScheduledOrganizerExport(AbstractScheduledExport):
@property @property
def tz(self): def tz(self):
return zoneinfo.ZoneInfo(self.timezone) return pytz.timezone(self.timezone)

View File

@@ -46,19 +46,14 @@ def gen_giftcard_secret(length=8):
class GiftCardAcceptance(models.Model): class GiftCardAcceptance(models.Model):
issuer = models.ForeignKey( issuer = models.ForeignKey(
'Organizer', 'Organizer',
related_name='gift_card_acceptor_acceptance', related_name='gift_card_collector_acceptance',
on_delete=models.CASCADE on_delete=models.CASCADE
) )
acceptor = models.ForeignKey( collector = models.ForeignKey(
'Organizer', 'Organizer',
related_name='gift_card_issuer_acceptance', related_name='gift_card_issuer_acceptance',
on_delete=models.CASCADE on_delete=models.CASCADE
) )
active = models.BooleanField(default=True)
reusable_media = models.BooleanField(default=False)
class Meta:
unique_together = (('issuer', 'acceptor'),)
class GiftCard(LoggedModel): class GiftCard(LoggedModel):
@@ -119,7 +114,7 @@ class GiftCard(LoggedModel):
return self.transactions.aggregate(s=Sum('value'))['s'] or Decimal('0.00') return self.transactions.aggregate(s=Sum('value'))['s'] or Decimal('0.00')
def accepted_by(self, organizer): def accepted_by(self, organizer):
return self.issuer == organizer or GiftCardAcceptance.objects.filter(issuer=self.issuer, acceptor=organizer, active=True).exists() return self.issuer == organizer or GiftCardAcceptance.objects.filter(issuer=self.issuer, collector=organizer).exists()
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
if not self.secret: if not self.secret:

View File

@@ -251,20 +251,14 @@ class Invoice(models.Model):
raise ValueError('Every invoice needs to be connected to an order') raise ValueError('Every invoice needs to be connected to an order')
if not self.event: if not self.event:
self.event = self.order.event self.event = self.order.event
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'event'}.union(kwargs['update_fields'])
if not self.organizer: if not self.organizer:
self.organizer = self.order.event.organizer self.organizer = self.order.event.organizer
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'organizer'}.union(kwargs['update_fields'])
if not self.prefix: if not self.prefix:
self.prefix = self.event.settings.invoice_numbers_prefix or (self.event.slug.upper() + '-') self.prefix = self.event.settings.invoice_numbers_prefix or (self.event.slug.upper() + '-')
if self.is_cancellation: if self.is_cancellation:
self.prefix = self.event.settings.invoice_numbers_prefix_cancellations or self.prefix self.prefix = self.event.settings.invoice_numbers_prefix_cancellations or self.prefix
if '%' in self.prefix: if '%' in self.prefix:
self.prefix = self.date.strftime(self.prefix) self.prefix = self.date.strftime(self.prefix)
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'prefix'}.union(kwargs['update_fields'])
if not self.invoice_no: if not self.invoice_no:
if self.order.testmode: if self.order.testmode:
@@ -282,13 +276,8 @@ class Invoice(models.Model):
# Suppress duplicate key errors and try again # Suppress duplicate key errors and try again
if i == 9: if i == 9:
raise raise
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'invoice_no'}.union(kwargs['update_fields'])
if self.full_invoice_no != self.prefix + self.invoice_no: self.full_invoice_no = self.prefix + self.invoice_no
self.full_invoice_no = self.prefix + self.invoice_no
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'full_invoice_no'}.union(kwargs['update_fields'])
return super().save(*args, **kwargs) return super().save(*args, **kwargs)
def delete(self, *args, **kwargs): def delete(self, *args, **kwargs):

View File

@@ -40,11 +40,9 @@ from collections import Counter, OrderedDict
from datetime import date, datetime, time, timedelta from datetime import date, datetime, time, timedelta
from decimal import Decimal, DecimalException from decimal import Decimal, DecimalException
from typing import Optional, Tuple from typing import Optional, Tuple
from zoneinfo import ZoneInfo
import dateutil.parser import dateutil.parser
import django_redis import pytz
from dateutil.tz import datetime_exists
from django.conf import settings from django.conf import settings
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from django.core.validators import ( from django.core.validators import (
@@ -58,6 +56,7 @@ from django.utils.functional import cached_property
from django.utils.timezone import is_naive, make_aware, now from django.utils.timezone import is_naive, make_aware, now
from django.utils.translation import gettext_lazy as _, pgettext_lazy from django.utils.translation import gettext_lazy as _, pgettext_lazy
from django_countries.fields import Country from django_countries.fields import Country
from django_redis import get_redis_connection
from django_scopes import ScopedManager from django_scopes import ScopedManager
from i18nfield.fields import I18nCharField, I18nTextField from i18nfield.fields import I18nCharField, I18nTextField
@@ -928,22 +927,22 @@ class Item(LoggedModel):
) )
if self.validity_dynamic_duration_days: if self.validity_dynamic_duration_days:
replace_date += timedelta(days=self.validity_dynamic_duration_days) replace_date += timedelta(days=self.validity_dynamic_duration_days)
valid_until = valid_until.replace( valid_until = tz.localize(valid_until.replace(
year=replace_date.year, year=replace_date.year,
month=replace_date.month, month=replace_date.month,
day=replace_date.day, day=replace_date.day,
hour=23, minute=59, second=59, microsecond=0, hour=23, minute=59, second=59, microsecond=0,
tzinfo=tz, tzinfo=None,
) ))
elif self.validity_dynamic_duration_days: elif self.validity_dynamic_duration_days:
replace_date = valid_until.date() + timedelta(days=self.validity_dynamic_duration_days - 1) replace_date = valid_until.date() + timedelta(days=self.validity_dynamic_duration_days - 1)
valid_until = valid_until.replace( valid_until = tz.localize(valid_until.replace(
year=replace_date.year, year=replace_date.year,
month=replace_date.month, month=replace_date.month,
day=replace_date.day, day=replace_date.day,
hour=23, minute=59, second=59, microsecond=0, hour=23, minute=59, second=59, microsecond=0,
tzinfo=tz tzinfo=None
) ))
if self.validity_dynamic_duration_hours: if self.validity_dynamic_duration_hours:
valid_until += timedelta(hours=self.validity_dynamic_duration_hours) valid_until += timedelta(hours=self.validity_dynamic_duration_hours)
@@ -951,9 +950,6 @@ class Item(LoggedModel):
if self.validity_dynamic_duration_minutes: if self.validity_dynamic_duration_minutes:
valid_until += timedelta(minutes=self.validity_dynamic_duration_minutes) valid_until += timedelta(minutes=self.validity_dynamic_duration_minutes)
if not datetime_exists(valid_until):
valid_until += timedelta(hours=1)
return requested_start, valid_until return requested_start, valid_until
else: else:
@@ -1463,7 +1459,7 @@ class Question(LoggedModel):
(TYPE_PHONENUMBER, _("Phone number")), (TYPE_PHONENUMBER, _("Phone number")),
) )
UNLOCALIZED_TYPES = [TYPE_DATE, TYPE_TIME, TYPE_DATETIME] UNLOCALIZED_TYPES = [TYPE_DATE, TYPE_TIME, TYPE_DATETIME]
ASK_DURING_CHECKIN_UNSUPPORTED = [] ASK_DURING_CHECKIN_UNSUPPORTED = [TYPE_PHONENUMBER]
event = models.ForeignKey( event = models.ForeignKey(
Event, Event,
@@ -1593,8 +1589,6 @@ class Question(LoggedModel):
if not Question.objects.filter(event=self.event, identifier=code).exists(): if not Question.objects.filter(event=self.event, identifier=code).exists():
self.identifier = code self.identifier = code
break break
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
super().save(*args, **kwargs) super().save(*args, **kwargs)
if self.event: if self.event:
self.event.cache.clear() self.event.cache.clear()
@@ -1684,7 +1678,7 @@ class Question(LoggedModel):
try: try:
dt = dateutil.parser.parse(answer) dt = dateutil.parser.parse(answer)
if is_naive(dt): if is_naive(dt):
dt = make_aware(dt, ZoneInfo(self.event.settings.timezone)) dt = make_aware(dt, pytz.timezone(self.event.settings.timezone))
except: except:
raise ValidationError(_('Invalid datetime input.')) raise ValidationError(_('Invalid datetime input.'))
else: else:
@@ -1742,8 +1736,6 @@ class QuestionOption(models.Model):
if not QuestionOption.objects.filter(question__event=self.question.event, identifier=code).exists(): if not QuestionOption.objects.filter(question__event=self.question.event, identifier=code).exists():
self.identifier = code self.identifier = code
break break
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
super().save(*args, **kwargs) super().save(*args, **kwargs)
@staticmethod @staticmethod
@@ -1910,13 +1902,8 @@ class Quota(LoggedModel):
def rebuild_cache(self, now_dt=None): def rebuild_cache(self, now_dt=None):
if settings.HAS_REDIS: if settings.HAS_REDIS:
rc = django_redis.get_redis_connection("redis") rc = get_redis_connection("redis")
p = rc.pipeline() rc.hdel(f'quotas:{self.event_id}:availabilitycache', str(self.pk))
p.hdel(f'quotas:{self.event_id}:availabilitycache', str(self.pk))
p.hdel(f'quotas:{self.event_id}:availabilitycache:nocw', str(self.pk))
p.hdel(f'quotas:{self.event_id}:availabilitycache:igcl', str(self.pk))
p.hdel(f'quotas:{self.event_id}:availabilitycache:nocw:igcl', str(self.pk))
p.execute()
self.availability(now_dt=now_dt) self.availability(now_dt=now_dt)
def availability( def availability(

View File

@@ -88,7 +88,9 @@ class LogEntry(models.Model):
class Meta: class Meta:
ordering = ('-datetime', '-id') ordering = ('-datetime', '-id')
indexes = [models.Index(fields=["datetime", "id"])] index_together = [
['datetime', 'id']
]
def display(self): def display(self):
from ..signals import logentry_display from ..signals import logentry_display

View File

@@ -121,30 +121,5 @@ class ReusableMedium(LoggedModel):
class Meta: class Meta:
unique_together = (("identifier", "type", "organizer"),) unique_together = (("identifier", "type", "organizer"),)
indexes = [ index_together = (("identifier", "type", "organizer"), ("updated", "id"))
models.Index(fields=("identifier", "type", "organizer")),
models.Index(fields=("updated", "id")),
]
ordering = "identifier", "type", "organizer" ordering = "identifier", "type", "organizer"
class MediumKeySet(models.Model):
organizer = models.ForeignKey('Organizer', on_delete=models.CASCADE, related_name='medium_key_sets')
public_id = models.BigIntegerField(
unique=True,
)
media_type = models.CharField(max_length=100)
active = models.BooleanField(default=True)
uid_key = models.BinaryField()
diversification_key = models.BinaryField()
objects = ScopedManager(organizer='organizer')
class Meta:
constraints = [
models.UniqueConstraint(
fields=["organizer", "media_type"],
condition=Q(active=True),
name="keyset_unique_active",
),
]

View File

@@ -37,18 +37,15 @@ import copy
import hashlib import hashlib
import json import json
import logging import logging
import operator
import string import string
from collections import Counter from collections import Counter
from datetime import datetime, time, timedelta from datetime import datetime, time, timedelta
from decimal import Decimal from decimal import Decimal
from functools import reduce
from time import sleep
from typing import Any, Dict, List, Union from typing import Any, Dict, List, Union
from zoneinfo import ZoneInfo
import dateutil import dateutil
import pycountry import pycountry
import pytz
from django.conf import settings from django.conf import settings
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from django.db import models, transaction from django.db import models, transaction
@@ -78,6 +75,7 @@ from pretix.base.email import get_email_context
from pretix.base.i18n import language from pretix.base.i18n import language
from pretix.base.models import Customer, User from pretix.base.models import Customer, User
from pretix.base.reldate import RelativeDateWrapper from pretix.base.reldate import RelativeDateWrapper
from pretix.base.services.locking import LOCK_TIMEOUT, NoLockManager
from pretix.base.settings import PERSON_NAME_SCHEMES from pretix.base.settings import PERSON_NAME_SCHEMES
from pretix.base.signals import order_gracefully_delete from pretix.base.signals import order_gracefully_delete
@@ -85,7 +83,6 @@ from ...helpers import OF_SELF
from ...helpers.countries import CachedCountries, FastCountryField from ...helpers.countries import CachedCountries, FastCountryField
from ...helpers.format import format_map from ...helpers.format import format_map
from ...helpers.names import build_name from ...helpers.names import build_name
from ...testutils.middleware import debugflags_var
from ._transactions import ( from ._transactions import (
_fail, _transactions_mark_order_clean, _transactions_mark_order_dirty, _fail, _transactions_mark_order_clean, _transactions_mark_order_dirty,
) )
@@ -273,9 +270,9 @@ class Order(LockModel, LoggedModel):
verbose_name = _("Order") verbose_name = _("Order")
verbose_name_plural = _("Orders") verbose_name_plural = _("Orders")
ordering = ("-datetime", "-pk") ordering = ("-datetime", "-pk")
indexes = [ index_together = [
models.Index(fields=["datetime", "id"]), ["datetime", "id"],
models.Index(fields=["last_modified", "id"]), ["last_modified", "id"],
] ]
def __str__(self): def __str__(self):
@@ -464,20 +461,14 @@ class Order(LockModel, LoggedModel):
return '{event}-{code}'.format(event=self.event.slug.upper(), code=self.code) return '{event}-{code}'.format(event=self.event.slug.upper(), code=self.code)
def save(self, **kwargs): def save(self, **kwargs):
if 'update_fields' in kwargs: if 'update_fields' in kwargs and 'last_modified' not in kwargs['update_fields']:
kwargs['update_fields'] = {'last_modified'}.union(kwargs['update_fields']) kwargs['update_fields'] = list(kwargs['update_fields']) + ['last_modified']
if not self.code: if not self.code:
self.assign_code() self.assign_code()
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'code'}.union(kwargs['update_fields'])
if not self.datetime: if not self.datetime:
self.datetime = now() self.datetime = now()
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'datetime'}.union(kwargs['update_fields'])
if not self.expires: if not self.expires:
self.set_expires() self.set_expires()
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'expires'}.union(kwargs['update_fields'])
is_new = not self.pk is_new = not self.pk
update_fields = kwargs.get('update_fields', []) update_fields = kwargs.get('update_fields', [])
@@ -505,7 +496,7 @@ class Order(LockModel, LoggedModel):
def set_expires(self, now_dt=None, subevents=None): def set_expires(self, now_dt=None, subevents=None):
now_dt = now_dt or now() now_dt = now_dt or now()
tz = ZoneInfo(self.event.settings.timezone) tz = pytz.timezone(self.event.settings.timezone)
mode = self.event.settings.get('payment_term_mode') mode = self.event.settings.get('payment_term_mode')
if mode == 'days': if mode == 'days':
exp_by_date = now_dt.astimezone(tz) + timedelta(days=self.event.settings.get('payment_term_days', as_type=int)) exp_by_date = now_dt.astimezone(tz) + timedelta(days=self.event.settings.get('payment_term_days', as_type=int))
@@ -828,7 +819,7 @@ class Order(LockModel, LoggedModel):
if cp.has_checkin: if cp.has_checkin:
return False return False
if self.event.settings.get('invoice_address_asked', as_type=bool) or self.event.settings.get('invoice_name_required', as_type=bool): if self.event.settings.get('invoice_address_asked', as_type=bool):
return True return True
ask_names = self.event.settings.get('attendee_names_asked', as_type=bool) ask_names = self.event.settings.get('attendee_names_asked', as_type=bool)
for cp in positions: for cp in positions:
@@ -879,7 +870,7 @@ class Order(LockModel, LoggedModel):
@property @property
def payment_term_last(self): def payment_term_last(self):
tz = ZoneInfo(self.event.settings.timezone) tz = pytz.timezone(self.event.settings.timezone)
term_last = self.event.settings.get('payment_term_last', as_type=RelativeDateWrapper) term_last = self.event.settings.get('payment_term_last', as_type=RelativeDateWrapper)
if term_last: if term_last:
if self.event.has_subevents: if self.event.has_subevents:
@@ -899,34 +890,7 @@ class Order(LockModel, LoggedModel):
), tz) ), tz)
return term_last return term_last
@property def _can_be_paid(self, count_waitinglist=True, ignore_date=False, force=False) -> Union[bool, str]:
def payment_term_expire_date(self):
delay = self.event.settings.get('payment_term_expire_delay_days', as_type=int)
if not delay: # performance saver + backwards compatibility
return self.expires
term_last = self.payment_term_last
if term_last and self.expires > term_last: # backwards compatibility
return self.expires
expires = self.expires.date() + timedelta(days=delay)
if self.event.settings.get('payment_term_weekdays'):
if expires.weekday() == 5:
expires += timedelta(days=2)
elif expires.weekday() == 6:
expires += timedelta(days=1)
tz = ZoneInfo(self.event.settings.timezone)
expires = make_aware(datetime.combine(
expires,
time(hour=23, minute=59, second=59)
), tz)
if term_last:
return min(expires, term_last)
else:
return expires
def _can_be_paid(self, count_waitinglist=True, ignore_date=False, force=False, lock=False) -> Union[bool, str]:
error_messages = { error_messages = {
'late_lastdate': _("The payment can not be accepted as the last date of payments configured in the " 'late_lastdate': _("The payment can not be accepted as the last date of payments configured in the "
"payment settings is over."), "payment settings is over."),
@@ -947,11 +911,10 @@ class Order(LockModel, LoggedModel):
if not self.event.settings.get('payment_term_accept_late') and not ignore_date and not force: if not self.event.settings.get('payment_term_accept_late') and not ignore_date and not force:
return error_messages['late'] return error_messages['late']
return self._is_still_available(count_waitinglist=count_waitinglist, force=force, lock=lock) return self._is_still_available(count_waitinglist=count_waitinglist, force=force)
def _is_still_available(self, now_dt: datetime=None, count_waitinglist=True, lock=False, force=False, def _is_still_available(self, now_dt: datetime=None, count_waitinglist=True, force=False,
check_voucher_usage=False, check_memberships=False) -> Union[bool, str]: check_voucher_usage=False, check_memberships=False) -> Union[bool, str]:
from pretix.base.services.locking import lock_objects
from pretix.base.services.memberships import ( from pretix.base.services.memberships import (
validate_memberships_in_order, validate_memberships_in_order,
) )
@@ -970,21 +933,10 @@ class Order(LockModel, LoggedModel):
try: try:
if check_memberships: if check_memberships:
try: try:
validate_memberships_in_order(self.customer, positions, self.event, lock=lock, testmode=self.testmode) validate_memberships_in_order(self.customer, positions, self.event, lock=False, testmode=self.testmode)
except ValidationError as e: except ValidationError as e:
raise Quota.QuotaExceededException(e.message) raise Quota.QuotaExceededException(e.message)
for cp in positions:
cp._cached_quotas = list(cp.quotas) if not force else []
if lock:
lock_objects(
[q for q in reduce(operator.or_, (set(cp._cached_quotas) for cp in positions), set()) if q.size is not None] +
[op.voucher for op in positions if op.voucher and not force] +
[op.seat for op in positions if op.seat],
shared_lock_objects=[self.event]
)
for i, op in enumerate(positions): for i, op in enumerate(positions):
if op.seat: if op.seat:
if not op.seat.is_available(ignore_orderpos=op): if not op.seat.is_available(ignore_orderpos=op):
@@ -1009,7 +961,7 @@ class Order(LockModel, LoggedModel):
voucher=op.voucher.code voucher=op.voucher.code
)) ))
quotas = op._cached_quotas quotas = list(op.quotas)
if len(quotas) == 0: if len(quotas) == 0:
raise Quota.QuotaExceededException(error_messages['unavailable'].format( raise Quota.QuotaExceededException(error_messages['unavailable'].format(
item=str(op.item) + (' - ' + str(op.variation) if op.variation else '') item=str(op.item) + (' - ' + str(op.variation) if op.variation else '')
@@ -1031,9 +983,6 @@ class Order(LockModel, LoggedModel):
)) ))
except Quota.QuotaExceededException as e: except Quota.QuotaExceededException as e:
return str(e) return str(e)
if 'sleep-after-quota-check' in debugflags_var.get():
sleep(2)
return True return True
def send_mail(self, subject: Union[str, LazyI18nString], template: Union[str, LazyI18nString], def send_mail(self, subject: Union[str, LazyI18nString], template: Union[str, LazyI18nString],
@@ -1264,7 +1213,7 @@ class QuestionAnswer(models.Model):
@property @property
def is_image(self): def is_image(self):
return any(self.file.name.lower().endswith(e) for e in settings.FILE_UPLOAD_EXTENSIONS_QUESTION_IMAGE) return any(self.file.name.lower().endswith(e) for e in ('.jpg', '.png', '.gif', '.tiff', '.bmp', '.jpeg'))
@property @property
def file_name(self): def file_name(self):
@@ -1281,7 +1230,7 @@ class QuestionAnswer(models.Model):
try: try:
d = dateutil.parser.parse(self.answer) d = dateutil.parser.parse(self.answer)
if self.orderposition: if self.orderposition:
tz = ZoneInfo(self.orderposition.order.event.settings.timezone) tz = pytz.timezone(self.orderposition.order.event.settings.timezone)
d = d.astimezone(tz) d = d.astimezone(tz)
return date_format(d, "SHORT_DATETIME_FORMAT") return date_format(d, "SHORT_DATETIME_FORMAT")
except ValueError: except ValueError:
@@ -1493,20 +1442,12 @@ class AbstractPosition(models.Model):
else self.variation.quotas.filter(subevent=self.subevent)) else self.variation.quotas.filter(subevent=self.subevent))
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
update_fields = kwargs.get('update_fields', set()) update_fields = kwargs.get('update_fields', [])
if 'attendee_name_parts' in update_fields: if 'attendee_name_parts' in update_fields:
kwargs['update_fields'] = {'attendee_name_cached'}.union(kwargs['update_fields']) update_fields.append('attendee_name_cached')
self.attendee_name_cached = self.attendee_name
name = self.attendee_name
if name != self.attendee_name_cached:
self.attendee_name_cached = name
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'attendee_name_cached'}.union(kwargs['update_fields'])
if self.attendee_name_parts is None: if self.attendee_name_parts is None:
self.attendee_name_parts = {} self.attendee_name_parts = {}
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'attendee_name_parts'}.union(kwargs['update_fields'])
super().save(*args, **kwargs) super().save(*args, **kwargs)
@property @property
@@ -1665,10 +1606,9 @@ class OrderPayment(models.Model):
return self.order.event.get_payment_providers(cached=True).get(self.provider) return self.order.event.get_payment_providers(cached=True).get(self.provider)
@transaction.atomic() @transaction.atomic()
def _mark_paid_inner(self, force, count_waitinglist, user, auth, ignore_date=False, overpaid=False, lock=False): def _mark_paid_inner(self, force, count_waitinglist, user, auth, ignore_date=False, overpaid=False):
from pretix.base.signals import order_paid from pretix.base.signals import order_paid
can_be_paid = self.order._can_be_paid(count_waitinglist=count_waitinglist, ignore_date=ignore_date, force=force, can_be_paid = self.order._can_be_paid(count_waitinglist=count_waitinglist, ignore_date=ignore_date, force=force)
lock=lock)
if can_be_paid is not True: if can_be_paid is not True:
self.order.log_action('pretix.event.order.quotaexceeded', { self.order.log_action('pretix.event.order.quotaexceeded', {
'message': can_be_paid 'message': can_be_paid
@@ -1691,13 +1631,12 @@ class OrderPayment(models.Model):
if status_change: if status_change:
self.order.create_transactions() self.order.create_transactions()
def fail(self, info=None, user=None, auth=None, log_data=None, send_mail=True): def fail(self, info=None, user=None, auth=None, log_data=None):
""" """
Marks the order as failed and sets info to ``info``, but only if the order is in ``created`` or ``pending`` Marks the order as failed and sets info to ``info``, but only if the order is in ``created`` or ``pending``
state. This is equivalent to setting ``state`` to ``OrderPayment.PAYMENT_STATE_FAILED`` and logging a failure, state. This is equivalent to setting ``state`` to ``OrderPayment.PAYMENT_STATE_FAILED`` and logging a failure,
but it adds strong database locking since we do not want to report a failure for an order that has just but it adds strong database logging since we do not want to report a failure for an order that has just
been marked as paid. been marked as paid.
:param send_mail: Whether an email should be sent to the user about this event (default: ``True``).
""" """
with transaction.atomic(): with transaction.atomic():
locked_instance = OrderPayment.objects.select_for_update(of=OF_SELF).get(pk=self.pk) locked_instance = OrderPayment.objects.select_for_update(of=OF_SELF).get(pk=self.pk)
@@ -1722,17 +1661,6 @@ class OrderPayment(models.Model):
'info': info, 'info': info,
'data': log_data, 'data': log_data,
}, user=user, auth=auth) }, user=user, auth=auth)
if send_mail:
with language(self.order.locale, self.order.event.settings.region):
email_subject = self.order.event.settings.mail_subject_order_payment_failed
email_template = self.order.event.settings.mail_text_order_payment_failed
email_context = get_email_context(event=self.order.event, order=self.order)
self.order.send_mail(
email_subject, email_template, email_context,
'pretix.event.order.email.payment_failed', user=user, auth=auth,
)
return True return True
def confirm(self, count_waitinglist=True, send_mail=True, force=False, user=None, auth=None, mail_text='', def confirm(self, count_waitinglist=True, send_mail=True, force=False, user=None, auth=None, mail_text='',
@@ -1799,24 +1727,25 @@ class OrderPayment(models.Model):
)) ))
return return
with transaction.atomic(): self._mark_order_paid(count_waitinglist, send_mail, force, user, auth, mail_text, ignore_date, lock, payment_sum - refund_sum,
self._mark_order_paid(count_waitinglist, send_mail, force, user, auth, mail_text, ignore_date, lock, payment_sum - refund_sum, generate_invoice)
generate_invoice)
def _mark_order_paid(self, count_waitinglist=True, send_mail=True, force=False, user=None, auth=None, mail_text='', def _mark_order_paid(self, count_waitinglist=True, send_mail=True, force=False, user=None, auth=None, mail_text='',
ignore_date=False, lock=True, payment_refund_sum=0, allow_generate_invoice=True): ignore_date=False, lock=True, payment_refund_sum=0, allow_generate_invoice=True):
from pretix.base.services.invoices import ( from pretix.base.services.invoices import (
generate_invoice, invoice_qualified, generate_invoice, invoice_qualified,
) )
from pretix.base.services.locking import LOCK_TRUST_WINDOW
if lock and self.order.status == Order.STATUS_PENDING and self.order.expires > now() + timedelta(seconds=LOCK_TRUST_WINDOW): if (self.order.status == Order.STATUS_PENDING and self.order.expires > now() + timedelta(seconds=LOCK_TIMEOUT * 2)) or not lock:
# Performance optimization. In this case, there's really no reason to lock everything and an atomic # Performance optimization. In this case, there's really no reason to lock everything and an atomic
# database transaction is more than enough. # database transaction is more than enough.
lock = False lockfn = NoLockManager
else:
lockfn = self.order.event.lock
self._mark_paid_inner(force, count_waitinglist, user, auth, overpaid=payment_refund_sum > self.order.total, with lockfn():
ignore_date=ignore_date, lock=lock) self._mark_paid_inner(force, count_waitinglist, user, auth, overpaid=payment_refund_sum > self.order.total,
ignore_date=ignore_date)
invoice = None invoice = None
if invoice_qualified(self.order) and allow_generate_invoice: if invoice_qualified(self.order) and allow_generate_invoice:
@@ -1898,8 +1827,6 @@ class OrderPayment(models.Model):
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
if not self.local_id: if not self.local_id:
self.local_id = (self.order.payments.aggregate(m=Max('local_id'))['m'] or 0) + 1 self.local_id = (self.order.payments.aggregate(m=Max('local_id'))['m'] or 0) + 1
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'local_id'}.union(kwargs['update_fields'])
super().save(*args, **kwargs) super().save(*args, **kwargs)
def create_external_refund(self, amount=None, execution_date=None, info='{}'): def create_external_refund(self, amount=None, execution_date=None, info='{}'):
@@ -2098,8 +2025,6 @@ class OrderRefund(models.Model):
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
if not self.local_id: if not self.local_id:
self.local_id = (self.order.refunds.aggregate(m=Max('local_id'))['m'] or 0) + 1 self.local_id = (self.order.refunds.aggregate(m=Max('local_id'))['m'] or 0) + 1
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'local_id'}.union(kwargs['update_fields'])
super().save(*args, **kwargs) super().save(*args, **kwargs)
@@ -2518,20 +2443,14 @@ class OrderPosition(AbstractPosition):
assign_ticket_secret( assign_ticket_secret(
event=self.order.event, position=self, force_invalidate=True, save=False event=self.order.event, position=self, force_invalidate=True, save=False
) )
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'secret'}.union(kwargs['update_fields'])
if not self.blocked and self.blocked is not None: if not self.blocked:
self.blocked = None self.blocked = None
if 'update_fields' in kwargs: elif not isinstance(self.blocked, list) or any(not isinstance(b, str) for b in self.blocked):
kwargs['update_fields'] = {'blocked'}.union(kwargs['update_fields'])
elif self.blocked and (not isinstance(self.blocked, list) or any(not isinstance(b, str) for b in self.blocked)):
raise TypeError("blocked needs to be a list of strings") raise TypeError("blocked needs to be a list of strings")
if not self.pseudonymization_id: if not self.pseudonymization_id:
self.assign_pseudonymization_id() self.assign_pseudonymization_id()
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'pseudonymization_id'}.union(kwargs['update_fields'])
if not self.get_deferred_fields(): if not self.get_deferred_fields():
if Transaction.key(self) != self.__initial_transaction_key or self.canceled != self.__initial_canceled or not self.pk: if Transaction.key(self) != self.__initial_transaction_key or self.canceled != self.__initial_canceled or not self.pk:
@@ -2629,7 +2548,7 @@ class OrderPosition(AbstractPosition):
with language(self.order.locale, self.order.event.settings.region): with language(self.order.locale, self.order.event.settings.region):
email_template = self.event.settings.mail_text_resend_link email_template = self.event.settings.mail_text_resend_link
email_context = get_email_context(event=self.order.event, order=self.order, position=self) email_context = get_email_context(event=self.order.event, order=self.order, position=self)
email_subject = self.event.settings.mail_subject_resend_link_attendee email_subject = self.event.settings.mail_subject_resend_link
self.send_mail( self.send_mail(
email_subject, email_template, email_context, email_subject, email_template, email_context,
'pretix.event.order.email.resend', user=user, auth=auth, 'pretix.event.order.email.resend', user=user, auth=auth,
@@ -2774,8 +2693,8 @@ class Transaction(models.Model):
class Meta: class Meta:
ordering = 'datetime', 'pk' ordering = 'datetime', 'pk'
indexes = [ index_together = [
models.Index(fields=['datetime', 'id']) ['datetime', 'id']
] ]
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
@@ -3017,17 +2936,10 @@ class InvoiceAddress(models.Model):
self.order.touch() self.order.touch()
if self.name_parts: if self.name_parts:
name = self.name self.name_cached = self.name
if self.name_cached != name:
self.name_cached = self.name
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'name_cached'}.union(kwargs['update_fields'])
else: else:
if self.name_cached != "" or self.name_parts != {}: self.name_cached = ""
self.name_cached = "" self.name_parts = {}
self.name_parts = {}
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'name_cached', 'name_parts'}.union(kwargs['update_fields'])
super().save(**kwargs) super().save(**kwargs)
def describe(self): def describe(self):
@@ -3173,7 +3085,11 @@ class BlockedTicketSecret(models.Model):
updated = models.DateTimeField(auto_now=True) updated = models.DateTimeField(auto_now=True)
class Meta: class Meta:
unique_together = (('event', 'secret'),) if 'mysql' not in settings.DATABASES['default']['ENGINE']:
# MySQL does not support indexes on TextField(). Django knows this and just ignores db_index, but it will
# not silently ignore the UNIQUE index, causing this table to fail. I'm so glad we're deprecating MySQL
# in a few months, so we'll just live without an unique index until then.
unique_together = (('event', 'secret'),)
@receiver(post_delete, sender=CachedTicket) @receiver(post_delete, sender=CachedTicket)

View File

@@ -35,12 +35,12 @@
import string import string
from datetime import date, datetime, time from datetime import date, datetime, time
import pytz_deprecation_shim import pytz
from django.conf import settings from django.conf import settings
from django.core.mail import get_connection from django.core.mail import get_connection
from django.core.validators import MinLengthValidator, RegexValidator from django.core.validators import MinLengthValidator, RegexValidator
from django.db import models from django.db import models
from django.db.models import Q from django.db.models import Exists, OuterRef, Q
from django.urls import reverse from django.urls import reverse
from django.utils.crypto import get_random_string from django.utils.crypto import get_random_string
from django.utils.functional import cached_property from django.utils.functional import cached_property
@@ -102,7 +102,6 @@ class Organizer(LoggedModel):
is_new = not self.pk is_new = not self.pk
obj = super().save(*args, **kwargs) obj = super().save(*args, **kwargs)
if is_new: if is_new:
kwargs.pop('update_fields', None) # does not make sense here
self.set_defaults() self.set_defaults()
else: else:
self.get_cache().clear() self.get_cache().clear()
@@ -141,7 +140,7 @@ class Organizer(LoggedModel):
@property @property
def timezone(self): def timezone(self):
return pytz_deprecation_shim.timezone(self.settings.timezone) return pytz.timezone(self.settings.timezone)
@cached_property @cached_property
def all_logentries_link(self): def all_logentries_link(self):
@@ -157,19 +156,17 @@ class Organizer(LoggedModel):
return self.cache.get_or_set( return self.cache.get_or_set(
key='has_gift_cards', key='has_gift_cards',
timeout=15, timeout=15,
default=lambda: self.issued_gift_cards.exists() or self.gift_card_issuer_acceptance.filter(active=True).exists() default=lambda: self.issued_gift_cards.exists() or self.gift_card_issuer_acceptance.exists()
) )
@property @property
def accepted_gift_cards(self): def accepted_gift_cards(self):
from .giftcards import GiftCard, GiftCardAcceptance from .giftcards import GiftCard, GiftCardAcceptance
return GiftCard.objects.filter( return GiftCard.objects.annotate(
Q(issuer=self) | accepted=Exists(GiftCardAcceptance.objects.filter(issuer=OuterRef('issuer'), collector=self))
Q(issuer__in=GiftCardAcceptance.objects.filter( ).filter(
acceptor=self, Q(issuer=self) | Q(accepted=True)
active=True,
).values_list('issuer', flat=True))
) )
@property @property

Some files were not shown because too many files have changed in this diff Show More