Compare commits

..

12 Commits

Author SHA1 Message Date
Raphael Michel
73775786f4 Bump to 4.20.4 2023-09-12 12:17:17 +02:00
Raphael Michel
8898b58be3 Move new settings to _base_settings 2023-09-12 12:17:08 +02:00
Raphael Michel
0376690c5a Bump to 4.20.3 2023-09-12 11:52:45 +02:00
Raphael Michel
2ce5dedfcf [SECURITY] Do not allow Pillow to parse EPS files 2023-09-12 11:52:38 +02:00
Raphael Michel
9612e678fe Bump to 4.20.2.post1 2023-09-11 10:16:57 +02:00
Raphael Michel
213dbbb847 Loosen a version requirement 2023-09-11 10:16:22 +02:00
Raphael Michel
db0786619b Bump to 4.20.2 2023-09-11 10:00:40 +02:00
Raphael Michel
91b45a8707 Fix incorrect handling of boolean configuration flags 2023-09-11 10:00:21 +02:00
Raphael Michel
bff2881573 Bump to 4.20.1 2023-06-12 10:14:13 +02:00
Raphael Michel
dc83b61071 Fix incorrect directory check 2023-06-12 10:14:00 +02:00
Raphael Michel
a0870b1429 Add dependency on pretix-plugin-build to avoid trouble 2023-06-12 09:39:09 +02:00
Raphael Michel
ff68bb9f03 Do not run custom build commands on other packages 2023-06-12 09:39:07 +02:00
452 changed files with 136138 additions and 312181 deletions

View File

@@ -35,7 +35,7 @@ jobs:
restore-keys: |
${{ runner.os }}-pip-
- name: Install Dependencies
run: pip3 install -e ".[dev]" psycopg2-binary
run: pip3 install -e ".[dev]" mysqlclient psycopg2-binary
- name: Run isort
run: isort -c .
working-directory: ./src
@@ -55,7 +55,7 @@ jobs:
restore-keys: |
${{ runner.os }}-pip-
- name: Install Dependencies
run: pip3 install -e ".[dev]" psycopg2-binary
run: pip3 install -e ".[dev]" mysqlclient psycopg2-binary
- name: Run flake8
run: flake8 .
working-directory: ./src

View File

@@ -25,17 +25,27 @@ jobs:
strategy:
matrix:
python-version: ["3.9", "3.10", "3.11"]
database: [sqlite, postgres]
database: [sqlite, postgres, mysql]
exclude:
- database: mysql
python-version: "3.9"
- database: mysql
python-version: "3.11"
- database: sqlite
python-version: "3.9"
- database: sqlite
python-version: "3.10"
steps:
- uses: actions/checkout@v2
- uses: getong/mariadb-action@v1.1
with:
mariadb version: '10.10'
mysql database: 'pretix'
mysql root password: ''
if: matrix.database == 'mysql'
- uses: harmon758/postgresql-action@v1
with:
postgresql version: '15'
postgresql version: '11'
postgresql db: 'pretix'
postgresql user: 'postgres'
postgresql password: 'postgres'
@@ -51,9 +61,9 @@ jobs:
restore-keys: |
${{ runner.os }}-pip-
- name: Install system dependencies
run: sudo apt update && sudo apt install gettext
run: sudo apt update && sudo apt install gettext mariadb-client
- name: Install Python dependencies
run: pip3 install --ignore-requires-python -e ".[dev]" psycopg2-binary # We ignore that flake8 needs newer python as we don't run flake8 during tests
run: pip3 install --ignore-requires-python -e ".[dev]" mysqlclient psycopg2-binary # We ignore that flake8 needs newer python as we don't run flake8 during tests
- name: Run checks
run: python manage.py check
working-directory: ./src

View File

@@ -3,6 +3,7 @@ FROM python:3.11-bullseye
RUN apt-get update && \
apt-get install -y --no-install-recommends \
build-essential \
libmariadb-dev \
gettext \
git \
libffi-dev \
@@ -33,7 +34,8 @@ RUN apt-get update && \
mkdir /static && \
mkdir /etc/supervisord && \
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - && \
apt-get install -y nodejs
apt-get install -y nodejs && \
curl -qL https://www.npmjs.com/install.sh | sh
ENV LC_ALL=C.UTF-8 \
@@ -56,7 +58,7 @@ RUN pip3 install -U \
wheel && \
cd /pretix && \
PRETIX_DOCKER_BUILD=TRUE pip3 install \
-e ".[memcached]" \
-e ".[memcached,mysql]" \
gunicorn django-extensions ipython && \
rm -rf ~/.cache/pip

View File

@@ -1,4 +1,4 @@
from pretix.settings import *
LOGGING['handlers']['mail_admins']['include_html'] = True
STORAGES["staticfiles"]["BACKEND"] = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'

View File

@@ -152,26 +152,25 @@ Example::
password=abcd
host=localhost
port=3306
sslmode=require
sslrootcert=/etc/pretix/postgresql-ca.crt
sslcert=/etc/pretix/postgresql-client-crt.crt
sslkey=/etc/pretix/postgresql-client-key.key
``backend``
One of ``sqlite3`` and ``postgresql``.
One of ``mysql`` (deprecated), ``sqlite3`` and ``postgresql``.
Default: ``sqlite3``.
If you use MySQL, be sure to create your database using
``CREATE DATABASE <dbname> CHARACTER SET utf8;``. Otherwise, Unicode
support will not properly work.
``name``
The database's name. Default: ``db.sqlite3``.
``user``, ``password``, ``host``, ``port``
Connection details for the database connection. Empty by default.
``sslmode``, ``sslrootcert``
Connection TLS details for the PostgreSQL database connection. Possible values of ``sslmode`` are ``disable``, ``allow``, ``prefer``, ``require``, ``verify-ca``, and ``verify-full``. ``sslrootcert`` should be the accessible path of the ca certificate. Both values are empty by default.
``galera``
(Deprecated) Indicates if the database backend is a MySQL/MariaDB Galera cluster and
turns on some optimizations/special case handlers. Default: ``False``
``sslcert``, ``sslkey``
Connection mTLS details for the PostgreSQL database connection. It's also necessary to specify ``sslmode`` and ``sslrootcert`` parameters, please check the correct values from the TLS part. ``sslcert`` should be the accessible path of the client certificate. ``sslkey`` should be the accessible path of the client key. All values are empty by default.
.. _`config-replica`:
Database replica settings
@@ -333,10 +332,6 @@ to speed up various operations::
["sentinel_host_3", 26379]
]
password=password
ssl_cert_reqs=required
ssl_ca_certs=/etc/pretix/redis-ca.pem
ssl_keyfile=/etc/pretix/redis-client-crt.pem
ssl_certfile=/etc/pretix/redis-client-key.key
``location``
The location of redis, as a URL of the form ``redis://[:password]@localhost:6379/0``
@@ -360,22 +355,6 @@ to speed up various operations::
If your redis setup doesn't require a password or you already specified it in the location you can omit this option.
If this is set it will be passed to redis as the connection option PASSWORD.
``ssl_cert_reqs``
If this is set it will be passed to redis as the connection option ``SSL_CERT_REQS``.
Possible values are ``none``, ``optional``, and ``required``.
``ssl_ca_certs``
If your redis setup doesn't require TLS you can omit this option.
If this is set it will be passed to redis as the connection option ``SSL_CA_CERTS``. Possible value is the ca path.
``ssl_keyfile``
If your redis setup doesn't require mTLS you can omit this option.
If this is set it will be passed to redis as the connection option ``SSL_KEYFILE``. Possible value is the keyfile path.
``ssl_certfile``
If your redis setup doesn't require mTLS you can omit this option.
If this is set it will be passed to redis as the connection option ``SSL_CERTFILE``. Possible value is the certfile path.
If redis is not configured, pretix will store sessions and locks in the database. If memcached
is configured, memcached will be used for caching instead of redis.
@@ -425,8 +404,6 @@ The two ``transport_options`` entries can be omitted in most cases.
If they are present they need to be a valid JSON dictionary.
For possible entries in that dictionary see the `Celery documentation`_.
It is possible the use Redis with TLS/mTLS for the broker or the backend. To do so, it is necessary to specify the TLS identifier ``rediss``, the ssl mode ``ssl_cert_reqs`` and optionally specify the CA (TLS) ``ssl_ca_certs``, cert ``ssl_certfile`` and key ``ssl_keyfile`` (mTLS) path as encoded string. the following uri describes the format and possible parameters ``rediss://0.0.0.0:6379/1?ssl_cert_reqs=required&ssl_ca_certs=%2Fetc%2Fpretix%2Fredis-ca.pem&ssl_certfile=%2Fetc%2Fpretix%2Fredis-client-crt.pem&ssl_keyfile=%2Fetc%2Fpretix%2Fredis-client-key.key``
To use redis with sentinels set the broker or backend to ``sentinel://sentinel_host_1:26379;sentinel_host_2:26379/0``
and the respective transport_options to ``{"master_name":"mymaster"}``.
If your redis instances behind the sentinel have a password use ``sentinel://:my_password@sentinel_host_1:26379;sentinel_host_2:26379/0``.

View File

@@ -26,7 +26,7 @@ installation guides):
* `Docker`_
* A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for
* A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections
* A `PostgreSQL`_ 11+ database server
* A `PostgreSQL`_ 9.6+ database server
* A `redis`_ server
We also recommend that you use a firewall, although this is not a pretix-specific recommendation. If you're new to
@@ -321,11 +321,11 @@ workers, e.g. ``docker run … taskworker -Q notifications --concurrency 32``.
.. _Docker: https://docs.docker.com/engine/installation/linux/debian/
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-22-04
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-16-04
.. _nginx: https://botleg.com/stories/https-with-lets-encrypt-and-nginx/
.. _Let's Encrypt: https://letsencrypt.org/
.. _pretix.eu: https://pretix.eu/
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-22-04
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-20-04
.. _redis: https://blog.programster.org/debian-8-install-redis-server/
.. _ufw: https://en.wikipedia.org/wiki/Uncomplicated_Firewall
.. _redis website: https://redis.io/topics/security

View File

@@ -68,7 +68,7 @@ generated key and installs the plugin from the URL we told you::
mkdir -p /etc/ssh && \
ssh-keyscan -t rsa -p 10022 code.rami.io >> /root/.ssh/known_hosts && \
echo StrictHostKeyChecking=no >> /root/.ssh/config && \
DJANGO_SETTINGS_MODULE= pip3 install -U "git+ssh://git@code.rami.io:10022/pretix/pretix-slack.git@stable#egg=pretix-slack" && \
DJANGO_SETTINGS_MODULE=pretix.settings pip3 install -U "git+ssh://git@code.rami.io:10022/pretix/pretix-slack.git@stable#egg=pretix-slack" && \
cd /pretix/src && \
sudo -u pretixuser make production
USER pretixuser

View File

@@ -16,11 +16,14 @@ To use pretix, you will need the following things:
* A periodic task runner, e.g. ``cron``
* **A database**. This needs to be a SQL-based that is supported by Django. We highly recommend to either
go for **PostgreSQL**. If you do not provide one, pretix will run on SQLite, which is useful
go for **PostgreSQL** or **MySQL/MariaDB**. If you do not provide one, pretix will run on SQLite, which is useful
for evaluation and development purposes.
.. warning:: Do not ever use SQLite in production. It will break.
.. warning:: We recommend **PostgreSQL**. If you go for MySQL, make sure you run **MySQL 5.7 or newer** or
**MariaDB 10.2.7 or newer**.
* A **reverse proxy**. pretix needs to deliver some static content to your users (e.g. CSS, images, ...). While pretix
is capable of doing this, having this handled by a proper web server like **nginx** or **Apache** will be much
faster. Also, you need a proxying web server in front to provide SSL encryption.

View File

@@ -21,7 +21,6 @@ Requirements
Please set up the following systems beforehand, we'll not explain them here in detail (but see these links for external
installation guides):
* A python 3.9+ installation
* A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for
* A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections
* A `PostgreSQL`_ 11+ database server
@@ -324,11 +323,11 @@ Then, proceed like after any plugin installation::
(venv)$ python -m pretix updatestyles
# systemctl restart pretix-web pretix-worker
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-22-04
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-16-04
.. _nginx: https://botleg.com/stories/https-with-lets-encrypt-and-nginx/
.. _Let's Encrypt: https://letsencrypt.org/
.. _pretix.eu: https://pretix.eu/
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-22-04
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-20-04
.. _redis: https://blog.programster.org/debian-8-install-redis-server/
.. _ufw: https://en.wikipedia.org/wiki/Uncomplicated_Firewall
.. _strong encryption settings: https://mozilla.github.io/server-side-tls/ssl-config-generator/

View File

@@ -3,11 +3,11 @@
Migrating from MySQL/MariaDB to PostgreSQL
==========================================
Our recommended database for all production installations is PostgreSQL. Support for MySQL/MariaDB has been removed
in newer pretix releases.
Our recommended database for all production installations is PostgreSQL. Support for MySQL/MariaDB will be removed in
pretix 5.0.
In order to follow this guide, your pretix installation needs to be a version that fully supports MySQL/MariaDB. If you
already upgraded to pretix 5.0 or later, downgrade back to the last 4.x release using ``pip``.
already upgraded to pretix 5.0, downgrade back to the last 4.x release using ``pip``.
.. note:: We have tested this guide carefully, but we can't assume any liability for its correctness. The data loss
risk should be low as long as pretix is not running while you do the migration. If you are a pretix Enterprise

View File

@@ -32,16 +32,10 @@ as well as the type of underlying hardware. Example:
"token": "kpp4jn8g2ynzonp6",
"hardware_brand": "Samsung",
"hardware_model": "Galaxy S",
"os_name": "Android",
"os_version": "2.3.6",
"software_brand": "pretixdroid",
"software_version": "4.0.0",
"rsa_pubkey": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqh…nswIDAQAB\n-----END PUBLIC KEY-----\n"
"software_version": "4.0.0"
}
The ``rsa_pubkey`` is optional any only required for certain fatures such as working with reusable
media and NFC cryptography.
Every initialization token can only be used once. On success, you will receive a response containing
information on your device as well as your API token:
@@ -104,8 +98,6 @@ following endpoint:
{
"hardware_brand": "Samsung",
"hardware_model": "Galaxy S",
"os_name": "Android",
"os_version": "2.3.6",
"software_brand": "pretixdroid",
"software_version": "4.1.0",
"info": {"arbitrary": "data"}
@@ -141,29 +133,9 @@ The response will look like this:
"id": 3,
"name": "South entrance"
}
},
"server": {
"version": {
"pretix": "3.6.0.dev0",
"pretix_numeric": 30060001000
}
},
"medium_key_sets": [
{
"public_id": 3456349,
"organizer": "foo",
"active": true,
"media_type": "nfc_mf0aes",
"uid_key": "base64-encoded-encrypted-key",
"diversification_key": "base64-encoded-encrypted-key",
}
]
}
}
``"medium_key_sets`` will always be empty if you did not set an ``rsa_pubkey``.
The individual keys in the key sets are encrypted with the device's ``rsa_pubkey``
using ``RSA/ECB/PKCS1Padding``.
Creating a new API key
----------------------

View File

@@ -24,8 +24,6 @@ all_events boolean Whether this de
limit_events list List of event slugs this device has access to
hardware_brand string Device hardware manufacturer (read-only)
hardware_model string Device hardware model (read-only)
os_name string Device operating system name (read-only)
os_version string Device operating system version (read-only)
software_brand string Device software product (read-only)
software_version string Device software version (read-only)
created datetime Creation time
@@ -78,8 +76,6 @@ Device endpoints
"security_profile": "full",
"hardware_brand": "Zebra",
"hardware_model": "TC25",
"os_name": "Android",
"os_version": "8.1.0",
"software_brand": "pretixSCAN",
"software_version": "1.5.1"
}
@@ -127,8 +123,6 @@ Device endpoints
"security_profile": "full",
"hardware_brand": "Zebra",
"hardware_model": "TC25",
"os_name": "Android",
"os_version": "8.1.0",
"software_brand": "pretixSCAN",
"software_version": "1.5.1"
}
@@ -179,8 +173,6 @@ Device endpoints
"initialized": null
"hardware_brand": null,
"hardware_model": null,
"os_name": null,
"os_version": null,
"software_brand": null,
"software_version": null
}

View File

@@ -31,9 +31,9 @@ subevent_mode strings Determines h
``"same"`` (discount is only applied for groups within
the same date), or ``"distinct"`` (discount is only applied
for groups with no two same dates).
condition_all_products boolean If ``true``, the discount condition applies to all items.
condition_all_products boolean If ``true``, the discount applies to all items.
condition_limit_products list of integers If ``condition_all_products`` is not set, this is a list
of internal item IDs that the discount condition applies to.
of internal item IDs that the discount applies to.
condition_apply_to_addons boolean If ``true``, the discount applies to add-on products as well,
otherwise it only applies to top-level items. The discount never
applies to bundled products.
@@ -48,17 +48,6 @@ benefit_discount_matching_percent decimal (string) The percenta
benefit_only_apply_to_cheapest_n_matches integer If set higher than 0, the discount will only be applied to
the cheapest matches. Useful for a "3 for 2"-style discount.
Cannot be combined with ``condition_min_value``.
benefit_same_products boolean If ``true``, the discount benefit applies to the same set of items
as the condition (see above).
benefit_limit_products list of integers If ``benefit_same_products`` is not set, this is a list
of internal item IDs that the discount benefit applies to.
benefit_apply_to_addons boolean (Only used if ``benefit_same_products`` is ``false``.)
If ``true``, the discount applies to add-on products as well,
otherwise it only applies to top-level items. The discount never
applies to bundled products.
benefit_ignore_voucher_discounted boolean (Only used if ``benefit_same_products`` is ``false``.)
If ``true``, the discount does not apply to products which have
been discounted by a voucher.
======================================== ========================== =======================================================
@@ -105,10 +94,6 @@ Endpoints
"condition_ignore_voucher_discounted": false,
"condition_min_count": 3,
"condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1
}
@@ -161,10 +146,6 @@ Endpoints
"condition_ignore_voucher_discounted": false,
"condition_min_count": 3,
"condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1
}
@@ -203,10 +184,6 @@ Endpoints
"condition_ignore_voucher_discounted": false,
"condition_min_count": 3,
"condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1
}
@@ -234,10 +211,6 @@ Endpoints
"condition_ignore_voucher_discounted": false,
"condition_min_count": 3,
"condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1
}
@@ -294,10 +267,6 @@ Endpoints
"condition_ignore_voucher_discounted": false,
"condition_min_count": 3,
"condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1
}

View File

@@ -70,11 +70,6 @@ Endpoints
The ``public_url`` field has been added.
.. versionchanged:: 5.0
The ``date_from_before``, ``date_from_after``, ``date_to_before``, and ``date_to_after`` query parameters have been
added.
.. http:get:: /api/v1/organizers/(organizer)/events/
Returns a list of all events within a given organizer the authenticated user/token has access to.
@@ -146,10 +141,6 @@ Endpoints
:query has_subevents: If set to ``true``/``false``, only events with a matching value of ``has_subevents`` are returned.
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned. Event series are never (always) returned.
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned. Event series are never (always) returned.
:query date_from_after: If set to a date and time, only events that start at or after the given time are returned.
:query date_from_before: If set to a date and time, only events that start at or before the given time are returned.
:query date_to_after: If set to a date and time, only events that have an end date and end at or after the given time are returned.
:query date_to_before: If set to a date and time, only events that have an end date and end at or before the given time are returned.
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned. Event series are never returned.
:query string ordering: Manually set the ordering of results. Valid fields to be used are ``date_from`` and
``slug``. Keep in mind that ``date_from`` of event series does not really tell you anything.

View File

@@ -111,7 +111,7 @@ Listing available exporters
"input_parameters": [
{
"name": "events",
"required": false
"required": true
},
{
"name": "_format",

View File

@@ -18,7 +18,6 @@ at :ref:`plugin-docs`.
item_variations
item_bundles
item_add-ons
item_meta_properties
questions
question_options
quotas

View File

@@ -12,7 +12,6 @@ The invoice resource contains the following public fields:
Field Type Description
===================================== ========================== =======================================================
number string Invoice number (with prefix)
event string The slug of the parent event
order string Order code of the order this invoice belongs to
is_cancellation boolean ``true``, if this invoice is the cancellation of a
different invoice.
@@ -122,13 +121,9 @@ internal_reference string Customer's refe
The attribute ``lines.subevent`` has been added.
.. versionchanged:: 2023.8
The ``event`` attribute has been added. The organizer-level endpoint has been added.
List of all invoices
--------------------
Endpoints
---------
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/invoices/
@@ -157,7 +152,6 @@ List of all invoices
"results": [
{
"number": "SAMPLECONF-00001",
"event": "sampleconf",
"order": "ABC12",
"is_cancellation": false,
"invoice_from_name": "Big Events LLC",
@@ -227,50 +221,6 @@ List of all invoices
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
.. http:get:: /api/v1/organizers/(organizer)/invoices/
Returns a list of all invoices within all events of a given organizer (with sufficient access permissions).
Supported query parameters and output format of this endpoint are identical to the list endpoint within an event.
**Example request**:
.. sourcecode:: http
GET /api/v1/organizers/bigevents/events/sampleconf/invoices/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"number": "SAMPLECONF-00001",
"event": "sampleconf",
"order": "ABC12",
...
]
}
:param organizer: The ``slug`` field of the organizer to fetch
:statuscode 200: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
Fetching individual invoices
----------------------------
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/invoices/(number)/
Returns information on one invoice, identified by its invoice number.
@@ -293,7 +243,6 @@ Fetching individual invoices
{
"number": "SAMPLECONF-00001",
"event": "sampleconf",
"order": "ABC12",
"is_cancellation": false,
"invoice_from_name": "Big Events LLC",
@@ -388,12 +337,6 @@ Fetching individual invoices
:statuscode 409: The file is not yet ready and will now be prepared. Retry the request after waiting for a few
seconds.
Modifying invoices
------------------
Invoices cannot be edited directly, but the following actions can be triggered:
.. http:post:: /api/v1/organizers/(organizer)/events/(event)/invoices/(invoice_no)/reissue/
Cancels the invoice and creates a new one.

View File

@@ -20,7 +20,6 @@ The order resource contains the following public fields:
Field Type Description
===================================== ========================== =======================================================
code string Order code
event string The slug of the parent event
status string Order status, one of:
* ``n`` pending
@@ -131,10 +130,6 @@ last_modified datetime Last modificati
The ``valid_if_pending`` attribute has been added.
.. versionchanged:: 2023.8
The ``event`` attribute has been added. The organizer-level endpoint has been added.
.. _order-position-resource:
@@ -294,7 +289,6 @@ List of all orders
"results": [
{
"code": "ABC12",
"event": "sampleconf",
"status": "p",
"testmode": false,
"secret": "k24fiuwvu8kxz3y1",
@@ -447,48 +441,6 @@ List of all orders
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
.. http:get:: /api/v1/organizers/(organizer)/orders/
Returns a list of all orders within all events of a given organizer (with sufficient access permissions).
Supported query parameters and output format of this endpoint are identical to the list endpoint within an event,
with the exception that the ``pdf_data`` parameter is not supported here.
**Example request**:
.. sourcecode:: http
GET /api/v1/organizers/bigevents/orders/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
X-Page-Generated: 2017-12-01T10:00:00Z
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"code": "ABC12",
"event": "sampleconf",
...
}
]
}
:param organizer: The ``slug`` field of the organizer to fetch
:statuscode 200: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
Fetching individual orders
--------------------------
@@ -514,7 +466,6 @@ Fetching individual orders
{
"code": "ABC12",
"event": "sampleconf",
"status": "p",
"testmode": false,
"secret": "k24fiuwvu8kxz3y1",

View File

@@ -18,8 +18,7 @@ The reusable medium resource contains the following public fields:
Field Type Description
===================================== ========================== =======================================================
id integer Internal ID of the medium
type string Type of medium, e.g. ``"barcode"``, ``"nfc_uid"`` or ``"nfc_mf0aes"``.
organizer string Organizer slug of the organizer who "owns" this medium.
type string Type of medium, e.g. ``"barcode"`` or ``"nfc_uid"``.
identifier string Unique identifier of the medium. The format depends on the ``type``.
active boolean Whether this medium may be used.
created datetime Date of creation
@@ -37,7 +36,6 @@ Existing media types are:
- ``barcode``
- ``nfc_uid``
- ``nfc_mf0aes``
Endpoints
---------
@@ -69,7 +67,6 @@ Endpoints
"results": [
{
"id": 1,
"organizer": "bigevents",
"identifier": "ABCDEFGH",
"created": "2021-04-06T13:44:22.809377Z",
"updated": "2021-04-06T13:44:22.809377Z",
@@ -126,7 +123,6 @@ Endpoints
{
"id": 1,
"organizer": "bigevents",
"identifier": "ABCDEFGH",
"created": "2021-04-06T13:44:22.809377Z",
"updated": "2021-04-06T13:44:22.809377Z",
@@ -156,9 +152,6 @@ Endpoints
Look up a new reusable medium by its identifier. In some cases, this might lead to the automatic creation of a new
medium behind the scenes.
This endpoint, and this endpoint only, might return media from a different organizer if there is a cross-acceptance
agreement. In this case, only linked gift cards will be returned, no order position or customer records,
**Example request**:
.. sourcecode:: http
@@ -183,7 +176,6 @@ Endpoints
{
"id": 1,
"organizer": "bigevents",
"identifier": "ABCDEFGH",
"created": "2021-04-06T13:44:22.809377Z",
"updated": "2021-04-06T13:44:22.809377Z",
@@ -243,7 +235,6 @@ Endpoints
{
"id": 1,
"organizer": "bigevents",
"identifier": "ABCDEFGH",
"created": "2021-04-06T13:44:22.809377Z",
"updated": "2021-04-06T13:44:22.809377Z",
@@ -300,7 +291,6 @@ Endpoints
{
"id": 1,
"organizer": "bigevents",
"identifier": "ABCDEFGH",
"created": "2021-04-06T13:44:22.809377Z",
"updated": "2021-04-06T13:44:22.809377Z",

View File

@@ -18,19 +18,8 @@ subject multi-lingual string The subject of
template multi-lingual string The body of the email
all_products boolean If ``true``, the email is sent to buyers of all products
limit_products list of integers List of product IDs, if ``all_products`` is not set
[**DEPRECATED**] include_pending boolean If ``true``, the email is sent to pending orders. If ``false``,
include_pending boolean If ``true``, the email is sent to pending orders. If ``false``,
only paid orders are considered.
restrict_to_status list List of order states to restrict recipients to. Valid
entries are ``p`` for paid, ``e`` for expired, ``c`` for canceled,
``n__pending_approval`` for pending approval,
``n__not_pending_approval_and_not_valid_if_pending`` for payment
pending, ``n__valid_if_pending`` for payment pending but already confirmed,
and ``n__pending_overdue`` for pending with payment overdue.
The default is ``["p", "n__valid_if_pending"]``.
checked_in_status string Check-in status to restrict recipients to. Valid strings are:
``null`` for no filtering (default), ``checked_in`` for
limiting to attendees that are or have been checked in, and
``no_checkin`` for limiting to attendees who have not checked in.
date_is_absolute boolean If ``true``, the email is set at a specific point in time.
send_date datetime If ``date_is_absolute`` is set: Date and time to send the email.
send_offset_days integer If ``date_is_absolute`` is not set, this is the number of days
@@ -48,10 +37,7 @@ send_to string Can be ``"order
or ``"both"``.
date. Otherwise it is relative to the event start date.
===================================== ========================== =======================================================
.. versionchanged:: 2023.7
The ``include_pending`` field has been deprecated.
The ``restrict_to_status`` field has been added.
Endpoints
---------
@@ -88,12 +74,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true,
"limit_products": [],
"restrict_to_status": [
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": null,
"include_pending": false,
"send_date": null,
"send_offset_days": 1,
"send_offset_time": "18:00",
@@ -139,12 +120,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true,
"limit_products": [],
"restrict_to_status": [
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": null,
"include_pending": false,
"send_date": null,
"send_offset_days": 1,
"send_offset_time": "18:00",
@@ -181,12 +157,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true,
"limit_products": [],
"restrict_to_status": [
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": "checked_in",
"include_pending": false,
"send_date": null,
"send_offset_days": 1,
"send_offset_time": "18:00",
@@ -211,12 +182,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true,
"limit_products": [],
"restrict_to_status": [
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": "checked_in",
"include_pending": false,
"send_date": null,
"send_offset_days": 1,
"send_offset_time": "18:00",
@@ -269,12 +235,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true,
"limit_products": [],
"restrict_to_status": [
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": "checked_in",
"include_pending": false,
"send_date": null,
"send_offset_days": 1,
"send_offset_time": "18:00",

View File

@@ -63,11 +63,6 @@ last_modified datetime Last modificati
The ``search`` query parameter has been added to filter sub-events by their name or location in any language.
.. versionchanged:: 5.0
The ``date_from_before``, ``date_from_after``, ``date_to_before``, and ``date_to_after`` query parameters have been
added.
Endpoints
---------
@@ -135,10 +130,6 @@ Endpoints
:query active: If set to ``true``/``false``, only events with a matching value of ``active`` are returned.
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned.
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned.
:query date_from_after: If set to a date and time, only events that start at or after the given time are returned.
:query date_from_before: If set to a date and time, only events that start at or before the given time are returned.
:query date_to_after: If set to a date and time, only events that have an end date and end at or after the given time are returned.
:query date_to_before: If set to a date and time, only events that have an end date and end at or before the given time are returned.
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned.
:query search: Only return events matching a given search query.
:param organizer: The ``slug`` field of a valid organizer
@@ -467,10 +458,6 @@ Endpoints
:query event__live: If set to ``true``/``false``, only events with a matching value of ``live`` on the parent event are returned.
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned.
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned.
:query date_from_after: If set to a date and time, only events that start at or after the given time are returned.
:query date_from_before: If set to a date and time, only events that start at or before the given time are returned.
:query date_to_after: If set to a date and time, only events that have an end date and end at or after the given time are returned.
:query date_to_before: If set to a date and time, only events that have an end date and end at or before the given time are returned.
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned.
:query sales_channel: If set to a sales channel identifier, the response will only contain subevents from events available on this sales channel.
:param organizer: The ``slug`` field of a valid organizer

View File

@@ -20,16 +20,11 @@ internal_name string An optional nam
rate decimal (string) Tax rate in percent
price_includes_tax boolean If ``true`` (default), tax is assumed to be included in
the specified product price
eu_reverse_charge boolean If ``true``, EU reverse charge rules are applied. Will
be ignored if custom rules are set.
eu_reverse_charge boolean If ``true``, EU reverse charge rules are applied
home_country string Merchant country (required for reverse charge), can be
``null`` or empty string
keep_gross_if_rate_changes boolean If ``true``, changes of the tax rate based on custom
rules keep the gross price constant (default is ``false``)
custom_rules object Dynamic rules specification. Each list element
corresponds to one rule that will be processed in order.
The current version of the schema in use can be found
`here`_.
===================================== ========================== =======================================================
@@ -37,10 +32,6 @@ custom_rules object Dynamic rules s
The ``internal_name`` and ``keep_gross_if_rate_changes`` attributes have been added.
.. versionchanged:: 2023.6
The ``custom_rules`` attribute has been added.
Endpoints
---------
@@ -77,7 +68,6 @@ Endpoints
"price_includes_tax": true,
"eu_reverse_charge": false,
"keep_gross_if_rate_changes": false,
"custom_rules": null,
"home_country": "DE"
}
]
@@ -118,7 +108,6 @@ Endpoints
"price_includes_tax": true,
"eu_reverse_charge": false,
"keep_gross_if_rate_changes": false,
"custom_rules": null,
"home_country": "DE"
}
@@ -167,7 +156,6 @@ Endpoints
"price_includes_tax": true,
"eu_reverse_charge": false,
"keep_gross_if_rate_changes": false,
"custom_rules": null,
"home_country": "DE"
}
@@ -215,7 +203,6 @@ Endpoints
"price_includes_tax": true,
"eu_reverse_charge": false,
"keep_gross_if_rate_changes": false,
"custom_rules": null,
"home_country": "DE"
}
@@ -255,5 +242,3 @@ Endpoints
:statuscode 204: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event/rule does not exist **or** you have no permission to change it **or** this tax rule cannot be deleted since it is currently in use.
.. _here: https://github.com/pretix/pretix/blob/master/src/pretix/static/schema/tax-rules-custom.schema.json

View File

@@ -50,10 +50,6 @@ The following values for ``action_types`` are valid with pretix core:
* ``pretix.event.order.payment.confirmed``
* ``pretix.event.order.approved``
* ``pretix.event.order.denied``
* ``pretix.event.orders.waitinglist.added``
* ``pretix.event.orders.waitinglist.changed``
* ``pretix.event.orders.waitinglist.deleted``
* ``pretix.event.orders.waitinglist.voucher_assigned``
* ``pretix.event.checkin``
* ``pretix.event.checkin.reverted``
* ``pretix.event.added``
@@ -67,9 +63,6 @@ The following values for ``action_types`` are valid with pretix core:
* ``pretix.event.live.deactivated``
* ``pretix.event.testmode.activated``
* ``pretix.event.testmode.deactivated``
* ``pretix.customer.created``
* ``pretix.customer.changed``
* ``pretix.customer.anonymized``
Installed plugins might register more valid values.

View File

@@ -18,13 +18,13 @@ If you want to add a custom view to the control area of an event, just register
.. code-block:: python
from django.urls import re_path
from django.conf.urls import url
from . import views
urlpatterns = [
re_path(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
views.admin_view, name='backend'),
url(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
views.admin_view, name='backend'),
]
It is required that your URL parameters are called ``organizer`` and ``event``. If you want to

View File

@@ -61,7 +61,7 @@ Backend
item_formsets, order_search_filter_q, order_search_forms
.. automodule:: pretix.base.signals
:members: logentry_display, logentry_object_link, requiredaction_display, timeline_events, orderposition_blocked_display, customer_created, customer_signed_in
:members: logentry_display, logentry_object_link, requiredaction_display, timeline_events, orderposition_blocked_display
Vouchers
""""""""

View File

@@ -70,8 +70,6 @@ The provider class
.. autoattribute:: settings_form_fields
.. autoattribute:: walletqueries
.. automethod:: settings_form_clean
.. automethod:: settings_content_render

View File

@@ -37,7 +37,7 @@ you to execute a piece of code with a different locale:
This is very useful e.g. when sending an email to a user that has a different language than the user performing the
action that causes the mail to be sent.
.. _translation features: https://docs.djangoproject.com/en/4.2/topics/i18n/translation/
.. _translation features: https://docs.djangoproject.com/en/1.9/topics/i18n/translation/
.. _GNU gettext: https://www.gnu.org/software/gettext/
.. _strings: https://django-i18nfield.readthedocs.io/en/latest/strings.html
.. _database fields: https://django-i18nfield.readthedocs.io/en/latest/quickstart.html

View File

@@ -15,41 +15,33 @@ and the admin panel is available at ``https://pretix.eu/control/event/bigorg/awe
If the organizer now configures a custom domain like ``tickets.bigorg.com``, his event will
from now on be available on ``https://tickets.bigorg.com/awesomecon/``. The former URL at
``pretix.eu`` will redirect there. It's also possible to do this for just an event, in which
case the event will be available on ``https://tickets.awesomecon.org/``.
However, the admin panel will still only be available on ``pretix.eu`` for convenience and security reasons.
``pretix.eu`` will redirect there. However, the admin panel will still only be available
on ``pretix.eu`` for convenience and security reasons.
URL routing
-----------
The hard part about implementing this URL routing in Django is that
``https://pretix.eu/bigorg/awesomecon/`` contains two parameters of nearly arbitrary content
and ``https://tickets.bigorg.com/awesomecon/`` contains only one and ``https://tickets.awesomecon.org/`` does not contain any.
The only robust way to do this is by having *separate* URL configuration for those three cases.
and ``https://tickets.bigorg.com/awesomecon/`` contains only one. The only robust way to do
this is by having *separate* URL configuration for those two cases. In pretix, we call the
former our ``maindomain`` config and the latter our ``subdomain`` config. For pretix's core
modules we do some magic to avoid duplicate configuration, but for a fairly simple plugin with
only a handful of routes, we recommend just configuring the two URL sets separately.
In pretix, we therefore do not have a global URL configuration, but three, living in the following modules:
- ``pretix.multidomain.maindomain_urlconf``
- ``pretix.multidomain.organizer_domain_urlconf``
- ``pretix.multidomain.event_domain_urlconf``
We provide some helper utilities to work with these to avoid duplicate configuration of the individual URLs.
The file ``urls.py`` inside your plugin package will be loaded and scanned for URL configuration
automatically and should be provided by any plugin that provides any view.
However, unlike plain Django, we look not only for a ``urlpatterns`` attribute on the module but support other
attributes like ``event_patterns`` and ``organizer_patterns`` as well.
For example, for a simple plugin that adds one URL to the backend and one event-level URL to the frontend, you can
create the following configuration in your ``urls.py``::
A very basic example that provides one view in the admin panel and one view in the frontend
could look like this::
from django.urls import re_path
from django.conf.urls import url
from . import views
urlpatterns = [
re_path(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
views.AdminView.as_view(), name='backend'),
url(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
views.AdminView.as_view(), name='backend'),
]
event_patterns = [
@@ -60,7 +52,7 @@ create the following configuration in your ``urls.py``::
As you can see, the view in the frontend is not included in the standard Django ``urlpatterns``
setting but in a separate list with the name ``event_patterns``. This will automatically prepend
the appropriate parameters to the regex (e.g. the event or the event and the organizer, depending
on the called domain). For organizer-level views, ``organizer_patterns`` works the same way.
on the called domain).
If you only provide URLs in the admin area, you do not need to provide a ``event_patterns`` attribute.
@@ -79,16 +71,11 @@ is a python method that emulates a behavior similar to ``reverse``:
.. autofunction:: pretix.multidomain.urlreverse.eventreverse
If you need to communicate the URL externally, you can use a different method to ensure that it is always an absolute URL:
.. autofunction:: pretix.multidomain.urlreverse.build_absolute_uri
In addition, there is a template tag that works similar to ``url`` but takes an event or organizer object
as its first argument and can be used like this::
{% load eventurl %}
<a href="{% eventurl request.event "presale:event.checkout" step="payment" %}">Pay</a>
<a href="{% abseventurl request.event "presale:event.checkout" step="payment" %}">Pay</a>
Implementation details

View File

@@ -12,4 +12,3 @@ Developer documentation
api/index
structure
translation/index
nfc/index

View File

@@ -1,15 +0,0 @@
NFC media
=========
pretix supports using NFC chips as "reusable media", for example to store gift cards or tickets.
Most of this implementation currently lives in our proprietary app pretixPOS, but in the future might also become part of our open-source pretixSCAN solution.
Either way, we want this to be an open ecosystem and therefore document the exact mechanisms in use on the following pages.
We support multiple implementations of NFC media, each documented on its own page:
.. toctree::
:maxdepth: 2
uid
mf0aes

View File

@@ -1,113 +0,0 @@
Mifare Ultralight AES
=====================
We offer an implementation that provides a higher security level than the UID-based approach and uses the `Mifare Ultralight AES`_ chip sold by NXP.
We believe the security model of this approach is adequate to the situation where this will usually be used and we'll outline known risks below.
If you want to dive deeper into the properties of the Mifare Ultralight AES chip, we recommend reading the `data sheet`_.
Random UIDs
-----------
Mifare Ultralight AES supports a feature that returns a randomized UID every time a non-authenticated user tries to
read the UID. This has a strong privacy benefit, since no unauthorized entity can use the NFC chips to track users.
On the other hand, this reduces interoperability of the system. For example, this prevents you from using the same NFC
chips for a different purpose where you only need the UID. This will also prevent your guests from reading their UID
themselves with their phones, which might be useful e.g. in debugging situations.
Since there's no one-size-fits-all choice here, you can enable or disable this feature in the pretix organizer
settings. If you change it, the change will apply to all newly encoded chips after the change.
Key management
--------------
For every organizer, the server will generate create a "key set", which consists of a publicly known ID (random 32-bit integer) and two 16-byte keys ("diversification key" and "UID key").
Using our :ref:`Device authentication mechanism <rest-deviceauth>`, an authorized device can submit a locally generated RSA public key to the server.
This key can no longer changed on the server once it is set, thus protecting against the attack scenario of a leaked device API token.
The server will then include key sets in the response to ``/api/v1/device/info``, encrypted with the device's RSA key.
This includes all key sets generated for the organizer the device belongs to, as well as all keys of organizers that have granted sufficient access to this organizer.
The device will decrypt the key sets using its RSA key and store the key sets locally.
.. warning:: The device **will** have access to the raw key sets. Therefore, there is a risk of leaked master keys if an
authorized device is stolen or abused. Our implementation in pretixPOS attempts to make this very hard on
modern, non-rooted Android devices by keeping them encrypted with the RSA key and only storing the RSA key
in the hardware-backed keystore of the device. A sufficiently motivated attacker, however, will likely still
be able to extract the keys from a stolen device.
Encoding a chip
---------------
When a new chip is encoded, the following steps will be taken:
- The UID of the chip is retrieved.
- A chip-specific key is generated using the mechanism documented in `AN10922`_ using the "diversification key" from the
organizer's key set as the CMAC key and the diversification input concatenated in the from of ``0x01 + UID + APPID + SYSTEMID``
with the following values:
- The UID of the chip as ``UID``
- ``"eu.pretix"`` (``0x65 0x75 0x2e 0x70 0x72 0x65 0x74 0x69 0x78``) as ``APPID``
- The ``public_id`` from the organizer's key set as a 4-byte big-endian value as ``SYSTEMID``
- The chip-specific key is written to the chip as the "data protection key" (config pages 0x30 to 0x33)
- The UID key from the organizer's key set is written to the chip as the "UID retrieval key" (config pages 0x34 to 0x37)
- The config page 0x29 is set like this:
- ``RID_ACT`` (random UID) to ``1`` or ``0`` based on the organizer's configuration
- ``SEC_MSG_ACT`` (secure messaging) to ``1``
- ``AUTH0`` (first page that needs authentication) to 0x04 (first non-UID page)
- The config page 0x2A is set like this:
- ``PROT`` to ``0`` (only write access restricted, not read access)
- ``AUTHLIM`` to ``256`` (maximum number of wrong authentications before "self-desctruction")
- Everything else to its default value (no lock bits are set)
- The ``public_id`` of the key set will be written to page 0x04 as a big-endian value
- The UID of the chip will be registered as a reusable medium on the server.
.. warning:: During encoding, the chip-specific key and the UID key are transmitted in plain text over the air. The
security model therefore relies on the encoding of chips being performed in a trusted physical environment
to prevent a nearby attacker from sniffing the keys with a strong antenna.
.. note:: If an attacker tries to authenticate with the chip 256 times using the wrong key, the chip will become
unusable. A chip may also become unusable if it is detached from the reader in the middle of the encoding
process (even though we've tried to implement it in a way that makes this unlikely).
Usage
-----
When a chip is presented to the NFC reader, the following steps will be taken:
- Command ``GET_VERSION`` is used to determine if it is a Mifare Ultralight AES chip (if not, abort).
- Page 0x04 is read. If it is all zeroes, the chip is considered un-encoded (abort). If it contains a value that
corresponds to the ``public_id`` of a known key set, this key set is used for all further operations. If it contains
a different value, we consider this chip to belong to a different organizer or not to a pretix system at all (abort).
- An authentication with the chip using the UID key is performed.
- The UID of the chip will be read.
- The chip-specific key will be derived using the mechanism described above in the encoding step.
- An authentication with the chip using the chip-specific key is performed. If this is fully successful, this step
proves that the chip knows the same chip-specific key as we do and is therefore an authentic chip encoded by us and
we can trust its UID value.
- The UID is transmitted to the server to fetch the correct medium.
During these steps, the keys are never transmitted in plain text and can thus not be sniffed by a nearby attacker
with a strong antenna.
.. _Mifare Ultralight AES: https://www.nxp.com/products/rfid-nfc/mifare-hf/mifare-ultralight/mifare-ultralight-aes-enhanced-security-for-limited-use-contactless-applications:MF0AESx20
.. _data sheet: https://www.nxp.com/docs/en/data-sheet/MF0AES(H)20.pdf
.. _AN10922: https://www.nxp.com/docs/en/application-note/AN10922.pdf

View File

@@ -1,10 +0,0 @@
UID-based
=========
With UID-based NFC, only the unique ID (UID) of the NFC chip is used for identification purposes.
This can be used with virtually all NFC chips that provide compatibility with the NFC reader in use, typically at least all chips that comply with ISO/IEC 14443-3A.
We make only one restriction: The UID may not start with ``08``, since that usually signifies a randomized UID that changes on every read (which would not be very useful).
.. warning:: The UID-based approach provides only a very low level of security. It is easy to clone a chip with the same
UID and impersonate someone else.

View File

@@ -96,20 +96,6 @@ http://localhost:8000/control/ for the admin view.
port (for example because you develop on `pretixdroid`_), you can check
`Django's documentation`_ for more options.
When running the local development webserver, ensure Celery is not configured
in ``pretix.cfg``. i.e., you should remove anything such as::
[celery]
backend=redis://redis:6379/2
broker=redis://redis:6379/2
If you choose to use Celery for development, you must also start a Celery worker
process::
celery -A pretix.celery_app worker -l info
However, beware that code changes will not auto-reload within Celery.
.. _`checksandtests`:
Code checks and unit tests

View File

@@ -1,143 +0,0 @@
ePayBL
======
.. note::
Since ePayBL is only available to german federal, provincial and communal entities, the following page is also
only provided in german. Should you require assistance with ePayBL and do not speak this language, please feel free
reach out to support@pretix.eu.
Einführung
----------
.. note::
Sollten Sie lediglich schnell entscheiden wollen, welcher Kontierungsmodus in den Einstellungen des pretix
ePayBL-plugins gewählt werden soll, so springen Sie direkt zur Sektion :ref:`Kontierungsmodus`.
`ePayBL`_ - das ePayment-System von Bund und Länder - ist das am weitesten verbreitete Zahlungssystem für Bundes-, Länder-
sowie kommunale Aufgabenträger. Während es nur wie eines von vielen anderen Zahlungssystemen scheint, so bietet es
seinen Nutzern besondere Vorteile, wie die automatische Erfassung von Zahlungsbelegen, dem Übertragen von Buchungen in
Haushaltskassen/-systeme sowie die automatische Erfassung von Kontierungen und Steuermerkmalen.
Rein technisch gesehen ist ePayBL hierbei nicht ein eigenständiger Zahlungsdienstleister sondern nur ein eine Komponente
im komplexen System, dass die Zahlungsabwicklung für Kommunen und Behörden ist.
Im folgenden der schematische Aufbau einer Umgebung, in welcher ePayBL zum Einsatz kommt:
.. figure:: img/epaybl_flowchart.png
:class: screenshot
Quelle: Integrationshandbuch ePayBL-Konnektor, DResearch Digital Media Systems GmbH
In diesem Schaubild stellt pretix, bzw. die von Ihnen als Veranstalter angelegten Ticketshops, das Fachverfahren dar.
ePayBL stellt das Bindeglied zwischen den Fachverfahren, Haushaltssystemen und dem eigentlichen Zahlungsdienstleister,
dem sog. ZV-Provider dar. Dieser ZV-Provider ist die Stelle, welche die eigentlichen Kundengelder einzieht und an den
Händler auszahlt. Das Gros der Zahlungsdienstleister unterstützt pretix hierbei auch direkt; sprich: Sollten Sie die
Anbindung an Ihre Haushaltssysteme nicht benötigen, kann eine direkte Anbindung in der Regel ebenso - und dies bei meist
vermindertem Aufwand - vorgenommen werden.
In der Vergangenheit zeigte sich jedoch schnell, dass nicht jeder IT-Dienstleister immer sofort die neueste Version von
ePayBL seinen Nutzern angeboten hat. Die Gründe hierfür sind mannigfaltig: Von fest vorgegebenen Update-Zyklen bis hin
zu Systeme mit speziellen Anpassungen, kann leider nicht davon ausgegangen werden, dass alle ePayBL-Systeme exakt gleich
ansprechbar sind - auch wenn es sich dabei eigentlich um einen standardisierten Dienst handelt.
Aus diesem Grund gibt es mit dem ePayBL-Konnektor eine weitere Abstraktionsschicht welche optional zwischen den
Fachverfahren und dem ePayBL-Server sitzt. Dieser Konnektor wird so gepflegt, dass er zum einen eine dauerhaft
gleichartige Schnittstelle den Fachverfahren bietet aber gleichzeitig auch mit jeder Version des ePayBL-Servers
kommunizieren kann - egal wie neu oder alt, wie regulär oder angepasst diese ist.
Im Grunde müsste daher eigentlich immer gesagt werden, dass pretix eine Anbindung an den ePayBL-Konnektor bietet; nicht
an "ePayBL" oder den "ePayBL-Server". Diese Unterscheidung kann bei der Ersteinrichtung und Anforderung von Zugangsdaten
von Relevanz sein. Da in der Praxis jedoch beide Begriffe gleichbedeutend genutzt werden, wird im Folgenden auch nur von
einer ePayBL-Anbindung die Rede sein - auch wenn explizit der Konnektor gemeint ist.
.. _`Kontierungsmodus`:
Kontierungsmodus
----------------
ePayBL ist ein Produkt, welches für die Abwicklung von Online-Zahlungsvorgängen in der Verwaltung geschaffen wurde. Ein
Umfeld, in dem klar definiert ist, was ein Kunde gerade bezahlt und wohin das Geld genau fließt. Diese Annahmen lassen
sich in einem Ticketshop wie pretix jedoch nur teilweise genauso abbilden.
Die ePayBL-Integration für pretix bietet daher zwei unterschiedliche Modi an, wie Buchungen erfasst und an ePayBL und
damit auch an die dahinterliegenden Haushaltssysteme gemeldet werden können.
Kontierung pro Position/Artikel
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Dieser Modus versucht den klassischen, behördentypischen ePayBL-Zahlungsvorgang abzubilden: Jede einzelne Position, die
ein Kunde in den Warenkorb legt, wird auch genauso 1:1 an ePayBL und die Hintergrundsysteme übermittelt.
Hierbei muss zwingend auch für jede Position ein Kennzeichen für Haushaltsstelle und Objektnummer, sowie optional ein
Kontierungsobjekt (``HREF``; bspw. ``stsl=Steuerschlüssel;psp=gsb:Geschäftsbereich,auft:Innenauftrag,kst:Kostenstelle;``
) übermittelt werden.
Diese Daten sind vom Veranstalter entsprechend für jeden in der Veranstaltung angelegten Artikel innerhalb des Tabs
"Zusätzliche Einstellungen" der Produkteinstellungen zu hinterlegen.
Während diese Einstellung eine größtmögliche Menge an Kontierungsdaten überträgt und auch ein separates Verbuchen von
Leistungen auf unterschiedliche Haushaltsstellen erlaubt, so hat diese Option auch einen großen Nachteil: Der Kunde kann
nur eine Zahlung für seine Bestellung leisten.
Während sich dies nicht nach einem großen Problem anhört, so kann dies beim Kunden zu Frust führen. pretix bietet die
Option an, dass ein Veranstalter eine Bestellung jederzeit verändern kann: Ändern von Preisen von Positionen in einer
aufgegebenen Bestellung, Zubuchen und Entfernen von Bestellpositionen, etc. Hat der Kunde seine ursprüngliche Bestellung
jedoch schon bezahlt, kann pretix nicht mehr die komplette Bestellung mit den passenden Kontierungen übertragen - es
müsste nur ein Differenz-Abbild zwischen Ursprungsbestellung und aktueller Bestellung übertragen werden. Aber auch wenn
eine "Nachmeldung" möglich wäre, so wäre ein konkretes Auflösen für was jetzt genau gezahlt wird, nicht mehr möglich.
Daher gilt bei der Nutzung der Kontierung pro Position/Artikel: Der Kunde kann nur eine (erfolgreiche) Zahlung auf seine
Bestellung leisten.
Eine weitere Einschränkung dieses Modus ist, dass aktuell keine Gebühren-Positionen (Versandkosten, Zahlungs-, Storno-
oder Servicegebühren) in diesem Modus übertragen werden können. Bitte wenden Sie sich an uns, wenn Sie diese
Funktionalität benötigen.
Kontierung pro Zahlvorgang
^^^^^^^^^^^^^^^^^^^^^^^^^^
Dieser Modus verabschiedet sich vom behördlichen "Jede Position gehört genau zu einem Haushaltskonto und muss genau
zugeordnet werden". Stattdessen werden alle Bestellpositionen - inklusive eventuell definierter Gebühren - vermengt und
nur als ein großer Warenkorb, genauer gesagt: eine einzige Position an ePayBL sowie die Hintergrundsysteme gemeldet.
Während im "pro Postion/Artikel"-Modus jeder Artikel einzeln übermittelt wird und damit auch korrekt pro Artikel der
jeweilige Brutto- und Nettopreis, sowie der anfallende Steuerbetrag und ein Steuerkennzeichen (mit Hilfe des optionalen
``HREF``-Attributs) übermittelt werden, ist dies im "pro Zahlvorgang"-Modus nicht möglich.
Stattdessen übermittelt pretix nur einen Betrag für den gesamten Warenkorb: Bruttopreis == Nettopreis. Der Steuerbetrag
wird hierbei als 0 übermittelt.
Die Angabe einer Haushaltsstelle und Objektnummer, sowie optional der ``HREF``-Kontierungsinformationen ist jedoch
weiterhin notwendig - allerdings nicht mehr individuell für jeden Artikel/jede Position sondern nur für die gesamte
Bestellung. Diese Daten sind direkt in den ePayBL-Einstellungen der Veranstaltung unter Einstellungen -> Zahlung ->
ePayBL vorzunehmen
In der Praxis bedeutet dies, dass in einem angeschlossenen Haushaltssystem nicht nachvollzogen kann, welche Positionen
konkret erworben und bezahlt wurden - stattdessen kann nur der Fakt, dass etwas verkauft wurde erfasst werden.
Je nach Aufbau und Vorgaben der Finanzbuchhaltung kann dies jedoch ausreichend sein - wenn bspw. eine Ferienfahrt
angeboten wird und seitens der Haushaltssysteme nicht erfasst werden muss, wie viel vom Gesamtbetrag einer Bestellung
auf die Ferienfahrt an sich, auf einen Zubringerbus und einen Satz Bettwäsche entfallen ist, sondern (vereinfacht
gesagt) es ausreichend ist, dass "Eine Summe X für die Haushaltsstelle/Objektnummer geflossen ist".
Dieser Modus der Kontierung bietet Ihnen auch als Vorteil gegenüber dem vorhergehenden an, dass die Bestellungen der
Kunden jederzeit erweitert und verändert werden können - auch wenn die Ursprungsbestellung schon bezahlt wurde und nur
noch eine Differenz gezahlt wird.
Einschränkungen
---------------
Zum aktuellen Zeitpunkt erlaubt die pretix-Anbindung an ePayBL nicht das durchführen von Erstattungen von bereits
geleisteten Zahlungen. Der Prozess hierfür unterscheidet sich von Behörde zu Behörde und muss daher händisch
durchgeführt werden.
.. _ePayBL: https://www.epaybl.de/

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

View File

@@ -18,7 +18,6 @@ If you want to **create** a plugin, please go to the
campaigns
certificates
digital
epaybl
exhibitors
shipping
imported_secrets

View File

@@ -22,7 +22,7 @@ classifiers = [
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Framework :: Django :: 4.1",
"Framework :: Django :: 3.2",
]
dependencies = [
@@ -30,13 +30,13 @@ dependencies = [
"babel",
"BeautifulSoup4==4.12.*",
"bleach==5.0.*",
"celery==5.3.*",
"celery==5.2.*",
"chardet==5.1.*",
"cryptography>=3.4.2",
"css-inline==0.8.*",
"defusedcsv>=1.1.0",
"dj-static",
"Django==4.2.*",
"Django==3.2.*,>=3.2.18",
"django-bootstrap3==23.1.*",
"django-compressor==4.3.*",
"django-countries==7.5.*",
@@ -49,8 +49,9 @@ dependencies = [
"django-libsass==0.9",
"django-localflavor==4.0",
"django-markup",
"django-mysql",
"django-oauth-toolkit==2.2.*",
"django-otp==1.2.*",
"django-otp==1.1.*",
"django-phonenumber-field==7.1.*",
"django-redis==5.2.*",
"django-scopes==2.0.*",
@@ -62,18 +63,18 @@ dependencies = [
"importlib_metadata==6.*", # Polyfill, we can probably drop this once we require Python 3.10+
"isoweek",
"jsonschema",
"kombu==5.3.*",
"kombu==5.2.*",
"libsass==0.22.*",
"lxml",
"markdown==3.4.3", # 3.3.5 requires importlib-metadata>=4.4, but django-bootstrap3 requires importlib-metadata<3.
# We can upgrade markdown again once django-bootstrap3 upgrades or once we drop Python 3.6 and 3.7
"mt-940==4.30.*",
"mt-940==4.23.*",
"oauthlib==3.2.*",
"openpyxl==3.1.*",
"packaging",
"paypalrestsdk==1.13.*",
"paypal-checkout-serversdk==1.0.*",
"PyJWT==2.7.*",
"PyJWT==2.6.*",
"phonenumberslite==8.13.*",
"Pillow==9.5.*",
"pretix-plugin-build",
@@ -82,17 +83,16 @@ dependencies = [
"pycountry",
"pycparser==2.21",
"pycryptodome==3.18.*",
"pypdf==3.9.*",
"pypdf==3.8.*",
"python-bidi==0.4.*", # Support for Arabic in reportlab
"python-dateutil==2.8.*",
"python-u2flib-server==4.*",
"pytz",
"pytz-deprecation-shim==0.1.*",
"pyuca",
"qrcode==7.4.*",
"redis==4.6.*",
"redis==4.5.*,>=4.5.4",
"reportlab==4.0.*",
"requests==2.31.*",
"requests==2.30.*",
"sentry-sdk==1.15.*",
"sepaxml==2.6.*",
"slimit",
@@ -109,10 +109,10 @@ dependencies = [
[project.optional-dependencies]
memcached = ["pylibmc"]
mysql = ["mysqlclient"]
dev = [
"coverage",
"coveralls",
"fakeredis==2.18.*",
"flake8==6.0.*",
"freezegun",
"isort==5.12.*",
@@ -126,7 +126,7 @@ dev = [
"pytest-mock==3.10.*",
"pytest-rerunfailures==11.*",
"pytest-sugar",
"pytest-xdist==3.3.*",
"pytest-xdist==3.2.*",
"pytest==7.3.*",
"responses",
]

View File

@@ -19,4 +19,4 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
__version__ = "2023.8.0.dev0"
__version__ = "4.20.4"

View File

@@ -30,6 +30,7 @@ from django.utils.translation import gettext_lazy as _ # NOQA
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
USE_I18N = True
USE_L10N = True
USE_TZ = True
INSTALLED_APPS = [
@@ -67,7 +68,6 @@ INSTALLED_APPS = [
'oauth2_provider',
'phonenumber_field',
'statici18n',
'django.forms', # after pretix.base for overrides
]
FORMAT_MODULE_PATH = [
@@ -80,7 +80,6 @@ ALL_LANGUAGES = [
('de-informal', _('German (informal)')),
('ar', _('Arabic')),
('zh-hans', _('Chinese (simplified)')),
('zh-hant', _('Chinese (traditional)')),
('cs', _('Czech')),
('da', _('Danish')),
('nl', _('Dutch')),
@@ -180,8 +179,6 @@ TEMPLATES = [
},
]
FORM_RENDERER = "django.forms.renderers.TemplatesSetting"
STATIC_ROOT = os.path.join(os.path.dirname(__file__), 'static.dist')
STATICFILES_FINDERS = (
@@ -196,14 +193,7 @@ STATICFILES_DIRS = [
STATICI18N_ROOT = os.path.join(BASE_DIR, "pretix/static")
STORAGES = {
"default": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
},
"staticfiles": {
"BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage",
},
}
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
# if os.path.exists(os.path.join(DATA_DIR, 'static')):
# STATICFILES_DIRS.insert(0, os.path.join(DATA_DIR, 'static'))
@@ -259,3 +249,20 @@ PRETIX_PRIMARY_COLOR = '#8E44B3'
# stressful for some cache setups so it is enabled by default and currently can't be enabled through pretix.cfg
CACHE_LARGE_VALUES_ALLOWED = False
CACHE_LARGE_VALUES_ALIAS = 'default'
# Allowed file extensions for various places plus matching Pillow formats.
# Never allow EPS, it is full of dangerous bugs.
FILE_UPLOAD_EXTENSIONS_IMAGE = (".png", ".jpg", ".gif", ".jpeg")
PILLOW_FORMATS_IMAGE = ('PNG', 'GIF', 'JPEG')
FILE_UPLOAD_EXTENSIONS_FAVICON = (".ico", ".png", "jpg", ".gif", ".jpeg")
FILE_UPLOAD_EXTENSIONS_QUESTION_IMAGE = (".png", "jpg", ".gif", ".jpeg", ".bmp", ".tif", ".tiff", ".jfif")
PILLOW_FORMATS_QUESTIONS_IMAGE = ('PNG', 'GIF', 'JPEG', 'BMP', 'TIFF')
FILE_UPLOAD_EXTENSIONS_EMAIL_ATTACHMENT = (
".png", ".jpg", ".gif", ".jpeg", ".pdf", ".txt", ".docx", ".gif", ".svg",
".pptx", ".ppt", ".doc", ".xlsx", ".xls", ".jfif", ".heic", ".heif", ".pages",
".bmp", ".tif", ".tiff"
)
FILE_UPLOAD_EXTENSIONS_OTHER = FILE_UPLOAD_EXTENSIONS_EMAIL_ATTACHMENT

View File

@@ -223,7 +223,6 @@ class PretixPosSecurityProfile(AllowListSecurityProfile):
('POST', 'api-v1:checkinrpc.redeem'),
('GET', 'api-v1:checkinrpc.search'),
('POST', 'api-v1:reusablemedium-lookup'),
('POST', 'api-v1:reusablemedium-list'),
)

View File

@@ -59,7 +59,7 @@ class IdempotencyMiddleware:
auth_hash = sha1(auth_hash_parts.encode()).hexdigest()
idempotency_key = request.headers.get('X-Idempotency-Key', '')
with transaction.atomic(durable=True):
with transaction.atomic():
call, created = ApiCall.objects.select_for_update(of=OF_SELF).get_or_create(
auth_hash=auth_hash,
idempotency_key=idempotency_key,
@@ -75,7 +75,7 @@ class IdempotencyMiddleware:
if created:
resp = self.get_response(request)
with transaction.atomic(durable=True):
with transaction.atomic():
if resp.status_code in (409, 429, 500, 503):
# This is the exception: These calls are *meant* to be retried!
call.delete()

View File

@@ -19,8 +19,6 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
import json
from rest_framework import serializers
@@ -48,16 +46,3 @@ class AsymmetricField(serializers.Field):
def run_validation(self, data=serializers.empty):
return self.write.run_validation(data)
class CompatibleJSONField(serializers.JSONField):
def to_internal_value(self, data):
try:
return json.dumps(data)
except (TypeError, ValueError):
self.fail('invalid')
def to_representation(self, value):
if value:
return json.loads(value)
return value

View File

@@ -32,13 +32,11 @@ class DiscountSerializer(I18nAwareModelSerializer):
'available_until', 'subevent_mode', 'condition_all_products', 'condition_limit_products',
'condition_apply_to_addons', 'condition_min_count', 'condition_min_value',
'benefit_discount_matching_percent', 'benefit_only_apply_to_cheapest_n_matches',
'benefit_same_products', 'benefit_limit_products', 'benefit_apply_to_addons',
'benefit_ignore_voucher_discounted', 'condition_ignore_voucher_discounted')
'condition_ignore_voucher_discounted')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['condition_limit_products'].queryset = self.context['event'].items.all()
self.fields['benefit_limit_products'].queryset = self.context['event'].items.all()
def validate(self, data):
data = super().validate(data)

View File

@@ -46,7 +46,6 @@ from rest_framework import serializers
from rest_framework.fields import ChoiceField, Field
from rest_framework.relations import SlugRelatedField
from pretix.api.serializers import CompatibleJSONField
from pretix.api.serializers.i18n import I18nAwareModelSerializer
from pretix.api.serializers.settings import SettingsSerializer
from pretix.base.models import Device, Event, TaxRule, TeamAPIToken
@@ -54,7 +53,6 @@ from pretix.base.models.event import SubEvent
from pretix.base.models.items import (
ItemMetaProperty, SubEventItem, SubEventItemVariation,
)
from pretix.base.models.tax import CustomRulesValidator
from pretix.base.services.seating import (
SeatProtected, generate_seats, validate_plan_change,
)
@@ -652,16 +650,9 @@ class SubEventSerializer(I18nAwareModelSerializer):
class TaxRuleSerializer(CountryFieldMixin, I18nAwareModelSerializer):
custom_rules = CompatibleJSONField(
validators=[CustomRulesValidator()],
required=False,
allow_null=True,
)
class Meta:
model = TaxRule
fields = ('id', 'name', 'rate', 'price_includes_tax', 'eu_reverse_charge', 'home_country', 'internal_name',
'keep_gross_if_rate_changes', 'custom_rules')
fields = ('id', 'name', 'rate', 'price_includes_tax', 'eu_reverse_charge', 'home_country', 'internal_name', 'keep_gross_if_rate_changes')
class EventSettingsSerializer(SettingsSerializer):
@@ -728,7 +719,6 @@ class EventSettingsSerializer(SettingsSerializer):
'payment_term_minutes',
'payment_term_last',
'payment_term_expire_automatically',
'payment_term_expire_delay_days',
'payment_term_accept_late',
'payment_explanation',
'payment_pending_hidden',
@@ -817,10 +807,6 @@ class EventSettingsSerializer(SettingsSerializer):
'reusable_media_type_nfc_uid',
'reusable_media_type_nfc_uid_autocreate_giftcard',
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes_random_uid',
]
readonly_fields = [
# These are read-only since they are currently only settable on organizers, not events
@@ -830,10 +816,6 @@ class EventSettingsSerializer(SettingsSerializer):
'reusable_media_type_nfc_uid',
'reusable_media_type_nfc_uid_autocreate_giftcard',
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes_random_uid',
]
def __init__(self, *args, **kwargs):
@@ -902,8 +884,6 @@ class DeviceEventSettingsSerializer(EventSettingsSerializer):
'name_scheme',
'reusable_media_type_barcode',
'reusable_media_type_nfc_uid',
'reusable_media_type_nfc_mf0aes',
'reusable_media_type_nfc_mf0aes_random_uid',
'system_question_order',
]

View File

@@ -93,7 +93,7 @@ class JobRunSerializer(serializers.Serializer):
if events is not None and not isinstance(ex, OrganizerLevelExportMixin):
self.fields["events"] = serializers.SlugRelatedField(
queryset=events,
required=False,
required=True,
allow_empty=False,
slug_field='slug',
many=True
@@ -156,9 +156,8 @@ class JobRunSerializer(serializers.Serializer):
def to_internal_value(self, data):
if isinstance(data, QueryDict):
data = data.copy()
for k, v in self.fields.items():
if isinstance(v, serializers.ManyRelatedField) and k not in data and k != "events":
if isinstance(v, serializers.ManyRelatedField) and k not in data:
data[k] = []
for fk in self.fields.keys():

View File

@@ -60,8 +60,6 @@ class NestedGiftCardSerializer(GiftCardSerializer):
class ReusableMediaSerializer(I18nAwareModelSerializer):
organizer = serializers.SlugRelatedField(slug_field='slug', read_only=True)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -113,7 +111,6 @@ class ReusableMediaSerializer(I18nAwareModelSerializer):
model = ReusableMedium
fields = (
'id',
'organizer',
'created',
'updated',
'type',

View File

@@ -19,6 +19,7 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
import json
import logging
import os
from collections import Counter, defaultdict
@@ -27,7 +28,6 @@ from decimal import Decimal
import pycountry
from django.conf import settings
from django.core.files import File
from django.db import models
from django.db.models import F, Q
from django.utils.encoding import force_str
from django.utils.timezone import now
@@ -39,7 +39,6 @@ from rest_framework.exceptions import ValidationError
from rest_framework.relations import SlugRelatedField
from rest_framework.reverse import reverse
from pretix.api.serializers import CompatibleJSONField
from pretix.api.serializers.event import SubEventSerializer
from pretix.api.serializers.i18n import I18nAwareModelSerializer
from pretix.api.serializers.item import (
@@ -284,12 +283,11 @@ class FailedCheckinSerializer(I18nAwareModelSerializer):
raw_item = serializers.PrimaryKeyRelatedField(queryset=Item.objects.none(), required=False, allow_null=True)
raw_variation = serializers.PrimaryKeyRelatedField(queryset=ItemVariation.objects.none(), required=False, allow_null=True)
raw_subevent = serializers.PrimaryKeyRelatedField(queryset=SubEvent.objects.none(), required=False, allow_null=True)
nonce = serializers.CharField(required=False, allow_null=True)
class Meta:
model = Checkin
fields = ('error_reason', 'error_explanation', 'raw_barcode', 'raw_item', 'raw_variation',
'raw_subevent', 'nonce', 'datetime', 'type', 'position')
'raw_subevent', 'datetime', 'type', 'position')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -374,15 +372,11 @@ class PdfDataSerializer(serializers.Field):
self.context['vars_images'] = get_images(self.context['event'])
for k, f in self.context['vars'].items():
if 'evaluate_bulk' in f:
# Will be evaluated later by our list serializers
res[k] = (f['evaluate_bulk'], instance)
else:
try:
res[k] = f['evaluate'](instance, instance.order, ev)
except:
logger.exception('Evaluating PDF variable failed')
res[k] = '(error)'
try:
res[k] = f['evaluate'](instance, instance.order, ev)
except:
logger.exception('Evaluating PDF variable failed')
res[k] = '(error)'
if not hasattr(ev, '_cached_meta_data'):
ev._cached_meta_data = ev.meta_data
@@ -435,38 +429,6 @@ class PdfDataSerializer(serializers.Field):
return res
class OrderPositionListSerializer(serializers.ListSerializer):
def to_representation(self, data):
# We have a custom implementation of this method because PdfDataSerializer() might keep some elements unevaluated
# with a (callable, input) tuple. We'll loop over these entries and evaluate them bulk-wise to save on SQL queries.
if isinstance(self.parent, OrderSerializer) and isinstance(self.parent.parent, OrderListSerializer):
# Do not execute our custom code because it will be executed by OrderListSerializer later for the
# full result set.
return super().to_representation(data)
iterable = data.all() if isinstance(data, models.Manager) else data
data = []
evaluate_queue = defaultdict(list)
for item in iterable:
entry = self.child.to_representation(item)
if "pdf_data" in entry:
for k, v in entry["pdf_data"].items():
if isinstance(v, tuple) and callable(v[0]):
evaluate_queue[v[0]].append((v[1], entry, k))
data.append(entry)
for func, entries in evaluate_queue.items():
results = func([item for (item, entry, k) in entries])
for (item, entry, k), result in zip(entries, results):
entry["pdf_data"][k] = result
return data
class OrderPositionSerializer(I18nAwareModelSerializer):
checkins = CheckinSerializer(many=True, read_only=True)
answers = AnswerSerializer(many=True)
@@ -478,7 +440,6 @@ class OrderPositionSerializer(I18nAwareModelSerializer):
attendee_name = serializers.CharField(required=False)
class Meta:
list_serializer_class = OrderPositionListSerializer
model = OrderPosition
fields = ('id', 'order', 'positionid', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts',
'company', 'street', 'zipcode', 'city', 'country', 'state', 'discount',
@@ -507,20 +468,6 @@ class OrderPositionSerializer(I18nAwareModelSerializer):
def validate(self, data):
raise TypeError("this serializer is readonly")
def to_representation(self, data):
if isinstance(self.parent, (OrderListSerializer, OrderPositionListSerializer)):
# Do not execute our custom code because it will be executed by OrderListSerializer later for the
# full result set.
return super().to_representation(data)
entry = super().to_representation(data)
if "pdf_data" in entry:
for k, v in entry["pdf_data"].items():
if isinstance(v, tuple) and callable(v[0]):
entry["pdf_data"][k] = v[0]([v[1]])[0]
return entry
class RequireAttentionField(serializers.Field):
def to_representation(self, instance: OrderPosition):
@@ -588,9 +535,8 @@ class OrderPaymentTypeField(serializers.Field):
# TODO: Remove after pretix 2.2
def to_representation(self, instance: Order):
t = None
if instance.pk:
for p in instance.payments.all():
t = p.provider
for p in instance.payments.all():
t = p.provider
return t
@@ -598,10 +544,10 @@ class OrderPaymentDateField(serializers.DateField):
# TODO: Remove after pretix 2.2
def to_representation(self, instance: Order):
t = None
if instance.pk:
for p in instance.payments.all():
t = p.payment_date or t
for p in instance.payments.all():
t = p.payment_date or t
if t:
return super().to_representation(t.date())
@@ -615,7 +561,7 @@ class PaymentURLField(serializers.URLField):
def to_representation(self, instance: OrderPayment):
if instance.state != OrderPayment.PAYMENT_STATE_CREATED:
return None
return build_absolute_uri(instance.order.event, 'presale:event.order.pay', kwargs={
return build_absolute_uri(self.context['event'], 'presale:event.order.pay', kwargs={
'order': instance.order.code,
'secret': instance.order.secret,
'payment': instance.pk,
@@ -660,42 +606,13 @@ class OrderRefundSerializer(I18nAwareModelSerializer):
class OrderURLField(serializers.URLField):
def to_representation(self, instance: Order):
return build_absolute_uri(instance.event, 'presale:event.order', kwargs={
return build_absolute_uri(self.context['event'], 'presale:event.order', kwargs={
'order': instance.code,
'secret': instance.secret,
})
class OrderListSerializer(serializers.ListSerializer):
def to_representation(self, data):
# We have a custom implementation of this method because PdfDataSerializer() might keep some elements
# unevaluated with a (callable, input) tuple. We'll loop over these entries and evaluate them bulk-wise to
# save on SQL queries.
iterable = data.all() if isinstance(data, models.Manager) else data
data = []
evaluate_queue = defaultdict(list)
for item in iterable:
entry = self.child.to_representation(item)
for p in entry.get("positions", []):
if "pdf_data" in p:
for k, v in p["pdf_data"].items():
if isinstance(v, tuple) and callable(v[0]):
evaluate_queue[v[0]].append((v[1], p, k))
data.append(entry)
for func, entries in evaluate_queue.items():
results = func([item for (item, entry, k) in entries])
for (item, entry, k), result in zip(entries, results):
entry["pdf_data"][k] = result
return data
class OrderSerializer(I18nAwareModelSerializer):
event = SlugRelatedField(slug_field='slug', read_only=True)
invoice_address = InvoiceAddressSerializer(allow_null=True)
positions = OrderPositionSerializer(many=True, read_only=True)
fees = OrderFeeSerializer(many=True, read_only=True)
@@ -709,9 +626,8 @@ class OrderSerializer(I18nAwareModelSerializer):
class Meta:
model = Order
list_serializer_class = OrderListSerializer
fields = (
'code', 'event', 'status', 'testmode', 'secret', 'email', 'phone', 'locale', 'datetime', 'expires', 'payment_date',
'code', 'status', 'testmode', 'secret', 'email', 'phone', 'locale', 'datetime', 'expires', 'payment_date',
'payment_provider', 'fees', 'total', 'comment', 'custom_followup_at', 'invoice_address', 'positions', 'downloads',
'checkin_attention', 'last_modified', 'payments', 'refunds', 'require_approval', 'sales_channel',
'url', 'customer', 'valid_if_pending'
@@ -979,6 +895,19 @@ class OrderPositionCreateSerializer(I18nAwareModelSerializer):
return data
class CompatibleJSONField(serializers.JSONField):
def to_internal_value(self, data):
try:
return json.dumps(data)
except (TypeError, ValueError):
self.fail('invalid')
def to_representation(self, value):
if value:
return json.loads(value)
return value
class WrappedList:
def __init__(self, data):
self._data = data
@@ -1434,7 +1363,6 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
answers.append(answ)
pos.answers = answers
pos.pseudonymization_id = "PREVIEW"
pos.checkins = []
pos_map[pos.positionid] = pos
else:
if pos.voucher:
@@ -1531,8 +1459,6 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
if simulate:
order.fees = fees
order.positions = pos_map.values()
order.payments = []
order.refunds = []
return order # ignore payments
else:
order.save(update_fields=['total'])
@@ -1595,7 +1521,6 @@ class InlineInvoiceLineSerializer(I18nAwareModelSerializer):
class InvoiceSerializer(I18nAwareModelSerializer):
event = SlugRelatedField(slug_field='slug', read_only=True)
order = serializers.SlugRelatedField(slug_field='code', read_only=True)
refers = serializers.SlugRelatedField(slug_field='full_invoice_no', read_only=True)
lines = InlineInvoiceLineSerializer(many=True)
@@ -1604,7 +1529,7 @@ class InvoiceSerializer(I18nAwareModelSerializer):
class Meta:
model = Invoice
fields = ('event', 'order', 'number', 'is_cancellation', 'invoice_from', 'invoice_from_name', 'invoice_from_zipcode',
fields = ('order', 'number', 'is_cancellation', 'invoice_from', 'invoice_from_name', 'invoice_from_zipcode',
'invoice_from_city', 'invoice_from_country', 'invoice_from_tax_id', 'invoice_from_vat_id',
'invoice_to', 'invoice_to_company', 'invoice_to_name', 'invoice_to_street', 'invoice_to_zipcode',
'invoice_to_city', 'invoice_to_state', 'invoice_to_country', 'invoice_to_vat_id', 'invoice_to_beneficiary',

View File

@@ -70,8 +70,6 @@ class OrderPositionCreateForExistingOrderSerializer(OrderPositionCreateSerialize
def validate(self, data):
data = super().validate(data)
if 'order' in self.context:
data['order'] = self.context['order']
if data.get('addon_to'):
try:
data['addon_to'] = data['order'].positions.get(positionid=data['addon_to'])

View File

@@ -36,9 +36,9 @@ from pretix.api.serializers.settings import SettingsSerializer
from pretix.base.auth import get_auth_backends
from pretix.base.i18n import get_language_without_region
from pretix.base.models import (
Customer, Device, GiftCard, GiftCardAcceptance, GiftCardTransaction,
Membership, MembershipType, OrderPosition, Organizer, ReusableMedium,
SeatingPlan, Team, TeamAPIToken, TeamInvite, User,
Customer, Device, GiftCard, GiftCardTransaction, Membership,
MembershipType, OrderPosition, Organizer, ReusableMedium, SeatingPlan,
Team, TeamAPIToken, TeamInvite, User,
)
from pretix.base.models.seating import SeatingPlanLayoutValidator
from pretix.base.services.mail import SendMailException, mail
@@ -94,14 +94,6 @@ class CustomerSerializer(I18nAwareModelSerializer):
data['name_parts']['_scheme'] = self.context['request'].organizer.settings.name_scheme
return data
def validate_email(self, value):
qs = Customer.objects.filter(organizer=self.context['organizer'], email__iexact=value)
if self.instance and self.instance.pk:
qs = qs.exclude(pk=self.instance.pk)
if qs.exists():
raise ValidationError(_("An account with this email address is already registered."))
return value
class CustomerCreateSerializer(CustomerSerializer):
send_email = serializers.BooleanField(default=False, required=False, allow_null=True)
@@ -191,11 +183,8 @@ class GiftCardSerializer(I18nAwareModelSerializer):
qs = GiftCard.objects.filter(
secret=s
).filter(
Q(issuer=self.context["organizer"]) |
Q(issuer__in=GiftCardAcceptance.objects.filter(
acceptor=self.context["organizer"],
active=True,
).values_list('issuer', flat=True))
Q(issuer=self.context["organizer"]) | Q(
issuer__gift_card_collector_acceptance__collector=self.context["organizer"])
)
if self.instance:
qs = qs.exclude(pk=self.instance.pk)
@@ -259,8 +248,6 @@ class DeviceSerializer(serializers.ModelSerializer):
unique_serial = serializers.CharField(read_only=True)
hardware_brand = serializers.CharField(read_only=True)
hardware_model = serializers.CharField(read_only=True)
os_name = serializers.CharField(read_only=True)
os_version = serializers.CharField(read_only=True)
software_brand = serializers.CharField(read_only=True)
software_version = serializers.CharField(read_only=True)
created = serializers.DateTimeField(read_only=True)
@@ -273,7 +260,7 @@ class DeviceSerializer(serializers.ModelSerializer):
fields = (
'device_id', 'unique_serial', 'initialization_token', 'all_events', 'limit_events',
'revoked', 'name', 'created', 'initialized', 'hardware_brand', 'hardware_model',
'os_name', 'os_version', 'software_brand', 'software_version', 'security_profile'
'software_brand', 'software_version', 'security_profile'
)
@@ -400,9 +387,6 @@ class OrganizerSettingsSerializer(SettingsSerializer):
'reusable_media_type_nfc_uid',
'reusable_media_type_nfc_uid_autocreate_giftcard',
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
]
def __init__(self, *args, **kwargs):

View File

@@ -35,7 +35,8 @@
import importlib
from django.apps import apps
from django.urls import include, re_path
from django.conf.urls import re_path
from django.urls import include
from rest_framework import routers
from pretix.api.views import cart
@@ -61,8 +62,6 @@ orga_router.register(r'membershiptypes', organizer.MembershipTypeViewSet)
orga_router.register(r'reusablemedia', media.ReusableMediaViewSet)
orga_router.register(r'teams', organizer.TeamViewSet)
orga_router.register(r'devices', organizer.DeviceViewSet)
orga_router.register(r'orders', order.OrganizerOrderViewSet)
orga_router.register(r'invoices', order.InvoiceViewSet)
orga_router.register(r'exporters', exporters.OrganizerExportersViewSet, basename='exporters')
team_router = routers.DefaultRouter()
@@ -79,7 +78,7 @@ event_router.register(r'questions', item.QuestionViewSet)
event_router.register(r'discounts', discount.DiscountViewSet)
event_router.register(r'quotas', item.QuotaViewSet)
event_router.register(r'vouchers', voucher.VoucherViewSet)
event_router.register(r'orders', order.EventOrderViewSet)
event_router.register(r'orders', order.OrderViewSet)
event_router.register(r'orderpositions', order.OrderPositionViewSet)
event_router.register(r'invoices', order.InvoiceViewSet)
event_router.register(r'revokedsecrets', order.RevokedSecretViewSet, basename='revokedsecrets')

View File

@@ -164,21 +164,8 @@ class CheckinListViewSet(viewsets.ModelViewSet):
secret=serializer.validated_data['raw_barcode']
).first()
clist = self.get_object()
if serializer.validated_data.get('nonce'):
if kwargs.get('position'):
prev = kwargs['position'].all_checkins.filter(nonce=serializer.validated_data['nonce']).first()
else:
prev = clist.checkins.filter(
nonce=serializer.validated_data['nonce'],
raw_barcode=serializer.validated_data['raw_barcode'],
).first()
if prev:
# Ignore because nonce is already handled
return Response(serializer.data, status=201)
c = serializer.save(
list=clist,
list=self.get_object(),
successful=False,
forced=True,
force_sent=True,
@@ -409,7 +396,7 @@ def _checkin_list_position_queryset(checkinlists, ignore_status=False, ignore_pr
def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force, checkin_type, ignore_unpaid, nonce,
untrusted_input, user, auth, expand, pdf_data, request, questions_supported, canceled_supported,
source_type='barcode', legacy_url_support=False, simulate=False):
source_type='barcode', legacy_url_support=False):
if not checkinlists:
raise ValidationError('No check-in list passed.')
@@ -446,8 +433,6 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
)
raw_barcode_for_checkin = None
from_revoked_secret = False
if simulate:
common_checkin_args['__fake_arg_to_prevent_this_from_being_saved'] = True
# 1. Gather a list of positions that could be the one we looking for, either from their ID, secret or
# parent secret
@@ -487,14 +472,13 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
revoked_matches = list(
RevokedTicketSecret.objects.filter(event_id__in=list_by_event.keys(), secret=raw_barcode))
if len(revoked_matches) == 0:
if not simulate:
checkinlists[0].event.log_action('pretix.event.checkin.unknown', data={
'datetime': datetime,
'type': checkin_type,
'list': checkinlists[0].pk,
'barcode': raw_barcode,
'searched_lists': [cl.pk for cl in checkinlists]
}, user=user, auth=auth)
checkinlists[0].event.log_action('pretix.event.checkin.unknown', data={
'datetime': datetime,
'type': checkin_type,
'list': checkinlists[0].pk,
'barcode': raw_barcode,
'searched_lists': [cl.pk for cl in checkinlists]
}, user=user, auth=auth)
for cl in checkinlists:
for k, s in cl.event.ticket_secret_generators.items():
@@ -508,13 +492,12 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
except:
pass
if not simulate:
Checkin.objects.create(
position=None,
successful=False,
error_reason=Checkin.REASON_INVALID,
**common_checkin_args,
)
Checkin.objects.create(
position=None,
successful=False,
error_reason=Checkin.REASON_INVALID,
**common_checkin_args,
)
if force and legacy_url_support and isinstance(auth, Device):
# There was a bug in libpretixsync: If you scanned a ticket in offline mode that was
@@ -556,20 +539,19 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
from_revoked_secret = True
else:
op = revoked_matches[0].position
if not simulate:
op.order.log_action('pretix.event.checkin.revoked', data={
'datetime': datetime,
'type': checkin_type,
'list': list_by_event[revoked_matches[0].event_id].pk,
'barcode': raw_barcode
}, user=user, auth=auth)
common_checkin_args['list'] = list_by_event[revoked_matches[0].event_id]
Checkin.objects.create(
position=op,
successful=False,
error_reason=Checkin.REASON_REVOKED,
**common_checkin_args
)
op.order.log_action('pretix.event.checkin.revoked', data={
'datetime': datetime,
'type': checkin_type,
'list': list_by_event[revoked_matches[0].event_id].pk,
'barcode': raw_barcode
}, user=user, auth=auth)
common_checkin_args['list'] = list_by_event[revoked_matches[0].event_id]
Checkin.objects.create(
position=op,
successful=False,
error_reason=Checkin.REASON_REVOKED,
**common_checkin_args
)
return Response({
'status': 'error',
'reason': Checkin.REASON_REVOKED,
@@ -606,25 +588,24 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
# We choose the first match (regardless of product) for the logging since it's most likely to be the
# base product according to our order_by above.
op = op_candidates[0]
if not simulate:
op.order.log_action('pretix.event.checkin.denied', data={
'position': op.id,
'positionid': op.positionid,
'errorcode': Checkin.REASON_AMBIGUOUS,
'reason_explanation': None,
'force': force,
'datetime': datetime,
'type': checkin_type,
'list': list_by_event[op.order.event_id].pk,
}, user=user, auth=auth)
common_checkin_args['list'] = list_by_event[op.order.event_id]
Checkin.objects.create(
position=op,
successful=False,
error_reason=Checkin.REASON_AMBIGUOUS,
error_explanation=None,
**common_checkin_args,
)
op.order.log_action('pretix.event.checkin.denied', data={
'position': op.id,
'positionid': op.positionid,
'errorcode': Checkin.REASON_AMBIGUOUS,
'reason_explanation': None,
'force': force,
'datetime': datetime,
'type': checkin_type,
'list': list_by_event[op.order.event_id].pk,
}, user=user, auth=auth)
common_checkin_args['list'] = list_by_event[op.order.event_id]
Checkin.objects.create(
position=op,
successful=False,
error_reason=Checkin.REASON_AMBIGUOUS,
error_explanation=None,
**common_checkin_args,
)
return Response({
'status': 'error',
'reason': Checkin.REASON_AMBIGUOUS,
@@ -671,7 +652,6 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
raw_barcode=raw_barcode_for_checkin,
raw_source_type=source_type,
from_revoked_secret=from_revoked_secret,
simulate=simulate,
)
except RequiredQuestionsError as e:
return Response({
@@ -684,24 +664,23 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
'list': MiniCheckinListSerializer(list_by_event[op.order.event_id]).data,
}, status=400)
except CheckInError as e:
if not simulate:
op.order.log_action('pretix.event.checkin.denied', data={
'position': op.id,
'positionid': op.positionid,
'errorcode': e.code,
'reason_explanation': e.reason,
'force': force,
'datetime': datetime,
'type': checkin_type,
'list': list_by_event[op.order.event_id].pk,
}, user=user, auth=auth)
Checkin.objects.create(
position=op,
successful=False,
error_reason=e.code,
error_explanation=e.reason,
**common_checkin_args,
)
op.order.log_action('pretix.event.checkin.denied', data={
'position': op.id,
'positionid': op.positionid,
'errorcode': e.code,
'reason_explanation': e.reason,
'force': force,
'datetime': datetime,
'type': checkin_type,
'list': list_by_event[op.order.event_id].pk,
}, user=user, auth=auth)
Checkin.objects.create(
position=op,
successful=False,
error_reason=e.code,
error_explanation=e.reason,
**common_checkin_args,
)
return Response({
'status': 'error',
'reason': e.code,

View File

@@ -19,12 +19,8 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
import base64
import logging
from cryptography.hazmat.backends.openssl.backend import Backend
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives.serialization import load_pem_public_key
from django.db.models import Exists, OuterRef, Q
from django.db.models.functions import Coalesce
from django.utils.timezone import now
@@ -38,8 +34,6 @@ from pretix.api.auth.device import DeviceTokenAuthentication
from pretix.api.views.version import numeric_version
from pretix.base.models import CheckinList, Device, SubEvent
from pretix.base.models.devices import Gate, generate_api_token
from pretix.base.models.media import MediumKeySet
from pretix.base.services.media import get_keysets_for_organizer
logger = logging.getLogger(__name__)
@@ -48,73 +42,17 @@ class InitializationRequestSerializer(serializers.Serializer):
token = serializers.CharField(max_length=190)
hardware_brand = serializers.CharField(max_length=190)
hardware_model = serializers.CharField(max_length=190)
os_name = serializers.CharField(max_length=190, required=False, allow_null=True)
os_version = serializers.CharField(max_length=190, required=False, allow_null=True)
software_brand = serializers.CharField(max_length=190)
software_version = serializers.CharField(max_length=190)
info = serializers.JSONField(required=False, allow_null=True)
rsa_pubkey = serializers.CharField(required=False, allow_null=True)
def validate(self, attrs):
if attrs.get('rsa_pubkey'):
try:
load_pem_public_key(
attrs['rsa_pubkey'].encode(), Backend()
)
except:
raise ValidationError({'rsa_pubkey': ['Not a valid public key.']})
return attrs
class UpdateRequestSerializer(serializers.Serializer):
hardware_brand = serializers.CharField(max_length=190)
hardware_model = serializers.CharField(max_length=190)
os_name = serializers.CharField(max_length=190, required=False, allow_null=True)
os_version = serializers.CharField(max_length=190, required=False, allow_null=True)
software_brand = serializers.CharField(max_length=190)
software_version = serializers.CharField(max_length=190)
info = serializers.JSONField(required=False, allow_null=True)
rsa_pubkey = serializers.CharField(required=False, allow_null=True)
def validate(self, attrs):
if attrs.get('rsa_pubkey'):
try:
load_pem_public_key(
attrs['rsa_pubkey'].encode(), Backend()
)
except:
raise ValidationError({'rsa_pubkey': ['Not a valid public key.']})
return attrs
class RSAEncryptedField(serializers.Field):
def to_representation(self, value):
public_key = load_pem_public_key(
self.context['device'].rsa_pubkey.encode(), Backend()
)
cipher_text = public_key.encrypt(
# RSA/ECB/PKCS1Padding
value,
padding.PKCS1v15()
)
return base64.b64encode(cipher_text).decode()
class MediumKeySetSerializer(serializers.ModelSerializer):
uid_key = RSAEncryptedField(read_only=True)
diversification_key = RSAEncryptedField(read_only=True)
organizer = serializers.SlugRelatedField(slug_field='slug', read_only=True)
class Meta:
model = MediumKeySet
fields = [
'public_id',
'organizer',
'active',
'media_type',
'uid_key',
'diversification_key',
]
class GateSerializer(serializers.ModelSerializer):
@@ -155,18 +93,12 @@ class InitializeView(APIView):
if device.initialized:
raise ValidationError({'token': ['This initialization token has already been used.']})
if device.revoked:
raise ValidationError({'token': ['This initialization token has been revoked.']})
device.initialized = now()
device.hardware_brand = serializer.validated_data.get('hardware_brand')
device.hardware_model = serializer.validated_data.get('hardware_model')
device.os_name = serializer.validated_data.get('os_name')
device.os_version = serializer.validated_data.get('os_version')
device.software_brand = serializer.validated_data.get('software_brand')
device.software_version = serializer.validated_data.get('software_version')
device.info = serializer.validated_data.get('info')
device.rsa_pubkey = serializer.validated_data.get('rsa_pubkey')
device.api_token = generate_api_token()
device.save()
@@ -185,15 +117,8 @@ class UpdateView(APIView):
device = request.auth
device.hardware_brand = serializer.validated_data.get('hardware_brand')
device.hardware_model = serializer.validated_data.get('hardware_model')
device.os_name = serializer.validated_data.get('os_name')
device.os_version = serializer.validated_data.get('os_version')
device.software_brand = serializer.validated_data.get('software_brand')
device.software_version = serializer.validated_data.get('software_version')
if serializer.validated_data.get('rsa_pubkey') and serializer.validated_data.get('rsa_pubkey') != device.rsa_pubkey:
if device.rsa_pubkey:
raise ValidationError({'rsa_pubkey': ['You cannot change the rsa_pubkey of the device once it is set.']})
else:
device.rsa_pubkey = serializer.validated_data.get('rsa_pubkey')
device.info = serializer.validated_data.get('info')
device.save()
device.log_action('pretix.device.updated', data=serializer.validated_data, auth=device)
@@ -241,12 +166,8 @@ class InfoView(APIView):
'pretix': __version__,
'pretix_numeric': numeric_version(__version__),
}
},
'medium_key_sets': MediumKeySetSerializer(
get_keysets_for_organizer(device.organizer),
many=True,
context={'device': request.auth}
).data if device.rsa_pubkey else []
}
})

View File

@@ -71,8 +71,6 @@ with scopes_disabled():
ends_after = django_filters.rest_framework.IsoDateTimeFilter(method='ends_after_qs')
sales_channel = django_filters.rest_framework.CharFilter(method='sales_channel_qs')
search = django_filters.rest_framework.CharFilter(method='search_qs')
date_from = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
date_to = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
class Meta:
model = Event
@@ -338,8 +336,6 @@ with scopes_disabled():
modified_since = django_filters.IsoDateTimeFilter(field_name='last_modified', lookup_expr='gte')
sales_channel = django_filters.rest_framework.CharFilter(method='sales_channel_qs')
search = django_filters.rest_framework.CharFilter(method='search_qs')
date_from = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
date_to = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
class Meta:
model = SubEvent
@@ -415,7 +411,6 @@ class SubEventViewSet(ConditionalListView, viewsets.ModelViewSet):
'subeventitem_set',
'subeventitemvariation_set',
'meta_values',
'meta_values__property',
Prefetch(
'seat_category_mappings',
to_attr='_seat_category_mappings',

View File

@@ -133,12 +133,7 @@ class EventExportersViewSet(ExportersMixin, viewsets.ViewSet):
def exporters(self):
exporters = []
responses = register_data_exporters.send(self.request.event)
raw_exporters = [response(self.request.event, self.request.organizer) for r, response in responses if response]
raw_exporters = [
ex for ex in raw_exporters
if ex.available_for_user(self.request.user if self.request.user and self.request.user.is_authenticated else None)
]
for ex in sorted(raw_exporters, key=lambda ex: str(ex.verbose_name)):
for ex in sorted([response(self.request.event, self.request.organizer) for r, response in responses if response], key=lambda ex: str(ex.verbose_name)):
ex._serializer = JobRunSerializer(exporter=ex)
exporters.append(ex)
return exporters
@@ -171,7 +166,7 @@ class OrganizerExportersViewSet(ExportersMixin, viewsets.ViewSet):
if (
not isinstance(ex, OrganizerLevelExportMixin) or
perm_holder.has_organizer_permission(self.request.organizer, ex.organizer_required_permission, self.request)
) and ex.available_for_user(self.request.user if self.request.user and self.request.user.is_authenticated else None)
)
]
for ex in sorted(raw_exporters, key=lambda ex: str(ex.verbose_name)):
ex._serializer = JobRunSerializer(exporter=ex, events=events)

View File

@@ -39,8 +39,7 @@ from pretix.api.serializers.media import (
)
from pretix.base.media import MEDIA_TYPES
from pretix.base.models import (
Checkin, GiftCard, GiftCardAcceptance, GiftCardTransaction, OrderPosition,
ReusableMedium,
Checkin, GiftCard, GiftCardTransaction, OrderPosition, ReusableMedium,
)
from pretix.helpers import OF_SELF
from pretix.helpers.dicts import merge_dicts
@@ -104,12 +103,6 @@ class ReusableMediaViewSet(viewsets.ModelViewSet):
auth=self.request.auth,
data=merge_dicts(self.request.data, {'id': inst.pk})
)
mt = MEDIA_TYPES.get(serializer.validated_data["type"])
if mt:
m = mt.handle_new(self.request.organizer, inst, self.request.user, self.request.auth)
if m:
s = self.get_serializer(m)
return Response({"result": s.data})
@transaction.atomic()
def perform_update(self, serializer):
@@ -142,28 +135,12 @@ class ReusableMediaViewSet(viewsets.ModelViewSet):
s = self.get_serializer(m)
return Response({"result": s.data})
except ReusableMedium.DoesNotExist:
try:
with scopes_disabled():
m = ReusableMedium.objects.get(
organizer__in=GiftCardAcceptance.objects.filter(
acceptor=request.organizer,
active=True,
reusable_media=True,
).values_list('issuer', flat=True),
type=s.validated_data["type"],
identifier=s.validated_data["identifier"],
)
m.linked_orderposition = None # not relevant for cross-organizer
m.customer = None # not relevant for cross-organizer
s = self.get_serializer(m)
return Response({"result": s.data})
except ReusableMedium.DoesNotExist:
mt = MEDIA_TYPES.get(s.validated_data["type"])
if mt:
m = mt.handle_unknown(request.organizer, s.validated_data["identifier"], request.user, request.auth)
if m:
s = self.get_serializer(m)
return Response({"result": s.data})
mt = MEDIA_TYPES.get(s.validated_data["type"])
if mt:
m = mt.handle_unknown(request.organizer, s.validated_data["identifier"], request.user, request.auth)
if m:
s = self.get_serializer(m)
return Response({"result": s.data})
return Response({"result": None})

View File

@@ -23,9 +23,10 @@ import datetime
import mimetypes
import os
from decimal import Decimal
from zoneinfo import ZoneInfo
import django_filters
import pytz
from django.conf import settings
from django.db import transaction
from django.db.models import (
Exists, F, OuterRef, Prefetch, Q, Subquery, prefetch_related_objects,
@@ -44,7 +45,6 @@ from rest_framework.exceptions import (
APIException, NotFound, PermissionDenied, ValidationError,
)
from rest_framework.mixins import CreateModelMixin
from rest_framework.permissions import SAFE_METHODS
from rest_framework.response import Response
from pretix.api.models import OAuthAccessToken
@@ -186,7 +186,7 @@ with scopes_disabled():
)
class OrderViewSetMixin:
class OrderViewSet(viewsets.ModelViewSet):
serializer_class = OrderSerializer
queryset = Order.objects.none()
filter_backends = (DjangoFilterBackend, TotalOrderingFilter)
@@ -194,12 +194,19 @@ class OrderViewSetMixin:
ordering_fields = ('datetime', 'code', 'status', 'last_modified')
filterset_class = OrderFilter
lookup_field = 'code'
permission = 'can_view_orders'
write_permission = 'can_change_orders'
def get_base_queryset(self):
raise NotImplementedError()
def get_serializer_context(self):
ctx = super().get_serializer_context()
ctx['event'] = self.request.event
ctx['pdf_data'] = self.request.query_params.get('pdf_data', 'false') == 'true'
ctx['exclude'] = self.request.query_params.getlist('exclude')
ctx['include'] = self.request.query_params.getlist('include')
return ctx
def get_queryset(self):
qs = self.get_base_queryset()
qs = self.request.event.orders
if 'fees' not in self.request.GET.getlist('exclude'):
if self.request.query_params.get('include_canceled_fees', 'false') == 'true':
fqs = OrderFee.all
@@ -221,12 +228,11 @@ class OrderViewSetMixin:
opq = OrderPosition.all
else:
opq = OrderPosition.objects
if request.query_params.get('pdf_data', 'false') == 'true' and getattr(request, 'event', None):
if request.query_params.get('pdf_data', 'false') == 'true':
prefetch_related_objects([request.organizer], 'meta_properties')
prefetch_related_objects(
[request.event],
Prefetch('meta_values', queryset=EventMetaValue.objects.select_related('property'),
to_attr='meta_values_cached'),
Prefetch('meta_values', queryset=EventMetaValue.objects.select_related('property'), to_attr='meta_values_cached'),
'questions',
'item_meta_properties',
)
@@ -261,12 +267,13 @@ class OrderViewSetMixin:
)
)
def get_serializer_context(self):
ctx = super().get_serializer_context()
ctx['exclude'] = self.request.query_params.getlist('exclude')
ctx['include'] = self.request.query_params.getlist('include')
ctx['pdf_data'] = False
return ctx
def _get_output_provider(self, identifier):
responses = register_ticket_outputs.send(self.request.event)
for receiver, response in responses:
prov = response(self.request.event)
if prov.identifier == identifier:
return prov
raise NotFound('Unknown output provider.')
@scopes_disabled() # we are sure enough that get_queryset() is correct, so we save some perforamnce
def list(self, request, **kwargs):
@@ -283,45 +290,6 @@ class OrderViewSetMixin:
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data, headers={'X-Page-Generated': date})
class OrganizerOrderViewSet(OrderViewSetMixin, viewsets.ReadOnlyModelViewSet):
def get_base_queryset(self):
perm = "can_view_orders" if self.request.method in SAFE_METHODS else "can_change_orders"
if isinstance(self.request.auth, (TeamAPIToken, Device)):
return Order.objects.filter(
event__organizer=self.request.organizer,
event__in=self.request.auth.get_events_with_permission(perm)
)
elif self.request.user.is_authenticated:
return Order.objects.filter(
event__organizer=self.request.organizer,
event__in=self.request.user.get_events_with_permission(perm)
)
else:
raise PermissionDenied()
class EventOrderViewSet(OrderViewSetMixin, viewsets.ModelViewSet):
permission = 'can_view_orders'
write_permission = 'can_change_orders'
def get_serializer_context(self):
ctx = super().get_serializer_context()
ctx['event'] = self.request.event
ctx['pdf_data'] = self.request.query_params.get('pdf_data', 'false') == 'true'
return ctx
def get_base_queryset(self):
return self.request.event.orders
def _get_output_provider(self, identifier):
responses = register_ticket_outputs.send(self.request.event)
for receiver, response in responses:
prov = response(self.request.event)
if prov.identifier == identifier:
return prov
raise NotFound('Unknown output provider.')
@action(detail=True, url_name='download', url_path='download/(?P<output>[^/]+)')
def download(self, request, output, **kwargs):
provider = self._get_output_provider(output)
@@ -645,7 +613,7 @@ class EventOrderViewSet(OrderViewSetMixin, viewsets.ModelViewSet):
status=status.HTTP_400_BAD_REQUEST
)
tz = ZoneInfo(self.request.event.settings.timezone)
tz = pytz.timezone(self.request.event.settings.timezone)
new_date = make_aware(datetime.datetime.combine(
new_date,
datetime.time(hour=23, minute=59, second=59)
@@ -694,16 +662,7 @@ class EventOrderViewSet(OrderViewSetMixin, viewsets.ModelViewSet):
with language(order.locale, self.request.event.settings.region):
payment = order.payments.last()
# OrderCreateSerializer creates at most one payment
if payment and payment.state == OrderPayment.PAYMENT_STATE_CONFIRMED:
order.log_action(
'pretix.event.order.payment.confirmed', {
'local_id': payment.local_id,
'provider': payment.provider,
},
user=request.user if request.user.is_authenticated else None,
auth=request.auth,
)
order_placed.send(self.request.event, order=order)
if order.status == Order.STATUS_PAID:
order_paid.send(self.request.event, order=order)
@@ -978,7 +937,6 @@ with scopes_disabled():
| Q(addon_to__attendee_email__icontains=value)
| Q(order__code__istartswith=value)
| Q(order__invoice_address__name_cached__icontains=value)
| Q(order__invoice_address__company__icontains=value)
| Q(order__email__icontains=value)
| Q(pk__in=matching_media)
)
@@ -1224,7 +1182,7 @@ class OrderPositionViewSet(viewsets.ModelViewSet):
ftype, ignored = mimetypes.guess_type(image_file.name)
extension = os.path.basename(image_file.name).split('.')[-1]
else:
img = Image.open(image_file)
img = Image.open(image_file, formats=settings.PILLOW_FORMATS_QUESTIONS_IMAGE)
ftype = Image.MIME[img.format]
extensions = {
'GIF': 'gif', 'TIFF': 'tif', 'BMP': 'bmp', 'JPEG': 'jpg', 'PNG': 'png'
@@ -1815,24 +1773,11 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
write_permission = 'can_change_orders'
def get_queryset(self):
perm = "can_view_orders" if self.request.method in SAFE_METHODS else "can_change_orders"
if getattr(self.request, 'event', None):
qs = self.request.event.invoices
elif isinstance(self.request.auth, (TeamAPIToken, Device)):
qs = Invoice.objects.filter(
event__organizer=self.request.organizer,
event__in=self.request.auth.get_events_with_permission(perm)
)
elif self.request.user.is_authenticated:
qs = Invoice.objects.filter(
event__organizer=self.request.organizer,
event__in=self.request.user.get_events_with_permission(perm)
)
return qs.prefetch_related('lines').select_related('order', 'refers').annotate(
return self.request.event.invoices.prefetch_related('lines').select_related('order', 'refers').annotate(
nr=Concat('prefix', 'invoice_no')
)
@action(detail=True)
@action(detail=True, )
def download(self, request, **kwargs):
invoice = self.get_object()
@@ -1851,7 +1796,7 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
return resp
@action(detail=True, methods=['POST'])
def regenerate(self, request, **kwargs):
def regenerate(self, request, **kwarts):
inv = self.get_object()
if inv.canceled:
raise ValidationError('The invoice has already been canceled.')
@@ -1861,7 +1806,7 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
raise PermissionDenied('The invoice file is no longer stored on the server.')
elif inv.sent_to_organizer:
raise PermissionDenied('The invoice file has already been exported.')
elif now().astimezone(inv.event.timezone).date() - inv.date > datetime.timedelta(days=1):
elif now().astimezone(self.request.event.timezone).date() - inv.date > datetime.timedelta(days=1):
raise PermissionDenied('The invoice file is too old to be regenerated.')
else:
inv = regenerate_invoice(inv)
@@ -1876,7 +1821,7 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
return Response(status=204)
@action(detail=True, methods=['POST'])
def reissue(self, request, **kwargs):
def reissue(self, request, **kwarts):
inv = self.get_object()
if inv.canceled:
raise ValidationError('The invoice has already been canceled.')

View File

@@ -189,34 +189,6 @@ class ParametrizedOrderPositionWebhookEvent(ParametrizedOrderWebhookEvent):
return d
class ParametrizedWaitingListEntryWebhookEvent(ParametrizedWebhookEvent):
def build_payload(self, logentry: LogEntry):
# do not use content_object, this is also called in deletion
return {
'notification_id': logentry.pk,
'organizer': logentry.event.organizer.slug,
'event': logentry.event.slug,
'waitinglistentry': logentry.object_id,
'action': logentry.action_type,
}
class ParametrizedCustomerWebhookEvent(ParametrizedWebhookEvent):
def build_payload(self, logentry: LogEntry):
customer = logentry.content_object
if not customer:
return None
return {
'notification_id': logentry.pk,
'organizer': customer.organizer.slug,
'customer': customer.identifier,
'action': logentry.action_type,
}
@receiver(register_webhook_events, dispatch_uid="base_register_default_webhook_events")
def register_default_webhook_events(sender, **kwargs):
return (
@@ -349,34 +321,6 @@ def register_default_webhook_events(sender, **kwargs):
'pretix.event.testmode.deactivated',
_('Test-Mode of shop has been deactivated'),
),
ParametrizedWaitingListEntryWebhookEvent(
'pretix.event.orders.waitinglist.added',
_('Waiting list entry added'),
),
ParametrizedWaitingListEntryWebhookEvent(
'pretix.event.orders.waitinglist.changed',
_('Waiting list entry changed'),
),
ParametrizedWaitingListEntryWebhookEvent(
'pretix.event.orders.waitinglist.deleted',
_('Waiting list entry deleted'),
),
ParametrizedWaitingListEntryWebhookEvent(
'pretix.event.orders.waitinglist.voucher_assigned',
_('Waiting list entry received voucher'),
),
ParametrizedCustomerWebhookEvent(
'pretix.customer.created',
_('Customer account created'),
),
ParametrizedCustomerWebhookEvent(
'pretix.customer.changed',
_('Customer account changed'),
),
ParametrizedCustomerWebhookEvent(
'pretix.customer.anonymized',
_('Customer account anonymized'),
),
)

View File

@@ -62,27 +62,27 @@ class NamespacedCache:
prefix = int(time.time())
self.cache.set(self.prefixkey, prefix)
def set(self, key: str, value: any, timeout: int=300):
def set(self, key: str, value: str, timeout: int=300):
return self.cache.set(self._prefix_key(key), value, timeout)
def get(self, key: str) -> any:
def get(self, key: str) -> str:
return self.cache.get(self._prefix_key(key, known_prefix=self._last_prefix))
def get_or_set(self, key: str, default: Callable, timeout=300) -> any:
def get_or_set(self, key: str, default: Callable, timeout=300) -> str:
return self.cache.get_or_set(
self._prefix_key(key, known_prefix=self._last_prefix),
default=default,
timeout=timeout
)
def get_many(self, keys: List[str]) -> Dict[str, any]:
def get_many(self, keys: List[str]) -> Dict[str, str]:
values = self.cache.get_many([self._prefix_key(key) for key in keys])
newvalues = {}
for k, v in values.items():
newvalues[self._strip_prefix(k)] = v
return newvalues
def set_many(self, values: Dict[str, any], timeout=300):
def set_many(self, values: Dict[str, str], timeout=300):
newvalues = {}
for k, v in values.items():
newvalues[self._prefix_key(k)] = v

View File

@@ -134,11 +134,8 @@ class TemplateBasedMailRenderer(BaseHTMLMailRenderer):
def template_name(self):
raise NotImplementedError()
def compile_markdown(self, plaintext):
return markdown_compile_email(plaintext)
def render(self, plain_body: str, plain_signature: str, subject: str, order, position) -> str:
body_md = self.compile_markdown(plain_body)
body_md = markdown_compile_email(plain_body)
htmlctx = {
'site': settings.PRETIX_INSTANCE_NAME,
'site_url': settings.SITE_URL,
@@ -156,7 +153,7 @@ class TemplateBasedMailRenderer(BaseHTMLMailRenderer):
if plain_signature:
signature_md = plain_signature.replace('\n', '<br>\n')
signature_md = self.compile_markdown(signature_md)
signature_md = markdown_compile_email(signature_md)
htmlctx['signature'] = signature_md
if order:

View File

@@ -37,8 +37,8 @@ import tempfile
from collections import OrderedDict, namedtuple
from decimal import Decimal
from typing import Optional, Tuple
from zoneinfo import ZoneInfo
import pytz
from defusedcsv import csv
from django import forms
from django.conf import settings
@@ -68,7 +68,7 @@ class BaseExporter:
self.events = event
self.event = None
e = self.events.first()
self.timezone = e.timezone if e else ZoneInfo(settings.TIME_ZONE)
self.timezone = e.timezone if e else pytz.timezone(settings.TIME_ZONE)
else:
self.events = Event.objects.filter(pk=event.pk)
self.timezone = event.timezone
@@ -140,7 +140,7 @@ class BaseExporter:
"""
return {}
def render(self, form_data: dict) -> Tuple[str, str, Optional[bytes]]:
def render(self, form_data: dict) -> Tuple[str, str, bytes]:
"""
Render the exported file and return a tuple consisting of a filename, a file type
and file content.
@@ -157,13 +157,6 @@ class BaseExporter:
"""
raise NotImplementedError() # NOQA
def available_for_user(self, user) -> bool:
"""
Allows to do additional checks whether an exporter is available based on the user who calls it. Note that
``user`` may be ``None`` e.g. during API usage.
"""
return True
class OrganizerLevelExportMixin:
@property

View File

@@ -34,8 +34,8 @@
from collections import OrderedDict
from decimal import Decimal
from zoneinfo import ZoneInfo
import pytz
from django import forms
from django.db.models import (
Case, CharField, Count, DateTimeField, F, IntegerField, Max, Min, OuterRef,
@@ -326,7 +326,7 @@ class OrderListExporter(MultiSheetListExporter):
yield self.ProgressSetTotal(total=qs.count())
for order in qs.order_by('datetime').iterator():
tz = ZoneInfo(self.event_object_cache[order.event_id].settings.timezone)
tz = pytz.timezone(self.event_object_cache[order.event_id].settings.timezone)
row = [
self.event_object_cache[order.event_id].slug,
@@ -459,7 +459,7 @@ class OrderListExporter(MultiSheetListExporter):
yield self.ProgressSetTotal(total=qs.count())
for op in qs.order_by('order__datetime').iterator():
order = op.order
tz = ZoneInfo(order.event.settings.timezone)
tz = pytz.timezone(order.event.settings.timezone)
row = [
self.event_object_cache[order.event_id].slug,
order.code,
@@ -549,9 +549,7 @@ class OrderListExporter(MultiSheetListExporter):
headers.append(_('End date'))
headers += [
_('Product'),
_('Product ID'),
_('Variation'),
_('Variation ID'),
_('Price'),
_('Tax rate'),
_('Tax rule'),
@@ -633,7 +631,7 @@ class OrderListExporter(MultiSheetListExporter):
for op in ops:
order = op.order
tz = ZoneInfo(self.event_object_cache[order.event_id].settings.timezone)
tz = pytz.timezone(self.event_object_cache[order.event_id].settings.timezone)
row = [
self.event_object_cache[order.event_id].slug,
order.code,
@@ -658,9 +656,7 @@ class OrderListExporter(MultiSheetListExporter):
row.append('')
row += [
str(op.item),
str(op.item_id),
str(op.variation) if op.variation else '',
str(op.variation_id) if op.variation_id else '',
op.price,
op.tax_rate,
str(op.tax_rule) if op.tax_rule else '',
@@ -854,8 +850,6 @@ class TransactionListExporter(ListExporter):
_('Tax rule ID'),
_('Tax rule'),
_('Tax value'),
_('Gross total'),
_('Tax total'),
]
if form_data.get('_format') == 'xlsx':
@@ -907,8 +901,6 @@ class TransactionListExporter(ListExporter):
t.tax_rule_id or '',
str(t.tax_rule.internal_name or t.tax_rule.name) if t.tax_rule_id else '',
t.tax_value,
t.price * t.count,
t.tax_value * t.count,
]
if form_data.get('_format') == 'xlsx':
@@ -1032,7 +1024,7 @@ class PaymentListExporter(ListExporter):
yield self.ProgressSetTotal(total=len(objs))
for obj in objs:
tz = ZoneInfo(obj.order.event.settings.timezone)
tz = pytz.timezone(obj.order.event.settings.timezone)
if isinstance(obj, OrderPayment) and obj.payment_date:
d2 = obj.payment_date.astimezone(tz).date().strftime('%Y-%m-%d')
elif isinstance(obj, OrderRefund) and obj.execution_date:
@@ -1151,7 +1143,7 @@ class GiftcardTransactionListExporter(OrganizerLevelExportMixin, ListExporter):
def iterate_list(self, form_data):
qs = GiftCardTransaction.objects.filter(
card__issuer=self.organizer,
).order_by('datetime').select_related('card', 'order', 'order__event', 'acceptor')
).order_by('datetime').select_related('card', 'order', 'order__event')
if form_data.get('date_range'):
dt_start, dt_end = resolve_timeframe_to_datetime_start_inclusive_end_exclusive(now(), form_data['date_range'], self.timezone)
@@ -1167,7 +1159,6 @@ class GiftcardTransactionListExporter(OrganizerLevelExportMixin, ListExporter):
_('Amount'),
_('Currency'),
_('Order'),
_('Organizer'),
]
yield headers
@@ -1179,7 +1170,6 @@ class GiftcardTransactionListExporter(OrganizerLevelExportMixin, ListExporter):
obj.value,
obj.card.currency,
obj.order.full_code if obj.order else None,
str(obj.acceptor or ""),
]
yield row
@@ -1213,7 +1203,7 @@ class GiftcardRedemptionListExporter(ListExporter):
yield headers
for obj in objs:
tz = ZoneInfo(obj.order.event.settings.timezone)
tz = pytz.timezone(obj.order.event.settings.timezone)
gc = GiftCard.objects.get(pk=obj.info_data.get('gift_card'))
row = [
obj.order.event.slug,

View File

@@ -20,8 +20,8 @@
# <https://www.gnu.org/licenses/>.
#
from collections import OrderedDict
from zoneinfo import ZoneInfo
import pytz
from django import forms
from django.db.models import F, Q
from django.dispatch import receiver
@@ -137,7 +137,7 @@ class WaitingListExporter(ListExporter):
# which event should be used to output dates in columns "Start date" and "End date"
event_for_date_columns = entry.subevent if entry.subevent else entry.event
tz = ZoneInfo(entry.event.settings.timezone)
tz = pytz.timezone(entry.event.settings.timezone)
datetime_format = '%Y-%m-%d %H:%M:%S'
row = [

View File

@@ -167,7 +167,6 @@ class SettingsForm(i18nfield.forms.I18nFormMixin, HierarkeyForm):
class PrefixForm(forms.Form):
prefix = forms.CharField(widget=forms.HiddenInput)
template_name = "django/forms/table.html"
class SafeSessionWizardView(SessionWizardView):

View File

@@ -38,10 +38,10 @@ import logging
from datetime import timedelta
from decimal import Decimal
from io import BytesIO
from zoneinfo import ZoneInfo
import dateutil.parser
import pycountry
import pytz
from django import forms
from django.conf import settings
from django.contrib import messages
@@ -61,7 +61,6 @@ from django.utils.timezone import get_current_timezone, now
from django.utils.translation import gettext_lazy as _, pgettext_lazy
from django_countries import countries
from django_countries.fields import Country, CountryField
from geoip2.errors import AddressNotFoundError
from phonenumber_field.formfields import PhoneNumberField
from phonenumber_field.phonenumber import PhoneNumber
from phonenumber_field.widgets import PhoneNumberPrefixWidget
@@ -357,12 +356,9 @@ class WrappedPhoneNumberPrefixWidget(PhoneNumberPrefixWidget):
def guess_country_from_request(request, event):
if settings.HAS_GEOIP:
g = GeoIP2()
try:
res = g.country(get_client_ip(request))
if res['country_code'] and len(res['country_code']) == 2:
return Country(res['country_code'])
except AddressNotFoundError:
pass
res = g.country(get_client_ip(request))
if res['country_code'] and len(res['country_code']) == 2:
return Country(res['country_code'])
return guess_country(event)
@@ -500,14 +496,14 @@ class PortraitImageField(SizeValidationMixin, ExtValidationMixin, forms.FileFiel
file = BytesIO(data['content'])
try:
image = Image.open(file)
image = Image.open(file, formats=settings.PILLOW_FORMATS_QUESTIONS_IMAGE)
# verify() must be called immediately after the constructor.
image.verify()
# We want to do more than just verify(), so we need to re-open the file
if hasattr(file, 'seek'):
file.seek(0)
image = Image.open(file)
image = Image.open(file, formats=settings.PILLOW_FORMATS_QUESTIONS_IMAGE)
# load() is a potential DoS vector (see Django bug #18520), so we verify the size first
if image.width > 10_000 or image.height > 10_000:
@@ -566,7 +562,7 @@ class PortraitImageField(SizeValidationMixin, ExtValidationMixin, forms.FileFiel
return f
def __init__(self, *args, **kwargs):
kwargs.setdefault('ext_whitelist', (".png", ".jpg", ".jpeg", ".jfif", ".tif", ".tiff", ".bmp"))
kwargs.setdefault('ext_whitelist', settings.FILE_UPLOAD_EXTENSIONS_QUESTION_IMAGE)
kwargs.setdefault('max_size', settings.FILE_UPLOAD_MAX_SIZE_IMAGE)
super().__init__(*args, **kwargs)
@@ -737,7 +733,7 @@ class BaseQuestionsForm(forms.Form):
initial = answers[0]
else:
initial = None
tz = ZoneInfo(event.settings.timezone)
tz = pytz.timezone(event.settings.timezone)
help_text = rich_text(q.help_text)
label = escape(q.question) # django-bootstrap3 calls mark_safe
required = q.required and not self.all_optional
@@ -826,11 +822,7 @@ class BaseQuestionsForm(forms.Form):
help_text=help_text,
initial=initial.file if initial else None,
widget=UploadedFileWidget(position=pos, event=event, answer=initial),
ext_whitelist=(
".png", ".jpg", ".gif", ".jpeg", ".pdf", ".txt", ".docx", ".gif", ".svg",
".pptx", ".ppt", ".doc", ".xlsx", ".xls", ".jfif", ".heic", ".heif", ".pages",
".bmp", ".tif", ".tiff"
),
ext_whitelist=settings.FILE_UPLOAD_EXTENSIONS_OTHER,
max_size=settings.FILE_UPLOAD_MAX_SIZE_OTHER,
)
elif q.type == Question.TYPE_DATE:

View File

@@ -1,63 +0,0 @@
#
# This file is part of pretix (Community Edition).
#
# Copyright (C) 2014-2020 Raphael Michel and contributors
# Copyright (C) 2020-2021 rami.io GmbH and contributors
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation in version 3 of the License.
#
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
# this file, see <https://pretix.eu/about/en/license>.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
from bootstrap3.renderers import (
FieldRenderer as BaseFieldRenderer,
InlineFieldRenderer as BaseInlineFieldRenderer,
)
from django.forms import (
CheckboxInput, CheckboxSelectMultiple, ClearableFileInput, RadioSelect,
SelectDateWidget,
)
class FieldRenderer(BaseFieldRenderer):
# Local application of https://github.com/zostera/django-bootstrap3/pull/859
def post_widget_render(self, html):
if isinstance(self.widget, CheckboxSelectMultiple):
html = self.list_to_class(html, "checkbox")
elif isinstance(self.widget, RadioSelect):
html = self.list_to_class(html, "radio")
elif isinstance(self.widget, SelectDateWidget):
html = self.fix_date_select_input(html)
elif isinstance(self.widget, ClearableFileInput):
html = self.fix_clearable_file_input(html)
elif isinstance(self.widget, CheckboxInput):
html = self.put_inside_label(html)
return html
class InlineFieldRenderer(BaseInlineFieldRenderer):
# Local application of https://github.com/zostera/django-bootstrap3/pull/859
def post_widget_render(self, html):
if isinstance(self.widget, CheckboxSelectMultiple):
html = self.list_to_class(html, "checkbox")
elif isinstance(self.widget, RadioSelect):
html = self.list_to_class(html, "radio")
elif isinstance(self.widget, SelectDateWidget):
html = self.fix_date_select_input(html)
elif isinstance(self.widget, ClearableFileInput):
html = self.fix_clearable_file_input(html)
elif isinstance(self.widget, CheckboxInput):
html = self.put_inside_label(html)
return html

View File

@@ -24,7 +24,7 @@ Django, for theoretically very valid reasons, creates migrations for *every sing
we change on a model. Even the `help_text`! This makes sense, as we don't know if any
database backend unknown to us might actually use this information for its database schema.
However, pretix only supports PostgreSQL and SQLite and we can be pretty
However, pretix only supports PostgreSQL, MySQL, MariaDB and SQLite and we can be pretty
certain that some changes to models will never require a change to the database. In this case,
not creating a migration for certain changes will save us some performance while applying them
*and* allow for a cleaner git history. Win-win!

View File

@@ -22,7 +22,7 @@
import json
import sys
import pytz_deprecation_shim
import pytz
from django.core.management.base import BaseCommand
from django.utils.timezone import override
from django_scopes import scope
@@ -60,7 +60,7 @@ class Command(BaseCommand):
sys.exit(1)
locale = options.get("locale", None)
timezone = pytz_deprecation_shim.timezone(options['timezone']) if options.get('timezone') else None
timezone = pytz.timezone(options['timezone']) if options.get('timezone') else None
with scope(organizer=o):
if options['event_slug']:

View File

@@ -49,9 +49,6 @@ class BaseMediaType:
def handle_unknown(self, organizer, identifier, user, auth):
pass
def handle_new(self, organizer, medium, user, auth):
pass
def __str__(self):
return str(self.verbose_name)
@@ -111,43 +108,9 @@ class NfcUidMediaType(BaseMediaType):
return m
class NfcMf0aesMediaType(BaseMediaType):
identifier = 'nfc_mf0aes'
verbose_name = 'NFC Mifare Ultralight AES'
medium_created_by_server = False
supports_giftcard = True
supports_orderposition = False
def handle_new(self, organizer, medium, user, auth):
from pretix.base.models import GiftCard
if organizer.settings.get(f'reusable_media_type_{self.identifier}_autocreate_giftcard', as_type=bool):
with transaction.atomic():
gc = GiftCard.objects.create(
issuer=organizer,
expires=organizer.default_gift_card_expiry,
currency=organizer.settings.get(f'reusable_media_type_{self.identifier}_autocreate_giftcard_currency'),
)
medium.linked_giftcard = gc
medium.save()
medium.log_action(
'pretix.reusable_medium.linked_giftcard.changed',
user=user, auth=auth,
data={
'linked_giftcard': gc.pk
}
)
gc.log_action(
'pretix.giftcards.created',
user=user, auth=auth,
)
return medium
MEDIA_TYPES = {
m.identifier: m for m in [
BarcodePlainMediaType(),
NfcUidMediaType(),
NfcMf0aesMediaType(),
]
}

View File

@@ -21,12 +21,12 @@
#
from collections import OrderedDict
from urllib.parse import urlsplit
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
import pytz
from django.conf import settings
from django.http import Http404, HttpRequest, HttpResponse
from django.middleware.common import CommonMiddleware
from django.urls import get_script_prefix, resolve
from django.urls import get_script_prefix
from django.utils import timezone, translation
from django.utils.cache import patch_vary_headers
from django.utils.deprecation import MiddlewareMixin
@@ -98,9 +98,9 @@ class LocaleMiddleware(MiddlewareMixin):
tzname = request.user.timezone
if tzname:
try:
timezone.activate(ZoneInfo(tzname))
timezone.activate(pytz.timezone(tzname))
request.timezone = tzname
except ZoneInfoNotFoundError:
except pytz.UnknownTimeZoneError:
pass
else:
timezone.deactivate()
@@ -230,8 +230,6 @@ class SecurityMiddleware(MiddlewareMixin):
)
def process_response(self, request, resp):
url = resolve(request.path_info)
if settings.DEBUG and resp.status_code >= 400:
# Don't use CSP on debug error page as it breaks of Django's fancy error
# pages
@@ -251,28 +249,20 @@ class SecurityMiddleware(MiddlewareMixin):
h = {
'default-src': ["{static}"],
'script-src': ['{static}'],
'script-src': ['{static}', 'https://checkout.stripe.com', 'https://js.stripe.com'],
'object-src': ["'none'"],
'frame-src': ['{static}'],
'frame-src': ['{static}', 'https://checkout.stripe.com', 'https://js.stripe.com'],
'style-src': ["{static}", "{media}"],
'connect-src': ["{dynamic}", "{media}"],
'img-src': ["{static}", "{media}", "data:"] + img_src,
'connect-src': ["{dynamic}", "{media}", "https://checkout.stripe.com"],
'img-src': ["{static}", "{media}", "data:", "https://*.stripe.com"] + img_src,
'font-src': ["{static}"],
'media-src': ["{static}", "data:"],
# form-action is not only used to match on form actions, but also on URLs
# form-actions redirect to. In the context of e.g. payment providers or
# single-sign-on this can be nearly anything, so we cannot really restrict
# single-sign-on this can be nearly anything so we cannot really restrict
# this. However, we'll restrict it to HTTPS.
'form-action': ["{dynamic}", "https:"] + (['http:'] if settings.SITE_URL.startswith('http://') else []),
}
# Only include pay.google.com for wallet detection purposes on the Payment selection page
if (
url.url_name == "event.order.pay.change" or
(url.url_name == "event.checkout" and url.kwargs['step'] == "payment")
):
h['script-src'].append('https://pay.google.com')
h['frame-src'].append('https://pay.google.com')
h['connect-src'].append('https://google.com/pay')
if settings.LOG_CSP:
h['report-uri'] = ["/csp_report/"]
if 'Content-Security-Policy' in resp:

View File

@@ -2,8 +2,6 @@
# Generated by Django 1.10.4 on 2017-02-03 14:21
from __future__ import unicode_literals
from zoneinfo import ZoneInfo
import django.core.validators
import django.db.migrations.operations.special
import django.db.models.deletion
@@ -28,7 +26,7 @@ def forwards42(apps, schema_editor):
for s in EventSetting.objects.filter(key='timezone').values('object_id', 'value')
}
for order in Order.objects.all():
tz = ZoneInfo(etz.get(order.event_id, 'UTC'))
tz = pytz.timezone(etz.get(order.event_id, 'UTC'))
order.expires = order.expires.astimezone(tz).replace(hour=23, minute=59, second=59)
order.save()

View File

@@ -2,9 +2,9 @@
# Generated by Django 1.10.2 on 2016-10-19 17:57
from __future__ import unicode_literals
from zoneinfo import ZoneInfo
import pytz
from django.db import migrations
from django.utils import timezone
def forwards(apps, schema_editor):
@@ -15,7 +15,7 @@ def forwards(apps, schema_editor):
for s in EventSetting.objects.filter(key='timezone').values('object_id', 'value')
}
for order in Order.objects.all():
tz = ZoneInfo(etz.get(order.event_id, 'UTC'))
tz = pytz.timezone(etz.get(order.event_id, 'UTC'))
order.expires = order.expires.astimezone(tz).replace(hour=23, minute=59, second=59)
order.save()

View File

@@ -3,6 +3,7 @@
from django.core.exceptions import ImproperlyConfigured
from django.db import migrations, models
from django_mysql.checks import mysql_connections
def set_attendee_name_parts(apps, schema_editor):
@@ -23,12 +24,40 @@ def set_attendee_name_parts(apps, schema_editor):
ia.save(update_fields=['name_parts'])
def check_mysqlversion(apps, schema_editor):
errors = []
any_conn_works = False
conns = list(mysql_connections())
found = 'Unknown version'
for alias, conn in conns:
if hasattr(conn, 'mysql_is_mariadb') and conn.mysql_is_mariadb and hasattr(conn, 'mysql_version'):
if conn.mysql_version >= (10, 2, 7):
any_conn_works = True
else:
found = 'MariaDB ' + '.'.join(str(v) for v in conn.mysql_version)
elif hasattr(conn, 'mysql_version'):
if conn.mysql_version >= (5, 7):
any_conn_works = True
else:
found = 'MySQL ' + '.'.join(str(v) for v in conn.mysql_version)
if conns and not any_conn_works:
raise ImproperlyConfigured(
'As of pretix 2.2, you need MySQL 5.7+ or MariaDB 10.2.7+ to run pretix. However, we detected a '
'database connection to {}'.format(found)
)
return errors
class Migration(migrations.Migration):
dependencies = [
('pretixbase', '0101_auto_20181025_2255'),
]
operations = [
migrations.RunPython(
check_mysqlversion, migrations.RunPython.noop
),
migrations.RenameField(
model_name='cartposition',
old_name='attendee_name',

View File

@@ -1,7 +1,8 @@
# Generated by Django 3.2.4 on 2021-09-30 10:25
from datetime import datetime, timezone
from datetime import datetime
from django.db import migrations, models
from pytz import UTC
class Migration(migrations.Migration):
@@ -14,7 +15,7 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='invoice',
name='sent_to_customer',
field=models.DateTimeField(blank=True, null=True, default=datetime(1970, 1, 1, 0, 0, 0, 0, tzinfo=timezone.utc)),
field=models.DateTimeField(blank=True, null=True, default=UTC.localize(datetime(1970, 1, 1, 0, 0, 0, 0))),
preserve_default=False,
),
]

View File

@@ -50,6 +50,6 @@ class Migration(migrations.Migration):
],
options={
'unique_together': {('event', 'secret')},
}
} if 'mysql' not in settings.DATABASES['default']['ENGINE'] else {}
),
]

View File

@@ -1,38 +0,0 @@
# Generated by Django 3.2.18 on 2023-05-12 10:08
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pretixbase', '0241_itemmetaproperties_required_values'),
]
operations = [
migrations.RenameField(
model_name='giftcardacceptance',
old_name='collector',
new_name='acceptor',
),
migrations.AddField(
model_name='giftcardacceptance',
name='active',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='giftcardacceptance',
name='reusable_media',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='giftcardacceptance',
name='issuer',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='gift_card_acceptor_acceptance', to='pretixbase.organizer'),
),
migrations.AlterUniqueTogether(
name='giftcardacceptance',
unique_together={('issuer', 'acceptor')},
),
]

View File

@@ -1,23 +0,0 @@
# Generated by Django 4.1.9 on 2023-06-26 10:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pretixbase', '0242_auto_20230512_1008'),
]
operations = [
migrations.AddField(
model_name='device',
name='os_name',
field=models.CharField(max_length=190, null=True),
),
migrations.AddField(
model_name='device',
name='os_version',
field=models.CharField(max_length=190, null=True),
),
]

View File

@@ -1,35 +0,0 @@
# Generated by Django 3.2.18 on 2023-05-17 11:32
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pretixbase', '0243_device_os_name_and_os_version'),
]
operations = [
migrations.AddField(
model_name='device',
name='rsa_pubkey',
field=models.TextField(null=True),
),
migrations.CreateModel(
name='MediumKeySet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)),
('public_id', models.BigIntegerField(unique=True)),
('media_type', models.CharField(max_length=100)),
('active', models.BooleanField(default=True)),
('uid_key', models.BinaryField()),
('diversification_key', models.BinaryField()),
('organizer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='medium_key_sets', to='pretixbase.organizer')),
],
),
migrations.AddConstraint(
model_name='mediumkeyset',
constraint=models.UniqueConstraint(condition=models.Q(('active', True)), fields=('organizer', 'media_type'), name='keyset_unique_active'),
),
]

View File

@@ -1,34 +0,0 @@
# Generated by Django 4.2.4 on 2023-08-28 12:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("pretixbase", "0244_mediumkeyset"),
]
operations = [
migrations.AddField(
model_name="discount",
name="benefit_apply_to_addons",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="discount",
name="benefit_ignore_voucher_discounted",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="discount",
name="benefit_limit_products",
field=models.ManyToManyField(
related_name="benefit_discounts", to="pretixbase.item"
),
),
migrations.AddField(
model_name="discount",
name="benefit_same_products",
field=models.BooleanField(default=True),
),
]

View File

@@ -97,7 +97,7 @@ def _transactions_mark_order_dirty(order_id, using=None):
if getattr(dirty_transactions, 'order_ids', None) is None:
dirty_transactions.order_ids = set()
if _check_for_dirty_orders not in [func for (savepoint_id, func, *__) in conn.run_on_commit]:
if _check_for_dirty_orders not in [func for savepoint_id, func in conn.run_on_commit]:
transaction.on_commit(_check_for_dirty_orders, using)
dirty_transactions.order_ids.clear() # This is necessary to clean up after old threads with rollbacked transactions

View File

@@ -178,7 +178,7 @@ class LoggedModel(models.Model, LoggingMixin):
return LogEntry.objects.filter(
content_type=self.logs_content_type, object_id=self.pk
).select_related('user', 'event', 'event__organizer', 'oauth_application', 'api_token', 'device')
).select_related('user', 'event', 'oauth_application', 'api_token', 'device')
class LockModel:

View File

@@ -121,23 +121,14 @@ class Customer(LoggedModel):
if self.email:
self.email = self.email.lower()
if 'update_fields' in kwargs and 'last_modified' not in kwargs['update_fields']:
kwargs['update_fields'] = {'last_modified'}.union(kwargs['update_fields'])
kwargs['update_fields'] = list(kwargs['update_fields']) + ['last_modified']
if not self.identifier:
self.assign_identifier()
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
if self.name_parts:
name = self.name
if self.name_cached != name:
self.name_cached = name
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'name_cached'}.union(kwargs['update_fields'])
self.name_cached = self.name
else:
if self.name_cached != "" or self.name_parts != {}:
self.name_cached = ""
self.name_parts = {}
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'name_cached', 'name_parts'}.union(kwargs['update_fields'])
self.name_cached = ""
self.name_parts = {}
super().save(**kwargs)
def anonymize(self):

View File

@@ -98,8 +98,6 @@ class Gate(LoggedModel):
if not Gate.objects.filter(organizer=self.organizer, identifier=code).exists():
self.identifier = code
break
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
return super().save(*args, **kwargs)
@@ -143,14 +141,6 @@ class Device(LoggedModel):
max_length=190,
null=True, blank=True
)
os_name = models.CharField(
max_length=190,
null=True, blank=True
)
os_version = models.CharField(
max_length=190,
null=True, blank=True
)
software_brand = models.CharField(
max_length=190,
null=True, blank=True
@@ -166,10 +156,6 @@ class Device(LoggedModel):
null=True,
blank=False
)
rsa_pubkey = models.TextField(
null=True,
blank=True,
)
info = models.JSONField(
null=True, blank=True,
)
@@ -187,8 +173,6 @@ class Device(LoggedModel):
def save(self, *args, **kwargs):
if not self.device_id:
self.device_id = (self.organizer.devices.aggregate(m=Max('device_id'))['m'] or 0) + 1
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'device_id'}.union(kwargs['update_fields'])
super().save(*args, **kwargs)
def permission_set(self) -> set:

View File

@@ -99,7 +99,7 @@ class Discount(LoggedModel):
)
condition_apply_to_addons = models.BooleanField(
default=True,
verbose_name=_("Count add-on products"),
verbose_name=_("Apply to add-on products"),
help_text=_("Discounts never apply to bundled products"),
)
condition_ignore_voucher_discounted = models.BooleanField(
@@ -107,7 +107,7 @@ class Discount(LoggedModel):
verbose_name=_("Ignore products discounted by a voucher"),
help_text=_("If this option is checked, products that already received a discount through a voucher will not "
"be considered for this discount. However, products that use a voucher only to e.g. unlock a "
"hidden product or gain access to sold-out quota will still be considered."),
"hidden product or gain access to sold-out quota will still receive the discount."),
)
condition_min_count = models.PositiveIntegerField(
verbose_name=_('Minimum number of matching products'),
@@ -120,19 +120,6 @@ class Discount(LoggedModel):
default=Decimal('0.00'),
)
benefit_same_products = models.BooleanField(
default=True,
verbose_name=_("Apply discount to same set of products"),
help_text=_("By default, the discount is applied across the same selection of products than the condition for "
"the discount given above. If you want, you can however also select a different selection of "
"products.")
)
benefit_limit_products = models.ManyToManyField(
'Item',
verbose_name=_("Apply discount to specific products"),
related_name='benefit_discounts',
blank=True
)
benefit_discount_matching_percent = models.DecimalField(
verbose_name=_('Percentual discount on matching products'),
decimal_places=2,
@@ -152,18 +139,6 @@ class Discount(LoggedModel):
blank=True,
validators=[MinValueValidator(1)],
)
benefit_apply_to_addons = models.BooleanField(
default=True,
verbose_name=_("Apply to add-on products"),
help_text=_("Discounts never apply to bundled products"),
)
benefit_ignore_voucher_discounted = models.BooleanField(
default=False,
verbose_name=_("Ignore products discounted by a voucher"),
help_text=_("If this option is checked, products that already received a discount through a voucher will not "
"be discounted. However, products that use a voucher only to e.g. unlock a hidden product or gain "
"access to sold-out quota will still receive the discount."),
)
# more feature ideas:
# - max_usages_per_order
@@ -212,14 +187,6 @@ class Discount(LoggedModel):
'on a minimum value.')
)
if data.get('subevent_mode') == cls.SUBEVENT_MODE_DISTINCT and not data.get('benefit_same_products'):
raise ValidationError(
{'benefit_same_products': [
_('You cannot apply the discount to a different set of products if the discount is only valid '
'for bookings of different dates.')
]}
)
def allow_delete(self):
return not self.orderposition_set.exists()
@@ -230,7 +197,6 @@ class Discount(LoggedModel):
'condition_min_value': self.condition_min_value,
'benefit_only_apply_to_cheapest_n_matches': self.benefit_only_apply_to_cheapest_n_matches,
'subevent_mode': self.subevent_mode,
'benefit_same_products': self.benefit_same_products,
})
def is_available_by_time(self, now_dt=None) -> bool:
@@ -241,14 +207,14 @@ class Discount(LoggedModel):
return False
return True
def _apply_min_value(self, positions, condition_idx_group, benefit_idx_group, result):
if self.condition_min_value and sum(positions[idx][2] for idx in condition_idx_group) < self.condition_min_value:
def _apply_min_value(self, positions, idx_group, result):
if self.condition_min_value and sum(positions[idx][2] for idx in idx_group) < self.condition_min_value:
return
if self.condition_min_count or self.benefit_only_apply_to_cheapest_n_matches:
raise ValueError('Validation invariant violated.')
for idx in benefit_idx_group:
for idx in idx_group:
previous_price = positions[idx][2]
new_price = round_decimal(
previous_price * (Decimal('100.00') - self.benefit_discount_matching_percent) / Decimal('100.00'),
@@ -256,8 +222,8 @@ class Discount(LoggedModel):
)
result[idx] = new_price
def _apply_min_count(self, positions, condition_idx_group, benefit_idx_group, result):
if len(condition_idx_group) < self.condition_min_count:
def _apply_min_count(self, positions, idx_group, result):
if len(idx_group) < self.condition_min_count:
return
if not self.condition_min_count or self.condition_min_value:
@@ -267,17 +233,15 @@ class Discount(LoggedModel):
if not self.condition_min_count:
raise ValueError('Validation invariant violated.')
condition_idx_group = sorted(condition_idx_group, key=lambda idx: (positions[idx][2], -idx)) # sort by line_price
benefit_idx_group = sorted(benefit_idx_group, key=lambda idx: (positions[idx][2], -idx)) # sort by line_price
idx_group = sorted(idx_group, key=lambda idx: (positions[idx][2], -idx)) # sort by line_price
# Prevent over-consuming of items, i.e. if our discount is "buy 2, get 1 free", we only
# want to match multiples of 3
n_groups = min(len(condition_idx_group) // self.condition_min_count, len(benefit_idx_group))
consume_idx = condition_idx_group[:n_groups * self.condition_min_count]
benefit_idx = benefit_idx_group[:n_groups * self.benefit_only_apply_to_cheapest_n_matches]
consume_idx = idx_group[:len(idx_group) // self.condition_min_count * self.condition_min_count]
benefit_idx = idx_group[:len(idx_group) // self.condition_min_count * self.benefit_only_apply_to_cheapest_n_matches]
else:
consume_idx = condition_idx_group
benefit_idx = benefit_idx_group
consume_idx = idx_group
benefit_idx = idx_group
for idx in benefit_idx:
previous_price = positions[idx][2]
@@ -312,7 +276,7 @@ class Discount(LoggedModel):
limit_products = {p.pk for p in self.condition_limit_products.all()}
# First, filter out everything not even covered by our product scope
condition_candidates = [
initial_candidates = [
idx
for idx, (item_id, subevent_id, line_price_gross, is_addon_to, voucher_discount) in positions.items()
if (
@@ -322,25 +286,11 @@ class Discount(LoggedModel):
)
]
if self.benefit_same_products:
benefit_candidates = list(condition_candidates)
else:
benefit_products = {p.pk for p in self.benefit_limit_products.all()}
benefit_candidates = [
idx
for idx, (item_id, subevent_id, line_price_gross, is_addon_to, voucher_discount) in positions.items()
if (
item_id in benefit_products and
(self.benefit_apply_to_addons or not is_addon_to) and
(not self.benefit_ignore_voucher_discounted or voucher_discount is None or voucher_discount == Decimal('0.00'))
)
]
if self.subevent_mode == self.SUBEVENT_MODE_MIXED: # also applies to non-series events
if self.condition_min_count:
self._apply_min_count(positions, condition_candidates, benefit_candidates, result)
self._apply_min_count(positions, initial_candidates, result)
else:
self._apply_min_value(positions, condition_candidates, benefit_candidates, result)
self._apply_min_value(positions, initial_candidates, result)
elif self.subevent_mode == self.SUBEVENT_MODE_SAME:
def key(idx):
@@ -349,18 +299,17 @@ class Discount(LoggedModel):
# Build groups of candidates with the same subevent, then apply our regular algorithm
# to each group
_groups = groupby(sorted(condition_candidates, key=key), key=key)
candidate_groups = [(k, list(g)) for k, g in _groups]
_groups = groupby(sorted(initial_candidates, key=key), key=key)
candidate_groups = [list(g) for k, g in _groups]
for subevent_id, g in candidate_groups:
benefit_g = [idx for idx in benefit_candidates if positions[idx][1] == subevent_id]
for g in candidate_groups:
if self.condition_min_count:
self._apply_min_count(positions, g, benefit_g, result)
self._apply_min_count(positions, g, result)
else:
self._apply_min_value(positions, g, benefit_g, result)
self._apply_min_value(positions, g, result)
elif self.subevent_mode == self.SUBEVENT_MODE_DISTINCT:
if self.condition_min_value or not self.benefit_same_products:
if self.condition_min_value:
raise ValueError('Validation invariant violated.')
# Build optimal groups of candidates with distinct subevents, then apply our regular algorithm
@@ -387,7 +336,7 @@ class Discount(LoggedModel):
candidates = []
cardinality = None
for se, l in subevent_to_idx.items():
l = [ll for ll in l if ll in condition_candidates and ll not in current_group]
l = [ll for ll in l if ll in initial_candidates and ll not in current_group]
if cardinality and len(l) != cardinality:
continue
if se not in {positions[idx][1] for idx in current_group}:
@@ -424,5 +373,5 @@ class Discount(LoggedModel):
break
for g in candidate_groups:
self._apply_min_count(positions, g, g, result)
self._apply_min_count(positions, g, result)
return result

View File

@@ -40,9 +40,8 @@ from collections import Counter, OrderedDict, defaultdict
from datetime import datetime, time, timedelta
from operator import attrgetter
from urllib.parse import urljoin
from zoneinfo import ZoneInfo
import pytz_deprecation_shim
import pytz
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.files.storage import default_storage
@@ -215,7 +214,7 @@ class EventMixin:
@property
def timezone(self):
return pytz_deprecation_shim.timezone(self.settings.timezone)
return pytz.timezone(self.settings.timezone)
@property
def effective_presale_end(self):
@@ -774,7 +773,7 @@ class Event(EventMixin, LoggedModel):
"""
The last datetime of payments for this event.
"""
tz = ZoneInfo(self.settings.timezone)
tz = pytz.timezone(self.settings.timezone)
return make_aware(datetime.combine(
self.settings.get('payment_term_last', as_type=RelativeDateWrapper).datetime(self).date(),
time(hour=23, minute=59, second=59)
@@ -907,18 +906,14 @@ class Event(EventMixin, LoggedModel):
self.items.filter(hidden_if_available_id=oldid).update(hidden_if_available=q)
for d in Discount.objects.filter(event=other).prefetch_related('condition_limit_products'):
c_items = list(d.condition_limit_products.all())
b_items = list(d.benefit_limit_products.all())
items = list(d.condition_limit_products.all())
d.pk = None
d.event = self
d.save(force_insert=True)
d.log_action('pretix.object.cloned')
for i in c_items:
for i in items:
if i.pk in item_map:
d.condition_limit_products.add(item_map[i.pk])
for i in b_items:
if i.pk in item_map:
d.benefit_limit_products.add(item_map[i.pk])
question_map = {}
for q in Question.objects.filter(event=other).prefetch_related('items', 'options'):
@@ -1281,9 +1276,6 @@ class Event(EventMixin, LoggedModel):
return not self.orders.exists() and not self.invoices.exists()
def delete_sub_objects(self):
from .checkin import Checkin
Checkin.all.filter(successful=False, list__event=self).delete()
self.cartposition_set.filter(addon_to__isnull=False).delete()
self.cartposition_set.all().delete()
self.vouchers.all().delete()

View File

@@ -19,11 +19,10 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
import zoneinfo
from datetime import datetime, timedelta
import pytz
from dateutil.rrule import rrulestr
from dateutil.tz import datetime_exists
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
@@ -109,9 +108,12 @@ class AbstractScheduledExport(LoggedModel):
self.schedule_next_run = None
return
self.schedule_next_run = make_aware(datetime.combine(new_d.date(), self.schedule_rrule_time), tz)
if not datetime_exists(self.schedule_next_run):
self.schedule_next_run += timedelta(hours=1)
try:
self.schedule_next_run = make_aware(datetime.combine(new_d.date(), self.schedule_rrule_time), tz)
except pytz.exceptions.AmbiguousTimeError:
self.schedule_next_run = make_aware(datetime.combine(new_d.date(), self.schedule_rrule_time), tz, is_dst=False)
except pytz.exceptions.NonExistentTimeError:
self.schedule_next_run = make_aware(datetime.combine(new_d.date(), self.schedule_rrule_time) + timedelta(hours=1), tz)
class ScheduledEventExport(AbstractScheduledExport):
@@ -134,4 +136,4 @@ class ScheduledOrganizerExport(AbstractScheduledExport):
@property
def tz(self):
return zoneinfo.ZoneInfo(self.timezone)
return pytz.timezone(self.timezone)

View File

@@ -46,19 +46,14 @@ def gen_giftcard_secret(length=8):
class GiftCardAcceptance(models.Model):
issuer = models.ForeignKey(
'Organizer',
related_name='gift_card_acceptor_acceptance',
related_name='gift_card_collector_acceptance',
on_delete=models.CASCADE
)
acceptor = models.ForeignKey(
collector = models.ForeignKey(
'Organizer',
related_name='gift_card_issuer_acceptance',
on_delete=models.CASCADE
)
active = models.BooleanField(default=True)
reusable_media = models.BooleanField(default=False)
class Meta:
unique_together = (('issuer', 'acceptor'),)
class GiftCard(LoggedModel):
@@ -119,7 +114,7 @@ class GiftCard(LoggedModel):
return self.transactions.aggregate(s=Sum('value'))['s'] or Decimal('0.00')
def accepted_by(self, organizer):
return self.issuer == organizer or GiftCardAcceptance.objects.filter(issuer=self.issuer, acceptor=organizer, active=True).exists()
return self.issuer == organizer or GiftCardAcceptance.objects.filter(issuer=self.issuer, collector=organizer).exists()
def save(self, *args, **kwargs):
if not self.secret:

View File

@@ -251,20 +251,14 @@ class Invoice(models.Model):
raise ValueError('Every invoice needs to be connected to an order')
if not self.event:
self.event = self.order.event
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'event'}.union(kwargs['update_fields'])
if not self.organizer:
self.organizer = self.order.event.organizer
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'organizer'}.union(kwargs['update_fields'])
if not self.prefix:
self.prefix = self.event.settings.invoice_numbers_prefix or (self.event.slug.upper() + '-')
if self.is_cancellation:
self.prefix = self.event.settings.invoice_numbers_prefix_cancellations or self.prefix
if '%' in self.prefix:
self.prefix = self.date.strftime(self.prefix)
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'prefix'}.union(kwargs['update_fields'])
if not self.invoice_no:
if self.order.testmode:
@@ -282,13 +276,8 @@ class Invoice(models.Model):
# Suppress duplicate key errors and try again
if i == 9:
raise
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'invoice_no'}.union(kwargs['update_fields'])
if self.full_invoice_no != self.prefix + self.invoice_no:
self.full_invoice_no = self.prefix + self.invoice_no
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'full_invoice_no'}.union(kwargs['update_fields'])
self.full_invoice_no = self.prefix + self.invoice_no
return super().save(*args, **kwargs)
def delete(self, *args, **kwargs):

View File

@@ -40,11 +40,9 @@ from collections import Counter, OrderedDict
from datetime import date, datetime, time, timedelta
from decimal import Decimal, DecimalException
from typing import Optional, Tuple
from zoneinfo import ZoneInfo
import dateutil.parser
import django_redis
from dateutil.tz import datetime_exists
import pytz
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import (
@@ -58,6 +56,7 @@ from django.utils.functional import cached_property
from django.utils.timezone import is_naive, make_aware, now
from django.utils.translation import gettext_lazy as _, pgettext_lazy
from django_countries.fields import Country
from django_redis import get_redis_connection
from django_scopes import ScopedManager
from i18nfield.fields import I18nCharField, I18nTextField
@@ -928,22 +927,22 @@ class Item(LoggedModel):
)
if self.validity_dynamic_duration_days:
replace_date += timedelta(days=self.validity_dynamic_duration_days)
valid_until = valid_until.replace(
valid_until = tz.localize(valid_until.replace(
year=replace_date.year,
month=replace_date.month,
day=replace_date.day,
hour=23, minute=59, second=59, microsecond=0,
tzinfo=tz,
)
tzinfo=None,
))
elif self.validity_dynamic_duration_days:
replace_date = valid_until.date() + timedelta(days=self.validity_dynamic_duration_days - 1)
valid_until = valid_until.replace(
valid_until = tz.localize(valid_until.replace(
year=replace_date.year,
month=replace_date.month,
day=replace_date.day,
hour=23, minute=59, second=59, microsecond=0,
tzinfo=tz
)
tzinfo=None
))
if self.validity_dynamic_duration_hours:
valid_until += timedelta(hours=self.validity_dynamic_duration_hours)
@@ -951,9 +950,6 @@ class Item(LoggedModel):
if self.validity_dynamic_duration_minutes:
valid_until += timedelta(minutes=self.validity_dynamic_duration_minutes)
if not datetime_exists(valid_until):
valid_until += timedelta(hours=1)
return requested_start, valid_until
else:
@@ -1593,8 +1589,6 @@ class Question(LoggedModel):
if not Question.objects.filter(event=self.event, identifier=code).exists():
self.identifier = code
break
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
super().save(*args, **kwargs)
if self.event:
self.event.cache.clear()
@@ -1684,7 +1678,7 @@ class Question(LoggedModel):
try:
dt = dateutil.parser.parse(answer)
if is_naive(dt):
dt = make_aware(dt, ZoneInfo(self.event.settings.timezone))
dt = make_aware(dt, pytz.timezone(self.event.settings.timezone))
except:
raise ValidationError(_('Invalid datetime input.'))
else:
@@ -1742,8 +1736,6 @@ class QuestionOption(models.Model):
if not QuestionOption.objects.filter(question__event=self.question.event, identifier=code).exists():
self.identifier = code
break
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
super().save(*args, **kwargs)
@staticmethod
@@ -1910,13 +1902,8 @@ class Quota(LoggedModel):
def rebuild_cache(self, now_dt=None):
if settings.HAS_REDIS:
rc = django_redis.get_redis_connection("redis")
p = rc.pipeline()
p.hdel(f'quotas:{self.event_id}:availabilitycache', str(self.pk))
p.hdel(f'quotas:{self.event_id}:availabilitycache:nocw', str(self.pk))
p.hdel(f'quotas:{self.event_id}:availabilitycache:igcl', str(self.pk))
p.hdel(f'quotas:{self.event_id}:availabilitycache:nocw:igcl', str(self.pk))
p.execute()
rc = get_redis_connection("redis")
rc.hdel(f'quotas:{self.event_id}:availabilitycache', str(self.pk))
self.availability(now_dt=now_dt)
def availability(

View File

@@ -88,7 +88,9 @@ class LogEntry(models.Model):
class Meta:
ordering = ('-datetime', '-id')
indexes = [models.Index(fields=["datetime", "id"])]
index_together = [
['datetime', 'id']
]
def display(self):
from ..signals import logentry_display

View File

@@ -121,30 +121,5 @@ class ReusableMedium(LoggedModel):
class Meta:
unique_together = (("identifier", "type", "organizer"),)
indexes = [
models.Index(fields=("identifier", "type", "organizer")),
models.Index(fields=("updated", "id")),
]
index_together = (("identifier", "type", "organizer"), ("updated", "id"))
ordering = "identifier", "type", "organizer"
class MediumKeySet(models.Model):
organizer = models.ForeignKey('Organizer', on_delete=models.CASCADE, related_name='medium_key_sets')
public_id = models.BigIntegerField(
unique=True,
)
media_type = models.CharField(max_length=100)
active = models.BooleanField(default=True)
uid_key = models.BinaryField()
diversification_key = models.BinaryField()
objects = ScopedManager(organizer='organizer')
class Meta:
constraints = [
models.UniqueConstraint(
fields=["organizer", "media_type"],
condition=Q(active=True),
name="keyset_unique_active",
),
]

View File

@@ -42,10 +42,10 @@ from collections import Counter
from datetime import datetime, time, timedelta
from decimal import Decimal
from typing import Any, Dict, List, Union
from zoneinfo import ZoneInfo
import dateutil
import pycountry
import pytz
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models, transaction
@@ -270,9 +270,9 @@ class Order(LockModel, LoggedModel):
verbose_name = _("Order")
verbose_name_plural = _("Orders")
ordering = ("-datetime", "-pk")
indexes = [
models.Index(fields=["datetime", "id"]),
models.Index(fields=["last_modified", "id"]),
index_together = [
["datetime", "id"],
["last_modified", "id"],
]
def __str__(self):
@@ -461,20 +461,14 @@ class Order(LockModel, LoggedModel):
return '{event}-{code}'.format(event=self.event.slug.upper(), code=self.code)
def save(self, **kwargs):
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'last_modified'}.union(kwargs['update_fields'])
if 'update_fields' in kwargs and 'last_modified' not in kwargs['update_fields']:
kwargs['update_fields'] = list(kwargs['update_fields']) + ['last_modified']
if not self.code:
self.assign_code()
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'code'}.union(kwargs['update_fields'])
if not self.datetime:
self.datetime = now()
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'datetime'}.union(kwargs['update_fields'])
if not self.expires:
self.set_expires()
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'expires'}.union(kwargs['update_fields'])
is_new = not self.pk
update_fields = kwargs.get('update_fields', [])
@@ -502,7 +496,7 @@ class Order(LockModel, LoggedModel):
def set_expires(self, now_dt=None, subevents=None):
now_dt = now_dt or now()
tz = ZoneInfo(self.event.settings.timezone)
tz = pytz.timezone(self.event.settings.timezone)
mode = self.event.settings.get('payment_term_mode')
if mode == 'days':
exp_by_date = now_dt.astimezone(tz) + timedelta(days=self.event.settings.get('payment_term_days', as_type=int))
@@ -876,7 +870,7 @@ class Order(LockModel, LoggedModel):
@property
def payment_term_last(self):
tz = ZoneInfo(self.event.settings.timezone)
tz = pytz.timezone(self.event.settings.timezone)
term_last = self.event.settings.get('payment_term_last', as_type=RelativeDateWrapper)
if term_last:
if self.event.has_subevents:
@@ -896,33 +890,6 @@ class Order(LockModel, LoggedModel):
), tz)
return term_last
@property
def payment_term_expire_date(self):
delay = self.event.settings.get('payment_term_expire_delay_days', as_type=int)
if not delay: # performance saver + backwards compatibility
return self.expires
term_last = self.payment_term_last
if term_last and self.expires > term_last: # backwards compatibility
return self.expires
expires = self.expires.date() + timedelta(days=delay)
if self.event.settings.get('payment_term_weekdays'):
if expires.weekday() == 5:
expires += timedelta(days=2)
elif expires.weekday() == 6:
expires += timedelta(days=1)
tz = ZoneInfo(self.event.settings.timezone)
expires = make_aware(datetime.combine(
expires,
time(hour=23, minute=59, second=59)
), tz)
if term_last:
return min(expires, term_last)
else:
return expires
def _can_be_paid(self, count_waitinglist=True, ignore_date=False, force=False) -> Union[bool, str]:
error_messages = {
'late_lastdate': _("The payment can not be accepted as the last date of payments configured in the "
@@ -1246,7 +1213,7 @@ class QuestionAnswer(models.Model):
@property
def is_image(self):
return any(self.file.name.lower().endswith(e) for e in ('.jpg', '.png', '.gif', '.tiff', '.bmp', '.jpeg'))
return any(self.file.name.lower().endswith(e) for e in settings.FILE_UPLOAD_EXTENSIONS_QUESTION_IMAGE)
@property
def file_name(self):
@@ -1263,7 +1230,7 @@ class QuestionAnswer(models.Model):
try:
d = dateutil.parser.parse(self.answer)
if self.orderposition:
tz = ZoneInfo(self.orderposition.order.event.settings.timezone)
tz = pytz.timezone(self.orderposition.order.event.settings.timezone)
d = d.astimezone(tz)
return date_format(d, "SHORT_DATETIME_FORMAT")
except ValueError:
@@ -1475,20 +1442,12 @@ class AbstractPosition(models.Model):
else self.variation.quotas.filter(subevent=self.subevent))
def save(self, *args, **kwargs):
update_fields = kwargs.get('update_fields', set())
update_fields = kwargs.get('update_fields', [])
if 'attendee_name_parts' in update_fields:
kwargs['update_fields'] = {'attendee_name_cached'}.union(kwargs['update_fields'])
name = self.attendee_name
if name != self.attendee_name_cached:
self.attendee_name_cached = name
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'attendee_name_cached'}.union(kwargs['update_fields'])
update_fields.append('attendee_name_cached')
self.attendee_name_cached = self.attendee_name
if self.attendee_name_parts is None:
self.attendee_name_parts = {}
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'attendee_name_parts'}.union(kwargs['update_fields'])
super().save(*args, **kwargs)
@property
@@ -1672,13 +1631,12 @@ class OrderPayment(models.Model):
if status_change:
self.order.create_transactions()
def fail(self, info=None, user=None, auth=None, log_data=None, send_mail=True):
def fail(self, info=None, user=None, auth=None, log_data=None):
"""
Marks the order as failed and sets info to ``info``, but only if the order is in ``created`` or ``pending``
state. This is equivalent to setting ``state`` to ``OrderPayment.PAYMENT_STATE_FAILED`` and logging a failure,
but it adds strong database locking since we do not want to report a failure for an order that has just
but it adds strong database logging since we do not want to report a failure for an order that has just
been marked as paid.
:param send_mail: Whether an email should be sent to the user about this event (default: ``True``).
"""
with transaction.atomic():
locked_instance = OrderPayment.objects.select_for_update(of=OF_SELF).get(pk=self.pk)
@@ -1703,17 +1661,6 @@ class OrderPayment(models.Model):
'info': info,
'data': log_data,
}, user=user, auth=auth)
if send_mail:
with language(self.order.locale, self.order.event.settings.region):
email_subject = self.order.event.settings.mail_subject_order_payment_failed
email_template = self.order.event.settings.mail_text_order_payment_failed
email_context = get_email_context(event=self.order.event, order=self.order)
self.order.send_mail(
email_subject, email_template, email_context,
'pretix.event.order.email.payment_failed', user=user, auth=auth,
)
return True
def confirm(self, count_waitinglist=True, send_mail=True, force=False, user=None, auth=None, mail_text='',
@@ -1880,8 +1827,6 @@ class OrderPayment(models.Model):
def save(self, *args, **kwargs):
if not self.local_id:
self.local_id = (self.order.payments.aggregate(m=Max('local_id'))['m'] or 0) + 1
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'local_id'}.union(kwargs['update_fields'])
super().save(*args, **kwargs)
def create_external_refund(self, amount=None, execution_date=None, info='{}'):
@@ -2080,8 +2025,6 @@ class OrderRefund(models.Model):
def save(self, *args, **kwargs):
if not self.local_id:
self.local_id = (self.order.refunds.aggregate(m=Max('local_id'))['m'] or 0) + 1
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'local_id'}.union(kwargs['update_fields'])
super().save(*args, **kwargs)
@@ -2500,20 +2443,14 @@ class OrderPosition(AbstractPosition):
assign_ticket_secret(
event=self.order.event, position=self, force_invalidate=True, save=False
)
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'secret'}.union(kwargs['update_fields'])
if not self.blocked and self.blocked is not None:
if not self.blocked:
self.blocked = None
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'blocked'}.union(kwargs['update_fields'])
elif self.blocked and (not isinstance(self.blocked, list) or any(not isinstance(b, str) for b in self.blocked)):
elif not isinstance(self.blocked, list) or any(not isinstance(b, str) for b in self.blocked):
raise TypeError("blocked needs to be a list of strings")
if not self.pseudonymization_id:
self.assign_pseudonymization_id()
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'pseudonymization_id'}.union(kwargs['update_fields'])
if not self.get_deferred_fields():
if Transaction.key(self) != self.__initial_transaction_key or self.canceled != self.__initial_canceled or not self.pk:
@@ -2756,8 +2693,8 @@ class Transaction(models.Model):
class Meta:
ordering = 'datetime', 'pk'
indexes = [
models.Index(fields=['datetime', 'id'])
index_together = [
['datetime', 'id']
]
def save(self, *args, **kwargs):
@@ -2999,17 +2936,10 @@ class InvoiceAddress(models.Model):
self.order.touch()
if self.name_parts:
name = self.name
if self.name_cached != name:
self.name_cached = self.name
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'name_cached'}.union(kwargs['update_fields'])
self.name_cached = self.name
else:
if self.name_cached != "" or self.name_parts != {}:
self.name_cached = ""
self.name_parts = {}
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'name_cached', 'name_parts'}.union(kwargs['update_fields'])
self.name_cached = ""
self.name_parts = {}
super().save(**kwargs)
def describe(self):
@@ -3155,7 +3085,11 @@ class BlockedTicketSecret(models.Model):
updated = models.DateTimeField(auto_now=True)
class Meta:
unique_together = (('event', 'secret'),)
if 'mysql' not in settings.DATABASES['default']['ENGINE']:
# MySQL does not support indexes on TextField(). Django knows this and just ignores db_index, but it will
# not silently ignore the UNIQUE index, causing this table to fail. I'm so glad we're deprecating MySQL
# in a few months, so we'll just live without an unique index until then.
unique_together = (('event', 'secret'),)
@receiver(post_delete, sender=CachedTicket)

View File

@@ -35,12 +35,12 @@
import string
from datetime import date, datetime, time
import pytz_deprecation_shim
import pytz
from django.conf import settings
from django.core.mail import get_connection
from django.core.validators import MinLengthValidator, RegexValidator
from django.db import models
from django.db.models import Q
from django.db.models import Exists, OuterRef, Q
from django.urls import reverse
from django.utils.crypto import get_random_string
from django.utils.functional import cached_property
@@ -102,7 +102,6 @@ class Organizer(LoggedModel):
is_new = not self.pk
obj = super().save(*args, **kwargs)
if is_new:
kwargs.pop('update_fields', None) # does not make sense here
self.set_defaults()
else:
self.get_cache().clear()
@@ -141,7 +140,7 @@ class Organizer(LoggedModel):
@property
def timezone(self):
return pytz_deprecation_shim.timezone(self.settings.timezone)
return pytz.timezone(self.settings.timezone)
@cached_property
def all_logentries_link(self):
@@ -157,19 +156,17 @@ class Organizer(LoggedModel):
return self.cache.get_or_set(
key='has_gift_cards',
timeout=15,
default=lambda: self.issued_gift_cards.exists() or self.gift_card_issuer_acceptance.filter(active=True).exists()
default=lambda: self.issued_gift_cards.exists() or self.gift_card_issuer_acceptance.exists()
)
@property
def accepted_gift_cards(self):
from .giftcards import GiftCard, GiftCardAcceptance
return GiftCard.objects.filter(
Q(issuer=self) |
Q(issuer__in=GiftCardAcceptance.objects.filter(
acceptor=self,
active=True,
).values_list('issuer', flat=True))
return GiftCard.objects.annotate(
accepted=Exists(GiftCardAcceptance.objects.filter(issuer=OuterRef('issuer'), collector=self))
).filter(
Q(issuer=self) | Q(accepted=True)
)
@property

View File

@@ -22,12 +22,9 @@
import json
from decimal import Decimal
import jsonschema
from django.contrib.staticfiles import finders
from django.core.exceptions import ValidationError
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
from django.utils.deconstruct import deconstructible
from django.utils.formats import localize
from django.utils.translation import gettext_lazy as _, pgettext
from i18nfield.fields import I18nCharField
@@ -138,25 +135,6 @@ def cc_to_vat_prefix(country_code):
return country_code
@deconstructible
class CustomRulesValidator:
def __call__(self, value):
if not isinstance(value, dict):
try:
val = json.loads(value)
except ValueError:
raise ValidationError(_('Your layout file is not a valid JSON file.'))
else:
val = value
with open(finders.find('schema/tax-rules-custom.schema.json'), 'r') as f:
schema = json.loads(f.read())
try:
jsonschema.validate(val, schema)
except jsonschema.ValidationError as e:
e = str(e).replace('%', '%%')
raise ValidationError(_('Your set of rules is not valid. Error message: {}').format(e))
class TaxRule(LoggedModel):
event = models.ForeignKey('Event', related_name='tax_rules', on_delete=models.CASCADE)
internal_name = models.CharField(
@@ -340,17 +318,10 @@ class TaxRule(LoggedModel):
rules = self._custom_rules
if invoice_address:
for r in rules:
if r['country'] == 'ZZ': # Rule: Any country
pass
elif r['country'] == 'EU': # Rule: Any EU country
if not is_eu_country(invoice_address.country):
continue
elif '-' in r['country']: # Rule: Specific country and state
if r['country'] != str(invoice_address.country) + '-' + str(invoice_address.state):
continue
else: # Rule: Specific country
if r['country'] != str(invoice_address.country):
continue
if r['country'] == 'EU' and not is_eu_country(invoice_address.country):
continue
if r['country'] not in ('ZZ', 'EU') and r['country'] != str(invoice_address.country):
continue
if r['address_type'] == 'individual' and invoice_address.is_business:
continue
if r['address_type'] in ('business', 'business_vat_id') and not invoice_address.is_business:

View File

@@ -502,10 +502,7 @@ class Voucher(LoggedModel):
return seat
def save(self, *args, **kwargs):
if self.code != self.code.upper():
self.code = self.code.upper()
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'code'}.union(kwargs['update_fields'])
self.code = self.code.upper()
super().save(*args, **kwargs)
self.event.cache.set('vouchers_exist', True)

View File

@@ -126,19 +126,12 @@ class WaitingListEntry(LoggedModel):
raise ValidationError('Invalid input')
def save(self, *args, **kwargs):
update_fields = kwargs.get('update_fields', set())
update_fields = kwargs.get('update_fields', [])
if 'name_parts' in update_fields:
kwargs['update_fields'] = {'name_cached'}.union(kwargs['update_fields'])
name = self.name
if name != self.name_cached:
self.name_cached = name
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'name_cached'}.union(kwargs['update_fields'])
update_fields.append('name_cached')
self.name_cached = self.name
if self.name_parts is None:
self.name_parts = {}
if 'update_fields' in kwargs:
kwargs['update_fields'] = {'name_parts'}.union(kwargs['update_fields'])
super().save(*args, **kwargs)
@property
@@ -218,7 +211,7 @@ class WaitingListEntry(LoggedModel):
'waitinglistentry': self.pk,
'subevent': self.subevent.pk if self.subevent else None,
}, user=user, auth=auth)
self.log_action('pretix.event.orders.waitinglist.voucher_assigned', user=user, auth=auth)
self.log_action('pretix.waitinglist.voucher', user=user, auth=auth)
self.voucher = v
self.save()

View File

@@ -28,7 +28,6 @@ import pycountry
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import EmailValidator
from django.db.models import Q
from django.utils import formats
from django.utils.functional import cached_property
from django.utils.translation import (
@@ -43,8 +42,8 @@ from phonenumbers import SUPPORTED_REGIONS
from pretix.base.channels import get_all_sales_channels
from pretix.base.forms.questions import guess_country
from pretix.base.models import (
Customer, ItemVariation, OrderPosition, Question, QuestionAnswer,
QuestionOption, Seat, SubEvent,
ItemVariation, OrderPosition, Question, QuestionAnswer, QuestionOption,
Seat, SubEvent,
)
from pretix.base.services.pricing import get_price
from pretix.base.settings import (
@@ -211,7 +210,7 @@ class SubeventColumn(ImportColumn):
for format in input_formats:
try:
d = datetime.datetime.strptime(value, format)
d = d.replace(tzinfo=self.event.timezone)
d = self.event.timezone.localize(d)
try:
se = self.event.subevents.get(
active=True,
@@ -661,7 +660,7 @@ class ValidFrom(ImportColumn):
for format in input_formats:
try:
d = datetime.datetime.strptime(value, format)
d = d.replace(tzinfo=self.event.timezone)
d = self.event.timezone.localize(d)
return d
except (ValueError, TypeError):
pass
@@ -684,7 +683,7 @@ class ValidUntil(ImportColumn):
for format in input_formats:
try:
d = datetime.datetime.strptime(value, format)
d = d.replace(tzinfo=self.event.timezone)
d = self.event.timezone.localize(d)
return d
except (ValueError, TypeError):
pass
@@ -805,7 +804,7 @@ class QuestionColumn(ImportColumn):
return self.q.clean_answer(value)
def assign(self, value, order, position, invoice_address, **kwargs):
if value is not None:
if value:
if not hasattr(order, '_answers'):
order._answers = []
if isinstance(value, QuestionOption):
@@ -827,28 +826,6 @@ class QuestionColumn(ImportColumn):
a.options.add(*a._options)
class CustomerColumn(ImportColumn):
identifier = 'customer'
verbose_name = gettext_lazy('Customer')
def clean(self, value, previous_values):
if value:
try:
value = self.event.organizer.customers.get(
Q(identifier=value) | Q(email__iexact=value) | Q(external_identifier=value)
)
except Customer.MultipleObjectsReturned:
value = self.event.organizer.customers.get(
Q(identifier=value)
)
except Customer.DoesNotExist:
raise ValidationError(_('No matching customer was found.'))
return value
def assign(self, value, order, position, invoice_address, **kwargs):
order.customer = value
def get_all_columns(event):
default = []
if event.has_subevents:
@@ -860,10 +837,6 @@ def get_all_columns(event):
Variation(event),
InvoiceAddressCompany(event),
]
if event.settings.customer_accounts:
default += [
CustomerColumn(event),
]
scheme = PERSON_NAME_SCHEMES.get(event.settings.name_scheme)
for n, l, w in scheme['fields']:
default.append(InvoiceAddressNamePart(event, n, l))

View File

@@ -39,8 +39,8 @@ import logging
from collections import OrderedDict
from decimal import ROUND_HALF_UP, Decimal
from typing import Any, Dict, Union
from zoneinfo import ZoneInfo
import pytz
from django import forms
from django.conf import settings
from django.contrib import messages
@@ -60,7 +60,7 @@ from pretix.base.channels import get_all_sales_channels
from pretix.base.forms import PlaceholderValidator
from pretix.base.models import (
CartPosition, Event, GiftCard, InvoiceAddress, Order, OrderPayment,
OrderRefund, Quota, TaxRule,
OrderRefund, Quota,
)
from pretix.base.reldate import RelativeDateField, RelativeDateWrapper
from pretix.base.settings import SettingsSandbox
@@ -78,16 +78,6 @@ from pretix.presale.views.cart import cart_session, get_or_create_cart_id
logger = logging.getLogger(__name__)
class WalletQueries:
APPLEPAY = 'applepay'
GOOGLEPAY = 'googlepay'
WALLETS = (
(APPLEPAY, pgettext_lazy('payment', 'Apple Pay')),
(GOOGLEPAY, pgettext_lazy('payment', 'Google Pay')),
)
class PaymentProviderForm(Form):
def clean(self):
cleaned_data = super().clean()
@@ -446,19 +436,6 @@ class BasePaymentProvider:
d['_restrict_to_sales_channels']._as_type = list
return d
@property
def walletqueries(self):
"""
.. warning:: This property is considered **experimental**. It might change or get removed at any time without
prior notice.
A list of wallet payment methods that should be dynamically joined to the public name of the payment method,
if they are available to the user.
The detection is made on a best effort basis with no guarantees of correctness and actual availability.
Wallets that pretix can check for are exposed through ``pretix.base.payment.WalletQueries``.
"""
return []
def settings_form_clean(self, cleaned_data):
"""
Overriding this method allows you to inject custom validation into the settings form.
@@ -541,7 +518,7 @@ class BasePaymentProvider:
def _is_still_available(self, now_dt=None, cart_id=None, order=None):
now_dt = now_dt or now()
tz = ZoneInfo(self.event.settings.timezone)
tz = pytz.timezone(self.event.settings.timezone)
availability_date = self.settings.get('_availability_date', as_type=RelativeDateWrapper)
if availability_date:
@@ -1038,11 +1015,7 @@ class FreeOrderProvider(BasePaymentProvider):
cart = get_cart(request)
total = get_cart_total(request)
try:
total += sum([f.value for f in get_fees(self.event, request, total, None, None, cart)])
except TaxRule.SaleNotAllowed:
# ignore for now, will fail on order creation
pass
total += sum([f.value for f in get_fees(self.event, request, total, None, None, cart)])
return total == 0
def order_change_allowed(self, order: Order) -> bool:

View File

@@ -43,18 +43,16 @@ import subprocess
import tempfile
import unicodedata
import uuid
from collections import OrderedDict, defaultdict
from collections import OrderedDict
from functools import partial
from io import BytesIO
import jsonschema
import reportlab.rl_config
from bidi.algorithm import get_display
from django.conf import settings
from django.contrib.staticfiles import finders
from django.core.exceptions import ValidationError
from django.db.models import Max, Min
from django.db.models.fields.files import FieldFile
from django.dispatch import receiver
from django.utils.deconstruct import deconstructible
from django.utils.formats import date_format
@@ -62,8 +60,8 @@ from django.utils.html import conditional_escape
from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _, pgettext
from i18nfield.strings import LazyI18nString
from pypdf import PdfReader, PdfWriter, Transformation
from pypdf.generic import RectangleObject
from pypdf import PdfReader
from pytz import timezone
from reportlab.graphics import renderPDF
from reportlab.graphics.barcode.qr import QrCodeWidget
from reportlab.graphics.shapes import Drawing
@@ -88,9 +86,6 @@ from pretix.presale.style import get_fonts
logger = logging.getLogger(__name__)
if not settings.DEBUG:
reportlab.rl_config.shapeChecking = 0
DEFAULT_VARIABLES = OrderedDict((
("secret", {
@@ -108,10 +103,7 @@ DEFAULT_VARIABLES = OrderedDict((
("positionid", {
"label": _("Order position number"),
"editor_sample": "1",
"evaluate": lambda orderposition, order, event: str(orderposition.positionid),
# There is no performance gain in using evaluate_bulk here, but we want to make sure it is used somewhere
# in core to make sure we notice if the implementation of the API breaks.
"evaluate_bulk": lambda orderpositions: [str(p.positionid) for p in orderpositions],
"evaluate": lambda orderposition, order, event: str(orderposition.positionid)
}),
("order_positionid", {
"label": _("Order code and position number"),
@@ -245,7 +237,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Event begin date and time"),
"editor_sample": _("2017-05-31 20:00"),
"evaluate": lambda op, order, ev: date_format(
ev.date_from.astimezone(ev.timezone),
ev.date_from.astimezone(timezone(ev.settings.timezone)),
"SHORT_DATETIME_FORMAT"
) if ev.date_from else ""
}),
@@ -253,7 +245,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Event begin date"),
"editor_sample": _("2017-05-31"),
"evaluate": lambda op, order, ev: date_format(
ev.date_from.astimezone(ev.timezone),
ev.date_from.astimezone(timezone(ev.settings.timezone)),
"SHORT_DATE_FORMAT"
) if ev.date_from else ""
}),
@@ -271,7 +263,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Event end date and time"),
"editor_sample": _("2017-05-31 22:00"),
"evaluate": lambda op, order, ev: date_format(
ev.date_to.astimezone(ev.timezone),
ev.date_to.astimezone(timezone(ev.settings.timezone)),
"SHORT_DATETIME_FORMAT"
) if ev.date_to else ""
}),
@@ -279,7 +271,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Event end date"),
"editor_sample": _("2017-05-31"),
"evaluate": lambda op, order, ev: date_format(
ev.date_to.astimezone(ev.timezone),
ev.date_to.astimezone(timezone(ev.settings.timezone)),
"SHORT_DATE_FORMAT"
) if ev.date_to else ""
}),
@@ -287,7 +279,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Event end time"),
"editor_sample": _("22:00"),
"evaluate": lambda op, order, ev: date_format(
ev.date_to.astimezone(ev.timezone),
ev.date_to.astimezone(timezone(ev.settings.timezone)),
"TIME_FORMAT"
) if ev.date_to else ""
}),
@@ -300,7 +292,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Event admission date and time"),
"editor_sample": _("2017-05-31 19:00"),
"evaluate": lambda op, order, ev: date_format(
ev.date_admission.astimezone(ev.timezone),
ev.date_admission.astimezone(timezone(ev.settings.timezone)),
"SHORT_DATETIME_FORMAT"
) if ev.date_admission else ""
}),
@@ -308,7 +300,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Event admission time"),
"editor_sample": _("19:00"),
"evaluate": lambda op, order, ev: date_format(
ev.date_admission.astimezone(ev.timezone),
ev.date_admission.astimezone(timezone(ev.settings.timezone)),
"TIME_FORMAT"
) if ev.date_admission else ""
}),
@@ -364,9 +356,14 @@ DEFAULT_VARIABLES = OrderedDict((
}),
("addons", {
"label": _("List of Add-Ons"),
"editor_sample": _("Add-on 1\n2x Add-on 2"),
"editor_sample": _("Add-on 1\nAdd-on 2"),
"evaluate": lambda op, order, ev: "\n".join([
str(p) for p in generate_compressed_addon_list(op, order, ev)
'{} - {}'.format(p.item.name, p.variation.value) if p.variation else str(p.item.name)
for p in (
op.addons.all() if 'addons' in getattr(op, '_prefetched_objects_cache', {})
else op.addons.select_related('item', 'variation')
)
if not p.canceled
])
}),
("organizer", {
@@ -388,7 +385,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Printing date"),
"editor_sample": _("2017-05-31"),
"evaluate": lambda op, order, ev: date_format(
now().astimezone(ev.timezone),
now().astimezone(timezone(ev.settings.timezone)),
"SHORT_DATE_FORMAT"
)
}),
@@ -396,7 +393,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Printing date and time"),
"editor_sample": _("2017-05-31 19:00"),
"evaluate": lambda op, order, ev: date_format(
now().astimezone(ev.timezone),
now().astimezone(timezone(ev.settings.timezone)),
"SHORT_DATETIME_FORMAT"
)
}),
@@ -404,7 +401,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Printing time"),
"editor_sample": _("19:00"),
"evaluate": lambda op, order, ev: date_format(
now().astimezone(ev.timezone),
now().astimezone(timezone(ev.settings.timezone)),
"TIME_FORMAT"
)
}),
@@ -412,7 +409,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Validity start date"),
"editor_sample": _("2017-05-31"),
"evaluate": lambda op, order, ev: date_format(
op.valid_from.astimezone(ev.timezone),
op.valid_from.astimezone(timezone(ev.settings.timezone)),
"SHORT_DATE_FORMAT"
) if op.valid_from else ""
}),
@@ -420,7 +417,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Validity start date and time"),
"editor_sample": _("2017-05-31 19:00"),
"evaluate": lambda op, order, ev: date_format(
op.valid_from.astimezone(ev.timezone),
op.valid_from.astimezone(timezone(ev.settings.timezone)),
"SHORT_DATETIME_FORMAT"
) if op.valid_from else ""
}),
@@ -428,7 +425,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Validity start time"),
"editor_sample": _("19:00"),
"evaluate": lambda op, order, ev: date_format(
op.valid_from.astimezone(ev.timezone),
op.valid_from.astimezone(timezone(ev.settings.timezone)),
"TIME_FORMAT"
) if op.valid_from else ""
}),
@@ -436,7 +433,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Validity end date"),
"editor_sample": _("2017-05-31"),
"evaluate": lambda op, order, ev: date_format(
op.valid_until.astimezone(ev.timezone),
op.valid_until.astimezone(timezone(ev.settings.timezone)),
"SHORT_DATE_FORMAT"
) if op.valid_until else ""
}),
@@ -444,7 +441,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Validity end date and time"),
"editor_sample": _("2017-05-31 19:00"),
"evaluate": lambda op, order, ev: date_format(
op.valid_until.astimezone(ev.timezone),
op.valid_until.astimezone(timezone(ev.settings.timezone)),
"SHORT_DATETIME_FORMAT"
) if op.valid_until else ""
}),
@@ -452,7 +449,7 @@ DEFAULT_VARIABLES = OrderedDict((
"label": _("Validity end time"),
"editor_sample": _("19:00"),
"evaluate": lambda op, order, ev: date_format(
op.valid_until.astimezone(ev.timezone),
op.valid_until.astimezone(timezone(ev.settings.timezone)),
"TIME_FORMAT"
) if op.valid_until else ""
}),
@@ -524,7 +521,7 @@ def images_from_questions(sender, *args, **kwargs):
else:
a = op.answers.filter(question_id=question_id).first() or a
if not a or not a.file or not any(a.file.name.lower().endswith(e) for e in (".jpg", ".jpeg", ".png", ".gif", ".bmp", ".tif", ".tiff")):
if not a or not a.file or not any(a.file.name.lower().endswith(e) for e in settings.FILE_UPLOAD_EXTENSIONS_QUESTION_IMAGE):
return None
else:
if etag:
@@ -700,30 +697,6 @@ def get_seat(op: OrderPosition):
return None
def generate_compressed_addon_list(op, order, event):
itemcount = defaultdict(int)
addons = [p for p in (
op.addons.all() if 'addons' in getattr(op, '_prefetched_objects_cache', {})
else op.addons.select_related('item', 'variation')
) if not p.canceled]
for pos in addons:
itemcount[pos.item, pos.variation] += 1
addonlist = []
for (item, variation), count in itemcount.items():
if variation:
if count > 1:
addonlist.append('{}x {} - {}'.format(count, item.name, variation.value))
else:
addonlist.append('{} - {}'.format(item.name, variation.value))
else:
if count > 1:
addonlist.append('{}x {}'.format(count, item.name))
else:
addonlist.append(item.name)
return addonlist
class Renderer:
def __init__(self, event, layout, background_file):
@@ -888,37 +861,22 @@ class Renderer:
image_file = None
if image_file:
ir = ThumbnailingImageReader(image_file)
try:
ir = ThumbnailingImageReader(image_file)
ir.resize(float(o['width']) * mm, float(o['height']) * mm, 300)
canvas.drawImage(
image=ir,
x=float(o['left']) * mm,
y=float(o['bottom']) * mm,
width=float(o['width']) * mm,
height=float(o['height']) * mm,
preserveAspectRatio=True,
anchor='c', # centered in frame
mask='auto'
)
if isinstance(image_file, FieldFile):
# ThumbnailingImageReader "closes" the file, so it's no use to use the same file pointer
# in case we need it again. For FieldFile, fortunately, there is an easy way to make the file
# refresh itself when it is used next.
del image_file.file
except:
logger.exception("Can not load or resize image")
canvas.saveState()
canvas.setFillColorRGB(.8, .8, .8, alpha=1)
canvas.rect(
x=float(o['left']) * mm,
y=float(o['bottom']) * mm,
width=float(o['width']) * mm,
height=float(o['height']) * mm,
stroke=0,
fill=1,
)
canvas.restoreState()
logger.exception("Can not resize image")
pass
canvas.drawImage(
image=ir,
x=float(o['left']) * mm,
y=float(o['bottom']) * mm,
width=float(o['width']) * mm,
height=float(o['height']) * mm,
preserveAspectRatio=True,
anchor='c', # centered in frame
mask='auto'
)
else:
canvas.saveState()
canvas.setFillColorRGB(.8, .8, .8, alpha=1)
@@ -972,7 +930,7 @@ class Renderer:
# reportlab does not support unicode combination characters
# It's important we do this before we use ArabicReshaper
text = unicodedata.normalize("NFC", text)
text = unicodedata.normalize("NFKC", text)
# reportlab does not support RTL, ligature-heavy scripts like Arabic. Therefore, we use ArabicReshaper
# to resolve all ligatures and python-bidi to switch RTL texts.
@@ -1025,10 +983,7 @@ class Renderer:
elif o['type'] == "poweredby":
self._draw_poweredby(canvas, op, o)
if self.bg_pdf:
page_size = (
self.bg_pdf.pages[0].mediabox[2] - self.bg_pdf.pages[0].mediabox[0],
self.bg_pdf.pages[0].mediabox[3] - self.bg_pdf.pages[0].mediabox[1]
)
page_size = (self.bg_pdf.pages[0].mediabox[2], self.bg_pdf.pages[0].mediabox[3])
if self.bg_pdf.pages[0].get('/Rotate') in (90, 270):
# swap dimensions due to pdf being rotated
page_size = page_size[::-1]
@@ -1056,12 +1011,14 @@ class Renderer:
with open(os.path.join(d, 'out.pdf'), 'rb') as f:
return BytesIO(f.read())
else:
from pypdf import PdfReader, PdfWriter, Transformation
from pypdf.generic import RectangleObject
buffer.seek(0)
new_pdf = PdfReader(buffer)
output = PdfWriter()
for i, page in enumerate(new_pdf.pages):
bg_page = copy.deepcopy(self.bg_pdf.pages[i])
bg_page = copy.copy(self.bg_pdf.pages[i])
bg_rotation = bg_page.get('/Rotate')
if bg_rotation:
# /Rotate is clockwise, transformation.rotate is counter-clockwise
@@ -1098,56 +1055,6 @@ class Renderer:
return outbuffer
def merge_background(fg_pdf, bg_pdf, out_file, compress):
if settings.PDFTK:
with tempfile.TemporaryDirectory() as d:
fg_filename = os.path.join(d, 'fg.pdf')
bg_filename = os.path.join(d, 'bg.pdf')
fg_pdf.write(fg_filename)
bg_pdf.write(bg_filename)
pdftk_cmd = [
settings.PDFTK,
fg_filename,
'multibackground',
bg_filename,
'output',
'-',
]
if compress:
pdftk_cmd.append('compress')
subprocess.run(pdftk_cmd, check=True, stdout=out_file)
else:
output = PdfWriter()
for i, page in enumerate(fg_pdf.pages):
bg_page = copy.deepcopy(bg_pdf.pages[i])
bg_rotation = bg_page.get('/Rotate')
if bg_rotation:
# /Rotate is clockwise, transformation.rotate is counter-clockwise
t = Transformation().rotate(bg_rotation)
w = float(page.mediabox.getWidth())
h = float(page.mediabox.getHeight())
if bg_rotation in (90, 270):
# offset due to rotation base
if bg_rotation == 90:
t = t.translate(h, 0)
else:
t = t.translate(0, w)
# rotate mediabox as well
page.mediabox = RectangleObject((
page.mediabox.left.as_numeric(),
page.mediabox.bottom.as_numeric(),
page.mediabox.top.as_numeric(),
page.mediabox.right.as_numeric(),
))
page.trimbox = page.mediabox
elif bg_rotation == 180:
t = t.translate(w, h)
page.add_transformation(t)
bg_page.merge_page(page)
output.add_page(bg_page)
output.write(out_file)
@deconstructible
class PdfLayoutValidator:
def __call__(self, value):

View File

@@ -22,8 +22,8 @@
import datetime
from collections import namedtuple
from typing import Union
from zoneinfo import ZoneInfo
import pytz
from dateutil import parser
from django import forms
from django.core.exceptions import ValidationError
@@ -67,7 +67,7 @@ class RelativeDateWrapper:
if self.data.minutes_before is not None:
raise ValueError('A minute-based relative datetime can not be used as a date')
tz = ZoneInfo(event.settings.timezone)
tz = pytz.timezone(event.settings.timezone)
if isinstance(event, SubEvent):
base_date = (
getattr(event, self.data.base_date_name)
@@ -86,7 +86,7 @@ class RelativeDateWrapper:
if isinstance(self.data, (datetime.datetime, datetime.date)):
return self.data
else:
tz = ZoneInfo(event.settings.timezone)
tz = pytz.timezone(event.settings.timezone)
if isinstance(event, SubEvent):
base_date = (
getattr(event, self.data.base_date_name)
@@ -99,7 +99,8 @@ class RelativeDateWrapper:
if self.data.minutes_before is not None:
return base_date.astimezone(tz) - datetime.timedelta(minutes=self.data.minutes_before)
else:
new_date = (base_date.astimezone(tz) - datetime.timedelta(days=self.data.days_before)).astimezone(tz)
oldoffset = base_date.astimezone(tz).utcoffset()
new_date = base_date.astimezone(tz) - datetime.timedelta(days=self.data.days_before)
if self.data.time:
new_date = new_date.replace(
hour=self.data.time.hour,
@@ -107,6 +108,8 @@ class RelativeDateWrapper:
second=self.data.time.second
)
new_date = new_date.astimezone(tz)
new_offset = new_date.utcoffset()
new_date += oldoffset - new_offset
return new_date
def to_string(self) -> str:

View File

@@ -141,10 +141,9 @@ error_messages = {
'price_not_a_number': gettext_lazy('The entered price is not a number.'),
'price_too_high': gettext_lazy('The entered price is to high.'),
'voucher_invalid': gettext_lazy('This voucher code is not known in our database.'),
'voucher_min_usages': ngettext_lazy(
'The voucher code "%(voucher)s" can only be used if you select at least %(number)s matching products.',
'The voucher code "%(voucher)s" can only be used if you select at least %(number)s matching products.',
'number'
'voucher_min_usages': gettext_lazy(
'The voucher code "%(voucher)s" can only be used if you select at least %(number)s '
'matching products.'
),
'voucher_min_usages_removed': ngettext_lazy(
'The voucher code "%(voucher)s" can only be used if you select at least %(number)s matching products. '
@@ -318,9 +317,6 @@ class CartManager:
def _delete_out_of_timeframe(self):
err = None
for cp in self.positions:
if not cp.pk:
continue
if cp.subevent and cp.subevent.presale_start and self.now_dt < cp.subevent.presale_start:
err = error_messages['some_subevent_not_started']
cp.addons.all().delete()
@@ -1078,7 +1074,6 @@ class CartManager:
quotas_ok = _get_quota_availability(self._quota_diff, self.now_dt)
err = None
new_cart_positions = []
deleted_positions = set()
err = err or self._check_min_max_per_product()
@@ -1090,10 +1085,7 @@ class CartManager:
if op.position.expires > self.now_dt:
for q in op.position.quotas:
quotas_ok[q] += 1
addons = op.position.addons.all()
deleted_positions |= {a.pk for a in addons}
addons.delete()
deleted_positions.add(op.position.pk)
op.position.addons.all().delete()
op.position.delete()
elif isinstance(op, (self.AddOperation, self.ExtendOperation)):
@@ -1243,28 +1235,20 @@ class CartManager:
if op.seat and not op.seat.is_available(ignore_cart=op.position, sales_channel=self._sales_channel,
ignore_voucher_id=op.position.voucher_id):
err = err or error_messages['seat_unavailable']
addons = op.position.addons.all()
deleted_positions |= {a.pk for a in addons}
deleted_positions.add(op.position.pk)
addons.delete()
op.position.addons.all().delete()
op.position.delete()
elif available_count == 1:
op.position.expires = self._expiry
op.position.listed_price = op.listed_price
op.position.price_after_voucher = op.price_after_voucher
# op.position.price will be updated by recompute_final_prices_and_taxes()
if op.position.pk not in deleted_positions:
try:
op.position.save(force_update=True, update_fields=['expires', 'listed_price', 'price_after_voucher'])
except DatabaseError:
# Best effort... The position might have been deleted in the meantime!
pass
try:
op.position.save(force_update=True, update_fields=['expires', 'listed_price', 'price_after_voucher'])
except DatabaseError:
# Best effort... The position might have been deleted in the meantime!
pass
elif available_count == 0:
addons = op.position.addons.all()
deleted_positions |= {a.pk for a in addons}
deleted_positions.add(op.position.pk)
addons.delete()
op.position.addons.all().delete()
op.position.delete()
else:
raise AssertionError("ExtendOperation cannot affect more than one item")

Some files were not shown because too many files have changed in this diff Show More