Compare commits

..

1 Commits

Author SHA1 Message Date
Richard Schreiber
f88912adfe PDF: fix offset in mediabox of background-pdf 2023-07-04 12:03:00 +02:00
560 changed files with 202051 additions and 394123 deletions

View File

@@ -35,7 +35,7 @@ jobs:
- uses: actions/checkout@v2
- uses: harmon758/postgresql-action@v1
with:
postgresql version: '15'
postgresql version: '11'
postgresql db: 'pretix'
postgresql user: 'postgres'
postgresql password: 'postgres'
@@ -66,10 +66,6 @@ jobs:
- name: Run tests
working-directory: ./src
run: PRETIX_CONFIG_FILE=tests/travis_${{ matrix.database }}.cfg py.test -n 3 -p no:sugar --cov=./ --cov-report=xml --reruns 3 tests --maxfail=100
- name: Run concurrency tests
working-directory: ./src
run: PRETIX_CONFIG_FILE=tests/travis_${{ matrix.database }}.cfg py.test tests/concurrency_tests/ --reruns 0 --reuse-db
if: matrix.database == 'postgres'
- name: Upload coverage
uses: codecov/codecov-action@v1
with:

View File

@@ -1,4 +1,4 @@
FROM python:3.11-bookworm
FROM python:3.11-bullseye
RUN apt-get update && \
apt-get install -y --no-install-recommends \
@@ -20,20 +20,20 @@ RUN apt-get update && \
supervisor \
libmaxminddb0 \
libmaxminddb-dev \
zlib1g-dev \
nodejs \
npm && \
zlib1g-dev && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* && \
dpkg-reconfigure locales && \
locale-gen C.UTF-8 && \
/usr/sbin/update-locale LANG=C.UTF-8 && \
dpkg-reconfigure locales && \
locale-gen C.UTF-8 && \
/usr/sbin/update-locale LANG=C.UTF-8 && \
mkdir /etc/pretix && \
mkdir /data && \
useradd -ms /bin/bash -d /pretix -u 15371 pretixuser && \
echo 'pretixuser ALL=(ALL) NOPASSWD:SETENV: /usr/bin/supervisord' >> /etc/sudoers && \
mkdir /static && \
mkdir /etc/supervisord
mkdir /etc/supervisord && \
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - && \
apt-get install -y nodejs
ENV LC_ALL=C.UTF-8 \
@@ -63,10 +63,10 @@ RUN pip3 install -U \
RUN chmod +x /usr/local/bin/pretix && \
rm /etc/nginx/sites-enabled/default && \
cd /pretix/src && \
rm -f pretix.cfg && \
mkdir -p data && \
chown -R pretixuser:pretixuser /pretix /data data && \
sudo -u pretixuser make production
rm -f pretix.cfg && \
mkdir -p data && \
chown -R pretixuser:pretixuser /pretix /data data && \
sudo -u pretixuser make production
USER pretixuser
VOLUME ["/etc/pretix", "/data"]

View File

@@ -5,7 +5,7 @@ export DATA_DIR=/data/
export HOME=/pretix
AUTOMIGRATE=${AUTOMIGRATE:-yes}
NUM_WORKERS_DEFAULT=$((2 * $(nproc)))
NUM_WORKERS_DEFAULT=$((2 * $(nproc --all)))
export NUM_WORKERS=${NUM_WORKERS:-$NUM_WORKERS_DEFAULT}
if [ ! -d /data/logs ]; then

View File

@@ -1,4 +1,4 @@
from pretix.settings import *
LOGGING['handlers']['mail_admins']['include_html'] = True
STORAGES["staticfiles"]["BACKEND"] = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'

View File

@@ -42,6 +42,7 @@ Example::
currency=EUR
datadir=/data
plugins_default=pretix.plugins.sendmail,pretix.plugins.statistics
cookie_domain=.pretix.de
``instance_name``
The name of this installation. Default: ``pretix.de``
@@ -70,8 +71,11 @@ Example::
``auth_backends``
A comma-separated list of available auth backends. Defaults to ``pretix.base.auth.NativeAuthBackend``.
``cookie_domain``
The cookie domain to be set. Defaults to ``None``.
``registration``
Enables or disables the registration of new admin users. Defaults to ``off``.
Enables or disables the registration of new admin users. Defaults to ``on``.
``password_reset``
Enables or disables password reset. Defaults to ``on``.
@@ -148,11 +152,6 @@ Example::
password=abcd
host=localhost
port=3306
advisory_lock_index=1
sslmode=require
sslrootcert=/etc/pretix/postgresql-ca.crt
sslcert=/etc/pretix/postgresql-client-crt.crt
sslkey=/etc/pretix/postgresql-client-key.key
``backend``
One of ``sqlite3`` and ``postgresql``.
@@ -164,17 +163,6 @@ Example::
``user``, ``password``, ``host``, ``port``
Connection details for the database connection. Empty by default.
``advisory_lock_index``
On PostgreSQL, pretix uses the "advisory lock" feature. However, advisory locks use a server-wide name space and
and are not scoped to a specific database. If you run multiple pretix applications with the same PostgreSQL server,
you should set separate values for this setting (integers up to 256).
``sslmode``, ``sslrootcert``
Connection TLS details for the PostgreSQL database connection. Possible values of ``sslmode`` are ``disable``, ``allow``, ``prefer``, ``require``, ``verify-ca``, and ``verify-full``. ``sslrootcert`` should be the accessible path of the ca certificate. Both values are empty by default.
``sslcert``, ``sslkey``
Connection mTLS details for the PostgreSQL database connection. It's also necessary to specify ``sslmode`` and ``sslrootcert`` parameters, please check the correct values from the TLS part. ``sslcert`` should be the accessible path of the client certificate. ``sslkey`` should be the accessible path of the client key. All values are empty by default.
.. _`config-replica`:
Database replica settings
@@ -336,10 +324,6 @@ to speed up various operations::
["sentinel_host_3", 26379]
]
password=password
ssl_cert_reqs=required
ssl_ca_certs=/etc/pretix/redis-ca.pem
ssl_keyfile=/etc/pretix/redis-client-crt.pem
ssl_certfile=/etc/pretix/redis-client-key.key
``location``
The location of redis, as a URL of the form ``redis://[:password]@localhost:6379/0``
@@ -363,22 +347,6 @@ to speed up various operations::
If your redis setup doesn't require a password or you already specified it in the location you can omit this option.
If this is set it will be passed to redis as the connection option PASSWORD.
``ssl_cert_reqs``
If this is set it will be passed to redis as the connection option ``SSL_CERT_REQS``.
Possible values are ``none``, ``optional``, and ``required``.
``ssl_ca_certs``
If your redis setup doesn't require TLS you can omit this option.
If this is set it will be passed to redis as the connection option ``SSL_CA_CERTS``. Possible value is the ca path.
``ssl_keyfile``
If your redis setup doesn't require mTLS you can omit this option.
If this is set it will be passed to redis as the connection option ``SSL_KEYFILE``. Possible value is the keyfile path.
``ssl_certfile``
If your redis setup doesn't require mTLS you can omit this option.
If this is set it will be passed to redis as the connection option ``SSL_CERTFILE``. Possible value is the certfile path.
If redis is not configured, pretix will store sessions and locks in the database. If memcached
is configured, memcached will be used for caching instead of redis.
@@ -428,8 +396,6 @@ The two ``transport_options`` entries can be omitted in most cases.
If they are present they need to be a valid JSON dictionary.
For possible entries in that dictionary see the `Celery documentation`_.
It is possible the use Redis with TLS/mTLS for the broker or the backend. To do so, it is necessary to specify the TLS identifier ``rediss``, the ssl mode ``ssl_cert_reqs`` and optionally specify the CA (TLS) ``ssl_ca_certs``, cert ``ssl_certfile`` and key ``ssl_keyfile`` (mTLS) path as encoded string. the following uri describes the format and possible parameters ``rediss://0.0.0.0:6379/1?ssl_cert_reqs=required&ssl_ca_certs=%2Fetc%2Fpretix%2Fredis-ca.pem&ssl_certfile=%2Fetc%2Fpretix%2Fredis-client-crt.pem&ssl_keyfile=%2Fetc%2Fpretix%2Fredis-client-key.key``
To use redis with sentinels set the broker or backend to ``sentinel://sentinel_host_1:26379;sentinel_host_2:26379/0``
and the respective transport_options to ``{"master_name":"mymaster"}``.
If your redis instances behind the sentinel have a password use ``sentinel://:my_password@sentinel_host_1:26379;sentinel_host_2:26379/0``.

View File

@@ -26,7 +26,7 @@ installation guides):
* `Docker`_
* A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for
* A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections
* A `PostgreSQL`_ 12+ database server
* A `PostgreSQL`_ 11+ database server
* A `redis`_ server
We also recommend that you use a firewall, although this is not a pretix-specific recommendation. If you're new to
@@ -276,8 +276,7 @@ Restarting the service can take a few seconds, especially if the update requires
Replace ``stable`` above with a specific version number like ``1.0`` or with ``latest`` for the development
version, if you want to.
Make sure to also read :ref:`update_notes` and the release notes of the version you are updating to. Pay special
attention to the "Runtime and server environment" section of all release notes between your current and new version.
Make sure to also read :ref:`update_notes` and the release notes of the version you are updating to.
.. _`docker_plugininstall`:

View File

@@ -68,7 +68,7 @@ generated key and installs the plugin from the URL we told you::
mkdir -p /etc/ssh && \
ssh-keyscan -t rsa -p 10022 code.rami.io >> /root/.ssh/known_hosts && \
echo StrictHostKeyChecking=no >> /root/.ssh/config && \
DJANGO_SETTINGS_MODULE= pip3 install -U "git+ssh://git@code.rami.io:10022/pretix/pretix-slack.git@stable#egg=pretix-slack" && \
DJANGO_SETTINGS_MODULE=pretix.settings pip3 install -U "git+ssh://git@code.rami.io:10022/pretix/pretix-slack.git@stable#egg=pretix-slack" && \
cd /pretix/src && \
sudo -u pretixuser make production
USER pretixuser

View File

@@ -12,7 +12,7 @@ solution with many things readily set-up, look at :ref:`dockersmallscale`.
get it right. If you're not feeling comfortable managing a Linux server, check out our hosting and service
offers at `pretix.eu`_.
We tested this guide on the Linux distribution **Debian 12** but it should work very similar on other
We tested this guide on the Linux distribution **Debian 11.6** but it should work very similar on other
modern distributions, especially on all systemd-based ones.
Requirements
@@ -24,7 +24,7 @@ installation guides):
* A python 3.9+ installation
* A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for
* A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections
* A `PostgreSQL`_ 12+ database server
* A `PostgreSQL`_ 11+ database server
* A `redis`_ server
* A `nodejs`_ installation
@@ -64,7 +64,7 @@ Package dependencies
To build and run pretix, you will need the following debian packages::
# apt-get install git build-essential python3-dev python3-venv python3 python3-pip \
# apt-get install git build-essential python-dev python3-venv python3 python3-pip \
python3-dev libxml2-dev libxslt1-dev libffi-dev zlib1g-dev libssl-dev \
gettext libpq-dev libjpeg-dev libopenjp2-7-dev
@@ -130,10 +130,9 @@ We now install pretix, its direct dependencies and gunicorn::
Note that you need Python 3.9 or newer. You can find out your Python version using ``python -V``.
We also need to create a data directory and allow your webserver to traverse to the root directory::
We also need to create a data directory::
(venv)$ mkdir -p /var/pretix/data/media
(venv)$ chmod +x /var/pretix
Finally, we compile static files and translation data and create the database structure::
@@ -249,14 +248,14 @@ The following snippet is an example on how to configure a nginx proxy for pretix
}
location /static/ {
alias /var/pretix/venv/lib/python3.11/site-packages/pretix/static.dist/;
alias /var/pretix/venv/lib/python3.10/site-packages/pretix/static.dist/;
access_log off;
expires 365d;
add_header Cache-Control "public";
}
}
.. note:: Remember to replace the ``python3.11`` in the ``/static/`` path in the config
.. note:: Remember to replace the ``python3.10`` in the ``/static/`` path in the config
above with your python version.
We recommend reading about setting `strong encryption settings`_ for your web server.
@@ -286,8 +285,7 @@ To upgrade to a new pretix release, pull the latest code changes and run the fol
(venv)$ python -m pretix updatestyles
# systemctl restart pretix-web pretix-worker
Make sure to also read :ref:`update_notes` and the release notes of the version you are updating to. Pay special
attention to the "Runtime and server environment" section of all release notes between your current and new version.
Make sure to also read :ref:`update_notes` and the release notes of the version you are updating to.
.. _`manual_plugininstall`:

View File

@@ -16,17 +16,12 @@ already upgraded to pretix 5.0 or later, downgrade back to the last 4.x release
Update database schema
----------------------
Before you start, make sure your database schema is up to date. With a local installation::
Before you start, make sure your database schema is up to date::
# sudo -u pretix -s
$ source /var/pretix/venv/bin/activate
(venv)$ python -m pretix migrate
With a docker installation::
docker exec -it pretix.service pretix migrate
Install PostgreSQL
------------------
@@ -75,14 +70,10 @@ Of course, instead of all this you can also run a PostgreSQL docker container an
Stop pretix
-----------
To prevent any more changes to your data, stop pretix from running. With a local installation::
To prevent any more changes to your data, stop pretix from running::
# systemctl stop pretix-web pretix-worker
With docker::
# systemctl stop pretix
Change configuration
--------------------
@@ -99,16 +90,12 @@ Change the database configuration in your ``/etc/pretix/pretix.cfg`` file::
Create database schema
-----------------------
To create the schema in your new PostgreSQL database, use the following commands. With a local installation::
To create the schema in your new PostgreSQL database, use the following commands::
# sudo -u pretix -s
$ source /var/pretix/venv/bin/activate
(venv)$ python -m pretix migrate
With docker::
# docker run --rm -v /var/pretix-data:/data -v /etc/pretix:/etc/pretix -v /var/run/redis:/var/run/redis pretix/standalone:stable migrate
Migrate your data
-----------------
@@ -157,18 +144,11 @@ Afterwards, delete the file again::
Start pretix
------------
Stop your MySQL server as a verification step that you are no longer using it::
Now, restart pretix. Maybe stop your MySQL server as a verification step that you are no longer using it::
# systemctl stop mariadb
Then, restart pretix. With a local installation::
# systemctl start pretix-web pretix-worker
With a docker installation::
# systemctl start pretix
And you're done! After you've verified everything has been copied correctly, you can delete the old MySQL database.
.. note:: Don't forget to update your backup process to back up your PostgreSQL database instead of your MySQL database now.

View File

@@ -47,30 +47,5 @@ Or, with a docker installation::
$ docker exec -it pretix.service pretix create_order_transactions
Upgrade to 2023.6.0 or newer
""""""""""""""""""""""""""""
MariaDB and MySQL are no longer supported.
Upgrade to 2023.8.0 or newer
""""""""""""""""""""""""""""
PostgreSQL 11 is now required.
Upgrade to 2023.9.0 or newer
""""""""""""""""""""""""""""
This release includes a migration that changes the `id` column of all core database tables from `integer`
to `bigint`. If you have a large database, the migration step of the upgrade might take significantly longer than
usual, so plan the update accordingly.
The default value for the `registration` setting in `pretix.cfg` has changed to `false`.
Upgrade to 2023.10.0 or newer
"""""""""""""""""""""""""""""
This release includes a migration that changes retroactively fills an `organizer` column in the table
`pretixbase_logentry`. If you have a large database, the migration step of the upgrade might take significantly
longer than usual, so plan the update accordingly.
.. _blog: https://pretix.eu/about/en/blog/

View File

@@ -35,13 +35,9 @@ as well as the type of underlying hardware. Example:
"os_name": "Android",
"os_version": "2.3.6",
"software_brand": "pretixdroid",
"software_version": "4.0.0",
"rsa_pubkey": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqh…nswIDAQAB\n-----END PUBLIC KEY-----\n"
"software_version": "4.0.0"
}
The ``rsa_pubkey`` is optional any only required for certain fatures such as working with reusable
media and NFC cryptography.
Every initialization token can only be used once. On success, you will receive a response containing
information on your device as well as your API token:
@@ -141,29 +137,9 @@ The response will look like this:
"id": 3,
"name": "South entrance"
}
},
"server": {
"version": {
"pretix": "3.6.0.dev0",
"pretix_numeric": 30060001000
}
},
"medium_key_sets": [
{
"public_id": 3456349,
"organizer": "foo",
"active": true,
"media_type": "nfc_mf0aes",
"uid_key": "base64-encoded-encrypted-key",
"diversification_key": "base64-encoded-encrypted-key",
}
]
}
}
``"medium_key_sets`` will always be empty if you did not set an ``rsa_pubkey``.
The individual keys in the key sets are encrypted with the device's ``rsa_pubkey``
using ``RSA/ECB/PKCS1Padding``.
Creating a new API key
----------------------

View File

@@ -31,7 +31,6 @@ Checking a ticket in
This endpoint supports passing multiple check-in lists to perform a multi-event scan. However, each check-in list
passed needs to be from a distinct event.
:query string expand: Expand a field inside the ``position`` object into a full object. Currently ``subevent``, ``item``, ``variation``, and ``answers.question`` are supported. Can be passed multiple times.
:<json string secret: Scanned QR code corresponding to the ``secret`` attribute of a ticket.
:<json string source_type: Type of source the ``secret`` was obtained form. Defaults to ``"barcode"``.
:<json array lists: List of check-in list IDs to search on. No two check-in lists may be from the same event.
@@ -64,7 +63,6 @@ Checking a ticket in
``checkin_attention`` flag set. (3) If ``attendee_name`` is empty, it may automatically fall
back to values from a parent product or from invoice addresses.
:>json boolean require_attention: Whether or not the ``require_attention`` flag is set on the item or order.
:>json list checkin_texts: List of additional texts to show to the user.
:>json object list: Excerpt of information about the matching :ref:`check-in list <rest-checkinlists>` (if any was found),
including the attributes ``id``, ``name``, ``event``, ``subevent``, and ``include_pending``.
:>json object questions: List of questions to be answered for check-in, only set on status ``"incomplete"``.
@@ -105,7 +103,6 @@ Checking a ticket in
},
"require_attention": false,
"checkin_texts": [],
"list": {
"id": 1,
"name": "Default check-in list",
@@ -128,7 +125,6 @@ Checking a ticket in
},
"require_attention": false,
"checkin_texts": [],
"list": {
"id": 1,
"name": "Default check-in list",
@@ -146,7 +142,6 @@ Checking a ticket in
"position": 1,
"identifier": "WY3TP9SL",
"ask_during_checkin": true,
"show_during_checkin": true,
"options": [
{
"id": 1,
@@ -183,8 +178,7 @@ Checking a ticket in
"status": "error",
"reason": "invalid",
"reason_explanation": null,
"require_attention": false,
"checkin_texts": []
"require_attention": false
}
**Example error response (known, but invalid ticket)**:
@@ -199,7 +193,6 @@ Checking a ticket in
"reason": "unpaid",
"reason_explanation": null,
"require_attention": false,
"checkin_texts": [],
"list": {
"id": 1,
"name": "Default check-in list",
@@ -224,7 +217,6 @@ Checking a ticket in
* ``rules`` - Check-in prevented by a user-defined rule.
* ``ambiguous`` - Multiple tickets match scan, rejected.
* ``revoked`` - Ticket code has been revoked.
* ``unapproved`` - Order has not yet been approved.
* ``error`` - Internal error.
In case of reason ``rules`` and ``invalid_time``, there might be an additional response field ``reason_explanation``

View File

@@ -37,18 +37,12 @@ allow_entry_after_exit boolean If ``true``, su
rules object Custom check-in logic. The contents of this field are currently not considered a stable API and modifications through the API are highly discouraged.
exit_all_at datetime Automatically check out (i.e. perform an exit scan) at this point in time. After this happened, this property will automatically be set exactly one day into the future. Note that this field is considered "internal configuration" and if you pull the list with ``If-Modified-Since``, the daily change in this field will not trigger a response.
addon_match boolean If ``true``, tickets on this list can be redeemed by scanning their parent ticket if this still leads to an unambiguous match.
ignore_in_statistics boolean If ``true``, check-ins on this list will be ignored in most reporting features.
consider_tickets_used boolean If ``true`` (default), tickets checked in on this list will be considered "used" by other functionality, i.e. when checking if they can still be canceled.
===================================== ========================== =======================================================
.. versionchanged:: 4.12
The ``addon_match`` attribute has been added.
.. versionchanged:: 2023.9
The ``ignore_in_statistics`` and ``consider_tickets_used`` attributes have been added.
Endpoints
---------
@@ -498,7 +492,7 @@ Order position endpoints
``attendee_name,positionid``
:query string order: Only return positions of the order with the given order code
:query string search: Fuzzy search matching the attendee name, order code, invoice address name as well as to the beginning of the secret.
:query string expand: Expand a field into a full object. Currently ``subevent``, ``item``, ``variation``, and ``answers.question`` are supported. Can be passed multiple times.
:query string expand: Expand a field into a full object. Currently only ``subevent``, ``item``, and ``variation`` are supported. Can be passed multiple times.
:query integer item: Only return positions with the purchased item matching the given ID.
:query integer item__in: Only return positions with the purchased item matching one of the given comma-separated IDs.
:query integer variation: Only return positions with the purchased item variation matching the given ID.
@@ -632,8 +626,7 @@ Order position endpoints
set this to ``false``. In that case, questions will just be ignored. Defaults
to ``true``.
:<json boolean canceled_supported: When this parameter is set to ``true``, the response code ``canceled`` may be
returned. Otherwise, canceled orders will return ``unpaid``. (**Deprecated**, in
the future, this will be ignored and ``canceled`` may always be returned.)
returned. Otherwise, canceled orders will return ``unpaid``.
:<json datetime datetime: Specifies the datetime of the check-in. If not supplied, the current time will be used.
:<json boolean force: Specifies that the check-in should succeed regardless of revoked barcode, previous check-ins or required
questions that have not been filled. This is usually used to upload offline scans that already happened,
@@ -707,7 +700,6 @@ Order position endpoints
"position": 1,
"identifier": "WY3TP9SL",
"ask_during_checkin": true,
"show_during_checkin": true,
"options": [
{
"id": 1,
@@ -760,7 +752,6 @@ Order position endpoints
* ``rules`` - Check-in prevented by a user-defined rule.
* ``ambiguous`` - Multiple tickets match scan, rejected.
* ``revoked`` - Ticket code has been revoked.
* ``unapproved`` - Order has not yet been approved.
In case of reason ``rules`` or ``invalid_time``, there might be an additional response field ``reason_explanation``
with a human-readable description of the violated rules. However, that field can also be missing or be ``null``.
@@ -776,4 +767,4 @@ Order position endpoints
:statuscode 404: The requested order position or check-in list does not exist.
.. _security issues: https://pretix.eu/about/de/blog/20220705-release-4111/
.. _security issues: https://pretix.eu/about/de/blog/20220705-release-4111/

View File

@@ -31,9 +31,9 @@ subevent_mode strings Determines h
``"same"`` (discount is only applied for groups within
the same date), or ``"distinct"`` (discount is only applied
for groups with no two same dates).
condition_all_products boolean If ``true``, the discount condition applies to all items.
condition_all_products boolean If ``true``, the discount applies to all items.
condition_limit_products list of integers If ``condition_all_products`` is not set, this is a list
of internal item IDs that the discount condition applies to.
of internal item IDs that the discount applies to.
condition_apply_to_addons boolean If ``true``, the discount applies to add-on products as well,
otherwise it only applies to top-level items. The discount never
applies to bundled products.
@@ -48,17 +48,6 @@ benefit_discount_matching_percent decimal (string) The percenta
benefit_only_apply_to_cheapest_n_matches integer If set higher than 0, the discount will only be applied to
the cheapest matches. Useful for a "3 for 2"-style discount.
Cannot be combined with ``condition_min_value``.
benefit_same_products boolean If ``true``, the discount benefit applies to the same set of items
as the condition (see above).
benefit_limit_products list of integers If ``benefit_same_products`` is not set, this is a list
of internal item IDs that the discount benefit applies to.
benefit_apply_to_addons boolean (Only used if ``benefit_same_products`` is ``false``.)
If ``true``, the discount applies to add-on products as well,
otherwise it only applies to top-level items. The discount never
applies to bundled products.
benefit_ignore_voucher_discounted boolean (Only used if ``benefit_same_products`` is ``false``.)
If ``true``, the discount does not apply to products which have
been discounted by a voucher.
======================================== ========================== =======================================================
@@ -105,10 +94,6 @@ Endpoints
"condition_ignore_voucher_discounted": false,
"condition_min_count": 3,
"condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1
}
@@ -161,10 +146,6 @@ Endpoints
"condition_ignore_voucher_discounted": false,
"condition_min_count": 3,
"condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1
}
@@ -203,10 +184,6 @@ Endpoints
"condition_ignore_voucher_discounted": false,
"condition_min_count": 3,
"condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1
}
@@ -234,10 +211,6 @@ Endpoints
"condition_ignore_voucher_discounted": false,
"condition_min_count": 3,
"condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1
}
@@ -294,10 +267,6 @@ Endpoints
"condition_ignore_voucher_discounted": false,
"condition_min_count": 3,
"condition_min_value": "0.00",
"benefit_same_products": true,
"benefit_limit_products": [],
"benefit_apply_to_addons": true,
"benefit_ignore_voucher_discounted": false,
"benefit_discount_matching_percent": "100.00",
"benefit_only_apply_to_cheapest_n_matches": 1
}

View File

@@ -36,8 +36,6 @@ geo_lon float Longitude of th
has_subevents boolean ``true`` if the event series feature is active for this
event. Cannot change after event is created.
meta_data object Values set for organizer-specific meta data parameters.
The allowed keys need to be set up as meta properties
in the organizer configuration.
plugins list A list of package names of the enabled plugins for this
event.
seating_plan integer If reserved seating is in use, the ID of a seating
@@ -345,8 +343,8 @@ Endpoints
Creates a new event with properties as set in the request body. The properties that are copied are: ``is_public``,
``testmode``, ``has_subevents``, settings, plugin settings, items, variations, add-ons, quotas, categories, tax rules, questions.
If the ``plugins``, ``has_subevents``, ``meta_data`` and/or ``is_public`` fields are present in the post body this will
determine their value. Otherwise their value will be copied from the existing event.
If the ``plugins``, ``has_subevents`` and/or ``is_public`` fields are present in the post body this will determine their
value. Otherwise their value will be copied from the existing event.
Please note that you can only copy from events under the same organizer this way. Use the ``clone_from`` parameter
when creating a new event for this instead.
@@ -567,8 +565,6 @@ organizer level.
.. warning:: This API is intended for advanced users. Even though we take care to validate your input, you will be
able to break your event using this API by creating situations of conflicting settings. Please take care.
.. note:: When authenticating with :ref:`rest-deviceauth`, only a limited subset of settings is available.
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/settings/
Get current values of event settings.

View File

@@ -1,7 +1,5 @@
.. spelling:word-list:: checkin
.. _rest-exporters:
Data exporters
==============

View File

@@ -40,7 +40,6 @@ at :ref:`plugin-docs`.
webhooks
seatingplans
exporters
scheduled_exports
shredders
sendmail_rules
billing_invoices

View File

@@ -12,7 +12,6 @@ The invoice resource contains the following public fields:
Field Type Description
===================================== ========================== =======================================================
number string Invoice number (with prefix)
event string The slug of the parent event
order string Order code of the order this invoice belongs to
is_cancellation boolean ``true``, if this invoice is the cancellation of a
different invoice.
@@ -122,13 +121,9 @@ internal_reference string Customer's refe
The attribute ``lines.subevent`` has been added.
.. versionchanged:: 2023.8
The ``event`` attribute has been added. The organizer-level endpoint has been added.
List of all invoices
--------------------
Endpoints
---------
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/invoices/
@@ -157,7 +152,6 @@ List of all invoices
"results": [
{
"number": "SAMPLECONF-00001",
"event": "sampleconf",
"order": "ABC12",
"is_cancellation": false,
"invoice_from_name": "Big Events LLC",
@@ -227,50 +221,6 @@ List of all invoices
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
.. http:get:: /api/v1/organizers/(organizer)/invoices/
Returns a list of all invoices within all events of a given organizer (with sufficient access permissions).
Supported query parameters and output format of this endpoint are identical to the list endpoint within an event.
**Example request**:
.. sourcecode:: http
GET /api/v1/organizers/bigevents/events/sampleconf/invoices/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"number": "SAMPLECONF-00001",
"event": "sampleconf",
"order": "ABC12",
...
]
}
:param organizer: The ``slug`` field of the organizer to fetch
:statuscode 200: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
Fetching individual invoices
----------------------------
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/invoices/(number)/
Returns information on one invoice, identified by its invoice number.
@@ -293,7 +243,6 @@ Fetching individual invoices
{
"number": "SAMPLECONF-00001",
"event": "sampleconf",
"order": "ABC12",
"is_cancellation": false,
"invoice_from_name": "Big Events LLC",
@@ -388,12 +337,6 @@ Fetching individual invoices
:statuscode 409: The file is not yet ready and will now be prepared. Retry the request after waiting for a few
seconds.
Modifying invoices
------------------
Invoices cannot be edited directly, but the following actions can be triggered:
.. http:post:: /api/v1/organizers/(organizer)/events/(event)/invoices/(invoice_no)/reissue/
Cancels the invoice and creates a new one.

View File

@@ -18,8 +18,6 @@ default_price money (string) The price set d
price money (string) The price used for this variation. This is either the
same as ``default_price`` if that value is set or equal
to the item's ``default_price`` (read-only).
free_price_suggestion money (string) A suggested price, used as a default value if
``Item.free_price`` is set (or ``null``).
original_price money (string) An original price, shown for comparison, not used
for price calculations (or ``null``).
active boolean If ``false``, this variation will not be sold or shown.
@@ -29,8 +27,6 @@ position integer An integer, use
checkin_attention boolean If ``true``, the check-in app should show a warning
that this ticket requires special attention if such
a variation is being scanned.
checkin_text string Text that will be shown if a ticket of this type is
scanned (or ``null``).
require_approval boolean If ``true``, orders with this variation will need to be
approved by the event organizer before they can be
paid.
@@ -57,12 +53,6 @@ meta_data object Values set for
The ``meta_data`` and ``checkin_attention`` attributes have been added.
.. versionchanged:: 2023.10
The ``free_price_suggestion`` attribute has been added.
The ``checkin_text`` attribute has been added.
Endpoints
---------
@@ -98,7 +88,6 @@ Endpoints
},
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_hidden": false,
@@ -114,7 +103,6 @@ Endpoints
"default_price": "223.00",
"price": 223.0,
"original_price": null,
"free_price_suggestion": null,
"meta_data": {}
},
{
@@ -124,21 +112,14 @@ Endpoints
},
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_hidden": false,
"require_membership_types": [],
"sales_channels": ["web"],
"available_from": null,
"available_until": null,
"hide_without_voucher": false,
"description": {},
"position": 1,
"default_price": "223.00",
"price": 223.0,
"original_price": null,
"free_price_suggestion": null,
"default_price": null,
"price": 15.0,
"meta_data": {}
}
]
@@ -182,10 +163,8 @@ Endpoints
"default_price": "10.00",
"price": "10.00",
"original_price": null,
"free_price_suggestion": null,
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_hidden": false,
@@ -225,7 +204,6 @@ Endpoints
"default_price": "10.00",
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_hidden": false,
@@ -253,10 +231,8 @@ Endpoints
"default_price": "10.00",
"price": "10.00",
"original_price": null,
"free_price_suggestion": null,
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_hidden": false,
@@ -315,10 +291,8 @@ Endpoints
"default_price": "10.00",
"price": "10.00",
"original_price": null,
"free_price_suggestion": null,
"active": false,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_hidden": false,

View File

@@ -29,8 +29,6 @@ free_price boolean If ``true``,
they buy the product (however, the price can't be set
lower than the price defined by ``default_price`` or
otherwise).
free_price_suggestion money (string) A suggested price, used as a default value if
``free_price`` is set (or ``null``).
tax_rate decimal (string) The VAT rate to be applied for this item (read-only,
set through ``tax_rule``).
tax_rule integer The internal ID of the applied tax rule (or ``null``).
@@ -52,12 +50,9 @@ available_from datetime The first dat
(or ``null``).
available_until datetime The last date time at which this item can be bought
(or ``null``).
hidden_if_available integer **DEPRECATED** The internal ID of a quota object, or ``null``. If
hidden_if_available integer The internal ID of a quota object, or ``null``. If
set, this item won't be shown publicly as long as this
quota is available.
hidden_if_item_available integer The internal ID of a different item, or ``null``. If
set, this item won't be shown publicly as long as this
other item is available.
require_voucher boolean If ``true``, this item can only be bought using a
voucher that is specifically assigned to this item.
hide_without_voucher boolean If ``true``, this item is only shown during the voucher
@@ -74,8 +69,6 @@ max_per_order integer This product
checkin_attention boolean If ``true``, the check-in app should show a warning
that this ticket requires special attention if such
a product is being scanned.
checkin_text string Text that will be shown if a ticket of this type is
scanned (or ``null``).
original_price money (string) An original price, shown for comparison, not used
for price calculations (or ``null``).
require_approval boolean If ``true``, orders with this product will need to be
@@ -130,8 +123,6 @@ variations list of objects A list with o
├ price money (string) The price used for this variation. This is either the
same as ``default_price`` if that value is set or equal
to the item's ``default_price``.
├ free_price_suggestion money (string) A suggested price, used as a default value if
``free_price`` is set (or ``null``).
├ original_price money (string) An original price, shown for comparison, not used
for price calculations (or ``null``).
├ active boolean If ``false``, this variation will not be sold or shown.
@@ -139,8 +130,6 @@ variations list of objects A list with o
├ checkin_attention boolean If ``true``, the check-in app should show a warning
that this ticket requires special attention if such
a variation is being scanned.
├ checkin_text string Text that will be shown if a ticket of this type is
scanned (or ``null``).
├ require_approval boolean If ``true``, orders with this variation will need to be
approved by the event organizer before they can be
paid.
@@ -207,16 +196,6 @@ meta_data object Values set fo
The ``media_policy`` and ``media_type`` attributes have been added.
.. versionchanged:: 2023.10
The ``checkin_text`` and ``variations[x].checkin_text`` attributes have been added.
The ``free_price_suggestion`` and ``variations[x].free_price_suggestion`` attributes have been added.
.. versionchanged:: 2023.10
The ``hidden_if_item_available`` attributes has been added, the ``hidden_if_available`` attribute has been
deprecated.
Notes
-----
@@ -267,7 +246,6 @@ Endpoints
"active": true,
"description": null,
"free_price": false,
"free_price_suggestion": null,
"tax_rate": "0.00",
"tax_rule": 1,
"admission": false,
@@ -281,14 +259,12 @@ Endpoints
"available_from": null,
"available_until": null,
"hidden_if_available": null,
"hidden_if_item_available": null,
"require_voucher": false,
"hide_without_voucher": false,
"allow_cancel": true,
"min_per_order": null,
"max_per_order": null,
"checkin_attention": false,
"checkin_text": null,
"has_variations": false,
"generate_tickets": null,
"allow_waitinglist": true,
@@ -315,10 +291,8 @@ Endpoints
"default_price": "10.00",
"price": "10.00",
"original_price": null,
"free_price_suggestion": null,
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_types": [],
@@ -335,10 +309,8 @@ Endpoints
"default_price": null,
"price": "23.00",
"original_price": null,
"free_price_suggestion": null,
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_types": [],
@@ -405,7 +377,6 @@ Endpoints
"active": true,
"description": null,
"free_price": false,
"free_price_suggestion": null,
"tax_rate": "0.00",
"tax_rule": 1,
"admission": false,
@@ -419,7 +390,6 @@ Endpoints
"available_from": null,
"available_until": null,
"hidden_if_available": null,
"hidden_if_item_available": null,
"require_voucher": false,
"hide_without_voucher": false,
"allow_cancel": true,
@@ -429,7 +399,6 @@ Endpoints
"min_per_order": null,
"max_per_order": null,
"checkin_attention": false,
"checkin_text": null,
"has_variations": false,
"require_approval": false,
"require_bundling": false,
@@ -453,10 +422,8 @@ Endpoints
"default_price": "10.00",
"price": "10.00",
"original_price": null,
"free_price_suggestion": null,
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_types": [],
@@ -473,10 +440,8 @@ Endpoints
"default_price": null,
"price": "23.00",
"original_price": null,
"free_price_suggestion": null,
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_types": [],
@@ -524,7 +489,6 @@ Endpoints
"active": true,
"description": null,
"free_price": false,
"free_price_suggestion": null,
"tax_rate": "0.00",
"tax_rule": 1,
"admission": false,
@@ -538,7 +502,6 @@ Endpoints
"available_from": null,
"available_until": null,
"hidden_if_available": null,
"hidden_if_item_available": null,
"require_voucher": false,
"hide_without_voucher": false,
"allow_cancel": true,
@@ -548,7 +511,6 @@ Endpoints
"min_per_order": null,
"max_per_order": null,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_bundling": false,
"require_membership": false,
@@ -571,10 +533,8 @@ Endpoints
"default_price": "10.00",
"price": "10.00",
"original_price": null,
"free_price_suggestion": null,
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_types": [],
@@ -591,10 +551,8 @@ Endpoints
"default_price": null,
"price": "23.00",
"original_price": null,
"free_price_suggestion": null,
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_types": [],
@@ -630,7 +588,6 @@ Endpoints
"active": true,
"description": null,
"free_price": false,
"free_price_suggestion": null,
"tax_rate": "0.00",
"tax_rule": 1,
"admission": false,
@@ -644,7 +601,6 @@ Endpoints
"available_from": null,
"available_until": null,
"hidden_if_available": null,
"hidden_if_item_available": null,
"require_voucher": false,
"hide_without_voucher": false,
"allow_cancel": true,
@@ -654,7 +610,6 @@ Endpoints
"allow_waitinglist": true,
"show_quota_left": null,
"checkin_attention": false,
"checkin_text": null,
"has_variations": true,
"require_approval": false,
"require_bundling": false,
@@ -678,10 +633,8 @@ Endpoints
"default_price": "10.00",
"price": "10.00",
"original_price": null,
"free_price_suggestion": null,
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_types": [],
@@ -698,10 +651,8 @@ Endpoints
"default_price": null,
"price": "23.00",
"original_price": null,
"free_price_suggestion": null,
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_types": [],
@@ -768,7 +719,6 @@ Endpoints
"active": true,
"description": null,
"free_price": false,
"free_price_suggestion": null,
"tax_rate": "0.00",
"tax_rule": 1,
"admission": false,
@@ -782,7 +732,6 @@ Endpoints
"available_from": null,
"available_until": null,
"hidden_if_available": null,
"hidden_if_item_available": null,
"require_voucher": false,
"hide_without_voucher": false,
"generate_tickets": null,
@@ -792,7 +741,6 @@ Endpoints
"min_per_order": null,
"max_per_order": null,
"checkin_attention": false,
"checkin_text": null,
"has_variations": true,
"require_approval": false,
"require_bundling": false,
@@ -816,10 +764,8 @@ Endpoints
"default_price": "10.00",
"price": "10.00",
"original_price": null,
"free_price_suggestion": null,
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_types": [],
@@ -836,10 +782,8 @@ Endpoints
"default_price": null,
"price": "23.00",
"original_price": null,
"free_price_suggestion": null,
"active": true,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"require_membership": false,
"require_membership_types": [],

View File

@@ -20,7 +20,6 @@ The order resource contains the following public fields:
Field Type Description
===================================== ========================== =======================================================
code string Order code
event string The slug of the parent event
status string Order status, one of:
* ``n`` pending
@@ -46,8 +45,6 @@ custom_followup_at date Internal date f
checkin_attention boolean If ``true``, the check-in app should show a warning
that this ticket requires special attention if a ticket
of this order is scanned.
checkin_text string Text that will be shown if a ticket of this order is
scanned (or ``null``).
invoice_address object Invoice address information (can be ``null``)
├ last_modified datetime Last modification date of the address
├ company string Customer company name
@@ -133,22 +130,6 @@ last_modified datetime Last modificati
The ``valid_if_pending`` attribute has been added.
.. versionchanged:: 2023.8
The ``event`` attribute has been added. The organizer-level endpoint has been added.
.. versionchanged:: 2023.9
The ``customer`` query parameter has been added.
.. versionchanged:: 2023.10
The ``checkin_text`` attribute has been added.
.. versionchanged:: 2024.1
The ``expires`` attribute can now be passed during order creation.
.. _order-position-resource:
@@ -308,7 +289,6 @@ List of all orders
"results": [
{
"code": "ABC12",
"event": "sampleconf",
"status": "p",
"testmode": false,
"secret": "k24fiuwvu8kxz3y1",
@@ -328,7 +308,6 @@ List of all orders
"comment": "",
"custom_followup_at": null,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"valid_if_pending": false,
"invoice_address": {
@@ -435,7 +414,6 @@ List of all orders
:query string code: Only return orders that match the given order code
:query string status: Only return orders in the given order status (see above)
:query string search: Only return orders matching a given search query (matching for names, email addresses, and company names)
:query string customer: Only show orders linked to the given customer.
:query integer item: Only return orders with a position that contains this item ID. *Warning:* Result will also include orders if they contain mixed items, and it will even return orders where the item is only contained in a canceled position.
:query integer variation: Only return orders with a position that contains this variation ID. *Warning:* Result will also include orders if they contain mixed items and variations, and it will even return orders where the variation is only contained in a canceled position.
:query boolean testmode: Only return orders with ``testmode`` set to ``true`` or ``false``
@@ -463,48 +441,6 @@ List of all orders
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
.. http:get:: /api/v1/organizers/(organizer)/orders/
Returns a list of all orders within all events of a given organizer (with sufficient access permissions).
Supported query parameters and output format of this endpoint are identical to the list endpoint within an event,
with the exception that the ``pdf_data`` parameter is not supported here.
**Example request**:
.. sourcecode:: http
GET /api/v1/organizers/bigevents/orders/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
X-Page-Generated: 2017-12-01T10:00:00Z
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"code": "ABC12",
"event": "sampleconf",
...
}
]
}
:param organizer: The ``slug`` field of the organizer to fetch
:statuscode 200: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
Fetching individual orders
--------------------------
@@ -530,7 +466,6 @@ Fetching individual orders
{
"code": "ABC12",
"event": "sampleconf",
"status": "p",
"testmode": false,
"secret": "k24fiuwvu8kxz3y1",
@@ -550,7 +485,6 @@ Fetching individual orders
"comment": "",
"custom_followup_at": null,
"checkin_attention": false,
"checkin_text": null,
"require_approval": false,
"valid_if_pending": false,
"invoice_address": {
@@ -721,8 +655,6 @@ Updating order fields
* ``checkin_attention``
* ``checkin_text``
* ``locale``
* ``comment``
@@ -733,8 +665,6 @@ Updating order fields
* ``valid_if_pending``
* ``expires``
**Example request**:
.. sourcecode:: http
@@ -940,7 +870,6 @@ Creating orders
* ``comment`` (optional)
* ``custom_followup_at`` (optional)
* ``checkin_attention`` (optional)
* ``checkin_text`` (optional)
* ``require_approval`` (optional)
* ``valid_if_pending`` (optional)
* ``invoice_address`` (optional)
@@ -1588,7 +1517,6 @@ List of all order positions
``order__datetime,positionid``
:query string order: Only return positions of the order with the given order code
:query string search: Fuzzy search matching the attendee name, order code, invoice address name as well as to the beginning of the secret.
:query string customer: Only show orders linked to the given customer.
:query integer item: Only return positions with the purchased item matching the given ID.
:query integer item__in: Only return positions with the purchased item matching one of the given comma-separated IDs.
:query integer variation: Only return positions with the purchased item variation matching the given ID.

View File

@@ -44,8 +44,6 @@ identifier string An arbitrary st
ask_during_checkin boolean If ``true``, this question will not be asked while
buying the ticket, but will show up when redeeming
the ticket instead.
show_during_checkin boolean If ``true``, the answer to the question will be shown
during check-in (if the check-in client supports it).
hidden boolean If ``true``, the question will only be shown in the
backend.
print_on_invoice boolean If ``true``, the question will only be shown on
@@ -79,10 +77,6 @@ dependency_value string An old version
for one value. **Deprecated.**
===================================== ========================== =======================================================
.. versionchanged:: 2023.8
The ``show_during_checkin`` attribute has been added.
Endpoints
---------
@@ -121,7 +115,6 @@ Endpoints
"position": 1,
"identifier": "WY3TP9SL",
"ask_during_checkin": false,
"show_during_checkin": false,
"hidden": false,
"print_on_invoice": false,
"valid_number_min": null,
@@ -201,7 +194,6 @@ Endpoints
"position": 1,
"identifier": "WY3TP9SL",
"ask_during_checkin": false,
"show_during_checkin": false,
"hidden": false,
"print_on_invoice": false,
"valid_number_min": null,
@@ -265,7 +257,6 @@ Endpoints
"items": [1, 2],
"position": 1,
"ask_during_checkin": false,
"show_during_checkin": false,
"hidden": false,
"print_on_invoice": false,
"dependency_question": null,
@@ -302,7 +293,6 @@ Endpoints
"position": 1,
"identifier": "WY3TP9SL",
"ask_during_checkin": false,
"show_during_checkin": false,
"hidden": false,
"print_on_invoice": false,
"dependency_question": null,
@@ -358,7 +348,7 @@ Endpoints
.. sourcecode:: http
PATCH /api/v1/organizers/bigevents/events/sampleconf/questions/1/ HTTP/1.1
PATCH /api/v1/organizers/bigevents/events/sampleconf/items/1/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
Content-Type: application/json
@@ -386,7 +376,6 @@ Endpoints
"position": 2,
"identifier": "WY3TP9SL",
"ask_during_checkin": false,
"show_during_checkin": false,
"hidden": false,
"print_on_invoice": false,
"dependency_question": null,
@@ -426,7 +415,7 @@ Endpoints
:param event: The ``slug`` field of the event to modify
:param id: The ``id`` field of the question to modify
:statuscode 200: no error
:statuscode 400: The question could not be modified due to invalid submitted data
:statuscode 400: The item could not be modified due to invalid submitted data
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to change this resource.
@@ -438,7 +427,7 @@ Endpoints
.. sourcecode:: http
DELETE /api/v1/organizers/bigevents/events/sampleconf/questions/1/ HTTP/1.1
DELETE /api/v1/organizers/bigevents/events/sampleconf/items/1/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
@@ -451,7 +440,7 @@ Endpoints
:param organizer: The ``slug`` field of the organizer to modify
:param event: The ``slug`` field of the event to modify
:param id: The ``id`` field of the question to delete
:param id: The ``id`` field of the item to delete
:statuscode 204: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to delete this resource.

View File

@@ -18,7 +18,7 @@ The reusable medium resource contains the following public fields:
Field Type Description
===================================== ========================== =======================================================
id integer Internal ID of the medium
type string Type of medium, e.g. ``"barcode"``, ``"nfc_uid"`` or ``"nfc_mf0aes"``.
type string Type of medium, e.g. ``"barcode"`` or ``"nfc_uid"``.
organizer string Organizer slug of the organizer who "owns" this medium.
identifier string Unique identifier of the medium. The format depends on the ``type``.
active boolean Whether this medium may be used.
@@ -37,7 +37,6 @@ Existing media types are:
- ``barcode``
- ``nfc_uid``
- ``nfc_mf0aes``
Endpoints
---------

View File

@@ -1,556 +0,0 @@
.. spelling:word-list:: checkin
Scheduled data exports
======================
pretix and it's plugins include a number of data exporters that allow you to bulk download various data from pretix in
different formats. You should read :ref:`rest-exporters` first to get an understanding of the basic mechanism.
Exports can be scheduled to be sent at specific times automatically, both on organizer level and event level.
Scheduled export resource
-------------------------
The scheduled export contains the following public fields:
.. rst-class:: rest-resource-table
===================================== ========================== =======================================================
Field Type Description
===================================== ========================== =======================================================
id integer Internal ID of the schedule
owner string Email address of the user who created this schedule (read-only).
This address will always receive the export and the export
will only contain data that this user has permission
to access at the time of the export. **We consider this
field experimental, it's behaviour might change in the future.
Note that the email address of a user can change at any time.**
export_identifier string Identifier of the export to run, see :ref:`rest-exporters`
export_form_data object Input data for the export, format depends on the export,
see :ref:`rest-exporters` for more details.
locale string Language to run the export in
mail_additional_recipients string Email addresses to receive the export, comma-separated (or empty string)
mail_additional_recipients_cc string Email addresses to receive the export in copy, comma-separated (or empty string)
mail_additional_recipients_bcc string Email addresses to receive the exportin blind copy, comma-separated (or empty string)
mail_subject string Subject to use for the email (currently no variables supported)
mail_template string Text to use for the email (currently no variables supported)
schedule_rrule string Recurrence specification to determine the **days** this
schedule runs on in ``RRULE`` syntax following `RFC 5545`_
with some restrictions. Only one rule is allowed, only
one occurrence per day is allowed, and some features
are not supported (``BYMONTHDAY``, ``BYYEARDAY``,
``BYEASTER``, ``BYWEEKNO``).
schedule_rrule_time time Time of day to run this on on the specified days.
Will be interpreted as local time of the event for event-level
exports. For organizer-level exports, the timezone is given
in the field ``timezone``. The export will never run **before**
this time but it **may** run **later**.
timezone string Time zone to interpret the schedule in (only for organizer-level exports)
schedule_next_run datetime Next planned execution (read-only, computed by server)
error_counter integer Number of consecutive times this export failed (read-only).
After a number of failures (currently 5), the schedule no
longer is executed. Changing parameters resets the value.
===================================== ========================== =======================================================
Special notes on permissions
----------------------------
Permission handling for scheduled exports is more complex than for most other objects. The reason for this is that
there are two levels of access control involved here: First, you need permission to access or change the configuration
of the scheduled exports in the moment you are doing it. Second, you **continuously** need permission to access the
**data** that is exported as part of the schedule. For this reason, scheduled exports always need one user account
to be their **owner**.
Therefore, scheduled exports **must** be created by an API client using :ref:`OAuth authentication <rest-oauth>`.
It is impossible to create a scheduled export using token authentication. After the export is created, it can also be
modified using token authentication.
A user or token with the "can change settings" permission for a given organizer or event can see and change
**all** scheduled exports created for the respective organizer or event, regardless of who created them.
A user without this permission can only see **their own** scheduled exports.
A token without this permission can not see scheduled exports as all.
Endpoints for event exports
---------------------------
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/scheduled_exports/
Returns a list of all scheduled exports the client has access to.
**Example request**:
.. sourcecode:: http
GET /api/v1/organizers/bigevents/events/sampleconf/scheduled_exports/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"id": 1,
"owner": "john@example.com",
"export_identifier": "orderlist",
"export_form_data": {"_format": "xlsx", "date_range": "week_previous"},
"locale": "en",
"mail_additional_recipients": "mary@example.org",
"mail_additional_recipients_cc": "",
"mail_additional_recipients_bcc": "",
"mail_subject": "Order list",
"mail_template": "Here is last week's order list\n\nCheers\nJohn",
"schedule_rrule": "DTSTART:20230118T000000\nRRULE:FREQ=WEEKLY;BYDAY=TU,WE,TH",
"schedule_rrule_time": "04:00:00",
"schedule_next_run": "2023-10-26T02:00:00Z",
"error_counter": 0
}
]
}
:query integer page: The page number in case of a multi-page result set, default is 1
:query string ordering: Manually set the ordering of results. Valid fields to be used are ``id``, ``export_identifier``, and ``schedule_next_run``.
Default: ``id``
:param organizer: The ``slug`` field of the organizer to fetch
:param event: The ``slug`` field of the event to fetch
:statuscode 200: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/scheduled_exports/(id)/
Returns information on one scheduled export, identified by its ID.
**Example request**:
.. sourcecode:: http
GET /api/v1/organizers/bigevents/events/sampleconf/scheduled_exports/1/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"id": 1,
"owner": "john@example.com",
"export_identifier": "orderlist",
"export_form_data": {"_format": "xlsx", "date_range": "week_previous"},
"locale": "en",
"mail_additional_recipients": "mary@example.org",
"mail_additional_recipients_cc": "",
"mail_additional_recipients_bcc": "",
"mail_subject": "Order list",
"mail_template": "Here is last week's order list\n\nCheers\nJohn",
"schedule_rrule": "DTSTART:20230118T000000\nRRULE:FREQ=WEEKLY;BYDAY=TU,WE,TH",
"schedule_rrule_time": "04:00:00",
"schedule_next_run": "2023-10-26T02:00:00Z",
"error_counter": 0
}
:param organizer: The ``slug`` field of the organizer to fetch
:param event: The ``slug`` field of the event to fetch
:param id: The ``id`` field of the scheduled export to fetch
:statuscode 200: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
.. http:post:: /api/v1/organizers/(organizer)/events/(event)/scheduled_exports/
Schedule a new export.
.. note:: See above for special notes on permissions.
**Example request**:
.. sourcecode:: http
POST /api/v1/organizers/bigevents/events/sampleconf/scheduled_exports/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
Content-Type: application/json
{
"export_identifier": "orderlist",
"export_form_data": {"_format": "xlsx", "date_range": "week_previous"},
"locale": "en",
"mail_additional_recipients": "mary@example.org",
"mail_additional_recipients_cc": "",
"mail_additional_recipients_bcc": "",
"mail_subject": "Order list",
"mail_template": "Here is last week's order list\n\nCheers\nJohn",
"schedule_rrule": "DTSTART:20230118T000000\nRRULE:FREQ=WEEKLY;BYDAY=TU,WE,TH",
"schedule_rrule_time": "04:00:00"
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 201 Created
Vary: Accept
Content-Type: application/json
{
"id": 1,
"owner": "john@example.com",
"export_identifier": "orderlist",
"export_form_data": {"_format": "xlsx", "date_range": "week_previous"},
"locale": "en",
"mail_additional_recipients": "mary@example.org",
"mail_additional_recipients_cc": "",
"mail_additional_recipients_bcc": "",
"mail_subject": "Order list",
"mail_template": "Here is last week's order list\n\nCheers\nJohn",
"schedule_rrule": "DTSTART:20230118T000000\nRRULE:FREQ=WEEKLY;BYDAY=TU,WE,TH",
"schedule_rrule_time": "04:00:00",
"schedule_next_run": "2023-10-26T02:00:00Z",
"error_counter": 0
}
:param organizer: The ``slug`` field of the organizer of the event to create an item for
:param event: The ``slug`` field of the event to create an item for
:statuscode 201: no error
:statuscode 400: The item could not be created due to invalid submitted data.
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to create this resource.
.. http:patch:: /api/v1/organizers/(organizer)/events/(event)/scheduled_exports/(id)/
Update a scheduled export. You can also use ``PUT`` instead of ``PATCH``. With ``PUT``, you have to provide all fields of
the resource, other fields will be reset to default. With ``PATCH``, you only need to provide the fields that you
want to change.
**Example request**:
.. sourcecode:: http
PATCH /api/v1/organizers/bigevents/events/sampleconf/scheduled_exports/1/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
Content-Type: application/json
Content-Length: 94
{
"export_form_data": {"_format": "xlsx", "date_range": "week_this"},
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"id": 1,
"owner": "john@example.com",
"export_identifier": "orderlist",
"export_form_data": {"_format": "xlsx", "date_range": "week_this"},
"locale": "en",
"mail_additional_recipients": "mary@example.org",
"mail_additional_recipients_cc": "",
"mail_additional_recipients_bcc": "",
"mail_subject": "Order list",
"mail_template": "Here is last week's order list\n\nCheers\nJohn",
"schedule_rrule": "DTSTART:20230118T000000\nRRULE:FREQ=WEEKLY;BYDAY=TU,WE,TH",
"schedule_rrule_time": "04:00:00",
"schedule_next_run": "2023-10-26T02:00:00Z",
"error_counter": 0
}
:param organizer: The ``slug`` field of the organizer to modify
:param event: The ``slug`` field of the event to modify
:param id: The ``id`` field of the export to modify
:statuscode 200: no error
:statuscode 400: The export could not be modified due to invalid submitted data
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to change this resource.
.. http:delete:: /api/v1/organizers/(organizer)/events/(event)/scheduled_exports/(id)/
Delete a scheduled export.
**Example request**:
.. sourcecode:: http
DELETE /api/v1/organizers/bigevents/events/sampleconf/scheduled_exports/1/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 204 No Content
Vary: Accept
:param organizer: The ``slug`` field of the organizer to modify
:param event: The ``slug`` field of the event to modify
:param id: The ``id`` field of the export to delete
:statuscode 204: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to delete this resource.
Endpoints for organizer exports
---------------------------
.. http:get:: /api/v1/organizers/(organizer)/scheduled_exports/
Returns a list of all scheduled exports the client has access to.
**Example request**:
.. sourcecode:: http
GET /api/v1/organizers/bigevents/scheduled_exports/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"id": 1,
"owner": "john@example.com",
"export_identifier": "orderlist",
"export_form_data": {"_format": "xlsx", "date_range": "week_previous"},
"locale": "en",
"mail_additional_recipients": "mary@example.org",
"mail_additional_recipients_cc": "",
"mail_additional_recipients_bcc": "",
"mail_subject": "Order list",
"mail_template": "Here is last week's order list\n\nCheers\nJohn",
"schedule_rrule": "DTSTART:20230118T000000\nRRULE:FREQ=WEEKLY;BYDAY=TU,WE,TH",
"schedule_rrule_time": "04:00:00",
"schedule_next_run": "2023-10-26T02:00:00Z",
"timezone": "Europe/Berlin",
"error_counter": 0
}
]
}
:query integer page: The page number in case of a multi-page result set, default is 1
:query string ordering: Manually set the ordering of results. Valid fields to be used are ``id``, ``export_identifier``, and ``schedule_next_run``.
Default: ``id``
:param organizer: The ``slug`` field of the organizer to fetch
:statuscode 200: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer does not exist **or** you have no permission to view this resource.
.. http:get:: /api/v1/organizers/(organizer)/scheduled_exports/(id)/
Returns information on one scheduled export, identified by its ID.
**Example request**:
.. sourcecode:: http
GET /api/v1/organizers/bigevents/scheduled_exports/1/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"id": 1,
"owner": "john@example.com",
"export_identifier": "orderlist",
"export_form_data": {"_format": "xlsx", "date_range": "week_previous"},
"locale": "en",
"mail_additional_recipients": "mary@example.org",
"mail_additional_recipients_cc": "",
"mail_additional_recipients_bcc": "",
"mail_subject": "Order list",
"mail_template": "Here is last week's order list\n\nCheers\nJohn",
"schedule_rrule": "DTSTART:20230118T000000\nRRULE:FREQ=WEEKLY;BYDAY=TU,WE,TH",
"schedule_rrule_time": "04:00:00",
"schedule_next_run": "2023-10-26T02:00:00Z",
"timezone": "Europe/Berlin",
"error_counter": 0
}
:param organizer: The ``slug`` field of the organizer to fetch
:param id: The ``id`` field of the scheduled export to fetch
:statuscode 200: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer does not exist **or** you have no permission to view this resource.
.. http:post:: /api/v1/organizers/(organizer)/scheduled_exports/
Schedule a new export.
.. note:: See above for special notes on permissions.
**Example request**:
.. sourcecode:: http
POST /api/v1/organizers/bigevents/scheduled_exports/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
Content-Type: application/json
{
"export_identifier": "orderlist",
"export_form_data": {"_format": "xlsx", "date_range": "week_previous"},
"locale": "en",
"mail_additional_recipients": "mary@example.org",
"mail_additional_recipients_cc": "",
"mail_additional_recipients_bcc": "",
"mail_subject": "Order list",
"mail_template": "Here is last week's order list\n\nCheers\nJohn",
"schedule_rrule": "DTSTART:20230118T000000\nRRULE:FREQ=WEEKLY;BYDAY=TU,WE,TH",
"schedule_rrule_time": "04:00:00",
"timezone": "Europe/Berlin"
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 201 Created
Vary: Accept
Content-Type: application/json
{
"id": 1,
"owner": "john@example.com",
"export_identifier": "orderlist",
"export_form_data": {"_format": "xlsx", "date_range": "week_previous"},
"locale": "en",
"mail_additional_recipients": "mary@example.org",
"mail_additional_recipients_cc": "",
"mail_additional_recipients_bcc": "",
"mail_subject": "Order list",
"mail_template": "Here is last week's order list\n\nCheers\nJohn",
"schedule_rrule": "DTSTART:20230118T000000\nRRULE:FREQ=WEEKLY;BYDAY=TU,WE,TH",
"schedule_rrule_time": "04:00:00",
"schedule_next_run": "2023-10-26T02:00:00Z",
"timezone": "Europe/Berlin",
"error_counter": 0
}
:param organizer: The ``slug`` field of the organizer of the event to create an item for
:statuscode 201: no error
:statuscode 400: The item could not be created due to invalid submitted data.
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer does not exist **or** you have no permission to create this resource.
.. http:patch:: /api/v1/organizers/(organizer)/scheduled_exports/(id)/
Update a scheduled export. You can also use ``PUT`` instead of ``PATCH``. With ``PUT``, you have to provide all fields of
the resource, other fields will be reset to default. With ``PATCH``, you only need to provide the fields that you
want to change.
**Example request**:
.. sourcecode:: http
PATCH /api/v1/organizers/bigevents/scheduled_exports/1/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
Content-Type: application/json
Content-Length: 94
{
"export_form_data": {"_format": "xlsx", "date_range": "week_this"},
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"id": 1,
"owner": "john@example.com",
"export_identifier": "orderlist",
"export_form_data": {"_format": "xlsx", "date_range": "week_this"},
"locale": "en",
"mail_additional_recipients": "mary@example.org",
"mail_additional_recipients_cc": "",
"mail_additional_recipients_bcc": "",
"mail_subject": "Order list",
"mail_template": "Here is last week's order list\n\nCheers\nJohn",
"schedule_rrule": "DTSTART:20230118T000000\nRRULE:FREQ=WEEKLY;BYDAY=TU,WE,TH",
"schedule_rrule_time": "04:00:00",
"schedule_next_run": "2023-10-26T02:00:00Z",
"timezone": "Europe/Berlin",
"error_counter": 0
}
:param organizer: The ``slug`` field of the organizer to modify
:param id: The ``id`` field of the export to modify
:statuscode 200: no error
:statuscode 400: The export could not be modified due to invalid submitted data
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer does not exist **or** you have no permission to change this resource.
.. http:delete:: /api/v1/organizers/(organizer)/scheduled_exports/(id)/
Delete a scheduled export.
**Example request**:
.. sourcecode:: http
DELETE /api/v1/organizers/bigevents/scheduled_exports/1/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 204 No Content
Vary: Accept
:param organizer: The ``slug`` field of the organizer to modify
:param id: The ``id`` field of the export to delete
:statuscode 204: no error
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer does not exist **or** you have no permission to delete this resource.
.. _RFC 5545: https://datatracker.ietf.org/doc/html/rfc5545#section-3.8.5.3

View File

@@ -1,10 +1,10 @@
Scheduled email rules
Automated email rules
=====================
Resource description
--------------------
Scheduled email rules that specify emails that the system will send automatically at a specific point in time, e.g.
Automated email rules that specify emails that the system will send automatically at a specific point in time, e.g.
the day of the event.
.. rst-class:: rest-resource-table
@@ -18,19 +18,8 @@ subject multi-lingual string The subject of
template multi-lingual string The body of the email
all_products boolean If ``true``, the email is sent to buyers of all products
limit_products list of integers List of product IDs, if ``all_products`` is not set
[**DEPRECATED**] include_pending boolean If ``true``, the email is sent to pending orders. If ``false``,
include_pending boolean If ``true``, the email is sent to pending orders. If ``false``,
only paid orders are considered.
restrict_to_status list List of order states to restrict recipients to. Valid
entries are ``p`` for paid, ``e`` for expired, ``c`` for canceled,
``n__pending_approval`` for pending approval,
``n__not_pending_approval_and_not_valid_if_pending`` for payment
pending, ``n__valid_if_pending`` for payment pending but already confirmed,
and ``n__pending_overdue`` for pending with payment overdue.
The default is ``["p", "n__valid_if_pending"]``.
checked_in_status string Check-in status to restrict recipients to. Valid strings are:
``null`` for no filtering (default), ``checked_in`` for
limiting to attendees that are or have been checked in, and
``no_checkin`` for limiting to attendees who have not checked in.
date_is_absolute boolean If ``true``, the email is set at a specific point in time.
send_date datetime If ``date_is_absolute`` is set: Date and time to send the email.
send_offset_days integer If ``date_is_absolute`` is not set, this is the number of days
@@ -48,10 +37,7 @@ send_to string Can be ``"order
or ``"both"``.
date. Otherwise it is relative to the event start date.
===================================== ========================== =======================================================
.. versionchanged:: 2023.7
The ``include_pending`` field has been deprecated.
The ``restrict_to_status`` field has been added.
Endpoints
---------
@@ -88,12 +74,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true,
"limit_products": [],
"restrict_to_status": [
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": null,
"include_pending": false,
"send_date": null,
"send_offset_days": 1,
"send_offset_time": "18:00",
@@ -139,12 +120,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true,
"limit_products": [],
"restrict_to_status": [
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": null,
"include_pending": false,
"send_date": null,
"send_offset_days": 1,
"send_offset_time": "18:00",
@@ -181,12 +157,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true,
"limit_products": [],
"restrict_to_status": [
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": "checked_in",
"include_pending": false,
"send_date": null,
"send_offset_days": 1,
"send_offset_time": "18:00",
@@ -211,12 +182,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true,
"limit_products": [],
"restrict_to_status": [
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": "checked_in",
"include_pending": false,
"send_date": null,
"send_offset_days": 1,
"send_offset_time": "18:00",
@@ -269,12 +235,7 @@ Endpoints
"template": {"en": "Don't forget your tickets, download them at {url}"},
"all_products": true,
"limit_products": [],
"restrict_to_status": [
"p",
"n__not_pending_approval_and_not_valid_if_pending",
"n__valid_if_pending"
],
"checked_in_status": "checked_in",
"include_pending": false,
"send_date": null,
"send_offset_days": 1,
"send_offset_time": "18:00",

View File

@@ -68,10 +68,6 @@ last_modified datetime Last modificati
The ``date_from_before``, ``date_from_after``, ``date_to_before``, and ``date_to_after`` query parameters have been
added.
.. versionchanged:: 2023.8.0
For the organizer-wide endpoint, the ``search`` query parameter has been modified to filter sub-events by their parent events slug too.
Endpoints
---------
@@ -476,7 +472,6 @@ Endpoints
:query date_to_after: If set to a date and time, only events that have an end date and end at or after the given time are returned.
:query date_to_before: If set to a date and time, only events that have an end date and end at or before the given time are returned.
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned.
:query search: Only return events matching a given search query.
:query sales_channel: If set to a sales channel identifier, the response will only contain subevents from events available on this sales channel.
:param organizer: The ``slug`` field of a valid organizer
:param event: The ``slug`` field of the event to fetch

View File

@@ -67,9 +67,6 @@ The following values for ``action_types`` are valid with pretix core:
* ``pretix.event.live.deactivated``
* ``pretix.event.testmode.activated``
* ``pretix.event.testmode.deactivated``
* ``pretix.customer.created``
* ``pretix.customer.changed``
* ``pretix.customer.anonymized``
Installed plugins might register more valid values.

View File

@@ -11,7 +11,7 @@ Core
----
.. automodule:: pretix.base.signals
:members: periodic_task, event_live_issues, event_copy_data, email_filter, register_notification_types, notification,
:members: periodic_task, event_live_issues, event_copy_data, email_filter, register_notification_types,
item_copy_data, register_sales_channels, register_global_settings, quota_availability, global_email_filter,
register_ticket_secret_generators, gift_card_transaction_display
@@ -61,7 +61,7 @@ Backend
item_formsets, order_search_filter_q, order_search_forms
.. automodule:: pretix.base.signals
:members: logentry_display, logentry_object_link, requiredaction_display, timeline_events, orderposition_blocked_display, customer_created, customer_signed_in
:members: logentry_display, logentry_object_link, requiredaction_display, timeline_events, orderposition_blocked_display
Vouchers
""""""""

View File

@@ -70,8 +70,6 @@ The provider class
.. autoattribute:: settings_form_fields
.. autoattribute:: walletqueries
.. automethod:: settings_form_clean
.. automethod:: settings_content_render

View File

@@ -37,7 +37,7 @@ you to execute a piece of code with a different locale:
This is very useful e.g. when sending an email to a user that has a different language than the user performing the
action that causes the mail to be sent.
.. _translation features: https://docs.djangoproject.com/en/4.2/topics/i18n/translation/
.. _translation features: https://docs.djangoproject.com/en/1.9/topics/i18n/translation/
.. _GNU gettext: https://www.gnu.org/software/gettext/
.. _strings: https://django-i18nfield.readthedocs.io/en/latest/strings.html
.. _database fields: https://django-i18nfield.readthedocs.io/en/latest/quickstart.html

View File

@@ -18,4 +18,3 @@ Contents:
email
permissions
logging
locking

View File

@@ -1,69 +0,0 @@
.. highlight:: python
Resource locking
================
.. versionchanged:: 2023.8
Our locking mechanism changed heavily in version 2023.8. Read `this PR`_ for background information.
One of pretix's core objectives as a ticketing system could be described as the management of scarce resources.
Specifically, the following types of scarce-ness exist in pretix:
- Quotas can limit the number of tickets available
- Seats can only be booked once
- Vouchers can only be used a limited number of times
- Some memberships can only be used a limited number of times
For all of these, it is critical that we prevent race conditions.
While for some events it wouldn't be a big deal to sell a ticket more or less, for some it would be problematic and selling the same seat twice would always be catastrophic.
We therefore implement a standardized locking approach across the system to limit concurrency in cases where it could
be problematic.
To acquire a lock on a set of quotas to create a new order that uses that quota, you should follow the following pattern::
with transaction.atomic(durable=True):
quotas = Quota.objects.filter(...)
lock_objects(quotas, shared_lock_objects=[event])
check_quota(quotas)
create_ticket()
The lock will automatically be released at the end of your database transaction.
Generally, follow the following guidelines during your development:
- **Always** acquire a lock on every **quota**, **voucher** or **seat** that you "use" during your transaction. "Use"
here means any action after which the quota, voucher or seat will be **less available**, such as creating a cart
position, creating an order, creating a blocking voucher, etc.
- There is **no need** to acquire a lock if you **free up** capacity, e.g. by canceling an order, deleting a voucher, etc.
- **Always** acquire a shared lock on the ``event`` you are working in whenever you acquire a lock on a quota, voucher,
or seat.
- Only call ``lock_objects`` **once** per transaction. If you violate this rule, `deadlocks`_ become possible.
- For best performance, call ``lock_objects`` as **late** in your transaction as possible, but always before you check
if the desired resource is still available in sufficient quantity.
Behind the scenes, the locking is implemented through `PostgreSQL advisory locks`_. You should also be aware of the following
properties of our system:
- In some situations, an exclusive lock on the ``event`` is used, such as when the system can't determine for sure which
seats will become unavailable after the transaction.
- An exclusive lock on the event is also used if you pass more than 20 objects to ``lock_objects``. This is a performance
trade-off because it would take long to acquire all of the individual locks.
- If ``lock_objects`` is unable to acquire a lock within 3 seconds, a ``LockTimeoutException`` will be thrown.
.. note::
We currently do not use ``lock_objects`` for memberships. Instead, we use ``select_for_update()`` on the membership
model. This might change in the future, but you should usually not be concerned about it since
``validate_memberships_in_order(lock=True)`` will handle it for you.
.. _this PR: https://github.com/pretix/pretix/pull/2408
.. _deadlocks: https://www.postgresql.org/docs/current/explicit-locking.html#LOCKING-DEADLOCKS
.. _PostgreSQL advisory locks: https://www.postgresql.org/docs/11/explicit-locking.html#ADVISORY-LOCKS

View File

@@ -15,33 +15,25 @@ and the admin panel is available at ``https://pretix.eu/control/event/bigorg/awe
If the organizer now configures a custom domain like ``tickets.bigorg.com``, his event will
from now on be available on ``https://tickets.bigorg.com/awesomecon/``. The former URL at
``pretix.eu`` will redirect there. It's also possible to do this for just an event, in which
case the event will be available on ``https://tickets.awesomecon.org/``.
However, the admin panel will still only be available on ``pretix.eu`` for convenience and security reasons.
``pretix.eu`` will redirect there. However, the admin panel will still only be available
on ``pretix.eu`` for convenience and security reasons.
URL routing
-----------
The hard part about implementing this URL routing in Django is that
``https://pretix.eu/bigorg/awesomecon/`` contains two parameters of nearly arbitrary content
and ``https://tickets.bigorg.com/awesomecon/`` contains only one and ``https://tickets.awesomecon.org/`` does not contain any.
The only robust way to do this is by having *separate* URL configuration for those three cases.
and ``https://tickets.bigorg.com/awesomecon/`` contains only one. The only robust way to do
this is by having *separate* URL configuration for those two cases. In pretix, we call the
former our ``maindomain`` config and the latter our ``subdomain`` config. For pretix's core
modules we do some magic to avoid duplicate configuration, but for a fairly simple plugin with
only a handful of routes, we recommend just configuring the two URL sets separately.
In pretix, we therefore do not have a global URL configuration, but three, living in the following modules:
- ``pretix.multidomain.maindomain_urlconf``
- ``pretix.multidomain.organizer_domain_urlconf``
- ``pretix.multidomain.event_domain_urlconf``
We provide some helper utilities to work with these to avoid duplicate configuration of the individual URLs.
The file ``urls.py`` inside your plugin package will be loaded and scanned for URL configuration
automatically and should be provided by any plugin that provides any view.
However, unlike plain Django, we look not only for a ``urlpatterns`` attribute on the module but support other
attributes like ``event_patterns`` and ``organizer_patterns`` as well.
For example, for a simple plugin that adds one URL to the backend and one event-level URL to the frontend, you can
create the following configuration in your ``urls.py``::
A very basic example that provides one view in the admin panel and one view in the frontend
could look like this::
from django.urls import re_path
@@ -60,7 +52,7 @@ create the following configuration in your ``urls.py``::
As you can see, the view in the frontend is not included in the standard Django ``urlpatterns``
setting but in a separate list with the name ``event_patterns``. This will automatically prepend
the appropriate parameters to the regex (e.g. the event or the event and the organizer, depending
on the called domain). For organizer-level views, ``organizer_patterns`` works the same way.
on the called domain).
If you only provide URLs in the admin area, you do not need to provide a ``event_patterns`` attribute.
@@ -79,16 +71,11 @@ is a python method that emulates a behavior similar to ``reverse``:
.. autofunction:: pretix.multidomain.urlreverse.eventreverse
If you need to communicate the URL externally, you can use a different method to ensure that it is always an absolute URL:
.. autofunction:: pretix.multidomain.urlreverse.build_absolute_uri
In addition, there is a template tag that works similar to ``url`` but takes an event or organizer object
as its first argument and can be used like this::
{% load eventurl %}
<a href="{% eventurl request.event "presale:event.checkout" step="payment" %}">Pay</a>
<a href="{% abseventurl request.event "presale:event.checkout" step="payment" %}">Pay</a>
Implementation details

View File

@@ -12,4 +12,3 @@ Developer documentation
api/index
structure
translation/index
nfc/index

View File

@@ -1,15 +0,0 @@
NFC media
=========
pretix supports using NFC chips as "reusable media", for example to store gift cards or tickets.
Most of this implementation currently lives in our proprietary app pretixPOS, but in the future might also become part of our open-source pretixSCAN solution.
Either way, we want this to be an open ecosystem and therefore document the exact mechanisms in use on the following pages.
We support multiple implementations of NFC media, each documented on its own page:
.. toctree::
:maxdepth: 2
uid
mf0aes

View File

@@ -1,113 +0,0 @@
Mifare Ultralight AES
=====================
We offer an implementation that provides a higher security level than the UID-based approach and uses the `Mifare Ultralight AES`_ chip sold by NXP.
We believe the security model of this approach is adequate to the situation where this will usually be used and we'll outline known risks below.
If you want to dive deeper into the properties of the Mifare Ultralight AES chip, we recommend reading the `data sheet`_.
Random UIDs
-----------
Mifare Ultralight AES supports a feature that returns a randomized UID every time a non-authenticated user tries to
read the UID. This has a strong privacy benefit, since no unauthorized entity can use the NFC chips to track users.
On the other hand, this reduces interoperability of the system. For example, this prevents you from using the same NFC
chips for a different purpose where you only need the UID. This will also prevent your guests from reading their UID
themselves with their phones, which might be useful e.g. in debugging situations.
Since there's no one-size-fits-all choice here, you can enable or disable this feature in the pretix organizer
settings. If you change it, the change will apply to all newly encoded chips after the change.
Key management
--------------
For every organizer, the server will generate create a "key set", which consists of a publicly known ID (random 32-bit integer) and two 16-byte keys ("diversification key" and "UID key").
Using our :ref:`Device authentication mechanism <rest-deviceauth>`, an authorized device can submit a locally generated RSA public key to the server.
This key can no longer changed on the server once it is set, thus protecting against the attack scenario of a leaked device API token.
The server will then include key sets in the response to ``/api/v1/device/info``, encrypted with the device's RSA key.
This includes all key sets generated for the organizer the device belongs to, as well as all keys of organizers that have granted sufficient access to this organizer.
The device will decrypt the key sets using its RSA key and store the key sets locally.
.. warning:: The device **will** have access to the raw key sets. Therefore, there is a risk of leaked master keys if an
authorized device is stolen or abused. Our implementation in pretixPOS attempts to make this very hard on
modern, non-rooted Android devices by keeping them encrypted with the RSA key and only storing the RSA key
in the hardware-backed keystore of the device. A sufficiently motivated attacker, however, will likely still
be able to extract the keys from a stolen device.
Encoding a chip
---------------
When a new chip is encoded, the following steps will be taken:
- The UID of the chip is retrieved.
- A chip-specific key is generated using the mechanism documented in `AN10922`_ using the "diversification key" from the
organizer's key set as the CMAC key and the diversification input concatenated in the from of ``0x01 + UID + APPID + SYSTEMID``
with the following values:
- The UID of the chip as ``UID``
- ``"eu.pretix"`` (``0x65 0x75 0x2e 0x70 0x72 0x65 0x74 0x69 0x78``) as ``APPID``
- The ``public_id`` from the organizer's key set as a 4-byte big-endian value as ``SYSTEMID``
- The chip-specific key is written to the chip as the "data protection key" (config pages 0x30 to 0x33)
- The UID key from the organizer's key set is written to the chip as the "UID retrieval key" (config pages 0x34 to 0x37)
- The config page 0x29 is set like this:
- ``RID_ACT`` (random UID) to ``1`` or ``0`` based on the organizer's configuration
- ``SEC_MSG_ACT`` (secure messaging) to ``1``
- ``AUTH0`` (first page that needs authentication) to 0x04 (first non-UID page)
- The config page 0x2A is set like this:
- ``PROT`` to ``0`` (only write access restricted, not read access)
- ``AUTHLIM`` to ``256`` (maximum number of wrong authentications before "self-desctruction")
- Everything else to its default value (no lock bits are set)
- The ``public_id`` of the key set will be written to page 0x04 as a big-endian value
- The UID of the chip will be registered as a reusable medium on the server.
.. warning:: During encoding, the chip-specific key and the UID key are transmitted in plain text over the air. The
security model therefore relies on the encoding of chips being performed in a trusted physical environment
to prevent a nearby attacker from sniffing the keys with a strong antenna.
.. note:: If an attacker tries to authenticate with the chip 256 times using the wrong key, the chip will become
unusable. A chip may also become unusable if it is detached from the reader in the middle of the encoding
process (even though we've tried to implement it in a way that makes this unlikely).
Usage
-----
When a chip is presented to the NFC reader, the following steps will be taken:
- Command ``GET_VERSION`` is used to determine if it is a Mifare Ultralight AES chip (if not, abort).
- Page 0x04 is read. If it is all zeroes, the chip is considered un-encoded (abort). If it contains a value that
corresponds to the ``public_id`` of a known key set, this key set is used for all further operations. If it contains
a different value, we consider this chip to belong to a different organizer or not to a pretix system at all (abort).
- An authentication with the chip using the UID key is performed.
- The UID of the chip will be read.
- The chip-specific key will be derived using the mechanism described above in the encoding step.
- An authentication with the chip using the chip-specific key is performed. If this is fully successful, this step
proves that the chip knows the same chip-specific key as we do and is therefore an authentic chip encoded by us and
we can trust its UID value.
- The UID is transmitted to the server to fetch the correct medium.
During these steps, the keys are never transmitted in plain text and can thus not be sniffed by a nearby attacker
with a strong antenna.
.. _Mifare Ultralight AES: https://www.nxp.com/products/rfid-nfc/mifare-hf/mifare-ultralight/mifare-ultralight-aes-enhanced-security-for-limited-use-contactless-applications:MF0AESx20
.. _data sheet: https://www.nxp.com/docs/en/data-sheet/MF0AES(H)20.pdf
.. _AN10922: https://www.nxp.com/docs/en/application-note/AN10922.pdf

View File

@@ -1,10 +0,0 @@
UID-based
=========
With UID-based NFC, only the unique ID (UID) of the NFC chip is used for identification purposes.
This can be used with virtually all NFC chips that provide compatibility with the NFC reader in use, typically at least all chips that comply with ISO/IEC 14443-3A.
We make only one restriction: The UID may not start with ``08``, since that usually signifies a randomized UID that changes on every read (which would not be very useful).
.. warning:: The UID-based approach provides only a very low level of security. It is easy to clone a chip with the same
UID and impersonate someone else.

View File

@@ -96,20 +96,6 @@ http://localhost:8000/control/ for the admin view.
port (for example because you develop on `pretixdroid`_), you can check
`Django's documentation`_ for more options.
When running the local development webserver, ensure Celery is not configured
in ``pretix.cfg``. i.e., you should remove anything such as::
[celery]
backend=redis://redis:6379/2
broker=redis://redis:6379/2
If you choose to use Celery for development, you must also start a Celery worker
process::
celery -A pretix.celery_app worker -l info
However, beware that code changes will not auto-reload within Celery.
.. _`checksandtests`:
Code checks and unit tests

Binary file not shown.

Before

Width:  |  Height:  |  Size: 287 KiB

After

Width:  |  Height:  |  Size: 278 KiB

View File

@@ -25,27 +25,27 @@ partition "data-based check" {
else
-down->[yes] "Is one or more block set on the ticket?"
--> if "" then
-right->[yes] "Return error BLOCKED"
-right->[no] "Return error BLOCKED"
else
-down->[no] "Is the order in status PENDING and not yet approved?"
-down->[yes] "If this is not an exit, is the valid_from/valid_until\nconstraint on the ticket fulfilled?"
--> if "" then
-right->[yes] "Return error UNAPPROVED"
-right->[no] "Return error INVALID_TIME"
else
-down->[no] "If this is not an exit, is the valid_from/valid_until\nconstraint on the ticket fulfilled?"
-down->[yes] "Is the product part of the check-in list?"
--> if "" then
-right->[no] "Return error INVALID_TIME"
-right->[no] "Return error PRODUCT"
else
-down->[yes] "Is the product part of the check-in list?"
-down->[yes] "Is the subevent part of the check-in list?"
--> if "" then
-right->[no] "Return error PRODUCT"
-right->[no] "Return error INVALID"
note bottom: TODO\ninconsistent\nwith online\ncheck
else
-down->[yes] "Is the subevent part of the check-in list?"
-down->[yes] "Is the order in status PAID?"
--> if "" then
-right->[no] "Return error INVALID"
note bottom: TODO\ninconsistent\nwith online\ncheck
else
-down->[yes] "Is the order in status PAID?"
-right->[no] "Is Order.require_approval set?"
--> if "" then
-->[yes] "Return error UNPAID "
else
-right->[no] "Is Order.valid_if_pending set?"
--> if "" then
-->[yes] "Is this an entry or exit?"
@@ -62,9 +62,9 @@ partition "data-based check" {
endif
endif
endif
else
-down->[yes] "Is this an entry or exit?"
endif
else
-down->[yes] "Is this an entry or exit?"
endif
endif
endif

Binary file not shown.

Before

Width:  |  Height:  |  Size: 183 KiB

After

Width:  |  Height:  |  Size: 180 KiB

View File

@@ -42,25 +42,23 @@ endif
else
-down->[yes || force] "Is one or more block set on the ticket?"
--> if "" then
-right->[yes && !force] "Return error BLOCKED"
-right->[no && !force] "Return error BLOCKED"
else
-down->[no || force] "Is the order in status PENDING and not yet approved?"
-down->[yes || force] "If this is not an exit, is the valid_from/valid_until\nconstraint on the ticket fulfilled?"
--> if "" then
-right->[yes && !force] "Return error UNAPPROVED"
-right->[no && !force] "Return error INVALID_TIME"
else
-down->[no || force] "If this is not an exit, is the valid_from/valid_until\nconstraint on the ticket fulfilled?"
-down->[yes || force] "Is the product part of the check-in list?"
--> if "" then
-right->[no && !force] "Return error INVALID_TIME"
-right->[no && !force] "Return error PRODUCT"
else
-down->[yes || force] "Is the product part of the check-in list?"
-down->[yes || force] "Is the subevent part of the check-in list?"
--> if "" then
-right->[no && !force] "Return error PRODUCT"
-right->[no && !force] "Return error PRODUCT "
else
-down->[yes || force] "Is the subevent part of the check-in list?"
-down->[yes] "Is the order in status PAID?"
--> if "" then
-right->[no && !force] "Return error PRODUCT "
else
-down->[yes] "Is the order in status PAID?"
-right->[no && !force] "Is Order.require_approval set?"
--> if "" then
-->[no] "Is Order.valid_if_pending set?"
--> if "" then
@@ -79,8 +77,10 @@ else
endif
endif
else
-down->[yes || force] "Is this an entry or exit?\nIs the upload forced?"
-->[yes] "Return error UNPAID "
endif
else
-down->[yes || force] "Is this an entry or exit?\nIs the upload forced?"
endif
endif
endif

View File

@@ -32,7 +32,6 @@ transactions list of objects Transactions in
├ checksum string Checksum computed from payer, reference, amount and
date
├ payer string Payment source
├ external_id string Unique ID of the payment from an external source
├ reference string Payment reference
├ amount string Payment amount
├ iban string Payment IBAN
@@ -86,7 +85,6 @@ Endpoints
"date": "26.06.2017",
"payer": "John Doe",
"order": null,
"external_id": null,
"iban": "",
"bic": "",
"checksum": "5de03a601644dfa63420dacfd285565f8375a8f2",
@@ -141,7 +139,6 @@ Endpoints
"iban": "",
"bic": "",
"order": null,
"external_id": null,
"checksum": "5de03a601644dfa63420dacfd285565f8375a8f2",
"reference": "GUTSCHRIFT\r\nSAMPLECONF-NAB12 EREF: SAMPLECONF-NAB12\r\nIBAN: DE1234556…",
"state": "nomatch",

View File

@@ -35,7 +35,7 @@ contact_name string Contact person
contact_name_parts object of strings Decomposition of contact name (i.e. given name, family name)
contact_email string Contact person email address (or ``null``)
booth string Booth number (or ``null``). Maximum 100 characters.
locale string Locale for communication with the exhibitor.
locale string Locale for communication with the exhibitor (or ``null``).
access_code string Access code for the exhibitor to access their data or use the lead scanning app (read-only).
allow_lead_scanning boolean Enables lead scanning app
allow_lead_access boolean Enables access to data gathered by the lead scanning app
@@ -230,8 +230,7 @@ Endpoints
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/exhibitors/(id)/vouchers/
Returns a list of all vouchers connected to an exhibitor. The response contains the same data as described in
:ref:`rest-vouchers` as well as for each voucher an additional field ``exhibitor_comment`` that is shown to the exhibitor. It can only
be modified using the ``attach`` API call below.
:ref:`rest-vouchers`.
**Example request**:
@@ -286,7 +285,7 @@ Endpoints
.. http:post:: /api/v1/organizers/(organizer)/events/(event)/exhibitors/(id)/vouchers/attach/
Attaches an **existing** voucher to an exhibitor. You need to send either the ``id`` **or** the ``code`` field of
the voucher. You can call this method multiple times to update the optional ``exhibitor_comment`` field.
the voucher.
**Example request**:
@@ -297,8 +296,7 @@ Endpoints
Accept: application/json, text/javascript
{
"id": 15,
"exhibitor_comment": "Free ticket"
"id": 15
}
**Example request**:
@@ -310,8 +308,7 @@ Endpoints
Accept: application/json, text/javascript
{
"code": "43K6LKM37FBVR2YG",
"exhibitor_comment": "Free ticket"
"code": "43K6LKM37FBVR2YG"
}
**Example response**:
@@ -359,6 +356,7 @@ Endpoints
"contact_email": "johnson@as.example.org",
"booth": "A2",
"locale": "de",
"access_code": "VKHZ2FU8",
"allow_lead_scanning": true,
"allow_lead_access": true,
"allow_voucher_access": true,
@@ -413,7 +411,7 @@ Endpoints
.. sourcecode:: http
PATCH /api/v1/organizers/bigevents/events/sampleconf/exhibitors/1/ HTTP/1.1
PATCH /api/v1/organizers/bigevents/events/sampleconf/digitalcontents/1/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
Content-Type: application/json
@@ -461,36 +459,6 @@ Endpoints
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event/exhibitor does not exist **or** you have no permission to change it.
.. http:post:: /api/v1/organizers/(organizer)/events/(event)/exhibitors/(id)/send_access_code/
Sends an email to the exhibitor with their access code.
**Example request**:
.. sourcecode:: http
POST /api/v1/organizers/bigevents/events/sampleconf/exhibitors/1/send_access_code/ HTTP/1.1
Host: pretix.eu
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 204 No Content
Vary: Accept
:param organizer: The ``slug`` field of the organizer to modify
:param event: The ``slug`` field of the event to modify
:param code: The ``id`` field of the exhibitor to send an email for
:statuscode 200: no error
:statuscode 400: The exhibitor does not have an email address associated
:statuscode 401: Authentication failure
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
:statuscode 404: The requested exhibitor does not exist.
:statuscode 503: The email could not be sent.
.. http:delete:: /api/v1/organizers/(organizer)/events/(event)/exhibitors/(id)/

View File

@@ -24,4 +24,3 @@ If you want to **create** a plugin, please go to the
imported_secrets
webinar
presale-saml
kulturpass

View File

@@ -1,193 +0,0 @@
KulturPass
=========
.. note::
Since the KulturPass is specific to event organizers within Germany, the following page is also only provided in
German. Should you require assistance with the KulturPass and do not speak this language, please feel free reach
out to support@pretix.eu.
Einführung
----------
Der `KulturPass`_ ist ein Angebot der Bundesregierung für alle, die im laufenden Jahr ihren 18. Geburtstag feiern.
Sie erhalten ab ihrem 18. Geburtstag ein Budget von 200 Euro, das sie für Eintrittskarten, Bücher, CDs, Platten und
vieles andere einsetzen können. So wird Kultur vor Ort noch einfacher erlebbar. Gleichzeitig stärkt das die Nachfrage
bei den Anbietenden.
Da pretix ein Ticketing-System ist, stellen wir ausschließlich einen automatisierten Prozess für den Verkauf von
Eintrittskarten über den KulturPass-Marktplatz bereit.
Registrierung und Einrichtung
-----------------------------
Um als Unternehmen oder Kultureinrichtung Angebote auf dem KulturPass-Marktplatz anbieten zu können, ist zunächst eine
Registerung und die Einrichtung eines "Shops" sowie der dazugehörigen Angebote notwendig.
1. Registrierung
Registrieren Sie sich zunächst unter https://www.kulturpass.de/anbietende/layer als Anbieter. Im Zuge der
Registrierung beantworten Sie einige Fragen zu Ihrem Unternehmen/Ihrer Kultureinrichtung, hinterlegen Ihre
E-Mail-Adresse und beantworten Fragen zu Ihren Angebotsformen sowie Finanzierung Ihrer Einrichtung.
2. Anlegen eines KulturPass Shops
Nach Ihrer Registrierung müssen Sie der Weitergabe Ihrer Daten an die technische Platform hinter dem KulturPass,
Mirakl, zustimmen. Hier benennen Sie auch Ihren Shop.
3. Identifizierung mit ELSTER-Zertifikat
Als nächsten Schritt müssen Sie Ihr Unternehmen oder Ihre Einrichtung mit Hilfe eines sog. ELSTER-Zertifikates
identifizieren. Dieses Zertifikat nutzen Sie auch bereits jetzt schon, wenn Sie auf elektronischem Wege mit der
Finanzverwaltung kommunizieren.
4. Ersteinrichtung in pretix
Hinterlegen Sie nun die ID-Nummer Ihres KulturPass Marktplatz-Shops sowie einen API-Key in den
`Einstellungen Ihres Veranstalterkontos`_ (Veranstalter-Konto -> Einstellungen -> KulturPass). Diese Daten müssen
Sie nur einmalig für alle Ihre Veranstaltungen angeben.
Im `KulturPass-Backend`_ finden Sie die benötigten Informationen indem Sie auf das Benutzer-Symbol in der oberen,
rechten Ecke klicken, "Profil" und dann "API Schlüssel" auswählen bzw. indem Sie auf "Einstellungen" in der
Navigation links und dann "Shop" auswählen.
.. note::
Zu jedem Zeitpunkt kann nur ein Hintergrundsystem mit dem KulturPass-System verbunden sein. Werden
unterschiedliche Systeme oder gar mehrere pretix-Veranstalterkonten mit dem gleichen KulturPass-System verbunden,
können keine Bestellungen mehr verarbeitet werden und Angebote nicht automatisiert an den KulturPass-Marktplatz
übermittelt werden. Eingehende Bestellungen von Jugendlichen werden in diesem Fall automatisch abgelehnt, da diese
nicht eindeutig zugeordnet werden können. Ebenso überschreibt die Bereitstellung der Angebote eines Systems die
Angebote eines anderen Systems.
Wenn Sie mehrere Systeme haben, die den KulturPass-Marktplatz bedienen sollen, wenden Sie sich bitte an den
KulturPass-Support, um sich einen weiteren Shop einrichten zu lassen.
5. Aktivierung der KulturPass-Erweiterungen
Alle Veranstaltungen, die Sie über den KulturPass anbieten möchten, benötigen die `KulturPass-Erweiterung`_.
Aktivieren Sie diese bitte in jeder relevanten Veranstaltung über Einstellungen -> Erweiterungen -> Tab
"Integrationen" -> KulturPass.
6. Konfiguration der Artikel
Nachdem die KulturPass-Erweiterung aktiviert wurde, müssen Sie sich entscheiden, welche Produkte Sie über den
KulturPass-Marktplatz anbieten möchten. In der Bearbeitungs-Ansicht des jeweiligen Produktes finden Sie hierzu im
Tab "Zusätzliche Einstellungen" eine Checkbox "Das Produkt kann mit dem KulturPass erworben werden".
.. note::
Die Eigenschaft, dass ein Produkt durch den KulturPass-Marktplatz erworben werden kann, kann für beliebig viele
Produkte aktiviert werden. Auf Grund der Funktionsweise des KulturPasses sollten Sie jedoch gerade bei vielen
Artikeln mit unterschiedlich hohen Preisen darauf achten, dass die Preisspanne nicht zu hoch ausfällt.
Aktivieren Sie die Option für drei Produkte für 1, 10 und 100 Euro, so wird Ihr Angebot im KulturPass-Marktplatz
für 100 Euro gelistet werden. Dies bedeutet im Umkehrschluss auch, dass das KulturPass-Guthaben eines Jugendlichen
auch mindestens 100 Euro betragen muss, damit er Ihr Angebot in Anspruch nehmen kann - auch wenn die betroffene
Person lediglich das 1 Euro-Angebot wahrnehmen möchte. Erst mit dem 100 Euro KulturPass-Einlösecode wählt die
kaufende Person in Ihrem pretix-Shop aus, welches Produkt erworben werden soll. Ein Restguthaben wird nach dem Kauf
automatisch zurückerstattet und dem KulturPass-Konto wieder gutgeschrieben.
7. Konfiguration des Marktplatz-Eintrages
Je nach dem, ob es sich bei Ihrer Veranstaltung um eine Einzelveranstaltung oder eine Veranstaltungsreihe handelt,
müssen Sie die folgende Einstellung einmalig oder pro Veranstaltungstermin vornehmen.
Einzelveranstaltungen konfigurieren Sie über den Menüpunkt "KulturPass" in den Einstellungen Ihrer Veranstaltung;
Veranstaltungsreihen beim Anlegen oder Editieren eines jeden einzelnen Termins am Ende der Seite.
Um eine Veranstaltung oder einen Veranstaltungstermin im KulturPass-Marktplatz anzubieten, aktivieren Sie zunächst
die Option "Diese Veranstaltung via KulturPass anbieten". Geben Sie im folgenden die benötigten Informationen an.
Bitte beachten Sie, dass Sie bei den Angaben präzise Titel und Beschreibungen verwenden, da der KulturPass-
Marktplatz ausschließlich die Informationen aus diesem Bereich verwendet. Etwaige andere Informationen die Sie
bspw. in den "Text auf Startseite"-Felder eingeben haben, erreichen das KulturPass-System nicht.
.. note::
Gerade bei Veranstaltungsreihen nutzen viele pretix-Veranstalter gerne verkürzte Termin-Namen. Ein Schwimmbad würde
beispielsweise Ihre Veranstaltungsreihe "Freibad Musterstadt" und die einzelnen Termine nur "Schwimmen" nennen.
Während dies im pretix-Shop in einem gemeinsamen Kontext wunderbar funktioniert, würde eine Veranstaltung mit dem
Titel "Schwimmen" im KulturPass-Marktplatz Informationen vermissen lassen. Wählen Sie daher für das Eingabefeld
"Veranstaltungstitel" in der KulturPass-Konfiguration einen sprechenden Wert.
8. Übermittlung der Angebote
Sobald Sie Ihre ersten Veranstaltungen konfiguriert und live geschaltet haben, übermittelt pretix automatisch in
regelmäßigen Abständen alle von Ihnen angebotenen Veranstaltungen an das KulturPass System (Mirakl). Bitte beachten
Sie jedoch, dass der Import der Produkte und Angebote einige Zeit in Anspruch nehmen kann. Zum einen müssen
Angebote initial händisch von den Betreibern der KulturPass-Platform freigegeben werden, zum anderen muss auch eine
Synchronisation zwischen dem Hintergrundsystem und der KulturPass-App erfolgen. Auf die Dauer dieser Prozesse hat
pretix keinen Einfluss.
9. Freischalten des Marktplatz-Shops
Nachdem pretix erstmalig Angebote an das KulturPass-System übermittelt hat, müssen Sie Ihren Shop KulturPass-Shop
einmalig freischalten. Loggen Sie sich hierzu in das `KulturPass-Backend`_ ein.
Verwalten von KulturPass-Bestellungen
-------------------------------------
Durch die Nutzung der pretix-Integration mit dem KulturPass-System müssen Sie sich - bis auf die Kennzeichnung von
Produkten, die per KulturPass erworben werden dürfen, sowie die Bereitstellung von Veranstaltungs-Informationen für den
KulturPass-Marktplatz - um nichts kümmern: pretix übermittelt automatisch Ihre Veranstaltungen, wickelt die Einlösung
der Tickets ab und führt die Abrechnung mit dem Hintergrund-System durch.
Für Ihre Kunden verhält sich der KulturPass wie eine Zahlungsmethode im Bestellprozess und wird dort neben Ihren
anderen Zahlungsmethoden mit angeboten.
Die Gelder für mit dem KulturPass bezahlte Tickets erhalten Sie in Form einer Sammel-Überweisung von der Stiftung
Digitale Chancen auf das von Ihnen beim KulturPass Onboarding angegeben Bankkonto.
In Ihrem `KulturPass-Backend`_ können Sie über den Menüpunkt "Buchhaltung" Ihre bereits erfolgten und kommenden
Auszahlungen betrachten.
.. note::
Es ist von äußerster Wichtigkeit, dass Sie weder die eingehenden Bestellungen noch die Produkte und Angebote im
KulturPass-Backend händisch bearbeiten - auch wenn dies möglich wäre.
Bei händischen Änderungen riskieren Sie, dass die Datenbasis zwischen pretix und dem KulturPass-System divergiert
und es zu fehlerhaften Buchungen kommt. Wann immer möglich, sollten Sie Korrekturbuchungen und Änderungen
ausschließlich über pretix vornehmen.
Sollte eine händische Änderung/Korrektur notwendig werden, wenden Sie sich bitte an den pretix-Support, damit wir
die Auswirkungen evaluieren und vorab mit Ihnen besprechen können!
Erstattungen für Stornos und Absagen können Sie wie gehabt über das pretix-Backend vornehmen. Der jeweilige Betrag wird
dem KulturPass-Konto dann automatisch gutgeschrieben.
Da nach Ausgabe eines KulturPass Einlöse-Codes dieser vom Kunden jederzeit oder vom System bei
Nicht-(Komplett)Einlösung binnen 48 Stunden storniert werden kann, kann das im KulturPass-Backend angezeigte,
auszuzahlende Guthaben fluktuieren. Da in der Regel Auszahlungen frühestens 48 Stunden nach der Aufgabe einer
KulturPass-Bestellungen erfolgen, sollte Ihr Guthaben in der Regel nicht ins Negative gehen.
Ablauf für Kunden
-----------------
Ihre Kunden erhalten - nachdem sie sich ein eigenes Konto in der KulturPass-App angelegt und sich mit ihrem
elektronischen Personalausweis identifiziert haben - ein Guthaben von 200 Euro, welches für Leistungen aus dem
KulturPass-Marktplatz eingelöst werden kann.
Im Falle von Veranstaltungen, die per pretix verkauft werden, wählt der Kunde ein Angebot aus und erhält im folgenden
binnen kurzer Zeit (ca. 10-20 Minuten) einen Code und einen Link, um diesen einzulösen. Der Link bringt den Kunden direkt auf die Seite der
betreffenden pretix-Veranstaltung. Hier wird der Kunde darauf hingewiesen, für welche Produkte der Code genutzt werden
kann.
Im Bezahlschritt des Verkaufsprozesses wird dem Kunden vorgeschlagen, seinen KulturPass Einlösecode nun zu nutzen, um
die gewünschte Leistung zu erhalten.
Wurde ein Artikel gewählt, welcher günstiger als der Wert des Einlösecodes war, wird das Restguthaben automatisch auf
das KulturPass-Konto erstattet.
Wurden hingegen mehrere Artikel in den Warenkorb gelegt, so kann die Differenz mit einem anderen, regulären
Zahlungsmittel erfolgen.
Einlösecodes, die vom Kunden nicht binnen 48 Stunden eingelöst werden, werden automatisch storniert und dem
KulturPass-Konto wieder gutgeschrieben. Dieser Mechanismus greift auch, wenn eine Veranstaltung mittlerweile
ausverkauft ist und daher der Einlösecode nicht mehr Nutzbar ist.
Unterstützung
-------------
Weitergehende Informationen zum KulturPass finden Sie auch auf der `Webseite des KulturPasses`_, sowie im
`KulturPass Serviceportal`_.
.. _KulturPass: https://www.kulturpass.de/
.. _Einstellungen Ihres Veranstalterkontos: https://pretix.eu/control/organizer/-/settings/kulturpass
.. _KulturPass-Erweiterung: https://pretix.eu/control/event/-/-/settings/plugins#tab-0-2-open
.. _KulturPass-Backend: https://kulturpass-de.mirakl.net/
.. _Webseite des KulturPasses: https://www.kulturpass.de/
.. _KulturPass Serviceportal: https://service.kulturpass.de/help/

View File

@@ -194,23 +194,17 @@ A complete record could look like this::
v=spf1 a mx include:_spf.pretix.eu ~all
Make sure to read up on the `SPF specification`_.
Make sure to read up on the `SPF specification`_. If you want to authenticate your emails with DKIM, set up a DNS TXT
record for the subdomain ``pretix._domainkey`` with the following contents::
If you want to authenticate your emails with `DKIM`_, set up a ``CNAME`` record for the subdomain ``pretix._domainkey``
pointing to ``dkim.pretix.eu``::
pretix._domainkey.mydomain.com. CNAME dkim.pretix.eu.
v=DKIM1; k=rsa; p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDXrDk6lwOWX00e2MbiiJac6huI+gnzLf9N4G1FnBv3PXq8fz3i2q1szH72OF5mAlKm3zXO4cl/uxx+lfidS1ERbX6Bn9BRstBTQUKWC4JFj8Yk9+fwT7LWehDURazLdTzfsIjJFudLLvxtOKSaOCtMhbPX05DIhziaqVCBqgz/NQIDAQAB
Then, please contact support@pretix.eu and we will enable DKIM for your domain on our mail servers.
For senders with larger volumes, Google Mail also requires you to have a `DMARC`_ policy (that may however be ``p=none``).
.. note:: Many SMTP servers impose rate limits on the sent emails, such as a maximum number of emails sent per hour.
These SMTP servers are often not suitable for use with pretix, in case you want to send an email to many
hundreds or thousands of ticket buyers. Depending on how the rate limit is implemented, emails might be lost
in this case, as pretix only retries email delivery for a certain time period.
.. _DKIM: https://en.wikipedia.org/wiki/DomainKeys_Identified_Mail
.. _Sender Policy Framework: https://en.wikipedia.org/wiki/Sender_Policy_Framework
.. _SPF specification: http://www.open-spf.org/SPF_Record_Syntax
.. _DMARC: https://en.wikipedia.org/wiki/DMARC

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.0 KiB

View File

@@ -124,24 +124,6 @@ If you want to disable voucher input in the widget, you can pass the ``disable-v
<pretix-widget event="https://pretix.eu/demo/democon/" disable-vouchers></pretix-widget>
Enabling the button-style single item select
--------------------------------------------
By default, the widget uses a checkbox to select items, that can only be bought in quantities of one. If you want to match
the button-style of that checkbox with the one in the pretix shop, you can use the ``single-item-select`` attribute::
<pretix-widget event="https://pretix.eu/demo/democon/" single-item-select="button"></pretix-widget>
.. image:: img/widget_checkbox_button.png
:align: center
:class: screenshot
.. note::
Due to compatibility with existing widget installations, the default value for ``single-item-select``
is ``checkbox``. This might change in the future, so make sure, to set the attribute to
``single-item-select="checkbox"`` if you need it.
Filtering products
------------------
@@ -196,10 +178,6 @@ settings. For example, if you set up a meta data property called "Promoted" that
<pretix-widget event="https://pretix.eu/demo/series/" list-type="list" filter="attr[Promoted]=Yes"></pretix-widget>
If you have enabled public filters in your meta data attribute configuration, a filter-form shows up. To disable, use::
<pretix-widget event="https://pretix.eu/demo/democon/" disable-filters></pretix-widget>
pretix Button
-------------
@@ -315,16 +293,6 @@ with that information::
</pretix-widget>
This works for the pretix Button as well, if you also specify a product.
As data-attributes are reactive, you can change them with JavaScript as well. Please note, that once the user
started the checkout process, we do not update the data-attributes in the existing checkout process to not
interrupt the checkout UX.
When updating data-attributes through JavaScript, make sure you do not have a stale reference to the HTMLNode of the
widget. When the widget is created, the original HTMLNode can happen to be replaced. So make sure to always have a
fresh reference like so
``document.querySelectorAll("pretix-widget, pretix-button, .pretix-widget-wrapper")``
Currently, the following attributes are understood by pretix itself:
* ``data-email`` will pre-fill the order email field as well as the attendee email field (if enabled).
@@ -361,72 +329,125 @@ Hosted or pretix Enterprise are active, you can pass the following fields:
* If you use the campaigns plugin, you can pass a campaign ID as a value to ``data-campaign``. This way, all orders
made through this widget will be counted towards this campaign.
* If you use the tracking plugin, you can enable cross-domain tracking. Please note: when you run your pretix-shop on a
subdomain of your main tracking domain, then you do not need cross-domain tracking as tracking automatically works
across subdomains. See :ref:`custom_domain` for how to set this up.
* If you use the tracking plugin, you can enable cross-domain tracking. To do so, you need to initialize the
pretix-widget manually. Use the html code to embed the widget and add one the following code snippets. Make sure to
replace all occurrences of <MEASUREMENT_ID> with your Google Analytics MEASUREMENT_ID (UA-XXXXXXX-X or G-XXXXXXXX)
Please make sure to add the embedding website to your `Referral exclusions
Please also make sure to add the embedding website to your `Referral exclusions
<https://support.google.com/analytics/answer/2795830>`_ in your Google Analytics settings.
Add Google Analytics as you normally would with all your `window.dataLayer` and `gtag` configurations. Also add the
widget code normally. Then you have two options:
If you use Google Analytics 4 (GA4 G-XXXXXXXX)::
* Block loading of the widget at most 2 seconds or until Googles client- and session-ID are loaded. This method
uses `window.pretixWidgetCallback`. Note that if it takes longer than 2 seconds to load, client- and session-ID
are never passed to the widget. Make sure to replace all occurrences of <MEASUREMENT_ID> with your Google
Analytics MEASUREMENT_ID (G-XXXXXXXX)::
<script async src="https://www.googletagmanager.com/gtag/js?id=<MEASUREMENT_ID>"></script>
<script type="text/javascript">
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', '<MEASUREMENT_ID>');
<script type="text/javascript">
window.pretixWidgetCallback = function () {
window.PretixWidget.build_widgets = false;
window.addEventListener('load', function() { // Wait for GA to be loaded
if (!window['google_tag_manager']) {
window.PretixWidget.buildWidgets();
return;
}
window.pretixWidgetCallback = function () {
window.PretixWidget.build_widgets = false;
window.addEventListener('load', function() { // Wait for GA to be loaded
if (!window['google_tag_manager']) {
window.PretixWidget.buildWidgets();
return;
}
var clientId;
var sessionId;
var loadingTimeout;
function build() {
// use loadingTimeout to make sure build() is only called once
if (!loadingTimeout) return;
window.clearTimeout(loadingTimeout);
loadingTimeout = null;
if (clientId) window.PretixWidget.widget_data["tracking-ga-id"] = clientId;
if (sessionId) window.PretixWidget.widget_data["tracking-ga-sessid"] = sessionId;
window.PretixWidget.buildWidgets();
};
// make sure to build pretix-widgets if gtag fails to load either client_id or session_id
loadingTimeout = window.setTimeout(build, 2000);
var clientId;
var sessionId;
var loadingTimeout;
function build() {
// use loadingTimeout to make sure build() is only called once
if (!loadingTimeout) return;
window.clearTimeout(loadingTimeout);
loadingTimeout = null;
if (clientId) window.PretixWidget.widget_data["tracking-ga-id"] = clientId;
if (sessionId) window.PretixWidget.widget_data["tracking-ga-sessid"] = sessionId;
window.PretixWidget.buildWidgets();
};
// make sure to build pretix-widgets if gtag fails to load either client_id or session_id
loadingTimeout = window.setTimeout(build, 2000);
gtag('get', '<MEASUREMENT_ID>', 'client_id', function(id) {
clientId = id;
if (sessionId !== undefined) build();
});
gtag('get', '<MEASUREMENT_ID>', 'session_id', function(id) {
sessionId = id;
if (clientId !== undefined) build();
});
});
};
</script>
* Or asynchronously set data-attributes the widgets are shown immediately, but once the user has started checkout,
data-attributes are not updated. Make sure to replace all occurrences of <MEASUREMENT_ID> with your Google
Analytics MEASUREMENT_ID (G-XXXXXXXX)::
<script type="text/javascript">
window.addEventListener('load', function() {
gtag('get', '<MEASUREMENT_ID>', 'client_id', function(id) {
const widgets = document.querySelectorAll("pretix-widget, pretix-button, .pretix-widget-wrapper");
widgets.forEach(widget => widget.setAttribute("data-tracking-ga-id", id))
clientId = id;
if (sessionId !== undefined) build();
});
gtag('get', '<MEASUREMENT_ID>', 'session_id', function(id) {
const widgets = document.querySelectorAll("pretix-widget, pretix-button, .pretix-widget-wrapper");
widgets.forEach(widget => widget.setAttribute("data-tracking-ga-sessid", id))
sessionId = id;
if (clientId !== undefined) build();
});
});
</script>
};
</script>
If you use Universal Analytics with ``gtag.js`` (UA-XXXXXXX-X)::
<script async src="https://www.googletagmanager.com/gtag/js?id=<MEASUREMENT_ID>"></script>
<script type="text/javascript">
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', '<MEASUREMENT_ID>');
window.pretixWidgetCallback = function () {
window.PretixWidget.build_widgets = false;
window.addEventListener('load', function() { // Wait for GA to be loaded
if (!window['google_tag_manager']) {
window.PretixWidget.buildWidgets();
return;
}
// make sure to build pretix-widgets if gtag fails to load client_id
var loadingTimeout = window.setTimeout(function() {
loadingTimeout = null;
window.PretixWidget.buildWidgets();
}, 1000);
gtag('get', '<MEASUREMENT_ID>', 'client_id', function(id) {
if (loadingTimeout) {
window.clearTimeout(loadingTimeout);
window.PretixWidget.widget_data["tracking-ga-id"] = id;
window.PretixWidget.buildWidgets();
}
});
});
};
</script>
If you use ``analytics.js`` (Universal Analytics)::
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', '<MEASUREMENT_ID>', 'auto');
ga('send', 'pageview');
window.pretixWidgetCallback = function () {
window.PretixWidget.build_widgets = false;
window.addEventListener('load', function() { // Wait for GA to be loaded
if (!window['ga'] || !ga.create) {
// Tracking is probably blocked
window.PretixWidget.buildWidgets()
return;
}
var loadingTimeout = window.setTimeout(function() {
loadingTimeout = null;
window.PretixWidget.buildWidgets();
}, 1000);
ga(function(tracker) {
if (loadingTimeout) {
window.clearTimeout(loadingTimeout);
window.PretixWidget.widget_data["tracking-ga-id"] = tracker.get('clientId');
window.PretixWidget.buildWidgets();
}
});
});
};
</script>
.. _Let's Encrypt: https://letsencrypt.org/

View File

@@ -145,7 +145,7 @@ to get a better plain text representation of your text. Note however, that for
security reasons you can only use the following HTML elements::
a, abbr, acronym, b, br, code, div, em, h1, h2,
h3, h4, h5, h6, hr, i, li, ol, p, pre, s, span, strong,
h3, h4, h5, h6, hr, i, li, ol, p, pre, span, strong,
table, tbody, td, thead, tr, ul
Additionally, only the following attributes are allowed on them::

View File

@@ -36,7 +36,7 @@ dependencies = [
"css-inline==0.8.*",
"defusedcsv>=1.1.0",
"dj-static",
"Django==4.2.*",
"Django==4.1.*",
"django-bootstrap3==23.1.*",
"django-compressor==4.3.*",
"django-countries==7.5.*",
@@ -59,7 +59,7 @@ dependencies = [
"dnspython==2.3.*",
"drf_ujson2==1.7.*",
"geoip2==4.*",
"importlib_metadata==7.*", # Polyfill, we can probably drop this once we require Python 3.10+
"importlib_metadata==6.*", # Polyfill, we can probably drop this once we require Python 3.10+
"isoweek",
"jsonschema",
"kombu==5.3.*",
@@ -90,14 +90,14 @@ dependencies = [
"pytz-deprecation-shim==0.1.*",
"pyuca",
"qrcode==7.4.*",
"redis==4.6.*",
"redis==4.5.*,>=4.5.4",
"reportlab==4.0.*",
"requests==2.31.*",
"sentry-sdk==1.15.*",
"sepaxml==2.6.*",
"slimit",
"static3==0.7.*",
"stripe==7.9.*",
"stripe==5.4.*",
"text-unidecode==1.*",
"tlds>=2020041600",
"tqdm==4.*",
@@ -110,10 +110,8 @@ dependencies = [
[project.optional-dependencies]
memcached = ["pylibmc"]
dev = [
"aiohttp==3.8.*",
"coverage",
"coveralls",
"fakeredis==2.18.*",
"flake8==6.0.*",
"freezegun",
"isort==5.12.*",
@@ -121,7 +119,6 @@ dev = [
"potypo",
"pycodestyle==2.10.*",
"pyflakes==3.0.*",
"pytest-asyncio",
"pytest-cache",
"pytest-cov",
"pytest-django==4.*",

View File

@@ -19,4 +19,4 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
__version__ = "2024.1.1"
__version__ = "2023.7.0.dev0"

View File

@@ -89,10 +89,8 @@ ALL_LANGUAGES = [
('fi', _('Finnish')),
('gl', _('Galician')),
('el', _('Greek')),
('id', _('Indonesian')),
('it', _('Italian')),
('lv', _('Latvian')),
('nb-no', _('Norwegian Bokmål')),
('pl', _('Polish')),
('pt-pt', _('Portuguese (Portugal)')),
('pt-br', _('Portuguese (Brazil)')),
@@ -109,7 +107,7 @@ LANGUAGES_RTL = {
'ar', 'hw'
}
LANGUAGES_INCUBATING = {
'fi', 'pt-br', 'gl',
'pl', 'fi', 'pt-br', 'gl',
}
LOCALE_PATHS = [
os.path.join(os.path.dirname(__file__), 'locale'),
@@ -198,14 +196,7 @@ STATICFILES_DIRS = [
STATICI18N_ROOT = os.path.join(BASE_DIR, "pretix/static")
STORAGES = {
"default": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
},
"staticfiles": {
"BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage",
},
}
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
# if os.path.exists(os.path.join(DATA_DIR, 'static')):
# STATICFILES_DIRS.insert(0, os.path.join(DATA_DIR, 'static'))
@@ -261,23 +252,3 @@ PRETIX_PRIMARY_COLOR = '#8E44B3'
# stressful for some cache setups so it is enabled by default and currently can't be enabled through pretix.cfg
CACHE_LARGE_VALUES_ALLOWED = False
CACHE_LARGE_VALUES_ALIAS = 'default'
# Allowed file extensions for various places plus matching Pillow formats.
# Never allow EPS, it is full of dangerous bugs.
FILE_UPLOAD_EXTENSIONS_IMAGE = (".png", ".jpg", ".gif", ".jpeg")
PILLOW_FORMATS_IMAGE = ('PNG', 'GIF', 'JPEG')
FILE_UPLOAD_EXTENSIONS_FAVICON = (".ico", ".png", ".jpg", ".gif", ".jpeg")
PILLOW_FORMATS_QUESTIONS_FAVICON = ('PNG', 'GIF', 'JPEG', 'ICO')
FILE_UPLOAD_EXTENSIONS_QUESTION_IMAGE = (".png", ".jpg", ".gif", ".jpeg", ".bmp", ".tif", ".tiff", ".jfif")
PILLOW_FORMATS_QUESTIONS_IMAGE = ('PNG', 'GIF', 'JPEG', 'BMP', 'TIFF')
FILE_UPLOAD_EXTENSIONS_EMAIL_ATTACHMENT = (
".png", ".jpg", ".gif", ".jpeg", ".pdf", ".txt", ".docx", ".gif", ".svg",
".pptx", ".ppt", ".doc", ".xlsx", ".xls", ".jfif", ".heic", ".heif", ".pages",
".bmp", ".tif", ".tiff"
)
FILE_UPLOAD_EXTENSIONS_OTHER = FILE_UPLOAD_EXTENSIONS_EMAIL_ATTACHMENT
PRETIX_MAX_ORDER_SIZE = 500

View File

@@ -38,7 +38,6 @@ MAIL_FROM_ORGANIZERS = 'invalid@invalid'
FILE_UPLOAD_MAX_SIZE_EMAIL_AUTO_ATTACHMENT = 10
FILE_UPLOAD_MAX_SIZE_EMAIL_ATTACHMENT = 10
FILE_UPLOAD_MAX_SIZE_IMAGE = 10
FILE_UPLOAD_MAX_SIZE_FAVICON = 10
DEFAULT_CURRENCY = 'EUR'
SECRET_KEY = "build-time-secret-key"
HAS_REDIS = False

View File

@@ -185,7 +185,6 @@ class PretixPosSecurityProfile(AllowListSecurityProfile):
('GET', 'api-v1:order-detail'),
('DELETE', 'api-v1:orderposition-detail'),
('PATCH', 'api-v1:orderposition-detail'),
('GET', 'api-v1:orderposition-list'),
('GET', 'api-v1:orderposition-answer'),
('GET', 'api-v1:orderposition-pdf_image'),
('POST', 'api-v1:order-mark-canceled'),
@@ -224,8 +223,6 @@ class PretixPosSecurityProfile(AllowListSecurityProfile):
('POST', 'api-v1:checkinrpc.redeem'),
('GET', 'api-v1:checkinrpc.search'),
('POST', 'api-v1:reusablemedium-lookup'),
('GET', 'api-v1:reusablemedium-list'),
('POST', 'api-v1:reusablemedium-list'),
)

View File

@@ -1,49 +0,0 @@
#
# This file is part of pretix (Community Edition).
#
# Copyright (C) 2014-2020 Raphael Michel and contributors
# Copyright (C) 2020-2021 rami.io GmbH and contributors
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation in version 3 of the License.
#
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
# this file, see <https://pretix.eu/about/en/license>.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
from rest_framework import exceptions
from rest_framework.authentication import (
SessionAuthentication as BaseSessionAuthentication,
)
from pretix.multidomain.middlewares import CsrfViewMiddleware
class CustomCSRFCheck(CsrfViewMiddleware):
def _reject(self, request, reason):
# Return the failure reason instead of an HttpResponse
return reason
class SessionAuthentication(BaseSessionAuthentication):
# Override from DRF to user our custom CSRF middleware
def enforce_csrf(self, request):
def dummy_get_response(request): # pragma: no cover
return None
check = CustomCSRFCheck(dummy_get_response)
# populates request.META['CSRF_COOKIE'], which is used in process_view()
check.process_request(request)
reason = check.process_view(request, None, (), {})
if reason:
# CSRF failed, bail with explicit error message
raise exceptions.PermissionDenied('CSRF Failed: %s' % reason)

View File

@@ -54,7 +54,7 @@ class IdempotencyMiddleware:
auth_hash_parts = '{}:{}'.format(
request.headers.get('Authorization', ''),
request.COOKIES.get('__Host-' + settings.SESSION_COOKIE_NAME, request.COOKIES.get(settings.SESSION_COOKIE_NAME, ''))
request.COOKIES.get(settings.SESSION_COOKIE_NAME, '')
)
auth_hash = sha1(auth_hash_parts.encode()).hexdigest()
idempotency_key = request.headers.get('X-Idempotency-Key', '')

View File

@@ -1,91 +0,0 @@
# Generated by Django 4.2.4 on 2023-09-26 12:01
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("pretixapi", "0010_webhook_comment"),
]
operations = [
migrations.AlterField(
model_name="apicall",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="oauthaccesstoken",
name="user",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="%(app_label)s_%(class)s",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="oauthapplication",
name="user",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="%(app_label)s_%(class)s",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="oauthgrant",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="%(app_label)s_%(class)s",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="oauthidtoken",
name="user",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="%(app_label)s_%(class)s",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="oauthrefreshtoken",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="%(app_label)s_%(class)s",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="webhook",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="webhookcall",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="webhookeventlistener",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
]

View File

@@ -38,7 +38,7 @@ class CheckinListSerializer(I18nAwareModelSerializer):
model = CheckinList
fields = ('id', 'name', 'all_products', 'limit_products', 'subevent', 'checkin_count', 'position_count',
'include_pending', 'auto_checkin_sales_channels', 'allow_multiple_entries', 'allow_entry_after_exit',
'rules', 'exit_all_at', 'addon_match', 'ignore_in_statistics', 'consider_tickets_used')
'rules', 'exit_all_at', 'addon_match')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

View File

@@ -32,13 +32,11 @@ class DiscountSerializer(I18nAwareModelSerializer):
'available_until', 'subevent_mode', 'condition_all_products', 'condition_limit_products',
'condition_apply_to_addons', 'condition_min_count', 'condition_min_value',
'benefit_discount_matching_percent', 'benefit_only_apply_to_cheapest_n_matches',
'benefit_same_products', 'benefit_limit_products', 'benefit_apply_to_addons',
'benefit_ignore_voucher_discounted', 'condition_ignore_voucher_discounted')
'condition_ignore_voucher_discounted')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['condition_limit_products'].queryset = self.context['event'].items.all()
self.fields['benefit_limit_products'].queryset = self.context['event'].items.all()
def validate(self, data):
data = super().validate(data)

View File

@@ -230,8 +230,8 @@ class EventSerializer(I18nAwareModelSerializer):
for key, v in value['meta_data'].items():
if key not in self.meta_properties:
raise ValidationError(_('Meta data property \'{name}\' does not exist.').format(name=key))
if self.meta_properties[key].choices:
if v not in self.meta_properties[key].choice_keys:
if self.meta_properties[key].allowed_values:
if v not in [_v.strip() for _v in self.meta_properties[key].allowed_values.splitlines()]:
raise ValidationError(_('Meta data property \'{name}\' does not allow value \'{value}\'.').format(name=key, value=v))
return value
@@ -424,7 +424,7 @@ class CloneEventSerializer(EventSerializer):
new_event = super().create({**validated_data, 'plugins': None})
event = Event.objects.filter(slug=self.context['event'], organizer=self.context['organizer'].pk).first()
new_event.copy_data_from(event, skip_meta_data='meta_data' in validated_data)
new_event.copy_data_from(event)
if plugins is not None:
new_event.set_active_plugins(plugins)
@@ -528,8 +528,8 @@ class SubEventSerializer(I18nAwareModelSerializer):
for key, v in value['meta_data'].items():
if key not in self.meta_properties:
raise ValidationError(_('Meta data property \'{name}\' does not exist.').format(name=key))
if self.meta_properties[key].choices:
if v not in self.meta_properties[key].choice_keys:
if self.meta_properties[key].allowed_values:
if v not in [_v.strip() for _v in self.meta_properties[key].allowed_values.splitlines()]:
raise ValidationError(_('Meta data property \'{name}\' does not allow value \'{value}\'.').format(name=key, value=v))
return value
@@ -705,7 +705,6 @@ class EventSettingsSerializer(SettingsSerializer):
'frontpage_subevent_ordering',
'event_list_type',
'event_list_available_only',
'event_list_filters',
'event_calendar_future_only',
'frontpage_text',
'event_info_text',
@@ -796,8 +795,6 @@ class EventSettingsSerializer(SettingsSerializer):
'cancel_allow_user_paid_refund_as_giftcard',
'cancel_allow_user_paid_require_approval',
'cancel_allow_user_paid_require_approval_fee_unknown',
'cancel_terms_paid',
'cancel_terms_unpaid',
'change_allow_user_variation',
'change_allow_user_addons',
'change_allow_user_until',
@@ -820,10 +817,6 @@ class EventSettingsSerializer(SettingsSerializer):
'reusable_media_type_nfc_uid',
'reusable_media_type_nfc_uid_autocreate_giftcard',
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes_random_uid',
]
readonly_fields = [
# These are read-only since they are currently only settable on organizers, not events
@@ -833,10 +826,6 @@ class EventSettingsSerializer(SettingsSerializer):
'reusable_media_type_nfc_uid',
'reusable_media_type_nfc_uid_autocreate_giftcard',
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes_random_uid',
]
def __init__(self, *args, **kwargs):
@@ -905,8 +894,6 @@ class DeviceEventSettingsSerializer(EventSettingsSerializer):
'name_scheme',
'reusable_media_type_barcode',
'reusable_media_type_nfc_uid',
'reusable_media_type_nfc_mf0aes',
'reusable_media_type_nfc_mf0aes_random_uid',
'system_question_order',
]

View File

@@ -20,14 +20,11 @@
# <https://www.gnu.org/licenses/>.
#
from django import forms
from django.conf import settings
from django.http import QueryDict
from pytz import common_timezones
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from pretix.base.exporter import OrganizerLevelExportMixin
from pretix.base.models import ScheduledEventExport, ScheduledOrganizerExport
from pretix.base.timeframes import DateFrameField, SerializerDateFrameField
@@ -200,92 +197,3 @@ class JobRunSerializer(serializers.Serializer):
raise ValidationError(self.errors)
return not bool(self._errors)
class ScheduledExportSerializer(serializers.ModelSerializer):
schedule_next_run = serializers.DateTimeField(read_only=True)
export_identifier = serializers.ChoiceField(choices=[])
locale = serializers.ChoiceField(choices=settings.LANGUAGES, default='en')
owner = serializers.SlugRelatedField(slug_field='email', read_only=True)
error_counter = serializers.IntegerField(read_only=True)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['export_identifier'].choices = [(e, e) for e in self.context['exporters']]
def validate(self, attrs):
if attrs.get("export_form_data"):
identifier = attrs.get('export_identifier', self.instance.export_identifier if self.instance else None)
exporter = self.context['exporters'].get(identifier)
if exporter:
try:
JobRunSerializer(exporter=exporter).to_internal_value(attrs["export_form_data"])
except ValidationError as e:
raise ValidationError({"export_form_data": e.detail})
else:
raise ValidationError({"export_identifier": ["Unknown exporter."]})
return attrs
def validate_mail_additional_recipients(self, value):
d = value.replace(' ', '')
if len(d.split(',')) > 25:
raise ValidationError('Please enter less than 25 recipients.')
return d
def validate_mail_additional_recipients_cc(self, value):
d = value.replace(' ', '')
if len(d.split(',')) > 25:
raise ValidationError('Please enter less than 25 recipients.')
return d
def validate_mail_additional_recipients_bcc(self, value):
d = value.replace(' ', '')
if len(d.split(',')) > 25:
raise ValidationError('Please enter less than 25 recipients.')
return d
class ScheduledEventExportSerializer(ScheduledExportSerializer):
class Meta:
model = ScheduledEventExport
fields = [
'id',
'owner',
'export_identifier',
'export_form_data',
'locale',
'mail_additional_recipients',
'mail_additional_recipients_cc',
'mail_additional_recipients_bcc',
'mail_subject',
'mail_template',
'schedule_rrule',
'schedule_rrule_time',
'schedule_next_run',
'error_counter',
]
class ScheduledOrganizerExportSerializer(ScheduledExportSerializer):
timezone = serializers.ChoiceField(default=settings.TIME_ZONE, choices=[(a, a) for a in common_timezones])
class Meta:
model = ScheduledOrganizerExport
fields = [
'id',
'owner',
'export_identifier',
'export_form_data',
'locale',
'mail_additional_recipients',
'mail_additional_recipients_cc',
'mail_additional_recipients_bcc',
'mail_subject',
'mail_template',
'schedule_rrule',
'schedule_rrule_time',
'schedule_next_run',
'timezone',
'error_counter',
]

View File

@@ -59,9 +59,9 @@ class InlineItemVariationSerializer(I18nAwareModelSerializer):
class Meta:
model = ItemVariation
fields = ('id', 'value', 'active', 'description',
'position', 'default_price', 'price', 'original_price', 'free_price_suggestion', 'require_approval',
'position', 'default_price', 'price', 'original_price', 'require_approval',
'require_membership', 'require_membership_types', 'require_membership_hidden',
'checkin_attention', 'checkin_text', 'available_from', 'available_until',
'checkin_attention', 'available_from', 'available_until',
'sales_channels', 'hide_without_voucher', 'meta_data')
def __init__(self, *args, **kwargs):
@@ -83,9 +83,9 @@ class ItemVariationSerializer(I18nAwareModelSerializer):
class Meta:
model = ItemVariation
fields = ('id', 'value', 'active', 'description',
'position', 'default_price', 'price', 'original_price', 'free_price_suggestion', 'require_approval',
'position', 'default_price', 'price', 'original_price', 'require_approval',
'require_membership', 'require_membership_types', 'require_membership_hidden',
'checkin_attention', 'checkin_text', 'available_from', 'available_until',
'checkin_attention', 'available_from', 'available_until',
'sales_channels', 'hide_without_voucher', 'meta_data')
def __init__(self, *args, **kwargs):
@@ -234,13 +234,12 @@ class ItemSerializer(I18nAwareModelSerializer):
class Meta:
model = Item
fields = ('id', 'category', 'name', 'internal_name', 'active', 'sales_channels', 'description',
'default_price', 'free_price', 'free_price_suggestion', 'tax_rate', 'tax_rule', 'admission',
'personalized', 'position', 'picture', 'available_from', 'available_until',
'default_price', 'free_price', 'tax_rate', 'tax_rule', 'admission', 'personalized',
'position', 'picture', 'available_from', 'available_until',
'require_voucher', 'hide_without_voucher', 'allow_cancel', 'require_bundling',
'min_per_order', 'max_per_order', 'checkin_attention', 'checkin_text', 'has_variations', 'variations',
'min_per_order', 'max_per_order', 'checkin_attention', 'has_variations', 'variations',
'addons', 'bundles', 'original_price', 'require_approval', 'generate_tickets',
'show_quota_left', 'hidden_if_available', 'hidden_if_item_available', 'allow_waitinglist',
'issue_giftcard', 'meta_data',
'show_quota_left', 'hidden_if_available', 'allow_waitinglist', 'issue_giftcard', 'meta_data',
'require_membership', 'require_membership_types', 'require_membership_hidden', 'grant_membership_type',
'grant_membership_duration_like_event', 'grant_membership_duration_days',
'grant_membership_duration_months', 'validity_mode', 'validity_fixed_from', 'validity_fixed_until',
@@ -440,7 +439,7 @@ class QuestionSerializer(I18nAwareModelSerializer):
class Meta:
model = Question
fields = ('id', 'question', 'type', 'required', 'items', 'options', 'position',
'ask_during_checkin', 'show_during_checkin', 'identifier', 'dependency_question', 'dependency_values',
'ask_during_checkin', 'identifier', 'dependency_question', 'dependency_values',
'hidden', 'dependency_value', 'print_on_invoice', 'help_text', 'valid_number_min',
'valid_number_max', 'valid_date_min', 'valid_date_max', 'valid_datetime_min', 'valid_datetime_max',
'valid_string_length_max', 'valid_file_portrait')
@@ -486,9 +485,6 @@ class QuestionSerializer(I18nAwareModelSerializer):
if full_data.get('ask_during_checkin') and full_data.get('type') in Question.ASK_DURING_CHECKIN_UNSUPPORTED:
raise ValidationError(_('This type of question cannot be asked during check-in.'))
if full_data.get('show_during_checkin') and full_data.get('type') in Question.SHOW_DURING_CHECKIN_UNSUPPORTED:
raise ValidationError(_('This type of question cannot be shown during check-in.'))
Question.clean_items(event, full_data.get('items'))
return data

View File

@@ -22,13 +22,11 @@
import logging
import os
from collections import Counter, defaultdict
from datetime import timedelta
from decimal import Decimal
import pycountry
from django.conf import settings
from django.core.files import File
from django.db import models
from django.db.models import F, Q
from django.utils.encoding import force_str
from django.utils.timezone import now
@@ -44,7 +42,7 @@ from pretix.api.serializers import CompatibleJSONField
from pretix.api.serializers.event import SubEventSerializer
from pretix.api.serializers.i18n import I18nAwareModelSerializer
from pretix.api.serializers.item import (
InlineItemVariationSerializer, ItemSerializer, QuestionSerializer,
InlineItemVariationSerializer, ItemSerializer,
)
from pretix.base.channels import get_all_sales_channels
from pretix.base.decimal import round_decimal
@@ -60,11 +58,10 @@ from pretix.base.models.orders import (
)
from pretix.base.pdf import get_images, get_variables
from pretix.base.services.cart import error_messages
from pretix.base.services.locking import LOCK_TRUST_WINDOW, lock_objects
from pretix.base.services.locking import NoLockManager
from pretix.base.services.pricing import (
apply_discounts, get_line_price, get_listed_price, is_included_for_free,
)
from pretix.base.services.quotas import QuotaAvailability
from pretix.base.settings import COUNTRIES_WITH_STATE_IN_ADDRESS
from pretix.base.signals import register_ticket_outputs
from pretix.helpers.countries import CachedCountries
@@ -286,12 +283,11 @@ class FailedCheckinSerializer(I18nAwareModelSerializer):
raw_item = serializers.PrimaryKeyRelatedField(queryset=Item.objects.none(), required=False, allow_null=True)
raw_variation = serializers.PrimaryKeyRelatedField(queryset=ItemVariation.objects.none(), required=False, allow_null=True)
raw_subevent = serializers.PrimaryKeyRelatedField(queryset=SubEvent.objects.none(), required=False, allow_null=True)
nonce = serializers.CharField(required=False, allow_null=True)
class Meta:
model = Checkin
fields = ('error_reason', 'error_explanation', 'raw_barcode', 'raw_item', 'raw_variation',
'raw_subevent', 'nonce', 'datetime', 'type', 'position')
'raw_subevent', 'datetime', 'type', 'position')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -376,15 +372,11 @@ class PdfDataSerializer(serializers.Field):
self.context['vars_images'] = get_images(self.context['event'])
for k, f in self.context['vars'].items():
if 'evaluate_bulk' in f:
# Will be evaluated later by our list serializers
res[k] = (f['evaluate_bulk'], instance)
else:
try:
res[k] = f['evaluate'](instance, instance.order, ev)
except:
logger.exception('Evaluating PDF variable failed')
res[k] = '(error)'
try:
res[k] = f['evaluate'](instance, instance.order, ev)
except:
logger.exception('Evaluating PDF variable failed')
res[k] = '(error)'
if not hasattr(ev, '_cached_meta_data'):
ev._cached_meta_data = ev.meta_data
@@ -437,38 +429,6 @@ class PdfDataSerializer(serializers.Field):
return res
class OrderPositionListSerializer(serializers.ListSerializer):
def to_representation(self, data):
# We have a custom implementation of this method because PdfDataSerializer() might keep some elements unevaluated
# with a (callable, input) tuple. We'll loop over these entries and evaluate them bulk-wise to save on SQL queries.
if isinstance(self.parent, OrderSerializer) and isinstance(self.parent.parent, OrderListSerializer):
# Do not execute our custom code because it will be executed by OrderListSerializer later for the
# full result set.
return super().to_representation(data)
iterable = data.all() if isinstance(data, models.Manager) else data
data = []
evaluate_queue = defaultdict(list)
for item in iterable:
entry = self.child.to_representation(item)
if "pdf_data" in entry:
for k, v in entry["pdf_data"].items():
if isinstance(v, tuple) and callable(v[0]):
evaluate_queue[v[0]].append((v[1], entry, k))
data.append(entry)
for func, entries in evaluate_queue.items():
results = func([item for (item, entry, k) in entries])
for (item, entry, k), result in zip(entries, results):
entry["pdf_data"][k] = result
return data
class OrderPositionSerializer(I18nAwareModelSerializer):
checkins = CheckinSerializer(many=True, read_only=True)
answers = AnswerSerializer(many=True)
@@ -480,7 +440,6 @@ class OrderPositionSerializer(I18nAwareModelSerializer):
attendee_name = serializers.CharField(required=False)
class Meta:
list_serializer_class = OrderPositionListSerializer
model = OrderPosition
fields = ('id', 'order', 'positionid', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts',
'company', 'street', 'zipcode', 'city', 'country', 'state', 'discount',
@@ -501,7 +460,7 @@ class OrderPositionSerializer(I18nAwareModelSerializer):
# /events/…/checkinlists/…/positions/
# We're unable to check this on this level if we're on /checkinrpc/, in which case we rely on the view
# layer to not set pdf_data=true in the first place.
request and hasattr(request, 'eventpermset') and 'can_view_orders' not in request.eventpermset
request and hasattr(request, 'event') and 'can_view_orders' not in request.eventpermset
)
if ('pdf_data' in self.context and not self.context['pdf_data']) or pdf_data_forbidden:
self.fields.pop('pdf_data', None)
@@ -509,20 +468,6 @@ class OrderPositionSerializer(I18nAwareModelSerializer):
def validate(self, data):
raise TypeError("this serializer is readonly")
def to_representation(self, data):
if isinstance(self.parent, (OrderListSerializer, OrderPositionListSerializer)):
# Do not execute our custom code because it will be executed by OrderListSerializer later for the
# full result set.
return super().to_representation(data)
entry = super().to_representation(data)
if "pdf_data" in entry:
for k, v in entry["pdf_data"].items():
if isinstance(v, tuple) and callable(v[0]):
entry["pdf_data"][k] = v[0]([v[1]])[0]
return entry
class RequireAttentionField(serializers.Field):
def to_representation(self, instance: OrderPosition):
@@ -585,9 +530,6 @@ class CheckinListOrderPositionSerializer(OrderPositionSerializer):
if 'variation' in self.context['expand']:
self.fields['variation'] = InlineItemVariationSerializer(read_only=True)
if 'answers.question' in self.context['expand']:
self.fields['answers'].child.fields['question'] = QuestionSerializer(read_only=True)
class OrderPaymentTypeField(serializers.Field):
# TODO: Remove after pretix 2.2
@@ -620,7 +562,7 @@ class PaymentURLField(serializers.URLField):
def to_representation(self, instance: OrderPayment):
if instance.state != OrderPayment.PAYMENT_STATE_CREATED:
return None
return build_absolute_uri(instance.order.event, 'presale:event.order.pay', kwargs={
return build_absolute_uri(self.context['event'], 'presale:event.order.pay', kwargs={
'order': instance.order.code,
'secret': instance.order.secret,
'payment': instance.pk,
@@ -665,42 +607,13 @@ class OrderRefundSerializer(I18nAwareModelSerializer):
class OrderURLField(serializers.URLField):
def to_representation(self, instance: Order):
return build_absolute_uri(instance.event, 'presale:event.order', kwargs={
return build_absolute_uri(self.context['event'], 'presale:event.order', kwargs={
'order': instance.code,
'secret': instance.secret,
})
class OrderListSerializer(serializers.ListSerializer):
def to_representation(self, data):
# We have a custom implementation of this method because PdfDataSerializer() might keep some elements
# unevaluated with a (callable, input) tuple. We'll loop over these entries and evaluate them bulk-wise to
# save on SQL queries.
iterable = data.all() if isinstance(data, models.Manager) else data
data = []
evaluate_queue = defaultdict(list)
for item in iterable:
entry = self.child.to_representation(item)
for p in entry.get("positions", []):
if "pdf_data" in p:
for k, v in p["pdf_data"].items():
if isinstance(v, tuple) and callable(v[0]):
evaluate_queue[v[0]].append((v[1], p, k))
data.append(entry)
for func, entries in evaluate_queue.items():
results = func([item for (item, entry, k) in entries])
for (item, entry, k), result in zip(entries, results):
entry["pdf_data"][k] = result
return data
class OrderSerializer(I18nAwareModelSerializer):
event = SlugRelatedField(slug_field='slug', read_only=True)
invoice_address = InvoiceAddressSerializer(allow_null=True)
positions = OrderPositionSerializer(many=True, read_only=True)
fees = OrderFeeSerializer(many=True, read_only=True)
@@ -714,11 +627,10 @@ class OrderSerializer(I18nAwareModelSerializer):
class Meta:
model = Order
list_serializer_class = OrderListSerializer
fields = (
'code', 'event', 'status', 'testmode', 'secret', 'email', 'phone', 'locale', 'datetime', 'expires', 'payment_date',
'code', 'status', 'testmode', 'secret', 'email', 'phone', 'locale', 'datetime', 'expires', 'payment_date',
'payment_provider', 'fees', 'total', 'comment', 'custom_followup_at', 'invoice_address', 'positions', 'downloads',
'checkin_attention', 'checkin_text', 'last_modified', 'payments', 'refunds', 'require_approval', 'sales_channel',
'checkin_attention', 'last_modified', 'payments', 'refunds', 'require_approval', 'sales_channel',
'url', 'customer', 'valid_if_pending'
)
read_only_fields = (
@@ -774,8 +686,8 @@ class OrderSerializer(I18nAwareModelSerializer):
def update(self, instance, validated_data):
# Even though all fields that shouldn't be edited are marked as read_only in the serializer
# (hopefully), we'll be extra careful here and be explicit about the model fields we update.
update_fields = ['comment', 'custom_followup_at', 'checkin_attention', 'checkin_text', 'email', 'locale',
'phone', 'valid_if_pending']
update_fields = ['comment', 'custom_followup_at', 'checkin_attention', 'email', 'locale', 'phone',
'valid_if_pending']
if 'invoice_address' in validated_data:
iadata = validated_data.pop('invoice_address')
@@ -1035,14 +947,13 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
super().__init__(*args, **kwargs)
self.fields['positions'].child.fields['voucher'].queryset = self.context['event'].vouchers.all()
self.fields['customer'].queryset = self.context['event'].organizer.customers.all()
self.fields['expires'].required = False
class Meta:
model = Order
fields = ('code', 'status', 'testmode', 'email', 'phone', 'locale', 'payment_provider', 'fees', 'comment', 'sales_channel',
'invoice_address', 'positions', 'checkin_attention', 'checkin_text', 'payment_info', 'payment_date',
'consume_carts', 'force', 'send_email', 'simulate', 'customer', 'custom_followup_at',
'require_approval', 'valid_if_pending', 'expires')
'invoice_address', 'positions', 'checkin_attention', 'payment_info', 'payment_date', 'consume_carts',
'force', 'send_email', 'simulate', 'customer', 'custom_followup_at', 'require_approval',
'valid_if_pending')
def validate_payment_provider(self, pp):
if pp is None:
@@ -1051,11 +962,6 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
raise ValidationError('The given payment provider is not known.')
return pp
def validate_expires(self, expires):
if expires < now():
raise ValidationError('Expiration date must be in the future.')
return expires
def validate_sales_channel(self, channel):
if channel not in get_all_sales_channels():
raise ValidationError('Unknown sales channel.')
@@ -1077,10 +983,6 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
raise ValidationError(
'An order cannot be empty.'
)
if len(data) > settings.PRETIX_MAX_ORDER_SIZE:
raise ValidationError(
'Orders cannot have more than %(max)s positions.' % {'max': settings.PRETIX_MAX_ORDER_SIZE}
)
errs = [{} for p in data]
if any([p.get('positionid') for p in data]):
if not all([p.get('positionid') for p in data]):
@@ -1159,368 +1061,338 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
else:
ia = None
quotas_by_item = {}
quota_diff_for_locking = Counter()
voucher_diff_for_locking = Counter()
seat_diff_for_locking = Counter()
quota_usage = Counter()
voucher_usage = Counter()
seat_usage = Counter()
v_budget = {}
now_dt = now()
delete_cps = []
consume_carts = validated_data.pop('consume_carts', [])
lock_required = False
for pos_data in positions_data:
if (pos_data.get('item'), pos_data.get('variation'), pos_data.get('subevent')) not in quotas_by_item:
quotas_by_item[pos_data.get('item'), pos_data.get('variation'), pos_data.get('subevent')] = list(
pos_data.get('variation').quotas.filter(subevent=pos_data.get('subevent'))
if pos_data.get('variation')
else pos_data.get('item').quotas.filter(subevent=pos_data.get('subevent'))
)
for q in quotas_by_item[pos_data.get('item'), pos_data.get('variation'), pos_data.get('subevent')]:
quota_diff_for_locking[q] += 1
if pos_data.get('voucher'):
voucher_diff_for_locking[pos_data['voucher']] += 1
if pos_data.get('seat'):
try:
seat = self.context['event'].seats.get(seat_guid=pos_data['seat'], subevent=pos_data.get('subevent'))
except Seat.DoesNotExist:
pos_data['seat'] = Seat.DoesNotExist
else:
pos_data['seat'] = seat
seat_diff_for_locking[pos_data['seat']] += 1
pos_data['_quotas'] = list(
pos_data.get('variation').quotas.filter(subevent=pos_data.get('subevent'))
if pos_data.get('variation')
else pos_data.get('item').quotas.filter(subevent=pos_data.get('subevent'))
)
if pos_data.get('voucher') or pos_data.get('seat') or any(q.size is not None for q in pos_data['_quotas']):
lock_required = True
if consume_carts:
offset = now() + timedelta(seconds=LOCK_TRUST_WINDOW)
for cp in CartPosition.objects.filter(
event=self.context['event'], cart_id__in=consume_carts, expires__gt=now_dt
):
quotas = (cp.variation.quotas.filter(subevent=cp.subevent)
if cp.variation else cp.item.quotas.filter(subevent=cp.subevent))
for quota in quotas:
if cp.expires > offset:
quota_diff_for_locking[quota] -= 1
quota_usage[quota] -= 1
if cp.voucher:
if cp.expires > offset:
voucher_diff_for_locking[cp.voucher] -= 1
voucher_usage[cp.voucher] -= 1
if cp.seat:
if cp.expires > offset:
seat_diff_for_locking[cp.seat] -= 1
seat_usage[cp.seat] -= 1
delete_cps.append(cp)
lockfn = self.context['event'].lock
if simulate or not lock_required:
lockfn = NoLockManager
with lockfn() as now_dt:
free_seats = set()
seats_seen = set()
consume_carts = validated_data.pop('consume_carts', [])
delete_cps = []
quota_avail_cache = {}
v_budget = {}
voucher_usage = Counter()
if consume_carts:
for cp in CartPosition.objects.filter(
event=self.context['event'], cart_id__in=consume_carts, expires__gt=now()
):
quotas = (cp.variation.quotas.filter(subevent=cp.subevent)
if cp.variation else cp.item.quotas.filter(subevent=cp.subevent))
for quota in quotas:
if quota not in quota_avail_cache:
quota_avail_cache[quota] = list(quota.availability())
if quota_avail_cache[quota][1] is not None:
quota_avail_cache[quota][1] += 1
if cp.voucher:
voucher_usage[cp.voucher] -= 1
if cp.expires > now_dt:
if cp.seat:
free_seats.add(cp.seat)
delete_cps.append(cp)
if not simulate:
full_lock_required = seat_diff_for_locking and self.context['event'].settings.seating_minimal_distance > 0
if full_lock_required:
# We lock the entire event in this case since we don't want to deal with fine-granular locking
# in the case of seating distance enforcement
lock_objects([self.context['event']])
else:
lock_objects(
[q for q, d in quota_diff_for_locking.items() if d > 0 and q.size is not None and not force] +
[v for v, d in voucher_diff_for_locking.items() if d > 0 and not force] +
[s for s, d in seat_diff_for_locking.items() if d > 0],
shared_lock_objects=[self.context['event']]
)
errs = [{} for p in positions_data]
qa = QuotaAvailability()
qa.queue(*[q for q, d in quota_diff_for_locking.items() if d > 0])
qa.compute()
for i, pos_data in enumerate(positions_data):
# These are not technically correct as diff use due to the time offset applied above, so let's prevent accidental
# use further down
del quota_diff_for_locking, voucher_diff_for_locking, seat_diff_for_locking
if pos_data.get('voucher'):
v = pos_data['voucher']
errs = [{} for p in positions_data]
if pos_data.get('addon_to'):
errs[i]['voucher'] = ['Vouchers are currently not supported for add-on products.']
continue
for i, pos_data in enumerate(positions_data):
if pos_data.get('voucher'):
v = pos_data['voucher']
if not v.applies_to(pos_data['item'], pos_data.get('variation')):
errs[i]['voucher'] = [error_messages['voucher_invalid_item']]
continue
if pos_data.get('addon_to'):
errs[i]['voucher'] = ['Vouchers are currently not supported for add-on products.']
continue
if v.subevent_id and pos_data.get('subevent').pk != v.subevent_id:
errs[i]['voucher'] = [error_messages['voucher_invalid_subevent']]
continue
if not v.applies_to(pos_data['item'], pos_data.get('variation')):
errs[i]['voucher'] = [error_messages['voucher_invalid_item']]
continue
if v.valid_until is not None and v.valid_until < now_dt:
errs[i]['voucher'] = [error_messages['voucher_expired']]
continue
if v.subevent_id and pos_data.get('subevent').pk != v.subevent_id:
errs[i]['voucher'] = [error_messages['voucher_invalid_subevent']]
continue
voucher_usage[v] += 1
if voucher_usage[v] > 0:
redeemed_in_carts = CartPosition.objects.filter(
Q(voucher=pos_data['voucher']) & Q(event=self.context['event']) & Q(expires__gte=now_dt)
).exclude(pk__in=[cp.pk for cp in delete_cps])
v_avail = v.max_usages - v.redeemed - redeemed_in_carts.count()
if v_avail < voucher_usage[v]:
errs[i]['voucher'] = [
'The voucher has already been used the maximum number of times.'
]
if v.valid_until is not None and v.valid_until < now_dt:
errs[i]['voucher'] = [error_messages['voucher_expired']]
continue
if v.budget is not None:
price = pos_data.get('price')
listed_price = get_listed_price(pos_data.get('item'), pos_data.get('variation'), pos_data.get('subevent'))
voucher_usage[v] += 1
if voucher_usage[v] > 0:
redeemed_in_carts = CartPosition.objects.filter(
Q(voucher=pos_data['voucher']) & Q(event=self.context['event']) & Q(expires__gte=now_dt)
).exclude(pk__in=[cp.pk for cp in delete_cps])
v_avail = v.max_usages - v.redeemed - redeemed_in_carts.count()
if v_avail < voucher_usage[v]:
errs[i]['voucher'] = [
'The voucher has already been used the maximum number of times.'
]
if pos_data.get('voucher'):
price_after_voucher = pos_data.get('voucher').calculate_price(listed_price)
else:
price_after_voucher = listed_price
if price is None:
price = price_after_voucher
if v.budget is not None:
price = pos_data.get('price')
listed_price = get_listed_price(pos_data.get('item'), pos_data.get('variation'), pos_data.get('subevent'))
if v not in v_budget:
v_budget[v] = v.budget - v.budget_used()
disc = max(listed_price - price, 0)
if disc > v_budget[v]:
new_disc = v_budget[v]
v_budget[v] -= new_disc
if new_disc == Decimal('0.00') or pos_data.get('price') is not None:
errs[i]['voucher'] = [
'The voucher has a remaining budget of {}, therefore a discount of {} can not be '
'given.'.format(v_budget[v] + new_disc, disc)
]
continue
pos_data['price'] = price + (disc - new_disc)
else:
v_budget[v] -= disc
seated = pos_data.get('item').seat_category_mappings.filter(subevent=pos_data.get('subevent')).exists()
if pos_data.get('seat'):
if pos_data.get('addon_to'):
errs[i]['seat'] = ['Seats are currently not supported for add-on products.']
continue
if not seated:
errs[i]['seat'] = ['The specified product does not allow to choose a seat.']
try:
seat = self.context['event'].seats.get(seat_guid=pos_data['seat'], subevent=pos_data.get('subevent'))
except Seat.DoesNotExist:
errs[i]['seat'] = ['The specified seat does not exist.']
else:
pos_data['seat'] = seat
if (seat not in free_seats and not seat.is_available(sales_channel=validated_data.get('sales_channel', 'web'))) or seat in seats_seen:
errs[i]['seat'] = [gettext_lazy('The selected seat "{seat}" is not available.').format(seat=seat.name)]
seats_seen.add(seat)
elif seated:
errs[i]['seat'] = ['The specified product requires to choose a seat.']
requested_valid_from = pos_data.pop('requested_valid_from', None)
if 'valid_from' not in pos_data and 'valid_until' not in pos_data:
valid_from, valid_until = pos_data['item'].compute_validity(
requested_start=(
max(requested_valid_from, now())
if requested_valid_from and pos_data['item'].validity_dynamic_start_choice
else now()
),
enforce_start_limit=True,
override_tz=self.context['event'].timezone,
)
pos_data['valid_from'] = valid_from
pos_data['valid_until'] = valid_until
if not force:
for i, pos_data in enumerate(positions_data):
if pos_data.get('voucher'):
price_after_voucher = pos_data.get('voucher').calculate_price(listed_price)
if pos_data['voucher'].allow_ignore_quota or pos_data['voucher'].block_quota:
continue
if pos_data.get('subevent'):
if pos_data.get('item').pk in pos_data['subevent'].item_overrides and pos_data['subevent'].item_overrides[pos_data['item'].pk].disabled:
errs[i]['item'] = [gettext_lazy('The product "{}" is not available on this date.').format(
str(pos_data.get('item'))
)]
if (
pos_data.get('variation') and pos_data['variation'].pk in pos_data['subevent'].var_overrides and
pos_data['subevent'].var_overrides[pos_data['variation'].pk].disabled
):
errs[i]['item'] = [gettext_lazy('The product "{}" is not available on this date.').format(
str(pos_data.get('item'))
)]
new_quotas = pos_data['_quotas']
if len(new_quotas) == 0:
errs[i]['item'] = [gettext_lazy('The product "{}" is not assigned to a quota.').format(
str(pos_data.get('item'))
)]
else:
for quota in new_quotas:
if quota not in quota_avail_cache:
quota_avail_cache[quota] = list(quota.availability())
if quota_avail_cache[quota][1] is not None:
quota_avail_cache[quota][1] -= 1
if quota_avail_cache[quota][1] < 0:
errs[i]['item'] = [
gettext_lazy('There is not enough quota available on quota "{}" to perform the operation.').format(
quota.name
)
]
if any(errs):
raise ValidationError({'positions': errs})
if validated_data.get('locale', None) is None:
validated_data['locale'] = self.context['event'].settings.locale
order = Order(event=self.context['event'], **validated_data)
order.set_expires(subevents=[p.get('subevent') for p in positions_data])
order.meta_info = "{}"
order.total = Decimal('0.00')
if validated_data.get('require_approval') is not None:
order.require_approval = validated_data['require_approval']
if simulate:
order = WrappedModel(order)
order.last_modified = now()
order.code = 'PREVIEW'
else:
order.save()
if ia:
if not simulate:
ia.order = order
ia.save()
else:
order.invoice_address = ia
ia.last_modified = now()
# Generate position objects
pos_map = {}
for pos_data in positions_data:
addon_to = pos_data.pop('addon_to', None)
attendee_name = pos_data.pop('attendee_name', '')
if attendee_name and not pos_data.get('attendee_name_parts'):
pos_data['attendee_name_parts'] = {
'_legacy': attendee_name
}
pos = OrderPosition(**{k: v for k, v in pos_data.items() if k != 'answers' and k != '_quotas' and k != 'use_reusable_medium'})
if simulate:
pos.order = order._wrapped
else:
pos.order = order
if addon_to:
if simulate:
pos.addon_to = pos_map[addon_to]
else:
pos.addon_to = pos_map[addon_to]
pos_map[pos.positionid] = pos
pos_data['__instance'] = pos
# Calculate prices if not set
for pos_data in positions_data:
pos = pos_data['__instance']
if pos.addon_to_id and is_included_for_free(pos.item, pos.addon_to):
listed_price = Decimal('0.00')
else:
listed_price = get_listed_price(pos.item, pos.variation, pos.subevent)
if pos.price is None:
if pos.voucher:
price_after_voucher = pos.voucher.calculate_price(listed_price)
else:
price_after_voucher = listed_price
if price is None:
price = price_after_voucher
if v not in v_budget:
v_budget[v] = v.budget - v.budget_used()
disc = max(listed_price - price, 0)
if disc > v_budget[v]:
new_disc = v_budget[v]
v_budget[v] -= new_disc
if new_disc == Decimal('0.00') or pos_data.get('price') is not None:
errs[i]['voucher'] = [
'The voucher has a remaining budget of {}, therefore a discount of {} can not be '
'given.'.format(v_budget[v] + new_disc, disc)
]
continue
pos_data['price'] = price + (disc - new_disc)
else:
v_budget[v] -= disc
seated = pos_data.get('item').seat_category_mappings.filter(subevent=pos_data.get('subevent')).exists()
if pos_data.get('seat'):
if pos_data.get('addon_to'):
errs[i]['seat'] = ['Seats are currently not supported for add-on products.']
continue
if not seated:
errs[i]['seat'] = ['The specified product does not allow to choose a seat.']
seat = pos_data['seat']
if seat is Seat.DoesNotExist:
errs[i]['seat'] = ['The specified seat does not exist.']
else:
seat_usage[seat] += 1
if (seat_usage[seat] > 0 and not seat.is_available(sales_channel=validated_data.get('sales_channel', 'web'))) or seat_usage[seat] > 1:
errs[i]['seat'] = [gettext_lazy('The selected seat "{seat}" is not available.').format(seat=seat.name)]
elif seated:
errs[i]['seat'] = ['The specified product requires to choose a seat.']
requested_valid_from = pos_data.pop('requested_valid_from', None)
if 'valid_from' not in pos_data and 'valid_until' not in pos_data:
valid_from, valid_until = pos_data['item'].compute_validity(
requested_start=(
max(requested_valid_from, now())
if requested_valid_from and pos_data['item'].validity_dynamic_start_choice
else now()
),
enforce_start_limit=True,
override_tz=self.context['event'].timezone,
)
pos_data['valid_from'] = valid_from
pos_data['valid_until'] = valid_until
if not force:
for i, pos_data in enumerate(positions_data):
if pos_data.get('voucher'):
if pos_data['voucher'].allow_ignore_quota or pos_data['voucher'].block_quota:
continue
if pos_data.get('subevent'):
if pos_data.get('item').pk in pos_data['subevent'].item_overrides and pos_data['subevent'].item_overrides[pos_data['item'].pk].disabled:
errs[i]['item'] = [gettext_lazy('The product "{}" is not available on this date.').format(
str(pos_data.get('item'))
)]
if (
pos_data.get('variation') and pos_data['variation'].pk in pos_data['subevent'].var_overrides and
pos_data['subevent'].var_overrides[pos_data['variation'].pk].disabled
):
errs[i]['item'] = [gettext_lazy('The product "{}" is not available on this date.').format(
str(pos_data.get('item'))
)]
new_quotas = quotas_by_item[pos_data.get('item'), pos_data.get('variation'), pos_data.get('subevent')]
if len(new_quotas) == 0:
errs[i]['item'] = [gettext_lazy('The product "{}" is not assigned to a quota.').format(
str(pos_data.get('item'))
)]
else:
for quota in new_quotas:
quota_usage[quota] += 1
if quota_usage[quota] > 0 and qa.results[quota][1] is not None:
if qa.results[quota][1] < quota_usage[quota]:
errs[i]['item'] = [
gettext_lazy('There is not enough quota available on quota "{}" to perform the operation.').format(
quota.name
)
]
if any(errs):
raise ValidationError({'positions': errs})
if validated_data.get('locale', None) is None:
validated_data['locale'] = self.context['event'].settings.locale
order = Order(event=self.context['event'], **validated_data)
if not validated_data.get('expires'):
order.set_expires(subevents=[p.get('subevent') for p in positions_data])
order.meta_info = "{}"
order.total = Decimal('0.00')
if validated_data.get('require_approval') is not None:
order.require_approval = validated_data['require_approval']
if simulate:
order = WrappedModel(order)
order.last_modified = now()
order.code = 'PREVIEW'
else:
order.save()
if ia:
if not simulate:
ia.order = order
ia.save()
else:
order.invoice_address = ia
ia.last_modified = now()
# Generate position objects
pos_map = {}
for pos_data in positions_data:
addon_to = pos_data.pop('addon_to', None)
attendee_name = pos_data.pop('attendee_name', '')
if attendee_name and not pos_data.get('attendee_name_parts'):
pos_data['attendee_name_parts'] = {
'_legacy': attendee_name
}
pos = OrderPosition(**{k: v for k, v in pos_data.items() if k != 'answers' and k != '_quotas' and k != 'use_reusable_medium'})
if simulate:
pos.order = order._wrapped
else:
pos.order = order
if addon_to:
if simulate:
pos.addon_to = pos_map[addon_to]
else:
pos.addon_to = pos_map[addon_to]
pos_map[pos.positionid] = pos
pos_data['__instance'] = pos
# Calculate prices if not set
for pos_data in positions_data:
pos = pos_data['__instance']
if pos.addon_to_id and is_included_for_free(pos.item, pos.addon_to):
listed_price = Decimal('0.00')
else:
listed_price = get_listed_price(pos.item, pos.variation, pos.subevent)
if pos.price is None:
if pos.voucher:
price_after_voucher = pos.voucher.calculate_price(listed_price)
else:
price_after_voucher = listed_price
line_price = get_line_price(
price_after_voucher=price_after_voucher,
custom_price_input=None,
custom_price_input_is_net=False,
tax_rule=pos.item.tax_rule,
invoice_address=ia,
bundled_sum=Decimal('0.00'),
)
pos.price = line_price.gross
pos._auto_generated_price = True
else:
if pos.voucher:
if not pos.item.tax_rule or pos.item.tax_rule.price_includes_tax:
price_after_voucher = max(pos.price, pos.voucher.calculate_price(listed_price))
else:
price_after_voucher = max(pos.price - pos.tax_value, pos.voucher.calculate_price(listed_price))
else:
price_after_voucher = listed_price
pos._auto_generated_price = False
pos._voucher_discount = listed_price - price_after_voucher
if pos.voucher:
pos.voucher_budget_use = max(listed_price - price_after_voucher, Decimal('0.00'))
order_positions = [pos_data['__instance'] for pos_data in positions_data]
discount_results = apply_discounts(
self.context['event'],
order.sales_channel,
[
(cp.item_id, cp.subevent_id, cp.price, bool(cp.addon_to), cp.is_bundled, pos._voucher_discount)
for cp in order_positions
]
)
for cp, (new_price, discount) in zip(order_positions, discount_results):
if new_price != pos.price and pos._auto_generated_price:
pos.price = new_price
pos.discount = discount
# Save instances
for pos_data in positions_data:
answers_data = pos_data.pop('answers', [])
use_reusable_medium = pos_data.pop('use_reusable_medium', None)
pos = pos_data['__instance']
pos._calculate_tax()
if simulate:
pos = WrappedModel(pos)
pos.id = 0
answers = []
for answ_data in answers_data:
options = answ_data.pop('options', [])
answ = WrappedModel(QuestionAnswer(**answ_data))
answ.options = WrappedList(options)
answers.append(answ)
pos.answers = answers
pos.pseudonymization_id = "PREVIEW"
pos.checkins = []
pos_map[pos.positionid] = pos
else:
if pos.voucher:
Voucher.objects.filter(pk=pos.voucher.pk).update(redeemed=F('redeemed') + 1)
pos.save()
seen_answers = set()
for answ_data in answers_data:
# Workaround for a pretixPOS bug :-(
if answ_data.get('question') in seen_answers:
continue
seen_answers.add(answ_data.get('question'))
options = answ_data.pop('options', [])
if isinstance(answ_data['answer'], File):
an = answ_data.pop('answer')
answ = pos.answers.create(**answ_data, answer='')
answ.file.save(os.path.basename(an.name), an, save=False)
answ.answer = 'file://' + answ.file.name
answ.save()
else:
answ = pos.answers.create(**answ_data)
answ.options.add(*options)
if use_reusable_medium:
use_reusable_medium.linked_orderposition = pos
use_reusable_medium.save(update_fields=['linked_orderposition'])
use_reusable_medium.log_action(
'pretix.reusable_medium.linked_orderposition.changed',
data={
'by_order': order.code,
'linked_orderposition': pos.pk,
}
line_price = get_line_price(
price_after_voucher=price_after_voucher,
custom_price_input=None,
custom_price_input_is_net=False,
tax_rule=pos.item.tax_rule,
invoice_address=ia,
bundled_sum=Decimal('0.00'),
)
pos.price = line_price.gross
pos._auto_generated_price = True
else:
if pos.voucher:
if not pos.item.tax_rule or pos.item.tax_rule.price_includes_tax:
price_after_voucher = max(pos.price, pos.voucher.calculate_price(listed_price))
else:
price_after_voucher = max(pos.price - pos.tax_value, pos.voucher.calculate_price(listed_price))
else:
price_after_voucher = listed_price
pos._auto_generated_price = False
pos._voucher_discount = listed_price - price_after_voucher
if pos.voucher:
pos.voucher_budget_use = max(listed_price - price_after_voucher, Decimal('0.00'))
if not simulate:
for cp in delete_cps:
if cp.addon_to_id:
continue
cp.addons.all().delete()
cp.delete()
order_positions = [pos_data['__instance'] for pos_data in positions_data]
discount_results = apply_discounts(
self.context['event'],
order.sales_channel,
[
(cp.item_id, cp.subevent_id, cp.price, bool(cp.addon_to), cp.is_bundled, pos._voucher_discount)
for cp in order_positions
]
)
for cp, (new_price, discount) in zip(order_positions, discount_results):
if new_price != pos.price and pos._auto_generated_price:
pos.price = new_price
pos.discount = discount
# Save instances
for pos_data in positions_data:
answers_data = pos_data.pop('answers', [])
use_reusable_medium = pos_data.pop('use_reusable_medium', None)
pos = pos_data['__instance']
pos._calculate_tax()
if simulate:
pos = WrappedModel(pos)
pos.id = 0
answers = []
for answ_data in answers_data:
options = answ_data.pop('options', [])
answ = WrappedModel(QuestionAnswer(**answ_data))
answ.options = WrappedList(options)
answers.append(answ)
pos.answers = answers
pos.pseudonymization_id = "PREVIEW"
pos.checkins = []
pos_map[pos.positionid] = pos
else:
if pos.voucher:
Voucher.objects.filter(pk=pos.voucher.pk).update(redeemed=F('redeemed') + 1)
pos.save()
seen_answers = set()
for answ_data in answers_data:
# Workaround for a pretixPOS bug :-(
if answ_data.get('question') in seen_answers:
continue
seen_answers.add(answ_data.get('question'))
options = answ_data.pop('options', [])
if isinstance(answ_data['answer'], File):
an = answ_data.pop('answer')
answ = pos.answers.create(**answ_data, answer='')
answ.file.save(os.path.basename(an.name), an, save=False)
answ.answer = 'file://' + answ.file.name
answ.save()
else:
answ = pos.answers.create(**answ_data)
answ.options.add(*options)
if use_reusable_medium:
use_reusable_medium.linked_orderposition = pos
use_reusable_medium.save(update_fields=['linked_orderposition'])
use_reusable_medium.log_action(
'pretix.reusable_medium.linked_orderposition.changed',
data={
'by_order': order.code,
'linked_orderposition': pos.pk,
}
)
if not simulate:
for cp in delete_cps:
if cp.addon_to_id:
continue
cp.addons.all().delete()
cp.delete()
order.total = sum([p.price for p in pos_map.values()])
fees = []
@@ -1640,7 +1512,6 @@ class InlineInvoiceLineSerializer(I18nAwareModelSerializer):
class InvoiceSerializer(I18nAwareModelSerializer):
event = SlugRelatedField(slug_field='slug', read_only=True)
order = serializers.SlugRelatedField(slug_field='code', read_only=True)
refers = serializers.SlugRelatedField(slug_field='full_invoice_no', read_only=True)
lines = InlineInvoiceLineSerializer(many=True)
@@ -1649,7 +1520,7 @@ class InvoiceSerializer(I18nAwareModelSerializer):
class Meta:
model = Invoice
fields = ('event', 'order', 'number', 'is_cancellation', 'invoice_from', 'invoice_from_name', 'invoice_from_zipcode',
fields = ('order', 'number', 'is_cancellation', 'invoice_from', 'invoice_from_name', 'invoice_from_zipcode',
'invoice_from_city', 'invoice_from_country', 'invoice_from_tax_id', 'invoice_from_vat_id',
'invoice_to', 'invoice_to_company', 'invoice_to_name', 'invoice_to_street', 'invoice_to_zipcode',
'invoice_to_city', 'invoice_to_state', 'invoice_to_country', 'invoice_to_vat_id', 'invoice_to_beneficiary',

View File

@@ -94,14 +94,6 @@ class CustomerSerializer(I18nAwareModelSerializer):
data['name_parts']['_scheme'] = self.context['request'].organizer.settings.name_scheme
return data
def validate_email(self, value):
qs = Customer.objects.filter(organizer=self.context['organizer'], email__iexact=value)
if self.instance and self.instance.pk:
qs = qs.exclude(pk=self.instance.pk)
if qs.exists():
raise ValidationError(_("An account with this email address is already registered."))
return value
class CustomerCreateSerializer(CustomerSerializer):
send_email = serializers.BooleanField(default=False, required=False, allow_null=True)
@@ -400,9 +392,6 @@ class OrganizerSettingsSerializer(SettingsSerializer):
'reusable_media_type_nfc_uid',
'reusable_media_type_nfc_uid_autocreate_giftcard',
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
'reusable_media_type_nfc_mf0aes',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
]
def __init__(self, *args, **kwargs):

View File

@@ -24,16 +24,10 @@ from rest_framework.exceptions import ValidationError
from pretix.api.serializers.i18n import I18nAwareModelSerializer
from pretix.base.models import Seat, Voucher
from pretix.base.models.vouchers import generate_codes
class VoucherListSerializer(serializers.ListSerializer):
def create(self, validated_data):
vouchers_without_codes = [v for v in validated_data if not v.get('code')]
for voucher_data, code in zip(vouchers_without_codes, generate_codes(self.context['event'].organizer, num=len(vouchers_without_codes), prefix=None)):
voucher_data['code'] = code
codes = set()
seats = set()
errs = []
@@ -100,13 +94,8 @@ class VoucherSerializer(I18nAwareModelSerializer):
)
if check_quota:
Voucher.clean_quota_check(
full_data,
full_data.get('max_usages', 1) - (self.instance.redeemed if self.instance else 0),
self.instance,
self.context.get('event'),
full_data.get('quota'),
full_data.get('item'),
full_data.get('variation')
full_data, 1, self.instance, self.context.get('event'),
full_data.get('quota'), full_data.get('item'), full_data.get('variation')
)
Voucher.clean_voucher_code(full_data, self.context.get('event'), self.instance.pk if self.instance else None)

View File

@@ -61,9 +61,6 @@ orga_router.register(r'membershiptypes', organizer.MembershipTypeViewSet)
orga_router.register(r'reusablemedia', media.ReusableMediaViewSet)
orga_router.register(r'teams', organizer.TeamViewSet)
orga_router.register(r'devices', organizer.DeviceViewSet)
orga_router.register(r'orders', order.OrganizerOrderViewSet)
orga_router.register(r'invoices', order.InvoiceViewSet)
orga_router.register(r'scheduled_exports', exporters.ScheduledOrganizerExportViewSet)
orga_router.register(r'exporters', exporters.OrganizerExportersViewSet, basename='exporters')
team_router = routers.DefaultRouter()
@@ -80,7 +77,7 @@ event_router.register(r'questions', item.QuestionViewSet)
event_router.register(r'discounts', discount.DiscountViewSet)
event_router.register(r'quotas', item.QuotaViewSet)
event_router.register(r'vouchers', voucher.VoucherViewSet)
event_router.register(r'orders', order.EventOrderViewSet)
event_router.register(r'orders', order.OrderViewSet)
event_router.register(r'orderpositions', order.OrderPositionViewSet)
event_router.register(r'invoices', order.InvoiceViewSet)
event_router.register(r'revokedsecrets', order.RevokedSecretViewSet, basename='revokedsecrets')
@@ -89,7 +86,6 @@ event_router.register(r'taxrules', event.TaxRuleViewSet)
event_router.register(r'waitinglistentries', waitinglist.WaitingListViewSet)
event_router.register(r'checkinlists', checkin.CheckinListViewSet)
event_router.register(r'cartpositions', cart.CartPositionViewSet)
event_router.register(r'scheduled_exports', exporters.ScheduledEventExportViewSet)
event_router.register(r'exporters', exporters.EventExportersViewSet, basename='exporters')
event_router.register(r'shredders', shredders.EventShreddersViewSet, basename='shredders')
event_router.register(r'item_meta_properties', event.ItemMetaPropertiesViewSet)

View File

@@ -25,7 +25,6 @@ from typing import List
from django.db import transaction
from django.utils.crypto import get_random_string
from django.utils.functional import cached_property
from django.utils.timezone import now
from django.utils.translation import gettext as _
from rest_framework import status, viewsets
from rest_framework.decorators import action
@@ -42,7 +41,7 @@ from pretix.base.models import CartPosition
from pretix.base.services.cart import (
_get_quota_availability, _get_voucher_availability, error_messages,
)
from pretix.base.services.locking import lock_objects
from pretix.base.services.locking import NoLockManager
class CartPositionViewSet(CreateModelMixin, DestroyModelMixin, viewsets.ReadOnlyModelViewSet):
@@ -151,21 +150,12 @@ class CartPositionViewSet(CreateModelMixin, DestroyModelMixin, viewsets.ReadOnly
quota_diff[q] += 1
seats_seen = set()
now_dt = now()
with transaction.atomic():
full_lock_required = seat_diff and self.request.event.settings.seating_minimal_distance > 0
if full_lock_required:
# We lock the entire event in this case since we don't want to deal with fine-granular locking
# in the case of seating distance enforcement
lock_objects([self.request.event])
else:
lock_objects(
[q for q, d in quota_diff.items() if q.size is not None and d > 0] +
[v for v, d in voucher_use_diff.items() if d > 0] +
[s for s, d in seat_diff.items() if d > 0],
shared_lock_objects=[self.request.event]
)
lockfn = NoLockManager
if self._require_locking(quota_diff, voucher_use_diff, seat_diff):
lockfn = self.request.event.lock
with lockfn() as now_dt, transaction.atomic():
vouchers_ok, vouchers_depend_on_cart = _get_voucher_availability(
self.request.event,
voucher_use_diff,

View File

@@ -164,21 +164,8 @@ class CheckinListViewSet(viewsets.ModelViewSet):
secret=serializer.validated_data['raw_barcode']
).first()
clist = self.get_object()
if serializer.validated_data.get('nonce'):
if kwargs.get('position'):
prev = kwargs['position'].all_checkins.filter(nonce=serializer.validated_data['nonce']).first()
else:
prev = clist.checkins.filter(
nonce=serializer.validated_data['nonce'],
raw_barcode=serializer.validated_data['raw_barcode'],
).first()
if prev:
# Ignore because nonce is already handled
return Response(serializer.data, status=201)
c = serializer.save(
list=clist,
list=self.get_object(),
successful=False,
forced=True,
force_sent=True,
@@ -278,7 +265,6 @@ with scopes_disabled():
def __init__(self, *args, **kwargs):
self.checkinlist = kwargs.pop('checkinlist')
self.gate = kwargs.pop('gate')
super().__init__(*args, **kwargs)
def has_checkin_qs(self, queryset, name, value):
@@ -288,7 +274,7 @@ with scopes_disabled():
if not self.checkinlist.rules:
return queryset
return queryset.filter(
SQLLogic(self.checkinlist, self.gate).apply(self.checkinlist.rules)
SQLLogic(self.checkinlist).apply(self.checkinlist.rules)
).filter(
Q(valid_from__isnull=True) | Q(valid_from__lte=now()),
Q(valid_until__isnull=True) | Q(valid_until__gte=now()),
@@ -410,7 +396,7 @@ def _checkin_list_position_queryset(checkinlists, ignore_status=False, ignore_pr
def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force, checkin_type, ignore_unpaid, nonce,
untrusted_input, user, auth, expand, pdf_data, request, questions_supported, canceled_supported,
source_type='barcode', legacy_url_support=False, simulate=False, gate=None):
source_type='barcode', legacy_url_support=False, simulate=False):
if not checkinlists:
raise ValidationError('No check-in list passed.')
@@ -418,7 +404,7 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
prefetch_related_objects([cl for cl in checkinlists if not cl.all_products], 'limit_products')
device = auth if isinstance(auth, Device) else None
gate = gate or (auth.gate if isinstance(auth, Device) else None)
gate = auth.gate if isinstance(auth, Device) else None
context = {
'request': request,
@@ -536,7 +522,6 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
'reason': Checkin.REASON_ALREADY_REDEEMED,
'reason_explanation': None,
'require_attention': False,
'checkin_texts': [],
'__warning': 'Compatibility hack active due to detected old pretixSCAN version',
}, status=400)
except: # we don't care e.g. about invalid version numbers
@@ -548,7 +533,6 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
'reason': Checkin.REASON_INVALID,
'reason_explanation': None,
'require_attention': False,
'checkin_texts': [],
'list': MiniCheckinListSerializer(checkinlists[0]).data,
}, status=404)
elif revoked_matches and force:
@@ -578,7 +562,6 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
'reason': Checkin.REASON_REVOKED,
'reason_explanation': None,
'require_attention': False,
'checkin_texts': [],
'position': CheckinListOrderPositionSerializer(op, context=_make_context(context, revoked_matches[
0].event)).data,
'list': MiniCheckinListSerializer(list_by_event[revoked_matches[0].event_id]).data,
@@ -634,7 +617,6 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
'reason': Checkin.REASON_AMBIGUOUS,
'reason_explanation': None,
'require_attention': op.require_checkin_attention,
'checkin_texts': op.checkin_texts,
'position': CheckinListOrderPositionSerializer(op, context=_make_context(context, op.order.event)).data,
'list': MiniCheckinListSerializer(list_by_event[op.order.event_id]).data,
}, status=400)
@@ -677,13 +659,11 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
raw_source_type=source_type,
from_revoked_secret=from_revoked_secret,
simulate=simulate,
gate=gate,
)
except RequiredQuestionsError as e:
return Response({
'status': 'incomplete',
'require_attention': op.require_checkin_attention,
'checkin_texts': op.checkin_texts,
'position': CheckinListOrderPositionSerializer(op, context=_make_context(context, op.order.event)).data,
'questions': [
QuestionSerializer(q).data for q in e.questions
@@ -714,7 +694,6 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
'reason': e.code,
'reason_explanation': e.reason,
'require_attention': op.require_checkin_attention,
'checkin_texts': op.checkin_texts,
'position': CheckinListOrderPositionSerializer(op, context=_make_context(context, op.order.event)).data,
'list': MiniCheckinListSerializer(list_by_event[op.order.event_id]).data,
}, status=400)
@@ -722,7 +701,6 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
return Response({
'status': 'ok',
'require_attention': op.require_checkin_attention,
'checkin_texts': op.checkin_texts,
'position': CheckinListOrderPositionSerializer(op, context=_make_context(context, op.order.event)).data,
'list': MiniCheckinListSerializer(list_by_event[op.order.event_id]).data,
}, status=201)
@@ -779,7 +757,6 @@ class CheckinListPositionViewSet(viewsets.ReadOnlyModelViewSet):
def get_filterset_kwargs(self):
return {
'checkinlist': self.checkinlist,
'gate': self.request.auth.gate if isinstance(self.request.auth, Device) else None,
}
@cached_property

View File

@@ -19,12 +19,8 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
import base64
import logging
from cryptography.hazmat.backends.openssl.backend import Backend
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives.serialization import load_pem_public_key
from django.db.models import Exists, OuterRef, Q
from django.db.models.functions import Coalesce
from django.utils.timezone import now
@@ -38,8 +34,6 @@ from pretix.api.auth.device import DeviceTokenAuthentication
from pretix.api.views.version import numeric_version
from pretix.base.models import CheckinList, Device, SubEvent
from pretix.base.models.devices import Gate, generate_api_token
from pretix.base.models.media import MediumKeySet
from pretix.base.services.media import get_keysets_for_organizer
logger = logging.getLogger(__name__)
@@ -53,17 +47,6 @@ class InitializationRequestSerializer(serializers.Serializer):
software_brand = serializers.CharField(max_length=190)
software_version = serializers.CharField(max_length=190)
info = serializers.JSONField(required=False, allow_null=True)
rsa_pubkey = serializers.CharField(required=False, allow_null=True)
def validate(self, attrs):
if attrs.get('rsa_pubkey'):
try:
load_pem_public_key(
attrs['rsa_pubkey'].encode(), Backend()
)
except:
raise ValidationError({'rsa_pubkey': ['Not a valid public key.']})
return attrs
class UpdateRequestSerializer(serializers.Serializer):
@@ -74,49 +57,6 @@ class UpdateRequestSerializer(serializers.Serializer):
software_brand = serializers.CharField(max_length=190)
software_version = serializers.CharField(max_length=190)
info = serializers.JSONField(required=False, allow_null=True)
rsa_pubkey = serializers.CharField(required=False, allow_null=True)
def validate(self, attrs):
if attrs.get('rsa_pubkey'):
try:
load_pem_public_key(
attrs['rsa_pubkey'].encode(), Backend()
)
except:
raise ValidationError({'rsa_pubkey': ['Not a valid public key.']})
return attrs
class RSAEncryptedField(serializers.Field):
def to_representation(self, value):
if isinstance(value, memoryview):
value = value.tobytes()
public_key = load_pem_public_key(
self.context['device'].rsa_pubkey.encode(), Backend()
)
cipher_text = public_key.encrypt(
# RSA/ECB/PKCS1Padding
value,
padding.PKCS1v15()
)
return base64.b64encode(cipher_text).decode()
class MediumKeySetSerializer(serializers.ModelSerializer):
uid_key = RSAEncryptedField(read_only=True)
diversification_key = RSAEncryptedField(read_only=True)
organizer = serializers.SlugRelatedField(slug_field='slug', read_only=True)
class Meta:
model = MediumKeySet
fields = [
'public_id',
'organizer',
'active',
'media_type',
'uid_key',
'diversification_key',
]
class GateSerializer(serializers.ModelSerializer):
@@ -168,7 +108,6 @@ class InitializeView(APIView):
device.software_brand = serializer.validated_data.get('software_brand')
device.software_version = serializer.validated_data.get('software_version')
device.info = serializer.validated_data.get('info')
device.rsa_pubkey = serializer.validated_data.get('rsa_pubkey')
device.api_token = generate_api_token()
device.save()
@@ -191,11 +130,6 @@ class UpdateView(APIView):
device.os_version = serializer.validated_data.get('os_version')
device.software_brand = serializer.validated_data.get('software_brand')
device.software_version = serializer.validated_data.get('software_version')
if serializer.validated_data.get('rsa_pubkey') and serializer.validated_data.get('rsa_pubkey') != device.rsa_pubkey:
if device.rsa_pubkey:
raise ValidationError({'rsa_pubkey': ['You cannot change the rsa_pubkey of the device once it is set.']})
else:
device.rsa_pubkey = serializer.validated_data.get('rsa_pubkey')
device.info = serializer.validated_data.get('info')
device.save()
device.log_action('pretix.device.updated', data=serializer.validated_data, auth=device)
@@ -243,12 +177,8 @@ class InfoView(APIView):
'pretix': __version__,
'pretix_numeric': numeric_version(__version__),
}
},
'medium_key_sets': MediumKeySetSerializer(
get_keysets_for_organizer(device.organizer),
many=True,
context={'device': request.auth}
).data if device.rsa_pubkey else []
}
})

View File

@@ -254,7 +254,7 @@ class EventViewSet(viewsets.ModelViewSet):
new_event = serializer.save(organizer=self.request.organizer)
if copy_from:
new_event.copy_data_from(copy_from, skip_meta_data='meta_data' in serializer.validated_data)
new_event.copy_data_from(copy_from)
if plugins is not None:
new_event.set_active_plugins(plugins)
@@ -381,29 +381,16 @@ with scopes_disabled():
| Q(location__icontains=i18ncomp(value))
)
class OrganizerSubEventFilter(SubEventFilter):
def search_qs(self, queryset, name, value):
return queryset.filter(
Q(name__icontains=i18ncomp(value))
| Q(event__slug__icontains=value)
| Q(location__icontains=i18ncomp(value))
)
class SubEventViewSet(ConditionalListView, viewsets.ModelViewSet):
serializer_class = SubEventSerializer
queryset = SubEvent.objects.none()
write_permission = 'can_change_event_settings'
filter_backends = (DjangoFilterBackend, TotalOrderingFilter)
filterset_class = SubEventFilter
ordering = ('date_from',)
ordering_fields = ('id', 'date_from', 'last_modified')
@property
def filterset_class(self):
if getattr(self.request, 'event', None):
return SubEventFilter
return OrganizerSubEventFilter
def get_queryset(self):
if getattr(self.request, 'event', None):
qs = self.request.event.subevents
@@ -428,7 +415,6 @@ class SubEventViewSet(ConditionalListView, viewsets.ModelViewSet):
'subeventitem_set',
'subeventitemvariation_set',
'meta_values',
'meta_values__property',
Prefetch(
'seat_category_mappings',
to_attr='_seat_category_mappings',

View File

@@ -29,20 +29,14 @@ from django.utils.functional import cached_property
from django.utils.timezone import now
from rest_framework import status, viewsets
from rest_framework.decorators import action
from rest_framework.exceptions import PermissionDenied
from rest_framework.response import Response
from rest_framework.reverse import reverse
from pretix.api.pagination import TotalOrderingFilter
from pretix.api.serializers.exporters import (
ExporterSerializer, JobRunSerializer, ScheduledEventExportSerializer,
ScheduledOrganizerExportSerializer,
ExporterSerializer, JobRunSerializer,
)
from pretix.base.exporter import OrganizerLevelExportMixin
from pretix.base.models import (
CachedFile, Device, Event, ScheduledEventExport, ScheduledOrganizerExport,
TeamAPIToken,
)
from pretix.base.models import CachedFile, Device, Event, TeamAPIToken
from pretix.base.services.export import export, multiexport
from pretix.base.signals import (
register_data_exporters, register_multievent_data_exporters,
@@ -205,152 +199,3 @@ class OrganizerExportersViewSet(ExportersMixin, viewsets.ViewSet):
'provider': instance.identifier,
'form_data': data
})
class ScheduledExportersViewSet(viewsets.ModelViewSet):
filter_backends = (TotalOrderingFilter,)
ordering = ('id',)
ordering_fields = ('id', 'export_identifier', 'schedule_next_run')
class ScheduledEventExportViewSet(ScheduledExportersViewSet):
serializer_class = ScheduledEventExportSerializer
queryset = ScheduledEventExport.objects.none()
permission = 'can_view_orders'
def get_queryset(self):
perm_holder = self.request.auth if isinstance(self.request.auth, (TeamAPIToken, Device)) else self.request.user
if not perm_holder.has_event_permission(self.request.organizer, self.request.event, 'can_change_event_settings',
request=self.request):
if self.request.user.is_authenticated:
qs = self.request.event.scheduled_exports.filter(owner=self.request.user)
else:
raise PermissionDenied('Scheduled exports require either permission to change event settings or '
'user-specific API access.')
else:
qs = self.request.event.scheduled_exports
return qs.select_related("owner")
def perform_create(self, serializer):
if not self.request.user.is_authenticated:
raise PermissionDenied('Creation of exports requires user-specific API access.')
serializer.save(event=self.request.event, owner=self.request.user)
serializer.instance.compute_next_run()
serializer.instance.save(update_fields=["schedule_next_run"])
self.request.event.log_action(
'pretix.event.export.schedule.added',
user=self.request.user,
auth=self.request.auth,
data=self.request.data
)
def get_serializer_context(self):
ctx = super().get_serializer_context()
ctx['event'] = self.request.event
ctx['exporters'] = self.exporters
return ctx
@cached_property
def exporters(self):
responses = register_data_exporters.send(self.request.event)
exporters = [response(self.request.event, self.request.organizer) for r, response in responses if response]
return {e.identifier: e for e in exporters}
def perform_update(self, serializer):
serializer.save(event=self.request.event)
serializer.instance.compute_next_run()
serializer.instance.error_counter = 0
serializer.instance.error_last_message = None
serializer.instance.save(update_fields=["schedule_next_run", "error_counter", "error_last_message"])
self.request.event.log_action(
'pretix.event.export.schedule.changed',
user=self.request.user,
auth=self.request.auth,
data=self.request.data
)
def perform_destroy(self, instance):
self.request.event.log_action(
'pretix.event.export.schedule.deleted',
user=self.request.user,
auth=self.request.auth,
)
super().perform_destroy(instance)
class ScheduledOrganizerExportViewSet(ScheduledExportersViewSet):
serializer_class = ScheduledOrganizerExportSerializer
queryset = ScheduledOrganizerExport.objects.none()
permission = None
def get_queryset(self):
perm_holder = self.request.auth if isinstance(self.request.auth, (TeamAPIToken, Device)) else self.request.user
if not perm_holder.has_organizer_permission(self.request.organizer, 'can_change_organizer_settings',
request=self.request):
if self.request.user.is_authenticated:
qs = self.request.organizer.scheduled_exports.filter(owner=self.request.user)
else:
raise PermissionDenied('Scheduled exports require either permission to change organizer settings or '
'user-specific API access.')
else:
qs = self.request.organizer.scheduled_exports
return qs.select_related("owner")
def perform_create(self, serializer):
if not self.request.user.is_authenticated:
raise PermissionDenied('Creation of exports requires user-specific API access.')
serializer.save(organizer=self.request.organizer, owner=self.request.user)
serializer.instance.compute_next_run()
serializer.instance.save(update_fields=["schedule_next_run"])
self.request.organizer.log_action(
'pretix.organizer.export.schedule.added',
user=self.request.user,
auth=self.request.auth,
data=self.request.data
)
def get_serializer_context(self):
ctx = super().get_serializer_context()
ctx['organizer'] = self.request.organizer
ctx['exporters'] = self.exporters
return ctx
@cached_property
def events(self):
if isinstance(self.request.auth, (TeamAPIToken, Device)):
return self.request.auth.get_events_with_permission('can_view_orders')
elif self.request.user.is_authenticated:
return self.request.user.get_events_with_permission('can_view_orders', self.request).filter(
organizer=self.request.organizer
)
@cached_property
def exporters(self):
responses = register_multievent_data_exporters.send(self.request.organizer)
exporters = [
response(Event.objects.none() if issubclass(response, OrganizerLevelExportMixin) else self.events,
self.request.organizer)
for r, response in responses if response
]
return {e.identifier: e for e in exporters}
def perform_update(self, serializer):
serializer.save(organizer=self.request.organizer)
serializer.instance.compute_next_run()
serializer.instance.error_counter = 0
serializer.instance.error_last_message = None
serializer.instance.save(update_fields=["schedule_next_run", "error_counter", "error_last_message"])
self.request.organizer.log_action(
'pretix.organizer.export.schedule.changed',
user=self.request.user,
auth=self.request.auth,
data=self.request.data
)
def perform_destroy(self, instance):
self.request.organizer.log_action(
'pretix.organizer.export.schedule.deleted',
user=self.request.user,
auth=self.request.auth,
)
super().perform_destroy(instance)

View File

@@ -42,7 +42,7 @@ class IdempotencyQueryView(APIView):
idempotency_key = request.GET.get("key")
auth_hash_parts = '{}:{}'.format(
request.headers.get('Authorization', ''),
request.COOKIES.get('__Host-' + settings.SESSION_COOKIE_NAME, request.COOKIES.get(settings.SESSION_COOKIE_NAME, ''))
request.COOKIES.get(settings.SESSION_COOKIE_NAME, '')
)
auth_hash = sha1(auth_hash_parts.encode()).hexdigest()
if not idempotency_key:

View File

@@ -104,12 +104,6 @@ class ReusableMediaViewSet(viewsets.ModelViewSet):
auth=self.request.auth,
data=merge_dicts(self.request.data, {'id': inst.pk})
)
mt = MEDIA_TYPES.get(serializer.validated_data["type"])
if mt:
m = mt.handle_new(self.request.organizer, inst, self.request.user, self.request.auth)
if m:
s = self.get_serializer(m)
return Response({"result": s.data})
@transaction.atomic()
def perform_update(self, serializer):

View File

@@ -26,7 +26,6 @@ from decimal import Decimal
from zoneinfo import ZoneInfo
import django_filters
from django.conf import settings
from django.db import transaction
from django.db.models import (
Exists, F, OuterRef, Prefetch, Q, Subquery, prefetch_related_objects,
@@ -45,7 +44,6 @@ from rest_framework.exceptions import (
APIException, NotFound, PermissionDenied, ValidationError,
)
from rest_framework.mixins import CreateModelMixin
from rest_framework.permissions import SAFE_METHODS
from rest_framework.response import Response
from pretix.api.models import OAuthAccessToken
@@ -110,24 +108,19 @@ with scopes_disabled():
item = django_filters.CharFilter(field_name='all_positions', lookup_expr='item_id', distinct=True)
variation = django_filters.CharFilter(field_name='all_positions', lookup_expr='variation_id', distinct=True)
subevent = django_filters.CharFilter(field_name='all_positions', lookup_expr='subevent_id', distinct=True)
customer = django_filters.CharFilter(field_name='customer__identifier')
class Meta:
model = Order
fields = ['code', 'status', 'email', 'locale', 'testmode', 'require_approval', 'customer']
fields = ['code', 'status', 'email', 'locale', 'testmode', 'require_approval']
@scopes_disabled()
def subevent_after_qs(self, qs, name, value):
if getattr(self.request, 'event', None):
subevents = self.request.event.subevents
else:
subevents = SubEvent.objects.filter(event__organizer=self.request.organizer)
qs = qs.filter(
pk__in=Subquery(
OrderPosition.all.filter(
subevent_id__in=subevents.filter(
subevent_id__in=SubEvent.objects.filter(
Q(date_to__gt=value) | Q(date_from__gt=value, date_to__isnull=True),
event=self.request.event
).values_list('id'),
).values_list('order_id')
)
@@ -135,16 +128,12 @@ with scopes_disabled():
return qs
def subevent_before_qs(self, qs, name, value):
if getattr(self.request, 'event', None):
subevents = self.request.event.subevents
else:
subevents = SubEvent.objects.filter(event__organizer=self.request.organizer)
qs = qs.filter(
pk__in=Subquery(
OrderPosition.all.filter(
subevent_id__in=subevents.filter(
subevent_id__in=SubEvent.objects.filter(
Q(date_from__lt=value),
event=self.request.event
).values_list('id'),
).values_list('order_id')
)
@@ -196,7 +185,7 @@ with scopes_disabled():
)
class OrderViewSetMixin:
class OrderViewSet(viewsets.ModelViewSet):
serializer_class = OrderSerializer
queryset = Order.objects.none()
filter_backends = (DjangoFilterBackend, TotalOrderingFilter)
@@ -204,12 +193,19 @@ class OrderViewSetMixin:
ordering_fields = ('datetime', 'code', 'status', 'last_modified')
filterset_class = OrderFilter
lookup_field = 'code'
permission = 'can_view_orders'
write_permission = 'can_change_orders'
def get_base_queryset(self):
raise NotImplementedError()
def get_serializer_context(self):
ctx = super().get_serializer_context()
ctx['event'] = self.request.event
ctx['pdf_data'] = self.request.query_params.get('pdf_data', 'false') == 'true'
ctx['exclude'] = self.request.query_params.getlist('exclude')
ctx['include'] = self.request.query_params.getlist('include')
return ctx
def get_queryset(self):
qs = self.get_base_queryset()
qs = self.request.event.orders
if 'fees' not in self.request.GET.getlist('exclude'):
if self.request.query_params.get('include_canceled_fees', 'false') == 'true':
fqs = OrderFee.all
@@ -222,8 +218,6 @@ class OrderViewSetMixin:
qs = qs.prefetch_related('refunds', 'refunds__payment')
if 'invoice_address' not in self.request.GET.getlist('exclude'):
qs = qs.select_related('invoice_address')
if 'customer' not in self.request.GET.getlist('exclude'):
qs = qs.select_related('customer')
qs = qs.prefetch_related(self._positions_prefetch(self.request))
return qs
@@ -233,12 +227,11 @@ class OrderViewSetMixin:
opq = OrderPosition.all
else:
opq = OrderPosition.objects
if request.query_params.get('pdf_data', 'false') == 'true' and getattr(request, 'event', None):
if request.query_params.get('pdf_data', 'false') == 'true':
prefetch_related_objects([request.organizer], 'meta_properties')
prefetch_related_objects(
[request.event],
Prefetch('meta_values', queryset=EventMetaValue.objects.select_related('property'),
to_attr='meta_values_cached'),
Prefetch('meta_values', queryset=EventMetaValue.objects.select_related('property'), to_attr='meta_values_cached'),
'questions',
'item_meta_properties',
)
@@ -273,12 +266,13 @@ class OrderViewSetMixin:
)
)
def get_serializer_context(self):
ctx = super().get_serializer_context()
ctx['exclude'] = self.request.query_params.getlist('exclude')
ctx['include'] = self.request.query_params.getlist('include')
ctx['pdf_data'] = False
return ctx
def _get_output_provider(self, identifier):
responses = register_ticket_outputs.send(self.request.event)
for receiver, response in responses:
prov = response(self.request.event)
if prov.identifier == identifier:
return prov
raise NotFound('Unknown output provider.')
@scopes_disabled() # we are sure enough that get_queryset() is correct, so we save some perforamnce
def list(self, request, **kwargs):
@@ -295,45 +289,6 @@ class OrderViewSetMixin:
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data, headers={'X-Page-Generated': date})
class OrganizerOrderViewSet(OrderViewSetMixin, viewsets.ReadOnlyModelViewSet):
def get_base_queryset(self):
perm = "can_view_orders" if self.request.method in SAFE_METHODS else "can_change_orders"
if isinstance(self.request.auth, (TeamAPIToken, Device)):
return Order.objects.filter(
event__organizer=self.request.organizer,
event__in=self.request.auth.get_events_with_permission(perm, request=self.request)
)
elif self.request.user.is_authenticated:
return Order.objects.filter(
event__organizer=self.request.organizer,
event__in=self.request.user.get_events_with_permission(perm, request=self.request)
)
else:
raise PermissionDenied()
class EventOrderViewSet(OrderViewSetMixin, viewsets.ModelViewSet):
permission = 'can_view_orders'
write_permission = 'can_change_orders'
def get_serializer_context(self):
ctx = super().get_serializer_context()
ctx['event'] = self.request.event
ctx['pdf_data'] = self.request.query_params.get('pdf_data', 'false') == 'true'
return ctx
def get_base_queryset(self):
return self.request.event.orders
def _get_output_provider(self, identifier):
responses = register_ticket_outputs.send(self.request.event)
for receiver, response in responses:
prov = response(self.request.event)
if prov.identifier == identifier:
return prov
raise NotFound('Unknown output provider.')
@action(detail=True, url_name='download', url_path='download/(?P<output>[^/]+)')
def download(self, request, output, **kwargs):
provider = self._get_output_provider(output)
@@ -829,16 +784,6 @@ class EventOrderViewSet(OrderViewSetMixin, viewsets.ModelViewSet):
}
)
if 'checkin_text' in self.request.data and serializer.instance.checkin_text != self.request.data.get('checkin_text'):
serializer.instance.log_action(
'pretix.event.order.checkin_text',
user=self.request.user,
auth=self.request.auth,
data={
'new_value': self.request.data.get('checkin_text')
}
)
if 'valid_if_pending' in self.request.data and serializer.instance.valid_if_pending != self.request.data.get('valid_if_pending'):
serializer.instance.log_action(
'pretix.event.order.valid_if_pending',
@@ -1000,7 +945,6 @@ with scopes_disabled():
| Q(addon_to__attendee_email__icontains=value)
| Q(order__code__istartswith=value)
| Q(order__invoice_address__name_cached__icontains=value)
| Q(order__invoice_address__company__icontains=value)
| Q(order__email__icontains=value)
| Q(pk__in=matching_media)
)
@@ -1246,7 +1190,7 @@ class OrderPositionViewSet(viewsets.ModelViewSet):
ftype, ignored = mimetypes.guess_type(image_file.name)
extension = os.path.basename(image_file.name).split('.')[-1]
else:
img = Image.open(image_file, formats=settings.PILLOW_FORMATS_QUESTIONS_IMAGE)
img = Image.open(image_file)
ftype = Image.MIME[img.format]
extensions = {
'GIF': 'gif', 'TIFF': 'tif', 'BMP': 'bmp', 'JPEG': 'jpg', 'PNG': 'png'
@@ -1837,24 +1781,11 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
write_permission = 'can_change_orders'
def get_queryset(self):
perm = "can_view_orders" if self.request.method in SAFE_METHODS else "can_change_orders"
if getattr(self.request, 'event', None):
qs = self.request.event.invoices
elif isinstance(self.request.auth, (TeamAPIToken, Device)):
qs = Invoice.objects.filter(
event__organizer=self.request.organizer,
event__in=self.request.auth.get_events_with_permission(perm, request=self.request)
)
elif self.request.user.is_authenticated:
qs = Invoice.objects.filter(
event__organizer=self.request.organizer,
event__in=self.request.user.get_events_with_permission(perm, request=self.request)
)
return qs.prefetch_related('lines').select_related('order', 'refers').annotate(
return self.request.event.invoices.prefetch_related('lines').select_related('order', 'refers').annotate(
nr=Concat('prefix', 'invoice_no')
)
@action(detail=True)
@action(detail=True, )
def download(self, request, **kwargs):
invoice = self.get_object()
@@ -1873,7 +1804,7 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
return resp
@action(detail=True, methods=['POST'])
def regenerate(self, request, **kwargs):
def regenerate(self, request, **kwarts):
inv = self.get_object()
if inv.canceled:
raise ValidationError('The invoice has already been canceled.')
@@ -1883,7 +1814,7 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
raise PermissionDenied('The invoice file is no longer stored on the server.')
elif inv.sent_to_organizer:
raise PermissionDenied('The invoice file has already been exported.')
elif now().astimezone(inv.event.timezone).date() - inv.date > datetime.timedelta(days=1):
elif now().astimezone(self.request.event.timezone).date() - inv.date > datetime.timedelta(days=1):
raise PermissionDenied('The invoice file is too old to be regenerated.')
else:
inv = regenerate_invoice(inv)
@@ -1898,7 +1829,7 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
return Response(status=204)
@action(detail=True, methods=['POST'])
def reissue(self, request, **kwargs):
def reissue(self, request, **kwarts):
inv = self.get_object()
if inv.canceled:
raise ValidationError('The invoice has already been canceled.')

View File

@@ -24,8 +24,6 @@ from decimal import Decimal
import django_filters
from django.contrib.auth.hashers import make_password
from django.db import transaction
from django.db.models import OuterRef, Subquery, Sum
from django.db.models.functions import Coalesce
from django.shortcuts import get_object_or_404
from django.utils.functional import cached_property
from django_filters.rest_framework import DjangoFilterBackend, FilterSet
@@ -157,13 +155,8 @@ class GiftCardViewSet(viewsets.ModelViewSet):
qs = self.request.organizer.accepted_gift_cards
else:
qs = self.request.organizer.issued_gift_cards.all()
s = GiftCardTransaction.objects.filter(
card=OuterRef('pk')
).order_by().values('card').annotate(s=Sum('value')).values('s')
return qs.prefetch_related(
'issuer'
).annotate(
cached_value=Coalesce(Subquery(s), Decimal('0.00'))
)
def get_serializer_context(self):

View File

@@ -19,6 +19,8 @@
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
import contextlib
from django.db import transaction
from django.db.models import F, Q
from django.utils.timezone import now
@@ -67,9 +69,30 @@ class VoucherViewSet(viewsets.ModelViewSet):
def get_queryset(self):
return self.request.event.vouchers.select_related('seat').all()
@transaction.atomic()
def _predict_quota_check(self, data, instance):
# This method predicts if Voucher.clean_quota_needs_checking
# *migh* later require a quota check. It is only approximate
# and returns True a little too often. The point is to avoid
# locks when we know we won't need them.
if 'allow_ignore_quota' in data and data.get('allow_ignore_quota'):
return False
if instance and 'allow_ignore_quota' not in data and instance.allow_ignore_quota:
return False
if 'block_quota' in data and not data.get('block_quota'):
return False
if instance and 'block_quota' not in data and not instance.block_quota:
return False
return True
def create(self, request, *args, **kwargs):
return super().create(request, *args, **kwargs)
if self._predict_quota_check(request.data, None):
lockfn = request.event.lock
else:
lockfn = contextlib.suppress # noop context manager
with lockfn():
return super().create(request, *args, **kwargs)
def perform_create(self, serializer):
serializer.save(event=self.request.event)
@@ -85,9 +108,13 @@ class VoucherViewSet(viewsets.ModelViewSet):
ctx['event'] = self.request.event
return ctx
@transaction.atomic()
def update(self, request, *args, **kwargs):
return super().update(request, *args, **kwargs)
if self._predict_quota_check(request.data, self.get_object()):
lockfn = request.event.lock
else:
lockfn = contextlib.suppress # noop context manager
with lockfn():
return super().update(request, *args, **kwargs)
def perform_update(self, serializer):
serializer.save(event=self.request.event)
@@ -113,18 +140,22 @@ class VoucherViewSet(viewsets.ModelViewSet):
super().perform_destroy(instance)
@action(detail=False, methods=['POST'])
@transaction.atomic()
def batch_create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data, many=True)
serializer.is_valid(raise_exception=True)
with transaction.atomic():
serializer.save(event=self.request.event)
for i, v in enumerate(serializer.instance):
v.log_action(
'pretix.voucher.added',
user=self.request.user,
auth=self.request.auth,
data=self.request.data[i]
)
if any(self._predict_quota_check(d, None) for d in request.data):
lockfn = request.event.lock
else:
lockfn = contextlib.suppress # noop context manager
with lockfn():
serializer = self.get_serializer(data=request.data, many=True)
serializer.is_valid(raise_exception=True)
with transaction.atomic():
serializer.save(event=self.request.event)
for i, v in enumerate(serializer.instance):
v.log_action(
'pretix.voucher.added',
user=self.request.user,
auth=self.request.auth,
data=self.request.data[i]
)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)

View File

@@ -202,21 +202,6 @@ class ParametrizedWaitingListEntryWebhookEvent(ParametrizedWebhookEvent):
}
class ParametrizedCustomerWebhookEvent(ParametrizedWebhookEvent):
def build_payload(self, logentry: LogEntry):
customer = logentry.content_object
if not customer:
return None
return {
'notification_id': logentry.pk,
'organizer': customer.organizer.slug,
'customer': customer.identifier,
'action': logentry.action_type,
}
@receiver(register_webhook_events, dispatch_uid="base_register_default_webhook_events")
def register_default_webhook_events(sender, **kwargs):
return (
@@ -365,18 +350,6 @@ def register_default_webhook_events(sender, **kwargs):
'pretix.event.orders.waitinglist.voucher_assigned',
_('Waiting list entry received voucher'),
),
ParametrizedCustomerWebhookEvent(
'pretix.customer.created',
_('Customer account created'),
),
ParametrizedCustomerWebhookEvent(
'pretix.customer.changed',
_('Customer account changed'),
),
ParametrizedCustomerWebhookEvent(
'pretix.customer.anonymized',
_('Customer account anonymized'),
),
)
@@ -384,7 +357,7 @@ def register_default_webhook_events(sender, **kwargs):
def notify_webhooks(logentry_ids: list):
if not isinstance(logentry_ids, list):
logentry_ids = [logentry_ids]
qs = LogEntry.all.select_related('event', 'event__organizer', 'organizer').filter(id__in=logentry_ids)
qs = LogEntry.all.select_related('event', 'event__organizer').filter(id__in=logentry_ids)
_org, _at, webhooks = None, None, None
for logentry in qs:
if not logentry.organizer:

View File

@@ -62,27 +62,27 @@ class NamespacedCache:
prefix = int(time.time())
self.cache.set(self.prefixkey, prefix)
def set(self, key: str, value: any, timeout: int=300):
def set(self, key: str, value: str, timeout: int=300):
return self.cache.set(self._prefix_key(key), value, timeout)
def get(self, key: str) -> any:
def get(self, key: str) -> str:
return self.cache.get(self._prefix_key(key, known_prefix=self._last_prefix))
def get_or_set(self, key: str, default: Callable, timeout=300) -> any:
def get_or_set(self, key: str, default: Callable, timeout=300) -> str:
return self.cache.get_or_set(
self._prefix_key(key, known_prefix=self._last_prefix),
default=default,
timeout=timeout
)
def get_many(self, keys: List[str]) -> Dict[str, any]:
def get_many(self, keys: List[str]) -> Dict[str, str]:
values = self.cache.get_many([self._prefix_key(key) for key in keys])
newvalues = {}
for k, v in values.items():
newvalues[self._strip_prefix(k)] = v
return newvalues
def set_many(self, values: Dict[str, any], timeout=300):
def set_many(self, values: Dict[str, str], timeout=300):
newvalues = {}
for k, v in values.items():
newvalues[self._prefix_key(k)] = v

View File

@@ -103,17 +103,15 @@ def get_all_sales_channels():
if _ALL_CHANNELS:
return _ALL_CHANNELS
channels = []
types = OrderedDict()
for recv, ret in register_sales_channels.send(None):
if isinstance(ret, (list, tuple)):
channels += ret
for r in ret:
types[r.identifier] = r
else:
channels.append(ret)
channels.sort(key=lambda c: c.identifier)
_ALL_CHANNELS = OrderedDict([(c.identifier, c) for c in channels])
if 'web' in _ALL_CHANNELS:
_ALL_CHANNELS.move_to_end('web', last=False)
return _ALL_CHANNELS
types[ret.identifier] = ret
_ALL_CHANNELS = types
return types
class WebshopSalesChannel(SalesChannel):

View File

@@ -134,11 +134,8 @@ class TemplateBasedMailRenderer(BaseHTMLMailRenderer):
def template_name(self):
raise NotImplementedError()
def compile_markdown(self, plaintext):
return markdown_compile_email(plaintext)
def render(self, plain_body: str, plain_signature: str, subject: str, order, position) -> str:
body_md = self.compile_markdown(plain_body)
body_md = markdown_compile_email(plain_body)
htmlctx = {
'site': settings.PRETIX_INSTANCE_NAME,
'site_url': settings.SITE_URL,
@@ -149,7 +146,6 @@ class TemplateBasedMailRenderer(BaseHTMLMailRenderer):
}
if self.organizer:
htmlctx['organizer'] = self.organizer
htmlctx['color'] = self.organizer.settings.primary_color
if self.event:
htmlctx['event'] = self.event
@@ -157,7 +153,7 @@ class TemplateBasedMailRenderer(BaseHTMLMailRenderer):
if plain_signature:
signature_md = plain_signature.replace('\n', '<br>\n')
signature_md = self.compile_markdown(signature_md)
signature_md = markdown_compile_email(signature_md)
htmlctx['signature'] = signature_md
if order:
@@ -670,11 +666,6 @@ def base_placeholders(sender, **kwargs):
lambda waiting_list_entry: concatenation_for_salutation(waiting_list_entry.name_parts),
_("Mr Doe"),
))
ph.append(SimpleFunctionalMailTextPlaceholder(
"name", ["waiting_list_entry"],
lambda waiting_list_entry: waiting_list_entry.name or "",
_("Mr Doe"),
))
ph.append(SimpleFunctionalMailTextPlaceholder(
"name_for_salutation", ["position_or_address"],
lambda position_or_address: concatenation_for_salutation(get_best_name(position_or_address, parts=True)),

View File

@@ -140,7 +140,7 @@ class BaseExporter:
"""
return {}
def render(self, form_data: dict) -> Tuple[str, str, Optional[bytes]]:
def render(self, form_data: dict) -> Tuple[str, str, bytes]:
"""
Render the exported file and return a tuple consisting of a filename, a file type
and file content.

View File

@@ -28,5 +28,4 @@ from .items import * # noqa
from .json import * # noqa
from .mail import * # noqa
from .orderlist import * # noqa
from .reusablemedia import * # noqa
from .waitinglist import * # noqa

View File

@@ -88,7 +88,6 @@ class ItemDataExporter(ListExporter):
_("Minimum amount per order"),
_("Maximum amount per order"),
_("Requires special attention"),
_("Check-in text"),
_("Original price"),
_("This product is a gift card"),
_("Require a valid membership"),
@@ -163,7 +162,6 @@ class ItemDataExporter(ListExporter):
i.min_per_order if i.min_per_order is not None else "",
i.max_per_order if i.max_per_order is not None else "",
_("Yes") if i.checkin_attention else "",
i.checkin_text or "",
v.original_price or i.original_price or "",
_("Yes") if i.issue_giftcard else "",
_("Yes") if i.require_membership or v.require_membership else "",
@@ -208,7 +206,6 @@ class ItemDataExporter(ListExporter):
i.min_per_order if i.min_per_order is not None else "",
i.max_per_order if i.max_per_order is not None else "",
_("Yes") if i.checkin_attention else "",
i.checkin_text or "",
i.original_price or "",
_("Yes") if i.issue_giftcard else "",
_("Yes") if i.require_membership else "",

View File

@@ -96,7 +96,6 @@ class JSONExporter(BaseExporter):
'min_per_order': item.min_per_order,
'max_per_order': item.max_per_order,
'checkin_attention': item.checkin_attention,
'checkin_text': item.checkin_text,
'original_price': item.original_price,
'issue_giftcard': item.issue_giftcard,
'meta_data': item.meta_data,
@@ -111,7 +110,6 @@ class JSONExporter(BaseExporter):
'description': str(variation.description),
'position': variation.position,
'checkin_attention': variation.checkin_attention,
'checkin_text': variation.checkin_text,
'require_approval': variation.require_approval,
'require_membership': variation.require_membership,
'sales_channels': variation.sales_channels,
@@ -166,7 +164,6 @@ class JSONExporter(BaseExporter):
'custom_followup_at': order.custom_followup_at,
'require_approval': order.require_approval,
'checkin_attention': order.checkin_attention,
'checkin_text': order.checkin_text,
'sales_channel': order.sales_channel,
'expires': order.expires,
'datetime': order.datetime,

View File

@@ -275,7 +275,6 @@ class OrderListExporter(MultiSheetListExporter):
headers.append(_('Invoice numbers'))
headers.append(_('Sales channel'))
headers.append(_('Requires special attention'))
headers.append(_('Check-in text'))
headers.append(_('Comment'))
headers.append(_('Follow-up date'))
headers.append(_('Positions'))
@@ -333,7 +332,7 @@ class OrderListExporter(MultiSheetListExporter):
self.event_object_cache[order.event_id].slug,
order.code,
order.total,
order.get_extended_status_display(),
order.get_status_display(),
order.email,
str(order.phone) if order.phone else '',
order.datetime.astimezone(tz).strftime('%Y-%m-%d'),
@@ -385,7 +384,6 @@ class OrderListExporter(MultiSheetListExporter):
row.append(order.invoice_numbers)
row.append(order.sales_channel)
row.append(_('Yes') if order.checkin_attention else _('No'))
row.append(order.checkin_text or "")
row.append(order.comment or "")
row.append(order.custom_followup_at.strftime("%Y-%m-%d") if order.custom_followup_at else "")
row.append(order.pcnt)
@@ -465,7 +463,7 @@ class OrderListExporter(MultiSheetListExporter):
row = [
self.event_object_cache[order.event_id].slug,
order.code,
_("canceled") if op.canceled else order.get_extended_status_display(),
_("canceled") if op.canceled else order.get_status_display(),
order.email,
str(order.phone) if order.phone else '',
order.datetime.astimezone(tz).strftime('%Y-%m-%d'),
@@ -551,9 +549,7 @@ class OrderListExporter(MultiSheetListExporter):
headers.append(_('End date'))
headers += [
_('Product'),
_('Product ID'),
_('Variation'),
_('Variation ID'),
_('Price'),
_('Tax rate'),
_('Tax rule'),
@@ -640,7 +636,7 @@ class OrderListExporter(MultiSheetListExporter):
self.event_object_cache[order.event_id].slug,
order.code,
op.positionid,
_("canceled") if op.canceled else order.get_extended_status_display(),
_("canceled") if op.canceled else order.get_status_display(),
order.email,
str(order.phone) if order.phone else '',
order.datetime.astimezone(tz).strftime('%Y-%m-%d'),
@@ -660,9 +656,7 @@ class OrderListExporter(MultiSheetListExporter):
row.append('')
row += [
str(op.item),
str(op.item_id),
str(op.variation) if op.variation else '',
str(op.variation_id) if op.variation_id else '',
op.price,
op.tax_rate,
str(op.tax_rule) if op.tax_rule else '',
@@ -1009,20 +1003,20 @@ class PaymentListExporter(ListExporter):
if form_data.get('end_date_range'):
dt_start, dt_end = resolve_timeframe_to_datetime_start_inclusive_end_exclusive(now(), form_data['end_date_range'], self.timezone)
if dt_start:
payments = payments.filter(payment_date__gte=dt_start)
refunds = refunds.filter(execution_date__gte=dt_start)
payments = payments.filter(created__gte=dt_start)
refunds = refunds .filter(created__gte=dt_start)
if dt_end:
payments = payments.filter(payment_date__lt=dt_end)
refunds = refunds.filter(execution_date__lt=dt_end)
payments = payments.filter(created__lt=dt_end)
refunds = refunds .filter(created__lt=dt_end)
if form_data.get('start_end_date_range'):
dt_start, dt_end = resolve_timeframe_to_datetime_start_inclusive_end_exclusive(now(), form_data['start_date_range'], self.timezone)
if dt_start:
payments = payments.filter(created__gte=dt_start)
refunds = refunds.filter(created__gte=dt_start)
payments = payments.filter(payment_date__gte=dt_start)
refunds = refunds .filter(execution_date__gte=dt_start)
if dt_end:
payments = payments.filter(created__lt=dt_end)
refunds = refunds.filter(created__lt=dt_end)
payments = payments.filter(payment_date__lt=dt_end)
refunds = refunds.filter(execution_date__lt=dt_end)
objs = sorted(list(payments) + list(refunds), key=lambda o: o.created)

View File

@@ -1,78 +0,0 @@
#
# This file is part of pretix (Community Edition).
#
# Copyright (C) 2014-2020 Raphael Michel and contributors
# Copyright (C) 2020-2021 rami.io GmbH and contributors
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation in version 3 of the License.
#
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
# this file, see <https://pretix.eu/about/en/license>.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
from django.dispatch import receiver
from django.utils.formats import date_format
from django.utils.translation import gettext_lazy as _, pgettext, pgettext_lazy
from ..exporter import ListExporter, OrganizerLevelExportMixin
from ..models import ReusableMedium
from ..signals import register_multievent_data_exporters
class ReusableMediaExporter(OrganizerLevelExportMixin, ListExporter):
identifier = 'reusablemedia'
verbose_name = _('Reusable media')
category = pgettext_lazy('export_category', 'Reusable media')
description = _('Download a spread sheet with the data of all reusable medias on your account.')
def iterate_list(self, form_data):
media = ReusableMedium.objects.filter(
organizer=self.organizer,
).select_related(
'customer', 'linked_orderposition', 'linked_giftcard',
).order_by('created')
headers = [
pgettext('reusable_medium', 'Media type'),
pgettext('reusable_medium', 'Identifier'),
_('Active'),
_('Expiration date'),
_('Customer account'),
_('Linked ticket'),
_('Linked gift card'),
_('Notes'),
]
yield headers
yield self.ProgressSetTotal(total=media.count())
for medium in media.iterator(chunk_size=1000):
row = [
medium.type,
medium.identifier,
_('Yes') if medium.active else _('No'),
date_format(medium.expires, 'SHORT_DATETIME_FORMAT') if medium.expires else '',
medium.customer.identifier if medium.customer_id else '',
f"{medium.linked_orderposition.order.code}-{medium.linked_orderposition.positionid}" if medium.linked_orderposition_id else '',
medium.linked_giftcard.secret if medium.linked_giftcard_id else '',
medium.notes,
]
yield row
def get_filename(self):
return f'{self.organizer.slug}_media'
@receiver(register_multievent_data_exporters, dispatch_uid="multiexporter_reusablemedia")
def register_multievent_i_reusable_media_exporter(sender, **kwargs):
return ReusableMediaExporter

View File

@@ -125,7 +125,7 @@ class NamePartsWidget(forms.MultiWidget):
if fname == 'title' and self.titles:
widgets.append(Select(attrs=a, choices=[('', '')] + [(d, d) for d in self.titles[1]]))
elif fname == 'salutation':
widgets.append(Select(attrs=a, choices=[('', '---'), ('empty', '')] + PERSON_NAME_SALUTATIONS))
widgets.append(Select(attrs=a, choices=[('', '---')] + PERSON_NAME_SALUTATIONS))
else:
widgets.append(self.widget(attrs=a))
super().__init__(widgets, attrs)
@@ -136,10 +136,7 @@ class NamePartsWidget(forms.MultiWidget):
data = []
for i, field in enumerate(self.scheme['fields']):
fname, label, size = field
fval = value.get(fname, "")
if fname == "salutation" and fname in value and fval == "":
fval = "empty"
data.append(fval)
data.append(value.get(fname, ""))
if '_legacy' in value and not data[-1]:
data[-1] = value.get('_legacy', '')
elif not any(d for d in data) and '_scheme' in value:
@@ -193,8 +190,7 @@ class NamePartsFormField(forms.MultiValueField):
data = {}
data['_scheme'] = self.scheme_name
for i, value in enumerate(data_list):
key = self.scheme['fields'][i][0]
data[key] = value or ''
data[self.scheme['fields'][i][0]] = value or ''
return data
def __init__(self, *args, **kwargs):
@@ -243,7 +239,7 @@ class NamePartsFormField(forms.MultiValueField):
d.pop('validators', None)
field = forms.ChoiceField(
**d,
choices=[('', '---'), ('empty', '')] + PERSON_NAME_SALUTATIONS
choices=[('', '---')] + PERSON_NAME_SALUTATIONS
)
else:
field = forms.CharField(**defaults)
@@ -269,9 +265,6 @@ class NamePartsFormField(forms.MultiValueField):
if sum(len(v) for v in value.values() if v) > 250:
raise forms.ValidationError(_('Please enter a shorter name.'), code='max_length')
if value.get("salutation") == "empty":
value["salutation"] = ""
return value
@@ -507,14 +500,14 @@ class PortraitImageField(SizeValidationMixin, ExtValidationMixin, forms.FileFiel
file = BytesIO(data['content'])
try:
image = Image.open(file, formats=settings.PILLOW_FORMATS_QUESTIONS_IMAGE)
image = Image.open(file)
# verify() must be called immediately after the constructor.
image.verify()
# We want to do more than just verify(), so we need to re-open the file
if hasattr(file, 'seek'):
file.seek(0)
image = Image.open(file, formats=settings.PILLOW_FORMATS_QUESTIONS_IMAGE)
image = Image.open(file)
# load() is a potential DoS vector (see Django bug #18520), so we verify the size first
if image.width > 10_000 or image.height > 10_000:
@@ -573,7 +566,7 @@ class PortraitImageField(SizeValidationMixin, ExtValidationMixin, forms.FileFiel
return f
def __init__(self, *args, **kwargs):
kwargs.setdefault('ext_whitelist', settings.FILE_UPLOAD_EXTENSIONS_QUESTION_IMAGE)
kwargs.setdefault('ext_whitelist', (".png", ".jpg", ".jpeg", ".jfif", ".tif", ".tiff", ".bmp"))
kwargs.setdefault('max_size', settings.FILE_UPLOAD_MAX_SIZE_IMAGE)
super().__init__(*args, **kwargs)
@@ -833,7 +826,11 @@ class BaseQuestionsForm(forms.Form):
help_text=help_text,
initial=initial.file if initial else None,
widget=UploadedFileWidget(position=pos, event=event, answer=initial),
ext_whitelist=settings.FILE_UPLOAD_EXTENSIONS_OTHER,
ext_whitelist=(
".png", ".jpg", ".gif", ".jpeg", ".pdf", ".txt", ".docx", ".gif", ".svg",
".pptx", ".ppt", ".doc", ".xlsx", ".xls", ".jfif", ".heic", ".heif", ".pages",
".bmp", ".tif", ".tiff"
),
max_size=settings.FILE_UPLOAD_MAX_SIZE_OTHER,
)
elif q.type == Question.TYPE_DATE:

View File

@@ -60,18 +60,6 @@ def replace_arabic_numbers(inp):
return inp.translate(table)
def format_placeholders_help_text(placeholders, event=None):
placeholders = [(k, v.render_sample(event) if event else v) for k, v in placeholders.items()]
placeholders.sort(key=lambda x: x[0])
phs = [
'<button type="button" class="content-placeholder" title="%s">{%s}</button>' % (_("Sample: %s") % v if v else "", k)
for k, v in placeholders
]
return _('Available placeholders: {list}').format(
list=' '.join(phs)
)
class DatePickerWidget(forms.DateInput):
def __init__(self, attrs=None, date_format=None):
attrs = attrs or {}
@@ -209,10 +197,7 @@ class SplitDateTimePickerWidget(forms.SplitDateTimeWidget):
date_attrs['placeholder'] = lazy(date_placeholder, str)
time_attrs['placeholder'] = lazy(time_placeholder, str)
date_attrs['aria-label'] = _('Date')
time_attrs['aria-label'] = _('Time')
if 'aria-label' in attrs:
del attrs['aria-label']
widgets = (
forms.DateInput(attrs=date_attrs, format=date_format),
forms.TimeInput(attrs=time_attrs, format=time_format),

View File

@@ -104,7 +104,7 @@ class Command(BaseCommand):
with language(locale), override(timezone):
for receiver, response in signal_result:
if not response:
continue
return None
ex = response(e, o, report_status)
if ex.identifier == options['export_provider']:
params = json.loads(options.get('parameters') or '{}')

View File

@@ -49,9 +49,6 @@ class BaseMediaType:
def handle_unknown(self, organizer, identifier, user, auth):
pass
def handle_new(self, organizer, medium, user, auth):
pass
def __str__(self):
return str(self.verbose_name)
@@ -111,43 +108,9 @@ class NfcUidMediaType(BaseMediaType):
return m
class NfcMf0aesMediaType(BaseMediaType):
identifier = 'nfc_mf0aes'
verbose_name = 'NFC Mifare Ultralight AES'
medium_created_by_server = False
supports_giftcard = True
supports_orderposition = False
def handle_new(self, organizer, medium, user, auth):
from pretix.base.models import GiftCard
if organizer.settings.get(f'reusable_media_type_{self.identifier}_autocreate_giftcard', as_type=bool):
with transaction.atomic():
gc = GiftCard.objects.create(
issuer=organizer,
expires=organizer.default_gift_card_expiry,
currency=organizer.settings.get(f'reusable_media_type_{self.identifier}_autocreate_giftcard_currency'),
)
medium.linked_giftcard = gc
medium.save()
medium.log_action(
'pretix.reusable_medium.linked_giftcard.changed',
user=user, auth=auth,
data={
'linked_giftcard': gc.pk
}
)
gc.log_action(
'pretix.giftcards.created',
user=user, auth=auth,
)
return medium
MEDIA_TYPES = {
m.identifier: m for m in [
BarcodePlainMediaType(),
NfcUidMediaType(),
NfcMf0aesMediaType(),
]
}

View File

@@ -264,7 +264,7 @@ def metric_values():
# Metrics from redis
if settings.HAS_REDIS:
for key, value in redis.hscan_iter(REDIS_KEY, count=1000):
for key, value in redis.hscan_iter(REDIS_KEY):
dkey = key.decode("utf-8")
splitted = dkey.split("{", 2)
value = float(value.decode("utf-8"))

View File

@@ -26,7 +26,7 @@ from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
from django.conf import settings
from django.http import Http404, HttpRequest, HttpResponse
from django.middleware.common import CommonMiddleware
from django.urls import get_script_prefix, resolve
from django.urls import get_script_prefix
from django.utils import timezone, translation
from django.utils.cache import patch_vary_headers
from django.utils.deprecation import MiddlewareMixin
@@ -44,25 +44,6 @@ from pretix.multidomain.urlreverse import (
_supported = None
def get_supported_language(requested_language, allowed_languages, default_language):
language = requested_language
if language not in allowed_languages:
firstpart = language.split('-')[0]
if firstpart in allowed_languages:
language = firstpart
else:
language = default_language
for lang in allowed_languages:
if lang.startswith(firstpart + '-'):
language = lang
break
if language not in allowed_languages:
# This seems redundant, but can happen in the rare edge case that settings.locale is (wrongfully)
# not part of settings.locales
language = allowed_languages[0]
return language
class LocaleMiddleware(MiddlewareMixin):
"""
@@ -84,11 +65,20 @@ class LocaleMiddleware(MiddlewareMixin):
settings_holder = None
if settings_holder:
language = get_supported_language(
language,
settings_holder.settings.locales,
settings_holder.settings.locale,
)
if language not in settings_holder.settings.locales:
firstpart = language.split('-')[0]
if firstpart in settings_holder.settings.locales:
language = firstpart
else:
language = settings_holder.settings.locale
for lang in settings_holder.settings.locales:
if lang.startswith(firstpart + '-'):
language = lang
break
if language not in settings_holder.settings.locales:
# This seems redundant, but can happen in the rare edge case that settings.locale is (wrongfully)
# not part of settings.locales
language = settings_holder.settings.locales[0]
if '-' not in language and settings_holder.settings.region:
language += '-' + settings_holder.settings.region
else:
@@ -240,8 +230,6 @@ class SecurityMiddleware(MiddlewareMixin):
)
def process_response(self, request, resp):
url = resolve(request.path_info)
if settings.DEBUG and resp.status_code >= 400:
# Don't use CSP on debug error page as it breaks of Django's fancy error
# pages
@@ -261,28 +249,20 @@ class SecurityMiddleware(MiddlewareMixin):
h = {
'default-src': ["{static}"],
'script-src': ['{static}'],
'script-src': ['{static}', 'https://checkout.stripe.com', 'https://js.stripe.com'],
'object-src': ["'none'"],
'frame-src': ['{static}'],
'frame-src': ['{static}', 'https://checkout.stripe.com', 'https://js.stripe.com'],
'style-src': ["{static}", "{media}"],
'connect-src': ["{dynamic}", "{media}"],
'img-src': ["{static}", "{media}", "data:"] + img_src,
'connect-src': ["{dynamic}", "{media}", "https://checkout.stripe.com"],
'img-src': ["{static}", "{media}", "data:", "https://*.stripe.com"] + img_src,
'font-src': ["{static}"],
'media-src': ["{static}", "data:"],
# form-action is not only used to match on form actions, but also on URLs
# form-actions redirect to. In the context of e.g. payment providers or
# single-sign-on this can be nearly anything, so we cannot really restrict
# single-sign-on this can be nearly anything so we cannot really restrict
# this. However, we'll restrict it to HTTPS.
'form-action': ["{dynamic}", "https:"] + (['http:'] if settings.SITE_URL.startswith('http://') else []),
}
# Only include pay.google.com for wallet detection purposes on the Payment selection page
if (
url.url_name == "event.order.pay.change" or
(url.url_name == "event.checkout" and url.kwargs['step'] == "payment")
):
h['script-src'].append('https://pay.google.com')
h['frame-src'].append('https://pay.google.com')
h['connect-src'].append('https://google.com/pay')
if settings.LOG_CSP:
h['report-uri'] = ["/csp_report/"]
if 'Content-Security-Policy' in resp:

View File

@@ -10,7 +10,6 @@ def initial_user(apps, schema_editor):
user = User(email='admin@localhost')
user.is_staff = True
user.is_superuser = True
user.needs_password_change = True
user.password = make_password('admin')
user.save()

View File

@@ -1,35 +0,0 @@
# Generated by Django 3.2.18 on 2023-05-17 11:32
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pretixbase', '0243_device_os_name_and_os_version'),
]
operations = [
migrations.AddField(
model_name='device',
name='rsa_pubkey',
field=models.TextField(null=True),
),
migrations.CreateModel(
name='MediumKeySet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)),
('public_id', models.BigIntegerField(unique=True)),
('media_type', models.CharField(max_length=100)),
('active', models.BooleanField(default=True)),
('uid_key', models.BinaryField()),
('diversification_key', models.BinaryField()),
('organizer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='medium_key_sets', to='pretixbase.organizer')),
],
),
migrations.AddConstraint(
model_name='mediumkeyset',
constraint=models.UniqueConstraint(condition=models.Q(('active', True)), fields=('organizer', 'media_type'), name='keyset_unique_active'),
),
]

View File

@@ -1,34 +0,0 @@
# Generated by Django 4.2.4 on 2023-08-28 12:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("pretixbase", "0244_mediumkeyset"),
]
operations = [
migrations.AddField(
model_name="discount",
name="benefit_apply_to_addons",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="discount",
name="benefit_ignore_voucher_discounted",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="discount",
name="benefit_limit_products",
field=models.ManyToManyField(
related_name="benefit_discounts", to="pretixbase.item"
),
),
migrations.AddField(
model_name="discount",
name="benefit_same_products",
field=models.BooleanField(default=True),
),
]

View File

@@ -1,509 +0,0 @@
# Generated by Django 4.2.4 on 2023-09-26 12:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("pretixbase", "0245_discount_benefit_products"),
]
operations = [
migrations.RenameIndex(
model_name="logentry",
new_name="pretixbase__datetim_b1fe5a_idx",
old_fields=("datetime", "id"),
),
migrations.RenameIndex(
model_name="order",
new_name="pretixbase__datetim_66aff0_idx",
old_fields=("datetime", "id"),
),
migrations.RenameIndex(
model_name="order",
new_name="pretixbase__last_mo_4ebf8b_idx",
old_fields=("last_modified", "id"),
),
migrations.RenameIndex(
model_name="reusablemedium",
new_name="pretixbase__updated_093277_idx",
old_fields=("updated", "id"),
),
migrations.RenameIndex(
model_name="transaction",
new_name="pretixbase__datetim_b20405_idx",
old_fields=("datetime", "id"),
),
migrations.AlterField(
model_name="attendeeprofile",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="blockedticketsecret",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="cachedcombinedticket",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="cachedticket",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="cancellationrequest",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="cartposition",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="checkin",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="checkinlist",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="customer",
name="locale",
field=models.CharField(default="de", max_length=50),
),
migrations.AlterField(
model_name="device",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="discount",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="event",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="event_settingsstore",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="eventfooterlink",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="eventmetaproperty",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="eventmetavalue",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="exchangerate",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="gate",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="giftcard",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="giftcardacceptance",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="giftcardtransaction",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="globalsettingsobject_settingsstore",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="invoice",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="invoiceaddress",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="invoiceline",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="item",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="itemaddon",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="itembundle",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="itemcategory",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="itemmetaproperty",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="itemmetavalue",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="itemvariation",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="itemvariationmetavalue",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="logentry",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="mediumkeyset",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="notificationsetting",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="order",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="orderfee",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="orderpayment",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="orderposition",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="orderrefund",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="organizer",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="organizer_settingsstore",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="organizerfooterlink",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="question",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="questionanswer",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="questionoption",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="quota",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="revokedticketsecret",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="seat",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="seatcategorymapping",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="seatingplan",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="staffsession",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="staffsessionauditlog",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="subevent",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="subeventitem",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="subeventitemvariation",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="subeventmetavalue",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="taxrule",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="team",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="teamapitoken",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="teaminvite",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="u2fdevice",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="user",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="user",
name="locale",
field=models.CharField(default="de", max_length=50),
),
migrations.AlterField(
model_name="voucher",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="waitinglistentry",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
migrations.AlterField(
model_name="webauthndevice",
name="id",
field=models.BigAutoField(
auto_created=True, primary_key=True, serialize=False
),
),
]

View File

@@ -1,22 +0,0 @@
# Generated by Django 4.2.4 on 2023-09-06 11:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("pretixbase", "0246_bigint"),
]
operations = [
migrations.AddField(
model_name="checkinlist",
name="consider_tickets_used",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="checkinlist",
name="ignore_in_statistics",
field=models.BooleanField(default=False),
),
]

View File

@@ -1,22 +0,0 @@
# Generated by Django 4.2.4 on 2023-10-25 12:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("pretixbase", "0247_checkinlist"),
]
operations = [
migrations.AddField(
model_name="item",
name="free_price_suggestion",
field=models.DecimalField(decimal_places=2, max_digits=13, null=True),
),
migrations.AddField(
model_name="itemvariation",
name="free_price_suggestion",
field=models.DecimalField(decimal_places=2, max_digits=13, null=True),
),
]

View File

@@ -1,22 +0,0 @@
# Generated by Django 4.2.4 on 2023-10-30 11:50
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("pretixbase", "0248_item_free_price_suggestion"),
]
operations = [
migrations.AddField(
model_name="item",
name="hidden_if_item_available",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="pretixbase.item",
),
),
]

Some files were not shown because too many files have changed in this diff Show More