forked from CGM_Public/pretix_original
Compare commits
310 Commits
hide-empty
...
customer-w
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ffd0612277 | ||
|
|
53e84dfb08 | ||
|
|
2e8447486c | ||
|
|
5b184bb1a0 | ||
|
|
8c6f0a5dc1 | ||
|
|
6a53091b91 | ||
|
|
be4bc9a6f3 | ||
|
|
efb1141d59 | ||
|
|
322a730eb2 | ||
|
|
8d2224e725 | ||
|
|
5b819b76f0 | ||
|
|
5d90a42acf | ||
|
|
5398671fde | ||
|
|
f7d4460deb | ||
|
|
f76576a587 | ||
|
|
cf5f0dc7f9 | ||
|
|
567984bd5e | ||
|
|
1c6bd46d21 | ||
|
|
9ba3227837 | ||
|
|
21864885cb | ||
|
|
38173e3a54 | ||
|
|
4baf317934 | ||
|
|
c2b25bad06 | ||
|
|
9e3ad6c05c | ||
|
|
f017de1a21 | ||
|
|
b56bd8541e | ||
|
|
1c9219609a | ||
|
|
0c96f758a8 | ||
|
|
9bd3444aad | ||
|
|
10a83935d9 | ||
|
|
e8ea6e0f5c | ||
|
|
e94e5be878 | ||
|
|
1073ea626e | ||
|
|
23ab8df443 | ||
|
|
d6caf01a38 | ||
|
|
1424ae78e9 | ||
|
|
827382edc3 | ||
|
|
85482bc939 | ||
|
|
42ce545f2f | ||
|
|
e49bc5d78d | ||
|
|
6e7a32ef2a | ||
|
|
37df7a6313 | ||
|
|
d5951415a4 | ||
|
|
691159ed83 | ||
|
|
18f517af44 | ||
|
|
89ba2da7e7 | ||
|
|
c1c47e50c3 | ||
|
|
f262cd632c | ||
|
|
8d58294af1 | ||
|
|
ddc94a8a16 | ||
|
|
83811c0343 | ||
|
|
b2c05a72e5 | ||
|
|
8c56a23562 | ||
|
|
53e1d9c6c4 | ||
|
|
6250ab2165 | ||
|
|
6ada83df9a | ||
|
|
cfd6376936 | ||
|
|
edb0cd0941 | ||
|
|
88ac407cf3 | ||
|
|
5ba56fb5ac | ||
|
|
b51c9f7552 | ||
|
|
0853296663 | ||
|
|
721e7549bc | ||
|
|
aee86de330 | ||
|
|
756a4355d1 | ||
|
|
5119bbd0b1 | ||
|
|
728bd74e28 | ||
|
|
015ffeecbf | ||
|
|
0365f6d9fc | ||
|
|
e208a79c32 | ||
|
|
0037d37960 | ||
|
|
50d9b1e4a3 | ||
|
|
7919d012e6 | ||
|
|
327f95a9cc | ||
|
|
98946ded4b | ||
|
|
cf47b69bd3 | ||
|
|
fa5c69ce0a | ||
|
|
39d85fc112 | ||
|
|
23e222bf13 | ||
|
|
cb068b029f | ||
|
|
9e95f3be1b | ||
|
|
401c02865b | ||
|
|
062450002d | ||
|
|
6d834762c4 | ||
|
|
4f1e9a31c6 | ||
|
|
8ed3911dfb | ||
|
|
4562879cb2 | ||
|
|
ef0024b2ef | ||
|
|
8e603410fa | ||
|
|
16691ca2f6 | ||
|
|
d7e70fd0b9 | ||
|
|
071a3e2c9b | ||
|
|
1733c383b3 | ||
|
|
65ecdc184e | ||
|
|
63ae0724cf | ||
|
|
370d1bf06b | ||
|
|
06f361cece | ||
|
|
4b706339ed | ||
|
|
26213f2ba9 | ||
|
|
c183351d50 | ||
|
|
14131a7cec | ||
|
|
dfde308010 | ||
|
|
96b8631e09 | ||
|
|
84f464885d | ||
|
|
098147ce70 | ||
|
|
08b6186d77 | ||
|
|
e9e98a7821 | ||
|
|
3150c6a3ea | ||
|
|
898d1ab6ed | ||
|
|
52ae7626b0 | ||
|
|
c652911bfb | ||
|
|
52023cde09 | ||
|
|
b134f29cf6 | ||
|
|
19e1d132c2 | ||
|
|
393a218df5 | ||
|
|
f247eb0568 | ||
|
|
b35a388685 | ||
|
|
6dbbfe3b04 | ||
|
|
b2c49461bc | ||
|
|
23dcdf1fd1 | ||
|
|
1f80e9ef82 | ||
|
|
0969abb460 | ||
|
|
7b5789b110 | ||
|
|
f3b5996b82 | ||
|
|
5dcab59174 | ||
|
|
a2e38bb415 | ||
|
|
0510814aae | ||
|
|
dee2818f5d | ||
|
|
0d7809c36b | ||
|
|
4c494b5265 | ||
|
|
9e85e8c60a | ||
|
|
ab8c71fab8 | ||
|
|
1fa8ea3a12 | ||
|
|
f584d3d5af | ||
|
|
46ae911ade | ||
|
|
85db5698a6 | ||
|
|
09a17b57ce | ||
|
|
826962d6e2 | ||
|
|
f77e79bb38 | ||
|
|
d21e832204 | ||
|
|
119d4f0e04 | ||
|
|
feab6acfbd | ||
|
|
d85a6074ec | ||
|
|
6c813ea299 | ||
|
|
8a903f21ae | ||
|
|
a7f7c64cce | ||
|
|
82969daf37 | ||
|
|
8e9d0fb723 | ||
|
|
ef3d44e581 | ||
|
|
f9055fce9f | ||
|
|
cff0e86fd9 | ||
|
|
f0913fc720 | ||
|
|
23a9f60171 | ||
|
|
faf41c805c | ||
|
|
41cded095c | ||
|
|
90fb034897 | ||
|
|
f4203b7408 | ||
|
|
8a9f14db03 | ||
|
|
a2adf2825a | ||
|
|
8f7220b574 | ||
|
|
5adbdb80a8 | ||
|
|
3717c4b553 | ||
|
|
609f45d818 | ||
|
|
1d49c98cf2 | ||
|
|
586f42557f | ||
|
|
e3f219366d | ||
|
|
c571b269ff | ||
|
|
6d57501c5c | ||
|
|
5f3e039b2e | ||
|
|
8fa7aeef78 | ||
|
|
3b5baa7701 | ||
|
|
c6bb3e71bf | ||
|
|
104607d34e | ||
|
|
714ef0d3b6 | ||
|
|
db7c52ca93 | ||
|
|
fc94fbd9c8 | ||
|
|
61b3207ea2 | ||
|
|
ccf17db972 | ||
|
|
456bee7efa | ||
|
|
ccfdd364a3 | ||
|
|
cf92988eae | ||
|
|
6c561b1908 | ||
|
|
5634a16a85 | ||
|
|
6883ae268f | ||
|
|
f75f8dead6 | ||
|
|
0b28df8b83 | ||
|
|
0ffffc6a51 | ||
|
|
3f95f06845 | ||
|
|
22bb4a9ac4 | ||
|
|
ee50ee8e99 | ||
|
|
63a6b17229 | ||
|
|
f33153ef01 | ||
|
|
09517837ba | ||
|
|
0f9ec8beca | ||
|
|
6d604889f2 | ||
|
|
f9da500c06 | ||
|
|
8f3b92a5b4 | ||
|
|
c82aa891e6 | ||
|
|
591ff61d1b | ||
|
|
af3ba16631 | ||
|
|
dce0bba707 | ||
|
|
0a942a670f | ||
|
|
310b1f50bc | ||
|
|
0cef7029e1 | ||
|
|
fbc2a4cdc2 | ||
|
|
2daf6f6d97 | ||
|
|
1fe80fa8c5 | ||
|
|
fa0b31b19f | ||
|
|
3a77eeaa91 | ||
|
|
a1faa66ecd | ||
|
|
1e458d21f9 | ||
|
|
d1a051544f | ||
|
|
8bd4ddcd0d | ||
|
|
59a16789ea | ||
|
|
f4ce3654bb | ||
|
|
3ad99d8239 | ||
|
|
b415393ccf | ||
|
|
84dbd93d9e | ||
|
|
5a4f990ab9 | ||
|
|
35f3d95a46 | ||
|
|
c729b71320 | ||
|
|
8eb7c8db9e | ||
|
|
d5609f6ab0 | ||
|
|
5d8fa31bdf | ||
|
|
9360b1fd90 | ||
|
|
51da6570bf | ||
|
|
fbdbddd555 | ||
|
|
eb3edd83b8 | ||
|
|
25f5fe54a9 | ||
|
|
7bf153bb3b | ||
|
|
48e64071a1 | ||
|
|
95ea4fd4c9 | ||
|
|
206b57adfd | ||
|
|
b7f3f7a7a1 | ||
|
|
34e7a0fc31 | ||
|
|
cc7f249cb8 | ||
|
|
147061eaa4 | ||
|
|
c16491889b | ||
|
|
1eb1d8df5f | ||
|
|
3f47cf785c | ||
|
|
e8859cb2e2 | ||
|
|
61ab6f729d | ||
|
|
79c9ba3cf3 | ||
|
|
1d86f7a0c3 | ||
|
|
e259b3994a | ||
|
|
18e97624fd | ||
|
|
1c9a245231 | ||
|
|
b51ca58820 | ||
|
|
7a48cac862 | ||
|
|
1bdcc4580e | ||
|
|
dd10bdd433 | ||
|
|
f7a74c2e74 | ||
|
|
4037e1886d | ||
|
|
c4ae363fdb | ||
|
|
3df64a46e7 | ||
|
|
69502986ad | ||
|
|
51ea63335c | ||
|
|
dc76b554f8 | ||
|
|
f8be8296dd | ||
|
|
b3c917925c | ||
|
|
4954373a04 | ||
|
|
5571ec3858 | ||
|
|
9ef3139905 | ||
|
|
3139b9fe6f | ||
|
|
437d33ba79 | ||
|
|
0a9890b1b0 | ||
|
|
1420ad43db | ||
|
|
30da7a6429 | ||
|
|
a2f3dcce02 | ||
|
|
41f5ca3f9d | ||
|
|
817f1e0371 | ||
|
|
35fc001768 | ||
|
|
002416e435 | ||
|
|
4917249bab | ||
|
|
afd2468375 | ||
|
|
54d06dd7f8 | ||
|
|
5e59844cf5 | ||
|
|
0d2a981674 | ||
|
|
943aeaa31f | ||
|
|
cfe0f67f0d | ||
|
|
635bb94cc4 | ||
|
|
cf732ce173 | ||
|
|
74e9a4ad2d | ||
|
|
570357e9be | ||
|
|
473375d4ae | ||
|
|
a78b698520 | ||
|
|
332c968294 | ||
|
|
ad12c344c5 | ||
|
|
91c0db1ac0 | ||
|
|
4d231b70aa | ||
|
|
ab2f6f6bed | ||
|
|
28458f7b85 | ||
|
|
50ff968c17 | ||
|
|
0b4064f14f | ||
|
|
1897bd4b26 | ||
|
|
fd6843822b | ||
|
|
ee1644e037 | ||
|
|
a6c1486650 | ||
|
|
f4b437e92b | ||
|
|
446c55dc89 | ||
|
|
0990eeeea0 | ||
|
|
591fe23a99 | ||
|
|
ad70765287 | ||
|
|
c59d29493c | ||
|
|
bd32b33ba9 | ||
|
|
3a8556bb78 | ||
|
|
c972d24ce7 | ||
|
|
647e68ef01 | ||
|
|
f439a591df | ||
|
|
8f17b338d1 |
4
.github/workflows/style.yml
vendored
4
.github/workflows/style.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pip-
|
${{ runner.os }}-pip-
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
run: pip3 install -e ".[dev]" mysqlclient psycopg2-binary
|
run: pip3 install -e ".[dev]" psycopg2-binary
|
||||||
- name: Run isort
|
- name: Run isort
|
||||||
run: isort -c .
|
run: isort -c .
|
||||||
working-directory: ./src
|
working-directory: ./src
|
||||||
@@ -55,7 +55,7 @@ jobs:
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pip-
|
${{ runner.os }}-pip-
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
run: pip3 install -e ".[dev]" mysqlclient psycopg2-binary
|
run: pip3 install -e ".[dev]" psycopg2-binary
|
||||||
- name: Run flake8
|
- name: Run flake8
|
||||||
run: flake8 .
|
run: flake8 .
|
||||||
working-directory: ./src
|
working-directory: ./src
|
||||||
|
|||||||
18
.github/workflows/tests.yml
vendored
18
.github/workflows/tests.yml
vendored
@@ -25,27 +25,17 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.9", "3.10", "3.11"]
|
python-version: ["3.9", "3.10", "3.11"]
|
||||||
database: [sqlite, postgres, mysql]
|
database: [sqlite, postgres]
|
||||||
exclude:
|
exclude:
|
||||||
- database: mysql
|
|
||||||
python-version: "3.9"
|
|
||||||
- database: mysql
|
|
||||||
python-version: "3.11"
|
|
||||||
- database: sqlite
|
- database: sqlite
|
||||||
python-version: "3.9"
|
python-version: "3.9"
|
||||||
- database: sqlite
|
- database: sqlite
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- uses: getong/mariadb-action@v1.1
|
|
||||||
with:
|
|
||||||
mariadb version: '10.10'
|
|
||||||
mysql database: 'pretix'
|
|
||||||
mysql root password: ''
|
|
||||||
if: matrix.database == 'mysql'
|
|
||||||
- uses: harmon758/postgresql-action@v1
|
- uses: harmon758/postgresql-action@v1
|
||||||
with:
|
with:
|
||||||
postgresql version: '11'
|
postgresql version: '15'
|
||||||
postgresql db: 'pretix'
|
postgresql db: 'pretix'
|
||||||
postgresql user: 'postgres'
|
postgresql user: 'postgres'
|
||||||
postgresql password: 'postgres'
|
postgresql password: 'postgres'
|
||||||
@@ -61,9 +51,9 @@ jobs:
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pip-
|
${{ runner.os }}-pip-
|
||||||
- name: Install system dependencies
|
- name: Install system dependencies
|
||||||
run: sudo apt update && sudo apt install gettext mariadb-client
|
run: sudo apt update && sudo apt install gettext
|
||||||
- name: Install Python dependencies
|
- name: Install Python dependencies
|
||||||
run: pip3 install --ignore-requires-python -e ".[dev]" mysqlclient psycopg2-binary # We ignore that flake8 needs newer python as we don't run flake8 during tests
|
run: pip3 install --ignore-requires-python -e ".[dev]" psycopg2-binary # We ignore that flake8 needs newer python as we don't run flake8 during tests
|
||||||
- name: Run checks
|
- name: Run checks
|
||||||
run: python manage.py check
|
run: python manage.py check
|
||||||
working-directory: ./src
|
working-directory: ./src
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ FROM python:3.11-bullseye
|
|||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y --no-install-recommends \
|
apt-get install -y --no-install-recommends \
|
||||||
build-essential \
|
build-essential \
|
||||||
libmariadb-dev \
|
|
||||||
gettext \
|
gettext \
|
||||||
git \
|
git \
|
||||||
libffi-dev \
|
libffi-dev \
|
||||||
@@ -34,8 +33,7 @@ RUN apt-get update && \
|
|||||||
mkdir /static && \
|
mkdir /static && \
|
||||||
mkdir /etc/supervisord && \
|
mkdir /etc/supervisord && \
|
||||||
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - && \
|
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - && \
|
||||||
apt-get install -y nodejs && \
|
apt-get install -y nodejs
|
||||||
curl -qL https://www.npmjs.com/install.sh | sh
|
|
||||||
|
|
||||||
|
|
||||||
ENV LC_ALL=C.UTF-8 \
|
ENV LC_ALL=C.UTF-8 \
|
||||||
@@ -58,7 +56,7 @@ RUN pip3 install -U \
|
|||||||
wheel && \
|
wheel && \
|
||||||
cd /pretix && \
|
cd /pretix && \
|
||||||
PRETIX_DOCKER_BUILD=TRUE pip3 install \
|
PRETIX_DOCKER_BUILD=TRUE pip3 install \
|
||||||
-e ".[memcached,mysql]" \
|
-e ".[memcached]" \
|
||||||
gunicorn django-extensions ipython && \
|
gunicorn django-extensions ipython && \
|
||||||
rm -rf ~/.cache/pip
|
rm -rf ~/.cache/pip
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from pretix.settings import *
|
from pretix.settings import *
|
||||||
|
|
||||||
LOGGING['handlers']['mail_admins']['include_html'] = True
|
LOGGING['handlers']['mail_admins']['include_html'] = True
|
||||||
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
|
STORAGES["staticfiles"]["BACKEND"] = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
|
||||||
|
|||||||
@@ -152,25 +152,26 @@ Example::
|
|||||||
password=abcd
|
password=abcd
|
||||||
host=localhost
|
host=localhost
|
||||||
port=3306
|
port=3306
|
||||||
|
sslmode=require
|
||||||
|
sslrootcert=/etc/pretix/postgresql-ca.crt
|
||||||
|
sslcert=/etc/pretix/postgresql-client-crt.crt
|
||||||
|
sslkey=/etc/pretix/postgresql-client-key.key
|
||||||
|
|
||||||
``backend``
|
``backend``
|
||||||
One of ``mysql`` (deprecated), ``sqlite3`` and ``postgresql``.
|
One of ``sqlite3`` and ``postgresql``.
|
||||||
Default: ``sqlite3``.
|
Default: ``sqlite3``.
|
||||||
|
|
||||||
If you use MySQL, be sure to create your database using
|
|
||||||
``CREATE DATABASE <dbname> CHARACTER SET utf8;``. Otherwise, Unicode
|
|
||||||
support will not properly work.
|
|
||||||
|
|
||||||
``name``
|
``name``
|
||||||
The database's name. Default: ``db.sqlite3``.
|
The database's name. Default: ``db.sqlite3``.
|
||||||
|
|
||||||
``user``, ``password``, ``host``, ``port``
|
``user``, ``password``, ``host``, ``port``
|
||||||
Connection details for the database connection. Empty by default.
|
Connection details for the database connection. Empty by default.
|
||||||
|
|
||||||
``galera``
|
``sslmode``, ``sslrootcert``
|
||||||
(Deprecated) Indicates if the database backend is a MySQL/MariaDB Galera cluster and
|
Connection TLS details for the PostgreSQL database connection. Possible values of ``sslmode`` are ``disable``, ``allow``, ``prefer``, ``require``, ``verify-ca``, and ``verify-full``. ``sslrootcert`` should be the accessible path of the ca certificate. Both values are empty by default.
|
||||||
turns on some optimizations/special case handlers. Default: ``False``
|
|
||||||
|
|
||||||
|
``sslcert``, ``sslkey``
|
||||||
|
Connection mTLS details for the PostgreSQL database connection. It's also necessary to specify ``sslmode`` and ``sslrootcert`` parameters, please check the correct values from the TLS part. ``sslcert`` should be the accessible path of the client certificate. ``sslkey`` should be the accessible path of the client key. All values are empty by default.
|
||||||
.. _`config-replica`:
|
.. _`config-replica`:
|
||||||
|
|
||||||
Database replica settings
|
Database replica settings
|
||||||
@@ -332,6 +333,10 @@ to speed up various operations::
|
|||||||
["sentinel_host_3", 26379]
|
["sentinel_host_3", 26379]
|
||||||
]
|
]
|
||||||
password=password
|
password=password
|
||||||
|
ssl_cert_reqs=required
|
||||||
|
ssl_ca_certs=/etc/pretix/redis-ca.pem
|
||||||
|
ssl_keyfile=/etc/pretix/redis-client-crt.pem
|
||||||
|
ssl_certfile=/etc/pretix/redis-client-key.key
|
||||||
|
|
||||||
``location``
|
``location``
|
||||||
The location of redis, as a URL of the form ``redis://[:password]@localhost:6379/0``
|
The location of redis, as a URL of the form ``redis://[:password]@localhost:6379/0``
|
||||||
@@ -355,6 +360,22 @@ to speed up various operations::
|
|||||||
If your redis setup doesn't require a password or you already specified it in the location you can omit this option.
|
If your redis setup doesn't require a password or you already specified it in the location you can omit this option.
|
||||||
If this is set it will be passed to redis as the connection option PASSWORD.
|
If this is set it will be passed to redis as the connection option PASSWORD.
|
||||||
|
|
||||||
|
``ssl_cert_reqs``
|
||||||
|
If this is set it will be passed to redis as the connection option ``SSL_CERT_REQS``.
|
||||||
|
Possible values are ``none``, ``optional``, and ``required``.
|
||||||
|
|
||||||
|
``ssl_ca_certs``
|
||||||
|
If your redis setup doesn't require TLS you can omit this option.
|
||||||
|
If this is set it will be passed to redis as the connection option ``SSL_CA_CERTS``. Possible value is the ca path.
|
||||||
|
|
||||||
|
``ssl_keyfile``
|
||||||
|
If your redis setup doesn't require mTLS you can omit this option.
|
||||||
|
If this is set it will be passed to redis as the connection option ``SSL_KEYFILE``. Possible value is the keyfile path.
|
||||||
|
|
||||||
|
``ssl_certfile``
|
||||||
|
If your redis setup doesn't require mTLS you can omit this option.
|
||||||
|
If this is set it will be passed to redis as the connection option ``SSL_CERTFILE``. Possible value is the certfile path.
|
||||||
|
|
||||||
If redis is not configured, pretix will store sessions and locks in the database. If memcached
|
If redis is not configured, pretix will store sessions and locks in the database. If memcached
|
||||||
is configured, memcached will be used for caching instead of redis.
|
is configured, memcached will be used for caching instead of redis.
|
||||||
|
|
||||||
@@ -404,6 +425,8 @@ The two ``transport_options`` entries can be omitted in most cases.
|
|||||||
If they are present they need to be a valid JSON dictionary.
|
If they are present they need to be a valid JSON dictionary.
|
||||||
For possible entries in that dictionary see the `Celery documentation`_.
|
For possible entries in that dictionary see the `Celery documentation`_.
|
||||||
|
|
||||||
|
It is possible the use Redis with TLS/mTLS for the broker or the backend. To do so, it is necessary to specify the TLS identifier ``rediss``, the ssl mode ``ssl_cert_reqs`` and optionally specify the CA (TLS) ``ssl_ca_certs``, cert ``ssl_certfile`` and key ``ssl_keyfile`` (mTLS) path as encoded string. the following uri describes the format and possible parameters ``rediss://0.0.0.0:6379/1?ssl_cert_reqs=required&ssl_ca_certs=%2Fetc%2Fpretix%2Fredis-ca.pem&ssl_certfile=%2Fetc%2Fpretix%2Fredis-client-crt.pem&ssl_keyfile=%2Fetc%2Fpretix%2Fredis-client-key.key``
|
||||||
|
|
||||||
To use redis with sentinels set the broker or backend to ``sentinel://sentinel_host_1:26379;sentinel_host_2:26379/0``
|
To use redis with sentinels set the broker or backend to ``sentinel://sentinel_host_1:26379;sentinel_host_2:26379/0``
|
||||||
and the respective transport_options to ``{"master_name":"mymaster"}``.
|
and the respective transport_options to ``{"master_name":"mymaster"}``.
|
||||||
If your redis instances behind the sentinel have a password use ``sentinel://:my_password@sentinel_host_1:26379;sentinel_host_2:26379/0``.
|
If your redis instances behind the sentinel have a password use ``sentinel://:my_password@sentinel_host_1:26379;sentinel_host_2:26379/0``.
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ installation guides):
|
|||||||
* `Docker`_
|
* `Docker`_
|
||||||
* A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for
|
* A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for
|
||||||
* A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections
|
* A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections
|
||||||
* A `PostgreSQL`_ 9.6+ database server
|
* A `PostgreSQL`_ 11+ database server
|
||||||
* A `redis`_ server
|
* A `redis`_ server
|
||||||
|
|
||||||
We also recommend that you use a firewall, although this is not a pretix-specific recommendation. If you're new to
|
We also recommend that you use a firewall, although this is not a pretix-specific recommendation. If you're new to
|
||||||
@@ -321,11 +321,11 @@ workers, e.g. ``docker run … taskworker -Q notifications --concurrency 32``.
|
|||||||
|
|
||||||
|
|
||||||
.. _Docker: https://docs.docker.com/engine/installation/linux/debian/
|
.. _Docker: https://docs.docker.com/engine/installation/linux/debian/
|
||||||
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-16-04
|
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-22-04
|
||||||
.. _nginx: https://botleg.com/stories/https-with-lets-encrypt-and-nginx/
|
.. _nginx: https://botleg.com/stories/https-with-lets-encrypt-and-nginx/
|
||||||
.. _Let's Encrypt: https://letsencrypt.org/
|
.. _Let's Encrypt: https://letsencrypt.org/
|
||||||
.. _pretix.eu: https://pretix.eu/
|
.. _pretix.eu: https://pretix.eu/
|
||||||
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-20-04
|
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-22-04
|
||||||
.. _redis: https://blog.programster.org/debian-8-install-redis-server/
|
.. _redis: https://blog.programster.org/debian-8-install-redis-server/
|
||||||
.. _ufw: https://en.wikipedia.org/wiki/Uncomplicated_Firewall
|
.. _ufw: https://en.wikipedia.org/wiki/Uncomplicated_Firewall
|
||||||
.. _redis website: https://redis.io/topics/security
|
.. _redis website: https://redis.io/topics/security
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ generated key and installs the plugin from the URL we told you::
|
|||||||
mkdir -p /etc/ssh && \
|
mkdir -p /etc/ssh && \
|
||||||
ssh-keyscan -t rsa -p 10022 code.rami.io >> /root/.ssh/known_hosts && \
|
ssh-keyscan -t rsa -p 10022 code.rami.io >> /root/.ssh/known_hosts && \
|
||||||
echo StrictHostKeyChecking=no >> /root/.ssh/config && \
|
echo StrictHostKeyChecking=no >> /root/.ssh/config && \
|
||||||
DJANGO_SETTINGS_MODULE=pretix.settings pip3 install -U "git+ssh://git@code.rami.io:10022/pretix/pretix-slack.git@stable#egg=pretix-slack" && \
|
DJANGO_SETTINGS_MODULE= pip3 install -U "git+ssh://git@code.rami.io:10022/pretix/pretix-slack.git@stable#egg=pretix-slack" && \
|
||||||
cd /pretix/src && \
|
cd /pretix/src && \
|
||||||
sudo -u pretixuser make production
|
sudo -u pretixuser make production
|
||||||
USER pretixuser
|
USER pretixuser
|
||||||
|
|||||||
@@ -16,14 +16,11 @@ To use pretix, you will need the following things:
|
|||||||
* A periodic task runner, e.g. ``cron``
|
* A periodic task runner, e.g. ``cron``
|
||||||
|
|
||||||
* **A database**. This needs to be a SQL-based that is supported by Django. We highly recommend to either
|
* **A database**. This needs to be a SQL-based that is supported by Django. We highly recommend to either
|
||||||
go for **PostgreSQL** or **MySQL/MariaDB**. If you do not provide one, pretix will run on SQLite, which is useful
|
go for **PostgreSQL**. If you do not provide one, pretix will run on SQLite, which is useful
|
||||||
for evaluation and development purposes.
|
for evaluation and development purposes.
|
||||||
|
|
||||||
.. warning:: Do not ever use SQLite in production. It will break.
|
.. warning:: Do not ever use SQLite in production. It will break.
|
||||||
|
|
||||||
.. warning:: We recommend **PostgreSQL**. If you go for MySQL, make sure you run **MySQL 5.7 or newer** or
|
|
||||||
**MariaDB 10.2.7 or newer**.
|
|
||||||
|
|
||||||
* A **reverse proxy**. pretix needs to deliver some static content to your users (e.g. CSS, images, ...). While pretix
|
* A **reverse proxy**. pretix needs to deliver some static content to your users (e.g. CSS, images, ...). While pretix
|
||||||
is capable of doing this, having this handled by a proper web server like **nginx** or **Apache** will be much
|
is capable of doing this, having this handled by a proper web server like **nginx** or **Apache** will be much
|
||||||
faster. Also, you need a proxying web server in front to provide SSL encryption.
|
faster. Also, you need a proxying web server in front to provide SSL encryption.
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ Requirements
|
|||||||
Please set up the following systems beforehand, we'll not explain them here in detail (but see these links for external
|
Please set up the following systems beforehand, we'll not explain them here in detail (but see these links for external
|
||||||
installation guides):
|
installation guides):
|
||||||
|
|
||||||
|
* A python 3.9+ installation
|
||||||
* A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for
|
* A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for
|
||||||
* A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections
|
* A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections
|
||||||
* A `PostgreSQL`_ 11+ database server
|
* A `PostgreSQL`_ 11+ database server
|
||||||
@@ -323,11 +324,11 @@ Then, proceed like after any plugin installation::
|
|||||||
(venv)$ python -m pretix updatestyles
|
(venv)$ python -m pretix updatestyles
|
||||||
# systemctl restart pretix-web pretix-worker
|
# systemctl restart pretix-web pretix-worker
|
||||||
|
|
||||||
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-16-04
|
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-22-04
|
||||||
.. _nginx: https://botleg.com/stories/https-with-lets-encrypt-and-nginx/
|
.. _nginx: https://botleg.com/stories/https-with-lets-encrypt-and-nginx/
|
||||||
.. _Let's Encrypt: https://letsencrypt.org/
|
.. _Let's Encrypt: https://letsencrypt.org/
|
||||||
.. _pretix.eu: https://pretix.eu/
|
.. _pretix.eu: https://pretix.eu/
|
||||||
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-20-04
|
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-22-04
|
||||||
.. _redis: https://blog.programster.org/debian-8-install-redis-server/
|
.. _redis: https://blog.programster.org/debian-8-install-redis-server/
|
||||||
.. _ufw: https://en.wikipedia.org/wiki/Uncomplicated_Firewall
|
.. _ufw: https://en.wikipedia.org/wiki/Uncomplicated_Firewall
|
||||||
.. _strong encryption settings: https://mozilla.github.io/server-side-tls/ssl-config-generator/
|
.. _strong encryption settings: https://mozilla.github.io/server-side-tls/ssl-config-generator/
|
||||||
|
|||||||
@@ -3,11 +3,11 @@
|
|||||||
Migrating from MySQL/MariaDB to PostgreSQL
|
Migrating from MySQL/MariaDB to PostgreSQL
|
||||||
==========================================
|
==========================================
|
||||||
|
|
||||||
Our recommended database for all production installations is PostgreSQL. Support for MySQL/MariaDB will be removed in
|
Our recommended database for all production installations is PostgreSQL. Support for MySQL/MariaDB has been removed
|
||||||
pretix 5.0.
|
in newer pretix releases.
|
||||||
|
|
||||||
In order to follow this guide, your pretix installation needs to be a version that fully supports MySQL/MariaDB. If you
|
In order to follow this guide, your pretix installation needs to be a version that fully supports MySQL/MariaDB. If you
|
||||||
already upgraded to pretix 5.0, downgrade back to the last 4.x release using ``pip``.
|
already upgraded to pretix 5.0 or later, downgrade back to the last 4.x release using ``pip``.
|
||||||
|
|
||||||
.. note:: We have tested this guide carefully, but we can't assume any liability for its correctness. The data loss
|
.. note:: We have tested this guide carefully, but we can't assume any liability for its correctness. The data loss
|
||||||
risk should be low as long as pretix is not running while you do the migration. If you are a pretix Enterprise
|
risk should be low as long as pretix is not running while you do the migration. If you are a pretix Enterprise
|
||||||
|
|||||||
@@ -32,10 +32,16 @@ as well as the type of underlying hardware. Example:
|
|||||||
"token": "kpp4jn8g2ynzonp6",
|
"token": "kpp4jn8g2ynzonp6",
|
||||||
"hardware_brand": "Samsung",
|
"hardware_brand": "Samsung",
|
||||||
"hardware_model": "Galaxy S",
|
"hardware_model": "Galaxy S",
|
||||||
|
"os_name": "Android",
|
||||||
|
"os_version": "2.3.6",
|
||||||
"software_brand": "pretixdroid",
|
"software_brand": "pretixdroid",
|
||||||
"software_version": "4.0.0"
|
"software_version": "4.0.0",
|
||||||
|
"rsa_pubkey": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqh…nswIDAQAB\n-----END PUBLIC KEY-----\n"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
The ``rsa_pubkey`` is optional any only required for certain fatures such as working with reusable
|
||||||
|
media and NFC cryptography.
|
||||||
|
|
||||||
Every initialization token can only be used once. On success, you will receive a response containing
|
Every initialization token can only be used once. On success, you will receive a response containing
|
||||||
information on your device as well as your API token:
|
information on your device as well as your API token:
|
||||||
|
|
||||||
@@ -98,6 +104,8 @@ following endpoint:
|
|||||||
{
|
{
|
||||||
"hardware_brand": "Samsung",
|
"hardware_brand": "Samsung",
|
||||||
"hardware_model": "Galaxy S",
|
"hardware_model": "Galaxy S",
|
||||||
|
"os_name": "Android",
|
||||||
|
"os_version": "2.3.6",
|
||||||
"software_brand": "pretixdroid",
|
"software_brand": "pretixdroid",
|
||||||
"software_version": "4.1.0",
|
"software_version": "4.1.0",
|
||||||
"info": {"arbitrary": "data"}
|
"info": {"arbitrary": "data"}
|
||||||
@@ -133,9 +141,29 @@ The response will look like this:
|
|||||||
"id": 3,
|
"id": 3,
|
||||||
"name": "South entrance"
|
"name": "South entrance"
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
"server": {
|
||||||
|
"version": {
|
||||||
|
"pretix": "3.6.0.dev0",
|
||||||
|
"pretix_numeric": 30060001000
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"medium_key_sets": [
|
||||||
|
{
|
||||||
|
"public_id": 3456349,
|
||||||
|
"organizer": "foo",
|
||||||
|
"active": true,
|
||||||
|
"media_type": "nfc_mf0aes",
|
||||||
|
"uid_key": "base64-encoded-encrypted-key",
|
||||||
|
"diversification_key": "base64-encoded-encrypted-key",
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
``"medium_key_sets`` will always be empty if you did not set an ``rsa_pubkey``.
|
||||||
|
The individual keys in the key sets are encrypted with the device's ``rsa_pubkey``
|
||||||
|
using ``RSA/ECB/PKCS1Padding``.
|
||||||
|
|
||||||
Creating a new API key
|
Creating a new API key
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
|||||||
@@ -24,6 +24,8 @@ all_events boolean Whether this de
|
|||||||
limit_events list List of event slugs this device has access to
|
limit_events list List of event slugs this device has access to
|
||||||
hardware_brand string Device hardware manufacturer (read-only)
|
hardware_brand string Device hardware manufacturer (read-only)
|
||||||
hardware_model string Device hardware model (read-only)
|
hardware_model string Device hardware model (read-only)
|
||||||
|
os_name string Device operating system name (read-only)
|
||||||
|
os_version string Device operating system version (read-only)
|
||||||
software_brand string Device software product (read-only)
|
software_brand string Device software product (read-only)
|
||||||
software_version string Device software version (read-only)
|
software_version string Device software version (read-only)
|
||||||
created datetime Creation time
|
created datetime Creation time
|
||||||
@@ -76,6 +78,8 @@ Device endpoints
|
|||||||
"security_profile": "full",
|
"security_profile": "full",
|
||||||
"hardware_brand": "Zebra",
|
"hardware_brand": "Zebra",
|
||||||
"hardware_model": "TC25",
|
"hardware_model": "TC25",
|
||||||
|
"os_name": "Android",
|
||||||
|
"os_version": "8.1.0",
|
||||||
"software_brand": "pretixSCAN",
|
"software_brand": "pretixSCAN",
|
||||||
"software_version": "1.5.1"
|
"software_version": "1.5.1"
|
||||||
}
|
}
|
||||||
@@ -123,6 +127,8 @@ Device endpoints
|
|||||||
"security_profile": "full",
|
"security_profile": "full",
|
||||||
"hardware_brand": "Zebra",
|
"hardware_brand": "Zebra",
|
||||||
"hardware_model": "TC25",
|
"hardware_model": "TC25",
|
||||||
|
"os_name": "Android",
|
||||||
|
"os_version": "8.1.0",
|
||||||
"software_brand": "pretixSCAN",
|
"software_brand": "pretixSCAN",
|
||||||
"software_version": "1.5.1"
|
"software_version": "1.5.1"
|
||||||
}
|
}
|
||||||
@@ -173,6 +179,8 @@ Device endpoints
|
|||||||
"initialized": null
|
"initialized": null
|
||||||
"hardware_brand": null,
|
"hardware_brand": null,
|
||||||
"hardware_model": null,
|
"hardware_model": null,
|
||||||
|
"os_name": null,
|
||||||
|
"os_version": null,
|
||||||
"software_brand": null,
|
"software_brand": null,
|
||||||
"software_version": null
|
"software_version": null
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,9 +31,9 @@ subevent_mode strings Determines h
|
|||||||
``"same"`` (discount is only applied for groups within
|
``"same"`` (discount is only applied for groups within
|
||||||
the same date), or ``"distinct"`` (discount is only applied
|
the same date), or ``"distinct"`` (discount is only applied
|
||||||
for groups with no two same dates).
|
for groups with no two same dates).
|
||||||
condition_all_products boolean If ``true``, the discount applies to all items.
|
condition_all_products boolean If ``true``, the discount condition applies to all items.
|
||||||
condition_limit_products list of integers If ``condition_all_products`` is not set, this is a list
|
condition_limit_products list of integers If ``condition_all_products`` is not set, this is a list
|
||||||
of internal item IDs that the discount applies to.
|
of internal item IDs that the discount condition applies to.
|
||||||
condition_apply_to_addons boolean If ``true``, the discount applies to add-on products as well,
|
condition_apply_to_addons boolean If ``true``, the discount applies to add-on products as well,
|
||||||
otherwise it only applies to top-level items. The discount never
|
otherwise it only applies to top-level items. The discount never
|
||||||
applies to bundled products.
|
applies to bundled products.
|
||||||
@@ -48,6 +48,17 @@ benefit_discount_matching_percent decimal (string) The percenta
|
|||||||
benefit_only_apply_to_cheapest_n_matches integer If set higher than 0, the discount will only be applied to
|
benefit_only_apply_to_cheapest_n_matches integer If set higher than 0, the discount will only be applied to
|
||||||
the cheapest matches. Useful for a "3 for 2"-style discount.
|
the cheapest matches. Useful for a "3 for 2"-style discount.
|
||||||
Cannot be combined with ``condition_min_value``.
|
Cannot be combined with ``condition_min_value``.
|
||||||
|
benefit_same_products boolean If ``true``, the discount benefit applies to the same set of items
|
||||||
|
as the condition (see above).
|
||||||
|
benefit_limit_products list of integers If ``benefit_same_products`` is not set, this is a list
|
||||||
|
of internal item IDs that the discount benefit applies to.
|
||||||
|
benefit_apply_to_addons boolean (Only used if ``benefit_same_products`` is ``false``.)
|
||||||
|
If ``true``, the discount applies to add-on products as well,
|
||||||
|
otherwise it only applies to top-level items. The discount never
|
||||||
|
applies to bundled products.
|
||||||
|
benefit_ignore_voucher_discounted boolean (Only used if ``benefit_same_products`` is ``false``.)
|
||||||
|
If ``true``, the discount does not apply to products which have
|
||||||
|
been discounted by a voucher.
|
||||||
======================================== ========================== =======================================================
|
======================================== ========================== =======================================================
|
||||||
|
|
||||||
|
|
||||||
@@ -94,6 +105,10 @@ Endpoints
|
|||||||
"condition_ignore_voucher_discounted": false,
|
"condition_ignore_voucher_discounted": false,
|
||||||
"condition_min_count": 3,
|
"condition_min_count": 3,
|
||||||
"condition_min_value": "0.00",
|
"condition_min_value": "0.00",
|
||||||
|
"benefit_same_products": true,
|
||||||
|
"benefit_limit_products": [],
|
||||||
|
"benefit_apply_to_addons": true,
|
||||||
|
"benefit_ignore_voucher_discounted": false,
|
||||||
"benefit_discount_matching_percent": "100.00",
|
"benefit_discount_matching_percent": "100.00",
|
||||||
"benefit_only_apply_to_cheapest_n_matches": 1
|
"benefit_only_apply_to_cheapest_n_matches": 1
|
||||||
}
|
}
|
||||||
@@ -146,6 +161,10 @@ Endpoints
|
|||||||
"condition_ignore_voucher_discounted": false,
|
"condition_ignore_voucher_discounted": false,
|
||||||
"condition_min_count": 3,
|
"condition_min_count": 3,
|
||||||
"condition_min_value": "0.00",
|
"condition_min_value": "0.00",
|
||||||
|
"benefit_same_products": true,
|
||||||
|
"benefit_limit_products": [],
|
||||||
|
"benefit_apply_to_addons": true,
|
||||||
|
"benefit_ignore_voucher_discounted": false,
|
||||||
"benefit_discount_matching_percent": "100.00",
|
"benefit_discount_matching_percent": "100.00",
|
||||||
"benefit_only_apply_to_cheapest_n_matches": 1
|
"benefit_only_apply_to_cheapest_n_matches": 1
|
||||||
}
|
}
|
||||||
@@ -184,6 +203,10 @@ Endpoints
|
|||||||
"condition_ignore_voucher_discounted": false,
|
"condition_ignore_voucher_discounted": false,
|
||||||
"condition_min_count": 3,
|
"condition_min_count": 3,
|
||||||
"condition_min_value": "0.00",
|
"condition_min_value": "0.00",
|
||||||
|
"benefit_same_products": true,
|
||||||
|
"benefit_limit_products": [],
|
||||||
|
"benefit_apply_to_addons": true,
|
||||||
|
"benefit_ignore_voucher_discounted": false,
|
||||||
"benefit_discount_matching_percent": "100.00",
|
"benefit_discount_matching_percent": "100.00",
|
||||||
"benefit_only_apply_to_cheapest_n_matches": 1
|
"benefit_only_apply_to_cheapest_n_matches": 1
|
||||||
}
|
}
|
||||||
@@ -211,6 +234,10 @@ Endpoints
|
|||||||
"condition_ignore_voucher_discounted": false,
|
"condition_ignore_voucher_discounted": false,
|
||||||
"condition_min_count": 3,
|
"condition_min_count": 3,
|
||||||
"condition_min_value": "0.00",
|
"condition_min_value": "0.00",
|
||||||
|
"benefit_same_products": true,
|
||||||
|
"benefit_limit_products": [],
|
||||||
|
"benefit_apply_to_addons": true,
|
||||||
|
"benefit_ignore_voucher_discounted": false,
|
||||||
"benefit_discount_matching_percent": "100.00",
|
"benefit_discount_matching_percent": "100.00",
|
||||||
"benefit_only_apply_to_cheapest_n_matches": 1
|
"benefit_only_apply_to_cheapest_n_matches": 1
|
||||||
}
|
}
|
||||||
@@ -267,6 +294,10 @@ Endpoints
|
|||||||
"condition_ignore_voucher_discounted": false,
|
"condition_ignore_voucher_discounted": false,
|
||||||
"condition_min_count": 3,
|
"condition_min_count": 3,
|
||||||
"condition_min_value": "0.00",
|
"condition_min_value": "0.00",
|
||||||
|
"benefit_same_products": true,
|
||||||
|
"benefit_limit_products": [],
|
||||||
|
"benefit_apply_to_addons": true,
|
||||||
|
"benefit_ignore_voucher_discounted": false,
|
||||||
"benefit_discount_matching_percent": "100.00",
|
"benefit_discount_matching_percent": "100.00",
|
||||||
"benefit_only_apply_to_cheapest_n_matches": 1
|
"benefit_only_apply_to_cheapest_n_matches": 1
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -70,6 +70,11 @@ Endpoints
|
|||||||
|
|
||||||
The ``public_url`` field has been added.
|
The ``public_url`` field has been added.
|
||||||
|
|
||||||
|
.. versionchanged:: 5.0
|
||||||
|
|
||||||
|
The ``date_from_before``, ``date_from_after``, ``date_to_before``, and ``date_to_after`` query parameters have been
|
||||||
|
added.
|
||||||
|
|
||||||
.. http:get:: /api/v1/organizers/(organizer)/events/
|
.. http:get:: /api/v1/organizers/(organizer)/events/
|
||||||
|
|
||||||
Returns a list of all events within a given organizer the authenticated user/token has access to.
|
Returns a list of all events within a given organizer the authenticated user/token has access to.
|
||||||
@@ -141,6 +146,10 @@ Endpoints
|
|||||||
:query has_subevents: If set to ``true``/``false``, only events with a matching value of ``has_subevents`` are returned.
|
:query has_subevents: If set to ``true``/``false``, only events with a matching value of ``has_subevents`` are returned.
|
||||||
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned. Event series are never (always) returned.
|
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned. Event series are never (always) returned.
|
||||||
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned. Event series are never (always) returned.
|
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned. Event series are never (always) returned.
|
||||||
|
:query date_from_after: If set to a date and time, only events that start at or after the given time are returned.
|
||||||
|
:query date_from_before: If set to a date and time, only events that start at or before the given time are returned.
|
||||||
|
:query date_to_after: If set to a date and time, only events that have an end date and end at or after the given time are returned.
|
||||||
|
:query date_to_before: If set to a date and time, only events that have an end date and end at or before the given time are returned.
|
||||||
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned. Event series are never returned.
|
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned. Event series are never returned.
|
||||||
:query string ordering: Manually set the ordering of results. Valid fields to be used are ``date_from`` and
|
:query string ordering: Manually set the ordering of results. Valid fields to be used are ``date_from`` and
|
||||||
``slug``. Keep in mind that ``date_from`` of event series does not really tell you anything.
|
``slug``. Keep in mind that ``date_from`` of event series does not really tell you anything.
|
||||||
|
|||||||
@@ -111,7 +111,7 @@ Listing available exporters
|
|||||||
"input_parameters": [
|
"input_parameters": [
|
||||||
{
|
{
|
||||||
"name": "events",
|
"name": "events",
|
||||||
"required": true
|
"required": false
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "_format",
|
"name": "_format",
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ The invoice resource contains the following public fields:
|
|||||||
Field Type Description
|
Field Type Description
|
||||||
===================================== ========================== =======================================================
|
===================================== ========================== =======================================================
|
||||||
number string Invoice number (with prefix)
|
number string Invoice number (with prefix)
|
||||||
|
event string The slug of the parent event
|
||||||
order string Order code of the order this invoice belongs to
|
order string Order code of the order this invoice belongs to
|
||||||
is_cancellation boolean ``true``, if this invoice is the cancellation of a
|
is_cancellation boolean ``true``, if this invoice is the cancellation of a
|
||||||
different invoice.
|
different invoice.
|
||||||
@@ -121,9 +122,13 @@ internal_reference string Customer's refe
|
|||||||
|
|
||||||
The attribute ``lines.subevent`` has been added.
|
The attribute ``lines.subevent`` has been added.
|
||||||
|
|
||||||
|
.. versionchanged:: 2023.8
|
||||||
|
|
||||||
Endpoints
|
The ``event`` attribute has been added. The organizer-level endpoint has been added.
|
||||||
---------
|
|
||||||
|
|
||||||
|
List of all invoices
|
||||||
|
--------------------
|
||||||
|
|
||||||
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/invoices/
|
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/invoices/
|
||||||
|
|
||||||
@@ -152,6 +157,7 @@ Endpoints
|
|||||||
"results": [
|
"results": [
|
||||||
{
|
{
|
||||||
"number": "SAMPLECONF-00001",
|
"number": "SAMPLECONF-00001",
|
||||||
|
"event": "sampleconf",
|
||||||
"order": "ABC12",
|
"order": "ABC12",
|
||||||
"is_cancellation": false,
|
"is_cancellation": false,
|
||||||
"invoice_from_name": "Big Events LLC",
|
"invoice_from_name": "Big Events LLC",
|
||||||
@@ -221,6 +227,50 @@ Endpoints
|
|||||||
:statuscode 401: Authentication failure
|
:statuscode 401: Authentication failure
|
||||||
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
|
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
|
||||||
|
|
||||||
|
.. http:get:: /api/v1/organizers/(organizer)/invoices/
|
||||||
|
|
||||||
|
Returns a list of all invoices within all events of a given organizer (with sufficient access permissions).
|
||||||
|
|
||||||
|
Supported query parameters and output format of this endpoint are identical to the list endpoint within an event.
|
||||||
|
|
||||||
|
**Example request**:
|
||||||
|
|
||||||
|
.. sourcecode:: http
|
||||||
|
|
||||||
|
GET /api/v1/organizers/bigevents/events/sampleconf/invoices/ HTTP/1.1
|
||||||
|
Host: pretix.eu
|
||||||
|
Accept: application/json, text/javascript
|
||||||
|
|
||||||
|
**Example response**:
|
||||||
|
|
||||||
|
.. sourcecode:: http
|
||||||
|
|
||||||
|
HTTP/1.1 200 OK
|
||||||
|
Vary: Accept
|
||||||
|
Content-Type: application/json
|
||||||
|
|
||||||
|
{
|
||||||
|
"count": 1,
|
||||||
|
"next": null,
|
||||||
|
"previous": null,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"number": "SAMPLECONF-00001",
|
||||||
|
"event": "sampleconf",
|
||||||
|
"order": "ABC12",
|
||||||
|
...
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
:param organizer: The ``slug`` field of the organizer to fetch
|
||||||
|
:statuscode 200: no error
|
||||||
|
:statuscode 401: Authentication failure
|
||||||
|
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
|
||||||
|
|
||||||
|
|
||||||
|
Fetching individual invoices
|
||||||
|
----------------------------
|
||||||
|
|
||||||
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/invoices/(number)/
|
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/invoices/(number)/
|
||||||
|
|
||||||
Returns information on one invoice, identified by its invoice number.
|
Returns information on one invoice, identified by its invoice number.
|
||||||
@@ -243,6 +293,7 @@ Endpoints
|
|||||||
|
|
||||||
{
|
{
|
||||||
"number": "SAMPLECONF-00001",
|
"number": "SAMPLECONF-00001",
|
||||||
|
"event": "sampleconf",
|
||||||
"order": "ABC12",
|
"order": "ABC12",
|
||||||
"is_cancellation": false,
|
"is_cancellation": false,
|
||||||
"invoice_from_name": "Big Events LLC",
|
"invoice_from_name": "Big Events LLC",
|
||||||
@@ -337,6 +388,12 @@ Endpoints
|
|||||||
:statuscode 409: The file is not yet ready and will now be prepared. Retry the request after waiting for a few
|
:statuscode 409: The file is not yet ready and will now be prepared. Retry the request after waiting for a few
|
||||||
seconds.
|
seconds.
|
||||||
|
|
||||||
|
|
||||||
|
Modifying invoices
|
||||||
|
------------------
|
||||||
|
|
||||||
|
Invoices cannot be edited directly, but the following actions can be triggered:
|
||||||
|
|
||||||
.. http:post:: /api/v1/organizers/(organizer)/events/(event)/invoices/(invoice_no)/reissue/
|
.. http:post:: /api/v1/organizers/(organizer)/events/(event)/invoices/(invoice_no)/reissue/
|
||||||
|
|
||||||
Cancels the invoice and creates a new one.
|
Cancels the invoice and creates a new one.
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ The order resource contains the following public fields:
|
|||||||
Field Type Description
|
Field Type Description
|
||||||
===================================== ========================== =======================================================
|
===================================== ========================== =======================================================
|
||||||
code string Order code
|
code string Order code
|
||||||
|
event string The slug of the parent event
|
||||||
status string Order status, one of:
|
status string Order status, one of:
|
||||||
|
|
||||||
* ``n`` – pending
|
* ``n`` – pending
|
||||||
@@ -130,6 +131,10 @@ last_modified datetime Last modificati
|
|||||||
|
|
||||||
The ``valid_if_pending`` attribute has been added.
|
The ``valid_if_pending`` attribute has been added.
|
||||||
|
|
||||||
|
.. versionchanged:: 2023.8
|
||||||
|
|
||||||
|
The ``event`` attribute has been added. The organizer-level endpoint has been added.
|
||||||
|
|
||||||
|
|
||||||
.. _order-position-resource:
|
.. _order-position-resource:
|
||||||
|
|
||||||
@@ -289,6 +294,7 @@ List of all orders
|
|||||||
"results": [
|
"results": [
|
||||||
{
|
{
|
||||||
"code": "ABC12",
|
"code": "ABC12",
|
||||||
|
"event": "sampleconf",
|
||||||
"status": "p",
|
"status": "p",
|
||||||
"testmode": false,
|
"testmode": false,
|
||||||
"secret": "k24fiuwvu8kxz3y1",
|
"secret": "k24fiuwvu8kxz3y1",
|
||||||
@@ -441,6 +447,48 @@ List of all orders
|
|||||||
:statuscode 401: Authentication failure
|
:statuscode 401: Authentication failure
|
||||||
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
|
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
|
||||||
|
|
||||||
|
.. http:get:: /api/v1/organizers/(organizer)/orders/
|
||||||
|
|
||||||
|
Returns a list of all orders within all events of a given organizer (with sufficient access permissions).
|
||||||
|
|
||||||
|
Supported query parameters and output format of this endpoint are identical to the list endpoint within an event,
|
||||||
|
with the exception that the ``pdf_data`` parameter is not supported here.
|
||||||
|
|
||||||
|
**Example request**:
|
||||||
|
|
||||||
|
.. sourcecode:: http
|
||||||
|
|
||||||
|
GET /api/v1/organizers/bigevents/orders/ HTTP/1.1
|
||||||
|
Host: pretix.eu
|
||||||
|
Accept: application/json, text/javascript
|
||||||
|
|
||||||
|
**Example response**:
|
||||||
|
|
||||||
|
.. sourcecode:: http
|
||||||
|
|
||||||
|
HTTP/1.1 200 OK
|
||||||
|
Vary: Accept
|
||||||
|
Content-Type: application/json
|
||||||
|
X-Page-Generated: 2017-12-01T10:00:00Z
|
||||||
|
|
||||||
|
{
|
||||||
|
"count": 1,
|
||||||
|
"next": null,
|
||||||
|
"previous": null,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"code": "ABC12",
|
||||||
|
"event": "sampleconf",
|
||||||
|
...
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
:param organizer: The ``slug`` field of the organizer to fetch
|
||||||
|
:statuscode 200: no error
|
||||||
|
:statuscode 401: Authentication failure
|
||||||
|
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
|
||||||
|
|
||||||
Fetching individual orders
|
Fetching individual orders
|
||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
@@ -466,6 +514,7 @@ Fetching individual orders
|
|||||||
|
|
||||||
{
|
{
|
||||||
"code": "ABC12",
|
"code": "ABC12",
|
||||||
|
"event": "sampleconf",
|
||||||
"status": "p",
|
"status": "p",
|
||||||
"testmode": false,
|
"testmode": false,
|
||||||
"secret": "k24fiuwvu8kxz3y1",
|
"secret": "k24fiuwvu8kxz3y1",
|
||||||
|
|||||||
@@ -18,7 +18,8 @@ The reusable medium resource contains the following public fields:
|
|||||||
Field Type Description
|
Field Type Description
|
||||||
===================================== ========================== =======================================================
|
===================================== ========================== =======================================================
|
||||||
id integer Internal ID of the medium
|
id integer Internal ID of the medium
|
||||||
type string Type of medium, e.g. ``"barcode"`` or ``"nfc_uid"``.
|
type string Type of medium, e.g. ``"barcode"``, ``"nfc_uid"`` or ``"nfc_mf0aes"``.
|
||||||
|
organizer string Organizer slug of the organizer who "owns" this medium.
|
||||||
identifier string Unique identifier of the medium. The format depends on the ``type``.
|
identifier string Unique identifier of the medium. The format depends on the ``type``.
|
||||||
active boolean Whether this medium may be used.
|
active boolean Whether this medium may be used.
|
||||||
created datetime Date of creation
|
created datetime Date of creation
|
||||||
@@ -36,6 +37,7 @@ Existing media types are:
|
|||||||
|
|
||||||
- ``barcode``
|
- ``barcode``
|
||||||
- ``nfc_uid``
|
- ``nfc_uid``
|
||||||
|
- ``nfc_mf0aes``
|
||||||
|
|
||||||
Endpoints
|
Endpoints
|
||||||
---------
|
---------
|
||||||
@@ -67,6 +69,7 @@ Endpoints
|
|||||||
"results": [
|
"results": [
|
||||||
{
|
{
|
||||||
"id": 1,
|
"id": 1,
|
||||||
|
"organizer": "bigevents",
|
||||||
"identifier": "ABCDEFGH",
|
"identifier": "ABCDEFGH",
|
||||||
"created": "2021-04-06T13:44:22.809377Z",
|
"created": "2021-04-06T13:44:22.809377Z",
|
||||||
"updated": "2021-04-06T13:44:22.809377Z",
|
"updated": "2021-04-06T13:44:22.809377Z",
|
||||||
@@ -123,6 +126,7 @@ Endpoints
|
|||||||
|
|
||||||
{
|
{
|
||||||
"id": 1,
|
"id": 1,
|
||||||
|
"organizer": "bigevents",
|
||||||
"identifier": "ABCDEFGH",
|
"identifier": "ABCDEFGH",
|
||||||
"created": "2021-04-06T13:44:22.809377Z",
|
"created": "2021-04-06T13:44:22.809377Z",
|
||||||
"updated": "2021-04-06T13:44:22.809377Z",
|
"updated": "2021-04-06T13:44:22.809377Z",
|
||||||
@@ -152,6 +156,9 @@ Endpoints
|
|||||||
Look up a new reusable medium by its identifier. In some cases, this might lead to the automatic creation of a new
|
Look up a new reusable medium by its identifier. In some cases, this might lead to the automatic creation of a new
|
||||||
medium behind the scenes.
|
medium behind the scenes.
|
||||||
|
|
||||||
|
This endpoint, and this endpoint only, might return media from a different organizer if there is a cross-acceptance
|
||||||
|
agreement. In this case, only linked gift cards will be returned, no order position or customer records,
|
||||||
|
|
||||||
**Example request**:
|
**Example request**:
|
||||||
|
|
||||||
.. sourcecode:: http
|
.. sourcecode:: http
|
||||||
@@ -176,6 +183,7 @@ Endpoints
|
|||||||
|
|
||||||
{
|
{
|
||||||
"id": 1,
|
"id": 1,
|
||||||
|
"organizer": "bigevents",
|
||||||
"identifier": "ABCDEFGH",
|
"identifier": "ABCDEFGH",
|
||||||
"created": "2021-04-06T13:44:22.809377Z",
|
"created": "2021-04-06T13:44:22.809377Z",
|
||||||
"updated": "2021-04-06T13:44:22.809377Z",
|
"updated": "2021-04-06T13:44:22.809377Z",
|
||||||
@@ -235,6 +243,7 @@ Endpoints
|
|||||||
|
|
||||||
{
|
{
|
||||||
"id": 1,
|
"id": 1,
|
||||||
|
"organizer": "bigevents",
|
||||||
"identifier": "ABCDEFGH",
|
"identifier": "ABCDEFGH",
|
||||||
"created": "2021-04-06T13:44:22.809377Z",
|
"created": "2021-04-06T13:44:22.809377Z",
|
||||||
"updated": "2021-04-06T13:44:22.809377Z",
|
"updated": "2021-04-06T13:44:22.809377Z",
|
||||||
@@ -291,6 +300,7 @@ Endpoints
|
|||||||
|
|
||||||
{
|
{
|
||||||
"id": 1,
|
"id": 1,
|
||||||
|
"organizer": "bigevents",
|
||||||
"identifier": "ABCDEFGH",
|
"identifier": "ABCDEFGH",
|
||||||
"created": "2021-04-06T13:44:22.809377Z",
|
"created": "2021-04-06T13:44:22.809377Z",
|
||||||
"updated": "2021-04-06T13:44:22.809377Z",
|
"updated": "2021-04-06T13:44:22.809377Z",
|
||||||
|
|||||||
@@ -18,8 +18,19 @@ subject multi-lingual string The subject of
|
|||||||
template multi-lingual string The body of the email
|
template multi-lingual string The body of the email
|
||||||
all_products boolean If ``true``, the email is sent to buyers of all products
|
all_products boolean If ``true``, the email is sent to buyers of all products
|
||||||
limit_products list of integers List of product IDs, if ``all_products`` is not set
|
limit_products list of integers List of product IDs, if ``all_products`` is not set
|
||||||
include_pending boolean If ``true``, the email is sent to pending orders. If ``false``,
|
[**DEPRECATED**] include_pending boolean If ``true``, the email is sent to pending orders. If ``false``,
|
||||||
only paid orders are considered.
|
only paid orders are considered.
|
||||||
|
restrict_to_status list List of order states to restrict recipients to. Valid
|
||||||
|
entries are ``p`` for paid, ``e`` for expired, ``c`` for canceled,
|
||||||
|
``n__pending_approval`` for pending approval,
|
||||||
|
``n__not_pending_approval_and_not_valid_if_pending`` for payment
|
||||||
|
pending, ``n__valid_if_pending`` for payment pending but already confirmed,
|
||||||
|
and ``n__pending_overdue`` for pending with payment overdue.
|
||||||
|
The default is ``["p", "n__valid_if_pending"]``.
|
||||||
|
checked_in_status string Check-in status to restrict recipients to. Valid strings are:
|
||||||
|
``null`` for no filtering (default), ``checked_in`` for
|
||||||
|
limiting to attendees that are or have been checked in, and
|
||||||
|
``no_checkin`` for limiting to attendees who have not checked in.
|
||||||
date_is_absolute boolean If ``true``, the email is set at a specific point in time.
|
date_is_absolute boolean If ``true``, the email is set at a specific point in time.
|
||||||
send_date datetime If ``date_is_absolute`` is set: Date and time to send the email.
|
send_date datetime If ``date_is_absolute`` is set: Date and time to send the email.
|
||||||
send_offset_days integer If ``date_is_absolute`` is not set, this is the number of days
|
send_offset_days integer If ``date_is_absolute`` is not set, this is the number of days
|
||||||
@@ -37,7 +48,10 @@ send_to string Can be ``"order
|
|||||||
or ``"both"``.
|
or ``"both"``.
|
||||||
date. Otherwise it is relative to the event start date.
|
date. Otherwise it is relative to the event start date.
|
||||||
===================================== ========================== =======================================================
|
===================================== ========================== =======================================================
|
||||||
|
.. versionchanged:: 2023.7
|
||||||
|
|
||||||
|
The ``include_pending`` field has been deprecated.
|
||||||
|
The ``restrict_to_status`` field has been added.
|
||||||
|
|
||||||
Endpoints
|
Endpoints
|
||||||
---------
|
---------
|
||||||
@@ -74,7 +88,12 @@ Endpoints
|
|||||||
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
||||||
"all_products": true,
|
"all_products": true,
|
||||||
"limit_products": [],
|
"limit_products": [],
|
||||||
"include_pending": false,
|
"restrict_to_status": [
|
||||||
|
"p",
|
||||||
|
"n__not_pending_approval_and_not_valid_if_pending",
|
||||||
|
"n__valid_if_pending"
|
||||||
|
],
|
||||||
|
"checked_in_status": null,
|
||||||
"send_date": null,
|
"send_date": null,
|
||||||
"send_offset_days": 1,
|
"send_offset_days": 1,
|
||||||
"send_offset_time": "18:00",
|
"send_offset_time": "18:00",
|
||||||
@@ -120,7 +139,12 @@ Endpoints
|
|||||||
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
||||||
"all_products": true,
|
"all_products": true,
|
||||||
"limit_products": [],
|
"limit_products": [],
|
||||||
"include_pending": false,
|
"restrict_to_status": [
|
||||||
|
"p",
|
||||||
|
"n__not_pending_approval_and_not_valid_if_pending",
|
||||||
|
"n__valid_if_pending"
|
||||||
|
],
|
||||||
|
"checked_in_status": null,
|
||||||
"send_date": null,
|
"send_date": null,
|
||||||
"send_offset_days": 1,
|
"send_offset_days": 1,
|
||||||
"send_offset_time": "18:00",
|
"send_offset_time": "18:00",
|
||||||
@@ -157,7 +181,12 @@ Endpoints
|
|||||||
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
||||||
"all_products": true,
|
"all_products": true,
|
||||||
"limit_products": [],
|
"limit_products": [],
|
||||||
"include_pending": false,
|
"restrict_to_status": [
|
||||||
|
"p",
|
||||||
|
"n__not_pending_approval_and_not_valid_if_pending",
|
||||||
|
"n__valid_if_pending"
|
||||||
|
],
|
||||||
|
"checked_in_status": "checked_in",
|
||||||
"send_date": null,
|
"send_date": null,
|
||||||
"send_offset_days": 1,
|
"send_offset_days": 1,
|
||||||
"send_offset_time": "18:00",
|
"send_offset_time": "18:00",
|
||||||
@@ -182,7 +211,12 @@ Endpoints
|
|||||||
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
||||||
"all_products": true,
|
"all_products": true,
|
||||||
"limit_products": [],
|
"limit_products": [],
|
||||||
"include_pending": false,
|
"restrict_to_status": [
|
||||||
|
"p",
|
||||||
|
"n__not_pending_approval_and_not_valid_if_pending",
|
||||||
|
"n__valid_if_pending"
|
||||||
|
],
|
||||||
|
"checked_in_status": "checked_in",
|
||||||
"send_date": null,
|
"send_date": null,
|
||||||
"send_offset_days": 1,
|
"send_offset_days": 1,
|
||||||
"send_offset_time": "18:00",
|
"send_offset_time": "18:00",
|
||||||
@@ -235,7 +269,12 @@ Endpoints
|
|||||||
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
||||||
"all_products": true,
|
"all_products": true,
|
||||||
"limit_products": [],
|
"limit_products": [],
|
||||||
"include_pending": false,
|
"restrict_to_status": [
|
||||||
|
"p",
|
||||||
|
"n__not_pending_approval_and_not_valid_if_pending",
|
||||||
|
"n__valid_if_pending"
|
||||||
|
],
|
||||||
|
"checked_in_status": "checked_in",
|
||||||
"send_date": null,
|
"send_date": null,
|
||||||
"send_offset_days": 1,
|
"send_offset_days": 1,
|
||||||
"send_offset_time": "18:00",
|
"send_offset_time": "18:00",
|
||||||
|
|||||||
@@ -63,6 +63,11 @@ last_modified datetime Last modificati
|
|||||||
|
|
||||||
The ``search`` query parameter has been added to filter sub-events by their name or location in any language.
|
The ``search`` query parameter has been added to filter sub-events by their name or location in any language.
|
||||||
|
|
||||||
|
.. versionchanged:: 5.0
|
||||||
|
|
||||||
|
The ``date_from_before``, ``date_from_after``, ``date_to_before``, and ``date_to_after`` query parameters have been
|
||||||
|
added.
|
||||||
|
|
||||||
Endpoints
|
Endpoints
|
||||||
---------
|
---------
|
||||||
|
|
||||||
@@ -130,6 +135,10 @@ Endpoints
|
|||||||
:query active: If set to ``true``/``false``, only events with a matching value of ``active`` are returned.
|
:query active: If set to ``true``/``false``, only events with a matching value of ``active`` are returned.
|
||||||
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned.
|
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned.
|
||||||
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned.
|
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned.
|
||||||
|
:query date_from_after: If set to a date and time, only events that start at or after the given time are returned.
|
||||||
|
:query date_from_before: If set to a date and time, only events that start at or before the given time are returned.
|
||||||
|
:query date_to_after: If set to a date and time, only events that have an end date and end at or after the given time are returned.
|
||||||
|
:query date_to_before: If set to a date and time, only events that have an end date and end at or before the given time are returned.
|
||||||
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned.
|
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned.
|
||||||
:query search: Only return events matching a given search query.
|
:query search: Only return events matching a given search query.
|
||||||
:param organizer: The ``slug`` field of a valid organizer
|
:param organizer: The ``slug`` field of a valid organizer
|
||||||
@@ -458,6 +467,10 @@ Endpoints
|
|||||||
:query event__live: If set to ``true``/``false``, only events with a matching value of ``live`` on the parent event are returned.
|
:query event__live: If set to ``true``/``false``, only events with a matching value of ``live`` on the parent event are returned.
|
||||||
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned.
|
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned.
|
||||||
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned.
|
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned.
|
||||||
|
:query date_from_after: If set to a date and time, only events that start at or after the given time are returned.
|
||||||
|
:query date_from_before: If set to a date and time, only events that start at or before the given time are returned.
|
||||||
|
:query date_to_after: If set to a date and time, only events that have an end date and end at or after the given time are returned.
|
||||||
|
:query date_to_before: If set to a date and time, only events that have an end date and end at or before the given time are returned.
|
||||||
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned.
|
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned.
|
||||||
:query sales_channel: If set to a sales channel identifier, the response will only contain subevents from events available on this sales channel.
|
:query sales_channel: If set to a sales channel identifier, the response will only contain subevents from events available on this sales channel.
|
||||||
:param organizer: The ``slug`` field of a valid organizer
|
:param organizer: The ``slug`` field of a valid organizer
|
||||||
|
|||||||
@@ -20,11 +20,16 @@ internal_name string An optional nam
|
|||||||
rate decimal (string) Tax rate in percent
|
rate decimal (string) Tax rate in percent
|
||||||
price_includes_tax boolean If ``true`` (default), tax is assumed to be included in
|
price_includes_tax boolean If ``true`` (default), tax is assumed to be included in
|
||||||
the specified product price
|
the specified product price
|
||||||
eu_reverse_charge boolean If ``true``, EU reverse charge rules are applied
|
eu_reverse_charge boolean If ``true``, EU reverse charge rules are applied. Will
|
||||||
|
be ignored if custom rules are set.
|
||||||
home_country string Merchant country (required for reverse charge), can be
|
home_country string Merchant country (required for reverse charge), can be
|
||||||
``null`` or empty string
|
``null`` or empty string
|
||||||
keep_gross_if_rate_changes boolean If ``true``, changes of the tax rate based on custom
|
keep_gross_if_rate_changes boolean If ``true``, changes of the tax rate based on custom
|
||||||
rules keep the gross price constant (default is ``false``)
|
rules keep the gross price constant (default is ``false``)
|
||||||
|
custom_rules object Dynamic rules specification. Each list element
|
||||||
|
corresponds to one rule that will be processed in order.
|
||||||
|
The current version of the schema in use can be found
|
||||||
|
`here`_.
|
||||||
===================================== ========================== =======================================================
|
===================================== ========================== =======================================================
|
||||||
|
|
||||||
|
|
||||||
@@ -32,6 +37,10 @@ keep_gross_if_rate_changes boolean If ``true``, ch
|
|||||||
|
|
||||||
The ``internal_name`` and ``keep_gross_if_rate_changes`` attributes have been added.
|
The ``internal_name`` and ``keep_gross_if_rate_changes`` attributes have been added.
|
||||||
|
|
||||||
|
.. versionchanged:: 2023.6
|
||||||
|
|
||||||
|
The ``custom_rules`` attribute has been added.
|
||||||
|
|
||||||
Endpoints
|
Endpoints
|
||||||
---------
|
---------
|
||||||
|
|
||||||
@@ -68,6 +77,7 @@ Endpoints
|
|||||||
"price_includes_tax": true,
|
"price_includes_tax": true,
|
||||||
"eu_reverse_charge": false,
|
"eu_reverse_charge": false,
|
||||||
"keep_gross_if_rate_changes": false,
|
"keep_gross_if_rate_changes": false,
|
||||||
|
"custom_rules": null,
|
||||||
"home_country": "DE"
|
"home_country": "DE"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@@ -108,6 +118,7 @@ Endpoints
|
|||||||
"price_includes_tax": true,
|
"price_includes_tax": true,
|
||||||
"eu_reverse_charge": false,
|
"eu_reverse_charge": false,
|
||||||
"keep_gross_if_rate_changes": false,
|
"keep_gross_if_rate_changes": false,
|
||||||
|
"custom_rules": null,
|
||||||
"home_country": "DE"
|
"home_country": "DE"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -156,6 +167,7 @@ Endpoints
|
|||||||
"price_includes_tax": true,
|
"price_includes_tax": true,
|
||||||
"eu_reverse_charge": false,
|
"eu_reverse_charge": false,
|
||||||
"keep_gross_if_rate_changes": false,
|
"keep_gross_if_rate_changes": false,
|
||||||
|
"custom_rules": null,
|
||||||
"home_country": "DE"
|
"home_country": "DE"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -203,6 +215,7 @@ Endpoints
|
|||||||
"price_includes_tax": true,
|
"price_includes_tax": true,
|
||||||
"eu_reverse_charge": false,
|
"eu_reverse_charge": false,
|
||||||
"keep_gross_if_rate_changes": false,
|
"keep_gross_if_rate_changes": false,
|
||||||
|
"custom_rules": null,
|
||||||
"home_country": "DE"
|
"home_country": "DE"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -242,3 +255,5 @@ Endpoints
|
|||||||
:statuscode 204: no error
|
:statuscode 204: no error
|
||||||
:statuscode 401: Authentication failure
|
:statuscode 401: Authentication failure
|
||||||
:statuscode 403: The requested organizer/event/rule does not exist **or** you have no permission to change it **or** this tax rule cannot be deleted since it is currently in use.
|
:statuscode 403: The requested organizer/event/rule does not exist **or** you have no permission to change it **or** this tax rule cannot be deleted since it is currently in use.
|
||||||
|
|
||||||
|
.. _here: https://github.com/pretix/pretix/blob/master/src/pretix/static/schema/tax-rules-custom.schema.json
|
||||||
|
|||||||
@@ -50,6 +50,10 @@ The following values for ``action_types`` are valid with pretix core:
|
|||||||
* ``pretix.event.order.payment.confirmed``
|
* ``pretix.event.order.payment.confirmed``
|
||||||
* ``pretix.event.order.approved``
|
* ``pretix.event.order.approved``
|
||||||
* ``pretix.event.order.denied``
|
* ``pretix.event.order.denied``
|
||||||
|
* ``pretix.event.orders.waitinglist.added``
|
||||||
|
* ``pretix.event.orders.waitinglist.changed``
|
||||||
|
* ``pretix.event.orders.waitinglist.deleted``
|
||||||
|
* ``pretix.event.orders.waitinglist.voucher_assigned``
|
||||||
* ``pretix.event.checkin``
|
* ``pretix.event.checkin``
|
||||||
* ``pretix.event.checkin.reverted``
|
* ``pretix.event.checkin.reverted``
|
||||||
* ``pretix.event.added``
|
* ``pretix.event.added``
|
||||||
@@ -63,6 +67,9 @@ The following values for ``action_types`` are valid with pretix core:
|
|||||||
* ``pretix.event.live.deactivated``
|
* ``pretix.event.live.deactivated``
|
||||||
* ``pretix.event.testmode.activated``
|
* ``pretix.event.testmode.activated``
|
||||||
* ``pretix.event.testmode.deactivated``
|
* ``pretix.event.testmode.deactivated``
|
||||||
|
* ``pretix.customer.created``
|
||||||
|
* ``pretix.customer.changed``
|
||||||
|
* ``pretix.customer.anonymized``
|
||||||
|
|
||||||
Installed plugins might register more valid values.
|
Installed plugins might register more valid values.
|
||||||
|
|
||||||
|
|||||||
@@ -18,13 +18,13 @@ If you want to add a custom view to the control area of an event, just register
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
from django.conf.urls import url
|
from django.urls import re_path
|
||||||
|
|
||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
url(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
|
re_path(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
|
||||||
views.admin_view, name='backend'),
|
views.admin_view, name='backend'),
|
||||||
]
|
]
|
||||||
|
|
||||||
It is required that your URL parameters are called ``organizer`` and ``event``. If you want to
|
It is required that your URL parameters are called ``organizer`` and ``event``. If you want to
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ Backend
|
|||||||
item_formsets, order_search_filter_q, order_search_forms
|
item_formsets, order_search_filter_q, order_search_forms
|
||||||
|
|
||||||
.. automodule:: pretix.base.signals
|
.. automodule:: pretix.base.signals
|
||||||
:members: logentry_display, logentry_object_link, requiredaction_display, timeline_events, orderposition_blocked_display
|
:members: logentry_display, logentry_object_link, requiredaction_display, timeline_events, orderposition_blocked_display, customer_created, customer_signed_in
|
||||||
|
|
||||||
Vouchers
|
Vouchers
|
||||||
""""""""
|
""""""""
|
||||||
|
|||||||
@@ -70,6 +70,8 @@ The provider class
|
|||||||
|
|
||||||
.. autoattribute:: settings_form_fields
|
.. autoattribute:: settings_form_fields
|
||||||
|
|
||||||
|
.. autoattribute:: walletqueries
|
||||||
|
|
||||||
.. automethod:: settings_form_clean
|
.. automethod:: settings_form_clean
|
||||||
|
|
||||||
.. automethod:: settings_content_render
|
.. automethod:: settings_content_render
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ you to execute a piece of code with a different locale:
|
|||||||
This is very useful e.g. when sending an email to a user that has a different language than the user performing the
|
This is very useful e.g. when sending an email to a user that has a different language than the user performing the
|
||||||
action that causes the mail to be sent.
|
action that causes the mail to be sent.
|
||||||
|
|
||||||
.. _translation features: https://docs.djangoproject.com/en/1.9/topics/i18n/translation/
|
.. _translation features: https://docs.djangoproject.com/en/4.2/topics/i18n/translation/
|
||||||
.. _GNU gettext: https://www.gnu.org/software/gettext/
|
.. _GNU gettext: https://www.gnu.org/software/gettext/
|
||||||
.. _strings: https://django-i18nfield.readthedocs.io/en/latest/strings.html
|
.. _strings: https://django-i18nfield.readthedocs.io/en/latest/strings.html
|
||||||
.. _database fields: https://django-i18nfield.readthedocs.io/en/latest/quickstart.html
|
.. _database fields: https://django-i18nfield.readthedocs.io/en/latest/quickstart.html
|
||||||
|
|||||||
@@ -15,33 +15,41 @@ and the admin panel is available at ``https://pretix.eu/control/event/bigorg/awe
|
|||||||
|
|
||||||
If the organizer now configures a custom domain like ``tickets.bigorg.com``, his event will
|
If the organizer now configures a custom domain like ``tickets.bigorg.com``, his event will
|
||||||
from now on be available on ``https://tickets.bigorg.com/awesomecon/``. The former URL at
|
from now on be available on ``https://tickets.bigorg.com/awesomecon/``. The former URL at
|
||||||
``pretix.eu`` will redirect there. However, the admin panel will still only be available
|
``pretix.eu`` will redirect there. It's also possible to do this for just an event, in which
|
||||||
on ``pretix.eu`` for convenience and security reasons.
|
case the event will be available on ``https://tickets.awesomecon.org/``.
|
||||||
|
|
||||||
|
However, the admin panel will still only be available on ``pretix.eu`` for convenience and security reasons.
|
||||||
|
|
||||||
URL routing
|
URL routing
|
||||||
-----------
|
-----------
|
||||||
|
|
||||||
The hard part about implementing this URL routing in Django is that
|
The hard part about implementing this URL routing in Django is that
|
||||||
``https://pretix.eu/bigorg/awesomecon/`` contains two parameters of nearly arbitrary content
|
``https://pretix.eu/bigorg/awesomecon/`` contains two parameters of nearly arbitrary content
|
||||||
and ``https://tickets.bigorg.com/awesomecon/`` contains only one. The only robust way to do
|
and ``https://tickets.bigorg.com/awesomecon/`` contains only one and ``https://tickets.awesomecon.org/`` does not contain any.
|
||||||
this is by having *separate* URL configuration for those two cases. In pretix, we call the
|
The only robust way to do this is by having *separate* URL configuration for those three cases.
|
||||||
former our ``maindomain`` config and the latter our ``subdomain`` config. For pretix's core
|
|
||||||
modules we do some magic to avoid duplicate configuration, but for a fairly simple plugin with
|
|
||||||
only a handful of routes, we recommend just configuring the two URL sets separately.
|
|
||||||
|
|
||||||
|
In pretix, we therefore do not have a global URL configuration, but three, living in the following modules:
|
||||||
|
|
||||||
|
- ``pretix.multidomain.maindomain_urlconf``
|
||||||
|
- ``pretix.multidomain.organizer_domain_urlconf``
|
||||||
|
- ``pretix.multidomain.event_domain_urlconf``
|
||||||
|
|
||||||
|
We provide some helper utilities to work with these to avoid duplicate configuration of the individual URLs.
|
||||||
The file ``urls.py`` inside your plugin package will be loaded and scanned for URL configuration
|
The file ``urls.py`` inside your plugin package will be loaded and scanned for URL configuration
|
||||||
automatically and should be provided by any plugin that provides any view.
|
automatically and should be provided by any plugin that provides any view.
|
||||||
|
However, unlike plain Django, we look not only for a ``urlpatterns`` attribute on the module but support other
|
||||||
|
attributes like ``event_patterns`` and ``organizer_patterns`` as well.
|
||||||
|
|
||||||
A very basic example that provides one view in the admin panel and one view in the frontend
|
For example, for a simple plugin that adds one URL to the backend and one event-level URL to the frontend, you can
|
||||||
could look like this::
|
create the following configuration in your ``urls.py``::
|
||||||
|
|
||||||
from django.conf.urls import url
|
from django.urls import re_path
|
||||||
|
|
||||||
from . import views
|
from . import views
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
url(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
|
re_path(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
|
||||||
views.AdminView.as_view(), name='backend'),
|
views.AdminView.as_view(), name='backend'),
|
||||||
]
|
]
|
||||||
|
|
||||||
event_patterns = [
|
event_patterns = [
|
||||||
@@ -52,7 +60,7 @@ could look like this::
|
|||||||
As you can see, the view in the frontend is not included in the standard Django ``urlpatterns``
|
As you can see, the view in the frontend is not included in the standard Django ``urlpatterns``
|
||||||
setting but in a separate list with the name ``event_patterns``. This will automatically prepend
|
setting but in a separate list with the name ``event_patterns``. This will automatically prepend
|
||||||
the appropriate parameters to the regex (e.g. the event or the event and the organizer, depending
|
the appropriate parameters to the regex (e.g. the event or the event and the organizer, depending
|
||||||
on the called domain).
|
on the called domain). For organizer-level views, ``organizer_patterns`` works the same way.
|
||||||
|
|
||||||
If you only provide URLs in the admin area, you do not need to provide a ``event_patterns`` attribute.
|
If you only provide URLs in the admin area, you do not need to provide a ``event_patterns`` attribute.
|
||||||
|
|
||||||
@@ -71,11 +79,16 @@ is a python method that emulates a behavior similar to ``reverse``:
|
|||||||
|
|
||||||
.. autofunction:: pretix.multidomain.urlreverse.eventreverse
|
.. autofunction:: pretix.multidomain.urlreverse.eventreverse
|
||||||
|
|
||||||
|
If you need to communicate the URL externally, you can use a different method to ensure that it is always an absolute URL:
|
||||||
|
|
||||||
|
.. autofunction:: pretix.multidomain.urlreverse.build_absolute_uri
|
||||||
|
|
||||||
In addition, there is a template tag that works similar to ``url`` but takes an event or organizer object
|
In addition, there is a template tag that works similar to ``url`` but takes an event or organizer object
|
||||||
as its first argument and can be used like this::
|
as its first argument and can be used like this::
|
||||||
|
|
||||||
{% load eventurl %}
|
{% load eventurl %}
|
||||||
<a href="{% eventurl request.event "presale:event.checkout" step="payment" %}">Pay</a>
|
<a href="{% eventurl request.event "presale:event.checkout" step="payment" %}">Pay</a>
|
||||||
|
<a href="{% abseventurl request.event "presale:event.checkout" step="payment" %}">Pay</a>
|
||||||
|
|
||||||
|
|
||||||
Implementation details
|
Implementation details
|
||||||
|
|||||||
@@ -12,3 +12,4 @@ Developer documentation
|
|||||||
api/index
|
api/index
|
||||||
structure
|
structure
|
||||||
translation/index
|
translation/index
|
||||||
|
nfc/index
|
||||||
|
|||||||
15
doc/development/nfc/index.rst
Normal file
15
doc/development/nfc/index.rst
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
NFC media
|
||||||
|
=========
|
||||||
|
|
||||||
|
pretix supports using NFC chips as "reusable media", for example to store gift cards or tickets.
|
||||||
|
|
||||||
|
Most of this implementation currently lives in our proprietary app pretixPOS, but in the future might also become part of our open-source pretixSCAN solution.
|
||||||
|
Either way, we want this to be an open ecosystem and therefore document the exact mechanisms in use on the following pages.
|
||||||
|
|
||||||
|
We support multiple implementations of NFC media, each documented on its own page:
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
uid
|
||||||
|
mf0aes
|
||||||
113
doc/development/nfc/mf0aes.rst
Normal file
113
doc/development/nfc/mf0aes.rst
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
Mifare Ultralight AES
|
||||||
|
=====================
|
||||||
|
|
||||||
|
We offer an implementation that provides a higher security level than the UID-based approach and uses the `Mifare Ultralight AES`_ chip sold by NXP.
|
||||||
|
We believe the security model of this approach is adequate to the situation where this will usually be used and we'll outline known risks below.
|
||||||
|
|
||||||
|
If you want to dive deeper into the properties of the Mifare Ultralight AES chip, we recommend reading the `data sheet`_.
|
||||||
|
|
||||||
|
Random UIDs
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Mifare Ultralight AES supports a feature that returns a randomized UID every time a non-authenticated user tries to
|
||||||
|
read the UID. This has a strong privacy benefit, since no unauthorized entity can use the NFC chips to track users.
|
||||||
|
On the other hand, this reduces interoperability of the system. For example, this prevents you from using the same NFC
|
||||||
|
chips for a different purpose where you only need the UID. This will also prevent your guests from reading their UID
|
||||||
|
themselves with their phones, which might be useful e.g. in debugging situations.
|
||||||
|
|
||||||
|
Since there's no one-size-fits-all choice here, you can enable or disable this feature in the pretix organizer
|
||||||
|
settings. If you change it, the change will apply to all newly encoded chips after the change.
|
||||||
|
|
||||||
|
Key management
|
||||||
|
--------------
|
||||||
|
|
||||||
|
For every organizer, the server will generate create a "key set", which consists of a publicly known ID (random 32-bit integer) and two 16-byte keys ("diversification key" and "UID key").
|
||||||
|
|
||||||
|
Using our :ref:`Device authentication mechanism <rest-deviceauth>`, an authorized device can submit a locally generated RSA public key to the server.
|
||||||
|
This key can no longer changed on the server once it is set, thus protecting against the attack scenario of a leaked device API token.
|
||||||
|
|
||||||
|
The server will then include key sets in the response to ``/api/v1/device/info``, encrypted with the device's RSA key.
|
||||||
|
This includes all key sets generated for the organizer the device belongs to, as well as all keys of organizers that have granted sufficient access to this organizer.
|
||||||
|
|
||||||
|
The device will decrypt the key sets using its RSA key and store the key sets locally.
|
||||||
|
|
||||||
|
.. warning:: The device **will** have access to the raw key sets. Therefore, there is a risk of leaked master keys if an
|
||||||
|
authorized device is stolen or abused. Our implementation in pretixPOS attempts to make this very hard on
|
||||||
|
modern, non-rooted Android devices by keeping them encrypted with the RSA key and only storing the RSA key
|
||||||
|
in the hardware-backed keystore of the device. A sufficiently motivated attacker, however, will likely still
|
||||||
|
be able to extract the keys from a stolen device.
|
||||||
|
|
||||||
|
Encoding a chip
|
||||||
|
---------------
|
||||||
|
|
||||||
|
When a new chip is encoded, the following steps will be taken:
|
||||||
|
|
||||||
|
- The UID of the chip is retrieved.
|
||||||
|
|
||||||
|
- A chip-specific key is generated using the mechanism documented in `AN10922`_ using the "diversification key" from the
|
||||||
|
organizer's key set as the CMAC key and the diversification input concatenated in the from of ``0x01 + UID + APPID + SYSTEMID``
|
||||||
|
with the following values:
|
||||||
|
|
||||||
|
- The UID of the chip as ``UID``
|
||||||
|
|
||||||
|
- ``"eu.pretix"`` (``0x65 0x75 0x2e 0x70 0x72 0x65 0x74 0x69 0x78``) as ``APPID``
|
||||||
|
|
||||||
|
- The ``public_id`` from the organizer's key set as a 4-byte big-endian value as ``SYSTEMID``
|
||||||
|
|
||||||
|
- The chip-specific key is written to the chip as the "data protection key" (config pages 0x30 to 0x33)
|
||||||
|
|
||||||
|
- The UID key from the organizer's key set is written to the chip as the "UID retrieval key" (config pages 0x34 to 0x37)
|
||||||
|
|
||||||
|
- The config page 0x29 is set like this:
|
||||||
|
|
||||||
|
- ``RID_ACT`` (random UID) to ``1`` or ``0`` based on the organizer's configuration
|
||||||
|
- ``SEC_MSG_ACT`` (secure messaging) to ``1``
|
||||||
|
- ``AUTH0`` (first page that needs authentication) to 0x04 (first non-UID page)
|
||||||
|
|
||||||
|
- The config page 0x2A is set like this:
|
||||||
|
|
||||||
|
- ``PROT`` to ``0`` (only write access restricted, not read access)
|
||||||
|
- ``AUTHLIM`` to ``256`` (maximum number of wrong authentications before "self-desctruction")
|
||||||
|
- Everything else to its default value (no lock bits are set)
|
||||||
|
|
||||||
|
- The ``public_id`` of the key set will be written to page 0x04 as a big-endian value
|
||||||
|
|
||||||
|
- The UID of the chip will be registered as a reusable medium on the server.
|
||||||
|
|
||||||
|
.. warning:: During encoding, the chip-specific key and the UID key are transmitted in plain text over the air. The
|
||||||
|
security model therefore relies on the encoding of chips being performed in a trusted physical environment
|
||||||
|
to prevent a nearby attacker from sniffing the keys with a strong antenna.
|
||||||
|
|
||||||
|
.. note:: If an attacker tries to authenticate with the chip 256 times using the wrong key, the chip will become
|
||||||
|
unusable. A chip may also become unusable if it is detached from the reader in the middle of the encoding
|
||||||
|
process (even though we've tried to implement it in a way that makes this unlikely).
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-----
|
||||||
|
|
||||||
|
When a chip is presented to the NFC reader, the following steps will be taken:
|
||||||
|
|
||||||
|
- Command ``GET_VERSION`` is used to determine if it is a Mifare Ultralight AES chip (if not, abort).
|
||||||
|
|
||||||
|
- Page 0x04 is read. If it is all zeroes, the chip is considered un-encoded (abort). If it contains a value that
|
||||||
|
corresponds to the ``public_id`` of a known key set, this key set is used for all further operations. If it contains
|
||||||
|
a different value, we consider this chip to belong to a different organizer or not to a pretix system at all (abort).
|
||||||
|
|
||||||
|
- An authentication with the chip using the UID key is performed.
|
||||||
|
|
||||||
|
- The UID of the chip will be read.
|
||||||
|
|
||||||
|
- The chip-specific key will be derived using the mechanism described above in the encoding step.
|
||||||
|
|
||||||
|
- An authentication with the chip using the chip-specific key is performed. If this is fully successful, this step
|
||||||
|
proves that the chip knows the same chip-specific key as we do and is therefore an authentic chip encoded by us and
|
||||||
|
we can trust its UID value.
|
||||||
|
|
||||||
|
- The UID is transmitted to the server to fetch the correct medium.
|
||||||
|
|
||||||
|
During these steps, the keys are never transmitted in plain text and can thus not be sniffed by a nearby attacker
|
||||||
|
with a strong antenna.
|
||||||
|
|
||||||
|
.. _Mifare Ultralight AES: https://www.nxp.com/products/rfid-nfc/mifare-hf/mifare-ultralight/mifare-ultralight-aes-enhanced-security-for-limited-use-contactless-applications:MF0AESx20
|
||||||
|
.. _data sheet: https://www.nxp.com/docs/en/data-sheet/MF0AES(H)20.pdf
|
||||||
|
.. _AN10922: https://www.nxp.com/docs/en/application-note/AN10922.pdf
|
||||||
10
doc/development/nfc/uid.rst
Normal file
10
doc/development/nfc/uid.rst
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
UID-based
|
||||||
|
=========
|
||||||
|
|
||||||
|
With UID-based NFC, only the unique ID (UID) of the NFC chip is used for identification purposes.
|
||||||
|
This can be used with virtually all NFC chips that provide compatibility with the NFC reader in use, typically at least all chips that comply with ISO/IEC 14443-3A.
|
||||||
|
|
||||||
|
We make only one restriction: The UID may not start with ``08``, since that usually signifies a randomized UID that changes on every read (which would not be very useful).
|
||||||
|
|
||||||
|
.. warning:: The UID-based approach provides only a very low level of security. It is easy to clone a chip with the same
|
||||||
|
UID and impersonate someone else.
|
||||||
@@ -96,6 +96,20 @@ http://localhost:8000/control/ for the admin view.
|
|||||||
port (for example because you develop on `pretixdroid`_), you can check
|
port (for example because you develop on `pretixdroid`_), you can check
|
||||||
`Django's documentation`_ for more options.
|
`Django's documentation`_ for more options.
|
||||||
|
|
||||||
|
When running the local development webserver, ensure Celery is not configured
|
||||||
|
in ``pretix.cfg``. i.e., you should remove anything such as::
|
||||||
|
|
||||||
|
[celery]
|
||||||
|
backend=redis://redis:6379/2
|
||||||
|
broker=redis://redis:6379/2
|
||||||
|
|
||||||
|
If you choose to use Celery for development, you must also start a Celery worker
|
||||||
|
process::
|
||||||
|
|
||||||
|
celery -A pretix.celery_app worker -l info
|
||||||
|
|
||||||
|
However, beware that code changes will not auto-reload within Celery.
|
||||||
|
|
||||||
.. _`checksandtests`:
|
.. _`checksandtests`:
|
||||||
|
|
||||||
Code checks and unit tests
|
Code checks and unit tests
|
||||||
|
|||||||
143
doc/plugins/epaybl.rst
Normal file
143
doc/plugins/epaybl.rst
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
ePayBL
|
||||||
|
======
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Since ePayBL is only available to german federal, provincial and communal entities, the following page is also
|
||||||
|
only provided in german. Should you require assistance with ePayBL and do not speak this language, please feel free
|
||||||
|
reach out to support@pretix.eu.
|
||||||
|
|
||||||
|
|
||||||
|
Einführung
|
||||||
|
----------
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Sollten Sie lediglich schnell entscheiden wollen, welcher Kontierungsmodus in den Einstellungen des pretix
|
||||||
|
ePayBL-plugins gewählt werden soll, so springen Sie direkt zur Sektion :ref:`Kontierungsmodus`.
|
||||||
|
|
||||||
|
|
||||||
|
`ePayBL`_ - das ePayment-System von Bund und Länder - ist das am weitesten verbreitete Zahlungssystem für Bundes-, Länder-
|
||||||
|
sowie kommunale Aufgabenträger. Während es nur wie eines von vielen anderen Zahlungssystemen scheint, so bietet es
|
||||||
|
seinen Nutzern besondere Vorteile, wie die automatische Erfassung von Zahlungsbelegen, dem Übertragen von Buchungen in
|
||||||
|
Haushaltskassen/-systeme sowie die automatische Erfassung von Kontierungen und Steuermerkmalen.
|
||||||
|
|
||||||
|
Rein technisch gesehen ist ePayBL hierbei nicht ein eigenständiger Zahlungsdienstleister sondern nur ein eine Komponente
|
||||||
|
im komplexen System, dass die Zahlungsabwicklung für Kommunen und Behörden ist.
|
||||||
|
|
||||||
|
Im folgenden der schematische Aufbau einer Umgebung, in welcher ePayBL zum Einsatz kommt:
|
||||||
|
|
||||||
|
.. figure:: img/epaybl_flowchart.png
|
||||||
|
:class: screenshot
|
||||||
|
|
||||||
|
Quelle: Integrationshandbuch ePayBL-Konnektor, DResearch Digital Media Systems GmbH
|
||||||
|
|
||||||
|
|
||||||
|
In diesem Schaubild stellt pretix, bzw. die von Ihnen als Veranstalter angelegten Ticketshops, das Fachverfahren dar.
|
||||||
|
|
||||||
|
ePayBL stellt das Bindeglied zwischen den Fachverfahren, Haushaltssystemen und dem eigentlichen Zahlungsdienstleister,
|
||||||
|
dem sog. ZV-Provider dar. Dieser ZV-Provider ist die Stelle, welche die eigentlichen Kundengelder einzieht und an den
|
||||||
|
Händler auszahlt. Das Gros der Zahlungsdienstleister unterstützt pretix hierbei auch direkt; sprich: Sollten Sie die
|
||||||
|
Anbindung an Ihre Haushaltssysteme nicht benötigen, kann eine direkte Anbindung in der Regel ebenso - und dies bei meist
|
||||||
|
vermindertem Aufwand - vorgenommen werden.
|
||||||
|
|
||||||
|
In der Vergangenheit zeigte sich jedoch schnell, dass nicht jeder IT-Dienstleister immer sofort die neueste Version von
|
||||||
|
ePayBL seinen Nutzern angeboten hat. Die Gründe hierfür sind mannigfaltig: Von fest vorgegebenen Update-Zyklen bis hin
|
||||||
|
zu Systeme mit speziellen Anpassungen, kann leider nicht davon ausgegangen werden, dass alle ePayBL-Systeme exakt gleich
|
||||||
|
ansprechbar sind - auch wenn es sich dabei eigentlich um einen standardisierten Dienst handelt.
|
||||||
|
|
||||||
|
Aus diesem Grund gibt es mit dem ePayBL-Konnektor eine weitere Abstraktionsschicht welche optional zwischen den
|
||||||
|
Fachverfahren und dem ePayBL-Server sitzt. Dieser Konnektor wird so gepflegt, dass er zum einen eine dauerhaft
|
||||||
|
gleichartige Schnittstelle den Fachverfahren bietet aber gleichzeitig auch mit jeder Version des ePayBL-Servers
|
||||||
|
kommunizieren kann - egal wie neu oder alt, wie regulär oder angepasst diese ist.
|
||||||
|
|
||||||
|
Im Grunde müsste daher eigentlich immer gesagt werden, dass pretix eine Anbindung an den ePayBL-Konnektor bietet; nicht
|
||||||
|
an "ePayBL" oder den "ePayBL-Server". Diese Unterscheidung kann bei der Ersteinrichtung und Anforderung von Zugangsdaten
|
||||||
|
von Relevanz sein. Da in der Praxis jedoch beide Begriffe gleichbedeutend genutzt werden, wird im Folgenden auch nur von
|
||||||
|
einer ePayBL-Anbindung die Rede sein - auch wenn explizit der Konnektor gemeint ist.
|
||||||
|
|
||||||
|
|
||||||
|
.. _`Kontierungsmodus`:
|
||||||
|
|
||||||
|
Kontierungsmodus
|
||||||
|
----------------
|
||||||
|
|
||||||
|
ePayBL ist ein Produkt, welches für die Abwicklung von Online-Zahlungsvorgängen in der Verwaltung geschaffen wurde. Ein
|
||||||
|
Umfeld, in dem klar definiert ist, was ein Kunde gerade bezahlt und wohin das Geld genau fließt. Diese Annahmen lassen
|
||||||
|
sich in einem Ticketshop wie pretix jedoch nur teilweise genauso abbilden.
|
||||||
|
|
||||||
|
Die ePayBL-Integration für pretix bietet daher zwei unterschiedliche Modi an, wie Buchungen erfasst und an ePayBL und
|
||||||
|
damit auch an die dahinterliegenden Haushaltssysteme gemeldet werden können.
|
||||||
|
|
||||||
|
Kontierung pro Position/Artikel
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Dieser Modus versucht den klassischen, behördentypischen ePayBL-Zahlungsvorgang abzubilden: Jede einzelne Position, die
|
||||||
|
ein Kunde in den Warenkorb legt, wird auch genauso 1:1 an ePayBL und die Hintergrundsysteme übermittelt.
|
||||||
|
|
||||||
|
Hierbei muss zwingend auch für jede Position ein Kennzeichen für Haushaltsstelle und Objektnummer, sowie optional ein
|
||||||
|
Kontierungsobjekt (``HREF``; bspw. ``stsl=Steuerschlüssel;psp=gsb:Geschäftsbereich,auft:Innenauftrag,kst:Kostenstelle;``
|
||||||
|
) übermittelt werden.
|
||||||
|
|
||||||
|
Diese Daten sind vom Veranstalter entsprechend für jeden in der Veranstaltung angelegten Artikel innerhalb des Tabs
|
||||||
|
"Zusätzliche Einstellungen" der Produkteinstellungen zu hinterlegen.
|
||||||
|
|
||||||
|
Während diese Einstellung eine größtmögliche Menge an Kontierungsdaten überträgt und auch ein separates Verbuchen von
|
||||||
|
Leistungen auf unterschiedliche Haushaltsstellen erlaubt, so hat diese Option auch einen großen Nachteil: Der Kunde kann
|
||||||
|
nur eine Zahlung für seine Bestellung leisten.
|
||||||
|
|
||||||
|
Während sich dies nicht nach einem großen Problem anhört, so kann dies beim Kunden zu Frust führen. pretix bietet die
|
||||||
|
Option an, dass ein Veranstalter eine Bestellung jederzeit verändern kann: Ändern von Preisen von Positionen in einer
|
||||||
|
aufgegebenen Bestellung, Zubuchen und Entfernen von Bestellpositionen, etc. Hat der Kunde seine ursprüngliche Bestellung
|
||||||
|
jedoch schon bezahlt, kann pretix nicht mehr die komplette Bestellung mit den passenden Kontierungen übertragen - es
|
||||||
|
müsste nur ein Differenz-Abbild zwischen Ursprungsbestellung und aktueller Bestellung übertragen werden. Aber auch wenn
|
||||||
|
eine "Nachmeldung" möglich wäre, so wäre ein konkretes Auflösen für was jetzt genau gezahlt wird, nicht mehr möglich.
|
||||||
|
|
||||||
|
Daher gilt bei der Nutzung der Kontierung pro Position/Artikel: Der Kunde kann nur eine (erfolgreiche) Zahlung auf seine
|
||||||
|
Bestellung leisten.
|
||||||
|
|
||||||
|
Eine weitere Einschränkung dieses Modus ist, dass aktuell keine Gebühren-Positionen (Versandkosten, Zahlungs-, Storno-
|
||||||
|
oder Servicegebühren) in diesem Modus übertragen werden können. Bitte wenden Sie sich an uns, wenn Sie diese
|
||||||
|
Funktionalität benötigen.
|
||||||
|
|
||||||
|
|
||||||
|
Kontierung pro Zahlvorgang
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Dieser Modus verabschiedet sich vom behördlichen "Jede Position gehört genau zu einem Haushaltskonto und muss genau
|
||||||
|
zugeordnet werden". Stattdessen werden alle Bestellpositionen - inklusive eventuell definierter Gebühren - vermengt und
|
||||||
|
nur als ein großer Warenkorb, genauer gesagt: eine einzige Position an ePayBL sowie die Hintergrundsysteme gemeldet.
|
||||||
|
|
||||||
|
Während im "pro Postion/Artikel"-Modus jeder Artikel einzeln übermittelt wird und damit auch korrekt pro Artikel der
|
||||||
|
jeweilige Brutto- und Nettopreis, sowie der anfallende Steuerbetrag und ein Steuerkennzeichen (mit Hilfe des optionalen
|
||||||
|
``HREF``-Attributs) übermittelt werden, ist dies im "pro Zahlvorgang"-Modus nicht möglich.
|
||||||
|
|
||||||
|
Stattdessen übermittelt pretix nur einen Betrag für den gesamten Warenkorb: Bruttopreis == Nettopreis. Der Steuerbetrag
|
||||||
|
wird hierbei als 0 übermittelt.
|
||||||
|
|
||||||
|
Die Angabe einer Haushaltsstelle und Objektnummer, sowie optional der ``HREF``-Kontierungsinformationen ist jedoch
|
||||||
|
weiterhin notwendig - allerdings nicht mehr individuell für jeden Artikel/jede Position sondern nur für die gesamte
|
||||||
|
Bestellung. Diese Daten sind direkt in den ePayBL-Einstellungen der Veranstaltung unter Einstellungen -> Zahlung ->
|
||||||
|
ePayBL vorzunehmen
|
||||||
|
|
||||||
|
In der Praxis bedeutet dies, dass in einem angeschlossenen Haushaltssystem nicht nachvollzogen kann, welche Positionen
|
||||||
|
konkret erworben und bezahlt wurden - stattdessen kann nur der Fakt, dass etwas verkauft wurde erfasst werden.
|
||||||
|
|
||||||
|
Je nach Aufbau und Vorgaben der Finanzbuchhaltung kann dies jedoch ausreichend sein - wenn bspw. eine Ferienfahrt
|
||||||
|
angeboten wird und seitens der Haushaltssysteme nicht erfasst werden muss, wie viel vom Gesamtbetrag einer Bestellung
|
||||||
|
auf die Ferienfahrt an sich, auf einen Zubringerbus und einen Satz Bettwäsche entfallen ist, sondern (vereinfacht
|
||||||
|
gesagt) es ausreichend ist, dass "Eine Summe X für die Haushaltsstelle/Objektnummer geflossen ist".
|
||||||
|
|
||||||
|
Dieser Modus der Kontierung bietet Ihnen auch als Vorteil gegenüber dem vorhergehenden an, dass die Bestellungen der
|
||||||
|
Kunden jederzeit erweitert und verändert werden können - auch wenn die Ursprungsbestellung schon bezahlt wurde und nur
|
||||||
|
noch eine Differenz gezahlt wird.
|
||||||
|
|
||||||
|
|
||||||
|
Einschränkungen
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Zum aktuellen Zeitpunkt erlaubt die pretix-Anbindung an ePayBL nicht das durchführen von Erstattungen von bereits
|
||||||
|
geleisteten Zahlungen. Der Prozess hierfür unterscheidet sich von Behörde zu Behörde und muss daher händisch
|
||||||
|
durchgeführt werden.
|
||||||
|
|
||||||
|
.. _ePayBL: https://www.epaybl.de/
|
||||||
BIN
doc/plugins/img/epaybl_flowchart.png
Normal file
BIN
doc/plugins/img/epaybl_flowchart.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 44 KiB |
@@ -18,6 +18,7 @@ If you want to **create** a plugin, please go to the
|
|||||||
campaigns
|
campaigns
|
||||||
certificates
|
certificates
|
||||||
digital
|
digital
|
||||||
|
epaybl
|
||||||
exhibitors
|
exhibitors
|
||||||
shipping
|
shipping
|
||||||
imported_secrets
|
imported_secrets
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ classifiers = [
|
|||||||
"Programming Language :: Python :: 3.9",
|
"Programming Language :: Python :: 3.9",
|
||||||
"Programming Language :: Python :: 3.10",
|
"Programming Language :: Python :: 3.10",
|
||||||
"Programming Language :: Python :: 3.11",
|
"Programming Language :: Python :: 3.11",
|
||||||
"Framework :: Django :: 3.2",
|
"Framework :: Django :: 4.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
@@ -30,13 +30,13 @@ dependencies = [
|
|||||||
"babel",
|
"babel",
|
||||||
"BeautifulSoup4==4.12.*",
|
"BeautifulSoup4==4.12.*",
|
||||||
"bleach==5.0.*",
|
"bleach==5.0.*",
|
||||||
"celery==5.2.*",
|
"celery==5.3.*",
|
||||||
"chardet==5.1.*",
|
"chardet==5.1.*",
|
||||||
"cryptography>=3.4.2",
|
"cryptography>=3.4.2",
|
||||||
"css-inline==0.8.*",
|
"css-inline==0.8.*",
|
||||||
"defusedcsv>=1.1.0",
|
"defusedcsv>=1.1.0",
|
||||||
"dj-static",
|
"dj-static",
|
||||||
"Django==3.2.*,>=3.2.18",
|
"Django==4.2.*",
|
||||||
"django-bootstrap3==23.1.*",
|
"django-bootstrap3==23.1.*",
|
||||||
"django-compressor==4.3.*",
|
"django-compressor==4.3.*",
|
||||||
"django-countries==7.5.*",
|
"django-countries==7.5.*",
|
||||||
@@ -49,7 +49,6 @@ dependencies = [
|
|||||||
"django-libsass==0.9",
|
"django-libsass==0.9",
|
||||||
"django-localflavor==4.0",
|
"django-localflavor==4.0",
|
||||||
"django-markup",
|
"django-markup",
|
||||||
"django-mysql",
|
|
||||||
"django-oauth-toolkit==2.2.*",
|
"django-oauth-toolkit==2.2.*",
|
||||||
"django-otp==1.2.*",
|
"django-otp==1.2.*",
|
||||||
"django-phonenumber-field==7.1.*",
|
"django-phonenumber-field==7.1.*",
|
||||||
@@ -60,10 +59,10 @@ dependencies = [
|
|||||||
"dnspython==2.3.*",
|
"dnspython==2.3.*",
|
||||||
"drf_ujson2==1.7.*",
|
"drf_ujson2==1.7.*",
|
||||||
"geoip2==4.*",
|
"geoip2==4.*",
|
||||||
"importlib_metadata==6.6.*", # Polyfill, we can probably drop this once we require Python 3.10+
|
"importlib_metadata==6.*", # Polyfill, we can probably drop this once we require Python 3.10+
|
||||||
"isoweek",
|
"isoweek",
|
||||||
"jsonschema",
|
"jsonschema",
|
||||||
"kombu==5.2.*",
|
"kombu==5.3.*",
|
||||||
"libsass==0.22.*",
|
"libsass==0.22.*",
|
||||||
"lxml",
|
"lxml",
|
||||||
"markdown==3.4.3", # 3.3.5 requires importlib-metadata>=4.4, but django-bootstrap3 requires importlib-metadata<3.
|
"markdown==3.4.3", # 3.3.5 requires importlib-metadata>=4.4, but django-bootstrap3 requires importlib-metadata<3.
|
||||||
@@ -74,9 +73,10 @@ dependencies = [
|
|||||||
"packaging",
|
"packaging",
|
||||||
"paypalrestsdk==1.13.*",
|
"paypalrestsdk==1.13.*",
|
||||||
"paypal-checkout-serversdk==1.0.*",
|
"paypal-checkout-serversdk==1.0.*",
|
||||||
"PyJWT==2.6.*",
|
"PyJWT==2.7.*",
|
||||||
"phonenumberslite==8.13.*",
|
"phonenumberslite==8.13.*",
|
||||||
"Pillow==9.5.*",
|
"Pillow==9.5.*",
|
||||||
|
"pretix-plugin-build",
|
||||||
"protobuf==4.23.*",
|
"protobuf==4.23.*",
|
||||||
"psycopg2-binary",
|
"psycopg2-binary",
|
||||||
"pycountry",
|
"pycountry",
|
||||||
@@ -87,11 +87,12 @@ dependencies = [
|
|||||||
"python-dateutil==2.8.*",
|
"python-dateutil==2.8.*",
|
||||||
"python-u2flib-server==4.*",
|
"python-u2flib-server==4.*",
|
||||||
"pytz",
|
"pytz",
|
||||||
|
"pytz-deprecation-shim==0.1.*",
|
||||||
"pyuca",
|
"pyuca",
|
||||||
"qrcode==7.4.*",
|
"qrcode==7.4.*",
|
||||||
"redis==4.5.*,>=4.5.4",
|
"redis==4.6.*",
|
||||||
"reportlab==4.0.*",
|
"reportlab==4.0.*",
|
||||||
"requests==2.30.*",
|
"requests==2.31.*",
|
||||||
"sentry-sdk==1.15.*",
|
"sentry-sdk==1.15.*",
|
||||||
"sepaxml==2.6.*",
|
"sepaxml==2.6.*",
|
||||||
"slimit",
|
"slimit",
|
||||||
@@ -108,10 +109,10 @@ dependencies = [
|
|||||||
|
|
||||||
[project.optional-dependencies]
|
[project.optional-dependencies]
|
||||||
memcached = ["pylibmc"]
|
memcached = ["pylibmc"]
|
||||||
mysql = ["mysqlclient"]
|
|
||||||
dev = [
|
dev = [
|
||||||
"coverage",
|
"coverage",
|
||||||
"coveralls",
|
"coveralls",
|
||||||
|
"fakeredis==2.18.*",
|
||||||
"flake8==6.0.*",
|
"flake8==6.0.*",
|
||||||
"freezegun",
|
"freezegun",
|
||||||
"isort==5.12.*",
|
"isort==5.12.*",
|
||||||
@@ -125,7 +126,7 @@ dev = [
|
|||||||
"pytest-mock==3.10.*",
|
"pytest-mock==3.10.*",
|
||||||
"pytest-rerunfailures==11.*",
|
"pytest-rerunfailures==11.*",
|
||||||
"pytest-sugar",
|
"pytest-sugar",
|
||||||
"pytest-xdist==3.2.*",
|
"pytest-xdist==3.3.*",
|
||||||
"pytest==7.3.*",
|
"pytest==7.3.*",
|
||||||
"responses",
|
"responses",
|
||||||
]
|
]
|
||||||
|
|||||||
1
setup.py
1
setup.py
@@ -29,7 +29,6 @@ sys.path.append(str(Path.cwd() / 'src'))
|
|||||||
|
|
||||||
|
|
||||||
def _CustomBuild(*args, **kwargs):
|
def _CustomBuild(*args, **kwargs):
|
||||||
print(sys.path)
|
|
||||||
from pretix._build import CustomBuild
|
from pretix._build import CustomBuild
|
||||||
return CustomBuild(*args, **kwargs)
|
return CustomBuild(*args, **kwargs)
|
||||||
|
|
||||||
|
|||||||
@@ -19,4 +19,4 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||||
# <https://www.gnu.org/licenses/>.
|
# <https://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
__version__ = "4.21.0.dev0"
|
__version__ = "2023.8.0.dev0"
|
||||||
|
|||||||
@@ -30,7 +30,6 @@ from django.utils.translation import gettext_lazy as _ # NOQA
|
|||||||
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||||
|
|
||||||
USE_I18N = True
|
USE_I18N = True
|
||||||
USE_L10N = True
|
|
||||||
USE_TZ = True
|
USE_TZ = True
|
||||||
|
|
||||||
INSTALLED_APPS = [
|
INSTALLED_APPS = [
|
||||||
@@ -68,6 +67,7 @@ INSTALLED_APPS = [
|
|||||||
'oauth2_provider',
|
'oauth2_provider',
|
||||||
'phonenumber_field',
|
'phonenumber_field',
|
||||||
'statici18n',
|
'statici18n',
|
||||||
|
'django.forms', # after pretix.base for overrides
|
||||||
]
|
]
|
||||||
|
|
||||||
FORMAT_MODULE_PATH = [
|
FORMAT_MODULE_PATH = [
|
||||||
@@ -180,6 +180,8 @@ TEMPLATES = [
|
|||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
FORM_RENDERER = "django.forms.renderers.TemplatesSetting"
|
||||||
|
|
||||||
STATIC_ROOT = os.path.join(os.path.dirname(__file__), 'static.dist')
|
STATIC_ROOT = os.path.join(os.path.dirname(__file__), 'static.dist')
|
||||||
|
|
||||||
STATICFILES_FINDERS = (
|
STATICFILES_FINDERS = (
|
||||||
@@ -194,7 +196,14 @@ STATICFILES_DIRS = [
|
|||||||
|
|
||||||
STATICI18N_ROOT = os.path.join(BASE_DIR, "pretix/static")
|
STATICI18N_ROOT = os.path.join(BASE_DIR, "pretix/static")
|
||||||
|
|
||||||
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
|
STORAGES = {
|
||||||
|
"default": {
|
||||||
|
"BACKEND": "django.core.files.storage.FileSystemStorage",
|
||||||
|
},
|
||||||
|
"staticfiles": {
|
||||||
|
"BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
# if os.path.exists(os.path.join(DATA_DIR, 'static')):
|
# if os.path.exists(os.path.join(DATA_DIR, 'static')):
|
||||||
# STATICFILES_DIRS.insert(0, os.path.join(DATA_DIR, 'static'))
|
# STATICFILES_DIRS.insert(0, os.path.join(DATA_DIR, 'static'))
|
||||||
|
|||||||
@@ -45,6 +45,10 @@ def npm_install():
|
|||||||
|
|
||||||
class CustomBuild(build):
|
class CustomBuild(build):
|
||||||
def run(self):
|
def run(self):
|
||||||
|
if "src" not in os.listdir(".") or "pretix" not in os.listdir("src"):
|
||||||
|
# Only run this command on the pretix module, not on other modules even if it's registered globally
|
||||||
|
# in some cases
|
||||||
|
return build.run(self)
|
||||||
if "PRETIX_DOCKER_BUILD" in os.environ:
|
if "PRETIX_DOCKER_BUILD" in os.environ:
|
||||||
return # this is a hack to allow calling this file early in our docker build to make use of caching
|
return # this is a hack to allow calling this file early in our docker build to make use of caching
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pretix._build_settings")
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pretix._build_settings")
|
||||||
@@ -68,6 +72,10 @@ class CustomBuild(build):
|
|||||||
|
|
||||||
class CustomBuildExt(build_ext):
|
class CustomBuildExt(build_ext):
|
||||||
def run(self):
|
def run(self):
|
||||||
|
if "src" not in os.listdir(".") or "pretix" not in os.listdir("src"):
|
||||||
|
# Only run this command on the pretix module, not on other modules even if it's registered globally
|
||||||
|
# in some cases
|
||||||
|
return build_ext.run(self)
|
||||||
if "PRETIX_DOCKER_BUILD" in os.environ:
|
if "PRETIX_DOCKER_BUILD" in os.environ:
|
||||||
return # this is a hack to allow calling this file early in our docker build to make use of caching
|
return # this is a hack to allow calling this file early in our docker build to make use of caching
|
||||||
npm_install()
|
npm_install()
|
||||||
|
|||||||
@@ -223,6 +223,7 @@ class PretixPosSecurityProfile(AllowListSecurityProfile):
|
|||||||
('POST', 'api-v1:checkinrpc.redeem'),
|
('POST', 'api-v1:checkinrpc.redeem'),
|
||||||
('GET', 'api-v1:checkinrpc.search'),
|
('GET', 'api-v1:checkinrpc.search'),
|
||||||
('POST', 'api-v1:reusablemedium-lookup'),
|
('POST', 'api-v1:reusablemedium-lookup'),
|
||||||
|
('POST', 'api-v1:reusablemedium-list'),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ class IdempotencyMiddleware:
|
|||||||
auth_hash = sha1(auth_hash_parts.encode()).hexdigest()
|
auth_hash = sha1(auth_hash_parts.encode()).hexdigest()
|
||||||
idempotency_key = request.headers.get('X-Idempotency-Key', '')
|
idempotency_key = request.headers.get('X-Idempotency-Key', '')
|
||||||
|
|
||||||
with transaction.atomic():
|
with transaction.atomic(durable=True):
|
||||||
call, created = ApiCall.objects.select_for_update(of=OF_SELF).get_or_create(
|
call, created = ApiCall.objects.select_for_update(of=OF_SELF).get_or_create(
|
||||||
auth_hash=auth_hash,
|
auth_hash=auth_hash,
|
||||||
idempotency_key=idempotency_key,
|
idempotency_key=idempotency_key,
|
||||||
@@ -75,7 +75,7 @@ class IdempotencyMiddleware:
|
|||||||
|
|
||||||
if created:
|
if created:
|
||||||
resp = self.get_response(request)
|
resp = self.get_response(request)
|
||||||
with transaction.atomic():
|
with transaction.atomic(durable=True):
|
||||||
if resp.status_code in (409, 429, 500, 503):
|
if resp.status_code in (409, 429, 500, 503):
|
||||||
# This is the exception: These calls are *meant* to be retried!
|
# This is the exception: These calls are *meant* to be retried!
|
||||||
call.delete()
|
call.delete()
|
||||||
|
|||||||
@@ -19,6 +19,8 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||||
# <https://www.gnu.org/licenses/>.
|
# <https://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
|
import json
|
||||||
|
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
|
||||||
@@ -46,3 +48,16 @@ class AsymmetricField(serializers.Field):
|
|||||||
|
|
||||||
def run_validation(self, data=serializers.empty):
|
def run_validation(self, data=serializers.empty):
|
||||||
return self.write.run_validation(data)
|
return self.write.run_validation(data)
|
||||||
|
|
||||||
|
|
||||||
|
class CompatibleJSONField(serializers.JSONField):
|
||||||
|
def to_internal_value(self, data):
|
||||||
|
try:
|
||||||
|
return json.dumps(data)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
self.fail('invalid')
|
||||||
|
|
||||||
|
def to_representation(self, value):
|
||||||
|
if value:
|
||||||
|
return json.loads(value)
|
||||||
|
return value
|
||||||
|
|||||||
@@ -32,11 +32,13 @@ class DiscountSerializer(I18nAwareModelSerializer):
|
|||||||
'available_until', 'subevent_mode', 'condition_all_products', 'condition_limit_products',
|
'available_until', 'subevent_mode', 'condition_all_products', 'condition_limit_products',
|
||||||
'condition_apply_to_addons', 'condition_min_count', 'condition_min_value',
|
'condition_apply_to_addons', 'condition_min_count', 'condition_min_value',
|
||||||
'benefit_discount_matching_percent', 'benefit_only_apply_to_cheapest_n_matches',
|
'benefit_discount_matching_percent', 'benefit_only_apply_to_cheapest_n_matches',
|
||||||
'condition_ignore_voucher_discounted')
|
'benefit_same_products', 'benefit_limit_products', 'benefit_apply_to_addons',
|
||||||
|
'benefit_ignore_voucher_discounted', 'condition_ignore_voucher_discounted')
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.fields['condition_limit_products'].queryset = self.context['event'].items.all()
|
self.fields['condition_limit_products'].queryset = self.context['event'].items.all()
|
||||||
|
self.fields['benefit_limit_products'].queryset = self.context['event'].items.all()
|
||||||
|
|
||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
data = super().validate(data)
|
data = super().validate(data)
|
||||||
|
|||||||
@@ -46,6 +46,7 @@ from rest_framework import serializers
|
|||||||
from rest_framework.fields import ChoiceField, Field
|
from rest_framework.fields import ChoiceField, Field
|
||||||
from rest_framework.relations import SlugRelatedField
|
from rest_framework.relations import SlugRelatedField
|
||||||
|
|
||||||
|
from pretix.api.serializers import CompatibleJSONField
|
||||||
from pretix.api.serializers.i18n import I18nAwareModelSerializer
|
from pretix.api.serializers.i18n import I18nAwareModelSerializer
|
||||||
from pretix.api.serializers.settings import SettingsSerializer
|
from pretix.api.serializers.settings import SettingsSerializer
|
||||||
from pretix.base.models import Device, Event, TaxRule, TeamAPIToken
|
from pretix.base.models import Device, Event, TaxRule, TeamAPIToken
|
||||||
@@ -53,6 +54,7 @@ from pretix.base.models.event import SubEvent
|
|||||||
from pretix.base.models.items import (
|
from pretix.base.models.items import (
|
||||||
ItemMetaProperty, SubEventItem, SubEventItemVariation,
|
ItemMetaProperty, SubEventItem, SubEventItemVariation,
|
||||||
)
|
)
|
||||||
|
from pretix.base.models.tax import CustomRulesValidator
|
||||||
from pretix.base.services.seating import (
|
from pretix.base.services.seating import (
|
||||||
SeatProtected, generate_seats, validate_plan_change,
|
SeatProtected, generate_seats, validate_plan_change,
|
||||||
)
|
)
|
||||||
@@ -650,9 +652,16 @@ class SubEventSerializer(I18nAwareModelSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class TaxRuleSerializer(CountryFieldMixin, I18nAwareModelSerializer):
|
class TaxRuleSerializer(CountryFieldMixin, I18nAwareModelSerializer):
|
||||||
|
custom_rules = CompatibleJSONField(
|
||||||
|
validators=[CustomRulesValidator()],
|
||||||
|
required=False,
|
||||||
|
allow_null=True,
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = TaxRule
|
model = TaxRule
|
||||||
fields = ('id', 'name', 'rate', 'price_includes_tax', 'eu_reverse_charge', 'home_country', 'internal_name', 'keep_gross_if_rate_changes')
|
fields = ('id', 'name', 'rate', 'price_includes_tax', 'eu_reverse_charge', 'home_country', 'internal_name',
|
||||||
|
'keep_gross_if_rate_changes', 'custom_rules')
|
||||||
|
|
||||||
|
|
||||||
class EventSettingsSerializer(SettingsSerializer):
|
class EventSettingsSerializer(SettingsSerializer):
|
||||||
@@ -719,6 +728,7 @@ class EventSettingsSerializer(SettingsSerializer):
|
|||||||
'payment_term_minutes',
|
'payment_term_minutes',
|
||||||
'payment_term_last',
|
'payment_term_last',
|
||||||
'payment_term_expire_automatically',
|
'payment_term_expire_automatically',
|
||||||
|
'payment_term_expire_delay_days',
|
||||||
'payment_term_accept_late',
|
'payment_term_accept_late',
|
||||||
'payment_explanation',
|
'payment_explanation',
|
||||||
'payment_pending_hidden',
|
'payment_pending_hidden',
|
||||||
@@ -807,6 +817,10 @@ class EventSettingsSerializer(SettingsSerializer):
|
|||||||
'reusable_media_type_nfc_uid',
|
'reusable_media_type_nfc_uid',
|
||||||
'reusable_media_type_nfc_uid_autocreate_giftcard',
|
'reusable_media_type_nfc_uid_autocreate_giftcard',
|
||||||
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
|
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
|
||||||
|
'reusable_media_type_nfc_mf0aes',
|
||||||
|
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
|
||||||
|
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
|
||||||
|
'reusable_media_type_nfc_mf0aes_random_uid',
|
||||||
]
|
]
|
||||||
readonly_fields = [
|
readonly_fields = [
|
||||||
# These are read-only since they are currently only settable on organizers, not events
|
# These are read-only since they are currently only settable on organizers, not events
|
||||||
@@ -816,6 +830,10 @@ class EventSettingsSerializer(SettingsSerializer):
|
|||||||
'reusable_media_type_nfc_uid',
|
'reusable_media_type_nfc_uid',
|
||||||
'reusable_media_type_nfc_uid_autocreate_giftcard',
|
'reusable_media_type_nfc_uid_autocreate_giftcard',
|
||||||
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
|
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
|
||||||
|
'reusable_media_type_nfc_mf0aes',
|
||||||
|
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
|
||||||
|
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
|
||||||
|
'reusable_media_type_nfc_mf0aes_random_uid',
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
@@ -884,6 +902,8 @@ class DeviceEventSettingsSerializer(EventSettingsSerializer):
|
|||||||
'name_scheme',
|
'name_scheme',
|
||||||
'reusable_media_type_barcode',
|
'reusable_media_type_barcode',
|
||||||
'reusable_media_type_nfc_uid',
|
'reusable_media_type_nfc_uid',
|
||||||
|
'reusable_media_type_nfc_mf0aes',
|
||||||
|
'reusable_media_type_nfc_mf0aes_random_uid',
|
||||||
'system_question_order',
|
'system_question_order',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -93,7 +93,7 @@ class JobRunSerializer(serializers.Serializer):
|
|||||||
if events is not None and not isinstance(ex, OrganizerLevelExportMixin):
|
if events is not None and not isinstance(ex, OrganizerLevelExportMixin):
|
||||||
self.fields["events"] = serializers.SlugRelatedField(
|
self.fields["events"] = serializers.SlugRelatedField(
|
||||||
queryset=events,
|
queryset=events,
|
||||||
required=True,
|
required=False,
|
||||||
allow_empty=False,
|
allow_empty=False,
|
||||||
slug_field='slug',
|
slug_field='slug',
|
||||||
many=True
|
many=True
|
||||||
@@ -156,8 +156,9 @@ class JobRunSerializer(serializers.Serializer):
|
|||||||
def to_internal_value(self, data):
|
def to_internal_value(self, data):
|
||||||
if isinstance(data, QueryDict):
|
if isinstance(data, QueryDict):
|
||||||
data = data.copy()
|
data = data.copy()
|
||||||
|
|
||||||
for k, v in self.fields.items():
|
for k, v in self.fields.items():
|
||||||
if isinstance(v, serializers.ManyRelatedField) and k not in data:
|
if isinstance(v, serializers.ManyRelatedField) and k not in data and k != "events":
|
||||||
data[k] = []
|
data[k] = []
|
||||||
|
|
||||||
for fk in self.fields.keys():
|
for fk in self.fields.keys():
|
||||||
|
|||||||
@@ -60,6 +60,8 @@ class NestedGiftCardSerializer(GiftCardSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class ReusableMediaSerializer(I18nAwareModelSerializer):
|
class ReusableMediaSerializer(I18nAwareModelSerializer):
|
||||||
|
organizer = serializers.SlugRelatedField(slug_field='slug', read_only=True)
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
@@ -111,6 +113,7 @@ class ReusableMediaSerializer(I18nAwareModelSerializer):
|
|||||||
model = ReusableMedium
|
model = ReusableMedium
|
||||||
fields = (
|
fields = (
|
||||||
'id',
|
'id',
|
||||||
|
'organizer',
|
||||||
'created',
|
'created',
|
||||||
'updated',
|
'updated',
|
||||||
'type',
|
'type',
|
||||||
|
|||||||
@@ -19,7 +19,6 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||||
# <https://www.gnu.org/licenses/>.
|
# <https://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from collections import Counter, defaultdict
|
from collections import Counter, defaultdict
|
||||||
@@ -28,6 +27,7 @@ from decimal import Decimal
|
|||||||
import pycountry
|
import pycountry
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.files import File
|
from django.core.files import File
|
||||||
|
from django.db import models
|
||||||
from django.db.models import F, Q
|
from django.db.models import F, Q
|
||||||
from django.utils.encoding import force_str
|
from django.utils.encoding import force_str
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
@@ -39,6 +39,7 @@ from rest_framework.exceptions import ValidationError
|
|||||||
from rest_framework.relations import SlugRelatedField
|
from rest_framework.relations import SlugRelatedField
|
||||||
from rest_framework.reverse import reverse
|
from rest_framework.reverse import reverse
|
||||||
|
|
||||||
|
from pretix.api.serializers import CompatibleJSONField
|
||||||
from pretix.api.serializers.event import SubEventSerializer
|
from pretix.api.serializers.event import SubEventSerializer
|
||||||
from pretix.api.serializers.i18n import I18nAwareModelSerializer
|
from pretix.api.serializers.i18n import I18nAwareModelSerializer
|
||||||
from pretix.api.serializers.item import (
|
from pretix.api.serializers.item import (
|
||||||
@@ -283,11 +284,12 @@ class FailedCheckinSerializer(I18nAwareModelSerializer):
|
|||||||
raw_item = serializers.PrimaryKeyRelatedField(queryset=Item.objects.none(), required=False, allow_null=True)
|
raw_item = serializers.PrimaryKeyRelatedField(queryset=Item.objects.none(), required=False, allow_null=True)
|
||||||
raw_variation = serializers.PrimaryKeyRelatedField(queryset=ItemVariation.objects.none(), required=False, allow_null=True)
|
raw_variation = serializers.PrimaryKeyRelatedField(queryset=ItemVariation.objects.none(), required=False, allow_null=True)
|
||||||
raw_subevent = serializers.PrimaryKeyRelatedField(queryset=SubEvent.objects.none(), required=False, allow_null=True)
|
raw_subevent = serializers.PrimaryKeyRelatedField(queryset=SubEvent.objects.none(), required=False, allow_null=True)
|
||||||
|
nonce = serializers.CharField(required=False, allow_null=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Checkin
|
model = Checkin
|
||||||
fields = ('error_reason', 'error_explanation', 'raw_barcode', 'raw_item', 'raw_variation',
|
fields = ('error_reason', 'error_explanation', 'raw_barcode', 'raw_item', 'raw_variation',
|
||||||
'raw_subevent', 'datetime', 'type', 'position')
|
'raw_subevent', 'nonce', 'datetime', 'type', 'position')
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
@@ -372,11 +374,15 @@ class PdfDataSerializer(serializers.Field):
|
|||||||
self.context['vars_images'] = get_images(self.context['event'])
|
self.context['vars_images'] = get_images(self.context['event'])
|
||||||
|
|
||||||
for k, f in self.context['vars'].items():
|
for k, f in self.context['vars'].items():
|
||||||
try:
|
if 'evaluate_bulk' in f:
|
||||||
res[k] = f['evaluate'](instance, instance.order, ev)
|
# Will be evaluated later by our list serializers
|
||||||
except:
|
res[k] = (f['evaluate_bulk'], instance)
|
||||||
logger.exception('Evaluating PDF variable failed')
|
else:
|
||||||
res[k] = '(error)'
|
try:
|
||||||
|
res[k] = f['evaluate'](instance, instance.order, ev)
|
||||||
|
except:
|
||||||
|
logger.exception('Evaluating PDF variable failed')
|
||||||
|
res[k] = '(error)'
|
||||||
|
|
||||||
if not hasattr(ev, '_cached_meta_data'):
|
if not hasattr(ev, '_cached_meta_data'):
|
||||||
ev._cached_meta_data = ev.meta_data
|
ev._cached_meta_data = ev.meta_data
|
||||||
@@ -429,6 +435,38 @@ class PdfDataSerializer(serializers.Field):
|
|||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
class OrderPositionListSerializer(serializers.ListSerializer):
|
||||||
|
|
||||||
|
def to_representation(self, data):
|
||||||
|
# We have a custom implementation of this method because PdfDataSerializer() might keep some elements unevaluated
|
||||||
|
# with a (callable, input) tuple. We'll loop over these entries and evaluate them bulk-wise to save on SQL queries.
|
||||||
|
|
||||||
|
if isinstance(self.parent, OrderSerializer) and isinstance(self.parent.parent, OrderListSerializer):
|
||||||
|
# Do not execute our custom code because it will be executed by OrderListSerializer later for the
|
||||||
|
# full result set.
|
||||||
|
return super().to_representation(data)
|
||||||
|
|
||||||
|
iterable = data.all() if isinstance(data, models.Manager) else data
|
||||||
|
|
||||||
|
data = []
|
||||||
|
evaluate_queue = defaultdict(list)
|
||||||
|
|
||||||
|
for item in iterable:
|
||||||
|
entry = self.child.to_representation(item)
|
||||||
|
if "pdf_data" in entry:
|
||||||
|
for k, v in entry["pdf_data"].items():
|
||||||
|
if isinstance(v, tuple) and callable(v[0]):
|
||||||
|
evaluate_queue[v[0]].append((v[1], entry, k))
|
||||||
|
data.append(entry)
|
||||||
|
|
||||||
|
for func, entries in evaluate_queue.items():
|
||||||
|
results = func([item for (item, entry, k) in entries])
|
||||||
|
for (item, entry, k), result in zip(entries, results):
|
||||||
|
entry["pdf_data"][k] = result
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
class OrderPositionSerializer(I18nAwareModelSerializer):
|
class OrderPositionSerializer(I18nAwareModelSerializer):
|
||||||
checkins = CheckinSerializer(many=True, read_only=True)
|
checkins = CheckinSerializer(many=True, read_only=True)
|
||||||
answers = AnswerSerializer(many=True)
|
answers = AnswerSerializer(many=True)
|
||||||
@@ -440,6 +478,7 @@ class OrderPositionSerializer(I18nAwareModelSerializer):
|
|||||||
attendee_name = serializers.CharField(required=False)
|
attendee_name = serializers.CharField(required=False)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
list_serializer_class = OrderPositionListSerializer
|
||||||
model = OrderPosition
|
model = OrderPosition
|
||||||
fields = ('id', 'order', 'positionid', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts',
|
fields = ('id', 'order', 'positionid', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts',
|
||||||
'company', 'street', 'zipcode', 'city', 'country', 'state', 'discount',
|
'company', 'street', 'zipcode', 'city', 'country', 'state', 'discount',
|
||||||
@@ -468,6 +507,20 @@ class OrderPositionSerializer(I18nAwareModelSerializer):
|
|||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
raise TypeError("this serializer is readonly")
|
raise TypeError("this serializer is readonly")
|
||||||
|
|
||||||
|
def to_representation(self, data):
|
||||||
|
if isinstance(self.parent, (OrderListSerializer, OrderPositionListSerializer)):
|
||||||
|
# Do not execute our custom code because it will be executed by OrderListSerializer later for the
|
||||||
|
# full result set.
|
||||||
|
return super().to_representation(data)
|
||||||
|
|
||||||
|
entry = super().to_representation(data)
|
||||||
|
if "pdf_data" in entry:
|
||||||
|
for k, v in entry["pdf_data"].items():
|
||||||
|
if isinstance(v, tuple) and callable(v[0]):
|
||||||
|
entry["pdf_data"][k] = v[0]([v[1]])[0]
|
||||||
|
|
||||||
|
return entry
|
||||||
|
|
||||||
|
|
||||||
class RequireAttentionField(serializers.Field):
|
class RequireAttentionField(serializers.Field):
|
||||||
def to_representation(self, instance: OrderPosition):
|
def to_representation(self, instance: OrderPosition):
|
||||||
@@ -535,8 +588,9 @@ class OrderPaymentTypeField(serializers.Field):
|
|||||||
# TODO: Remove after pretix 2.2
|
# TODO: Remove after pretix 2.2
|
||||||
def to_representation(self, instance: Order):
|
def to_representation(self, instance: Order):
|
||||||
t = None
|
t = None
|
||||||
for p in instance.payments.all():
|
if instance.pk:
|
||||||
t = p.provider
|
for p in instance.payments.all():
|
||||||
|
t = p.provider
|
||||||
return t
|
return t
|
||||||
|
|
||||||
|
|
||||||
@@ -544,10 +598,10 @@ class OrderPaymentDateField(serializers.DateField):
|
|||||||
# TODO: Remove after pretix 2.2
|
# TODO: Remove after pretix 2.2
|
||||||
def to_representation(self, instance: Order):
|
def to_representation(self, instance: Order):
|
||||||
t = None
|
t = None
|
||||||
for p in instance.payments.all():
|
if instance.pk:
|
||||||
t = p.payment_date or t
|
for p in instance.payments.all():
|
||||||
|
t = p.payment_date or t
|
||||||
if t:
|
if t:
|
||||||
|
|
||||||
return super().to_representation(t.date())
|
return super().to_representation(t.date())
|
||||||
|
|
||||||
|
|
||||||
@@ -561,7 +615,7 @@ class PaymentURLField(serializers.URLField):
|
|||||||
def to_representation(self, instance: OrderPayment):
|
def to_representation(self, instance: OrderPayment):
|
||||||
if instance.state != OrderPayment.PAYMENT_STATE_CREATED:
|
if instance.state != OrderPayment.PAYMENT_STATE_CREATED:
|
||||||
return None
|
return None
|
||||||
return build_absolute_uri(self.context['event'], 'presale:event.order.pay', kwargs={
|
return build_absolute_uri(instance.order.event, 'presale:event.order.pay', kwargs={
|
||||||
'order': instance.order.code,
|
'order': instance.order.code,
|
||||||
'secret': instance.order.secret,
|
'secret': instance.order.secret,
|
||||||
'payment': instance.pk,
|
'payment': instance.pk,
|
||||||
@@ -606,13 +660,42 @@ class OrderRefundSerializer(I18nAwareModelSerializer):
|
|||||||
|
|
||||||
class OrderURLField(serializers.URLField):
|
class OrderURLField(serializers.URLField):
|
||||||
def to_representation(self, instance: Order):
|
def to_representation(self, instance: Order):
|
||||||
return build_absolute_uri(self.context['event'], 'presale:event.order', kwargs={
|
return build_absolute_uri(instance.event, 'presale:event.order', kwargs={
|
||||||
'order': instance.code,
|
'order': instance.code,
|
||||||
'secret': instance.secret,
|
'secret': instance.secret,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
class OrderListSerializer(serializers.ListSerializer):
|
||||||
|
|
||||||
|
def to_representation(self, data):
|
||||||
|
# We have a custom implementation of this method because PdfDataSerializer() might keep some elements
|
||||||
|
# unevaluated with a (callable, input) tuple. We'll loop over these entries and evaluate them bulk-wise to
|
||||||
|
# save on SQL queries.
|
||||||
|
iterable = data.all() if isinstance(data, models.Manager) else data
|
||||||
|
|
||||||
|
data = []
|
||||||
|
evaluate_queue = defaultdict(list)
|
||||||
|
|
||||||
|
for item in iterable:
|
||||||
|
entry = self.child.to_representation(item)
|
||||||
|
for p in entry.get("positions", []):
|
||||||
|
if "pdf_data" in p:
|
||||||
|
for k, v in p["pdf_data"].items():
|
||||||
|
if isinstance(v, tuple) and callable(v[0]):
|
||||||
|
evaluate_queue[v[0]].append((v[1], p, k))
|
||||||
|
data.append(entry)
|
||||||
|
|
||||||
|
for func, entries in evaluate_queue.items():
|
||||||
|
results = func([item for (item, entry, k) in entries])
|
||||||
|
for (item, entry, k), result in zip(entries, results):
|
||||||
|
entry["pdf_data"][k] = result
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
class OrderSerializer(I18nAwareModelSerializer):
|
class OrderSerializer(I18nAwareModelSerializer):
|
||||||
|
event = SlugRelatedField(slug_field='slug', read_only=True)
|
||||||
invoice_address = InvoiceAddressSerializer(allow_null=True)
|
invoice_address = InvoiceAddressSerializer(allow_null=True)
|
||||||
positions = OrderPositionSerializer(many=True, read_only=True)
|
positions = OrderPositionSerializer(many=True, read_only=True)
|
||||||
fees = OrderFeeSerializer(many=True, read_only=True)
|
fees = OrderFeeSerializer(many=True, read_only=True)
|
||||||
@@ -626,8 +709,9 @@ class OrderSerializer(I18nAwareModelSerializer):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Order
|
model = Order
|
||||||
|
list_serializer_class = OrderListSerializer
|
||||||
fields = (
|
fields = (
|
||||||
'code', 'status', 'testmode', 'secret', 'email', 'phone', 'locale', 'datetime', 'expires', 'payment_date',
|
'code', 'event', 'status', 'testmode', 'secret', 'email', 'phone', 'locale', 'datetime', 'expires', 'payment_date',
|
||||||
'payment_provider', 'fees', 'total', 'comment', 'custom_followup_at', 'invoice_address', 'positions', 'downloads',
|
'payment_provider', 'fees', 'total', 'comment', 'custom_followup_at', 'invoice_address', 'positions', 'downloads',
|
||||||
'checkin_attention', 'last_modified', 'payments', 'refunds', 'require_approval', 'sales_channel',
|
'checkin_attention', 'last_modified', 'payments', 'refunds', 'require_approval', 'sales_channel',
|
||||||
'url', 'customer', 'valid_if_pending'
|
'url', 'customer', 'valid_if_pending'
|
||||||
@@ -895,19 +979,6 @@ class OrderPositionCreateSerializer(I18nAwareModelSerializer):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
class CompatibleJSONField(serializers.JSONField):
|
|
||||||
def to_internal_value(self, data):
|
|
||||||
try:
|
|
||||||
return json.dumps(data)
|
|
||||||
except (TypeError, ValueError):
|
|
||||||
self.fail('invalid')
|
|
||||||
|
|
||||||
def to_representation(self, value):
|
|
||||||
if value:
|
|
||||||
return json.loads(value)
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
class WrappedList:
|
class WrappedList:
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self._data = data
|
self._data = data
|
||||||
@@ -1363,6 +1434,7 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
|
|||||||
answers.append(answ)
|
answers.append(answ)
|
||||||
pos.answers = answers
|
pos.answers = answers
|
||||||
pos.pseudonymization_id = "PREVIEW"
|
pos.pseudonymization_id = "PREVIEW"
|
||||||
|
pos.checkins = []
|
||||||
pos_map[pos.positionid] = pos
|
pos_map[pos.positionid] = pos
|
||||||
else:
|
else:
|
||||||
if pos.voucher:
|
if pos.voucher:
|
||||||
@@ -1459,6 +1531,8 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
|
|||||||
if simulate:
|
if simulate:
|
||||||
order.fees = fees
|
order.fees = fees
|
||||||
order.positions = pos_map.values()
|
order.positions = pos_map.values()
|
||||||
|
order.payments = []
|
||||||
|
order.refunds = []
|
||||||
return order # ignore payments
|
return order # ignore payments
|
||||||
else:
|
else:
|
||||||
order.save(update_fields=['total'])
|
order.save(update_fields=['total'])
|
||||||
@@ -1521,6 +1595,7 @@ class InlineInvoiceLineSerializer(I18nAwareModelSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class InvoiceSerializer(I18nAwareModelSerializer):
|
class InvoiceSerializer(I18nAwareModelSerializer):
|
||||||
|
event = SlugRelatedField(slug_field='slug', read_only=True)
|
||||||
order = serializers.SlugRelatedField(slug_field='code', read_only=True)
|
order = serializers.SlugRelatedField(slug_field='code', read_only=True)
|
||||||
refers = serializers.SlugRelatedField(slug_field='full_invoice_no', read_only=True)
|
refers = serializers.SlugRelatedField(slug_field='full_invoice_no', read_only=True)
|
||||||
lines = InlineInvoiceLineSerializer(many=True)
|
lines = InlineInvoiceLineSerializer(many=True)
|
||||||
@@ -1529,7 +1604,7 @@ class InvoiceSerializer(I18nAwareModelSerializer):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Invoice
|
model = Invoice
|
||||||
fields = ('order', 'number', 'is_cancellation', 'invoice_from', 'invoice_from_name', 'invoice_from_zipcode',
|
fields = ('event', 'order', 'number', 'is_cancellation', 'invoice_from', 'invoice_from_name', 'invoice_from_zipcode',
|
||||||
'invoice_from_city', 'invoice_from_country', 'invoice_from_tax_id', 'invoice_from_vat_id',
|
'invoice_from_city', 'invoice_from_country', 'invoice_from_tax_id', 'invoice_from_vat_id',
|
||||||
'invoice_to', 'invoice_to_company', 'invoice_to_name', 'invoice_to_street', 'invoice_to_zipcode',
|
'invoice_to', 'invoice_to_company', 'invoice_to_name', 'invoice_to_street', 'invoice_to_zipcode',
|
||||||
'invoice_to_city', 'invoice_to_state', 'invoice_to_country', 'invoice_to_vat_id', 'invoice_to_beneficiary',
|
'invoice_to_city', 'invoice_to_state', 'invoice_to_country', 'invoice_to_vat_id', 'invoice_to_beneficiary',
|
||||||
|
|||||||
@@ -36,9 +36,9 @@ from pretix.api.serializers.settings import SettingsSerializer
|
|||||||
from pretix.base.auth import get_auth_backends
|
from pretix.base.auth import get_auth_backends
|
||||||
from pretix.base.i18n import get_language_without_region
|
from pretix.base.i18n import get_language_without_region
|
||||||
from pretix.base.models import (
|
from pretix.base.models import (
|
||||||
Customer, Device, GiftCard, GiftCardTransaction, Membership,
|
Customer, Device, GiftCard, GiftCardAcceptance, GiftCardTransaction,
|
||||||
MembershipType, OrderPosition, Organizer, ReusableMedium, SeatingPlan,
|
Membership, MembershipType, OrderPosition, Organizer, ReusableMedium,
|
||||||
Team, TeamAPIToken, TeamInvite, User,
|
SeatingPlan, Team, TeamAPIToken, TeamInvite, User,
|
||||||
)
|
)
|
||||||
from pretix.base.models.seating import SeatingPlanLayoutValidator
|
from pretix.base.models.seating import SeatingPlanLayoutValidator
|
||||||
from pretix.base.services.mail import SendMailException, mail
|
from pretix.base.services.mail import SendMailException, mail
|
||||||
@@ -94,6 +94,14 @@ class CustomerSerializer(I18nAwareModelSerializer):
|
|||||||
data['name_parts']['_scheme'] = self.context['request'].organizer.settings.name_scheme
|
data['name_parts']['_scheme'] = self.context['request'].organizer.settings.name_scheme
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
def validate_email(self, value):
|
||||||
|
qs = Customer.objects.filter(organizer=self.context['organizer'], email__iexact=value)
|
||||||
|
if self.instance and self.instance.pk:
|
||||||
|
qs = qs.exclude(pk=self.instance.pk)
|
||||||
|
if qs.exists():
|
||||||
|
raise ValidationError(_("An account with this email address is already registered."))
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
class CustomerCreateSerializer(CustomerSerializer):
|
class CustomerCreateSerializer(CustomerSerializer):
|
||||||
send_email = serializers.BooleanField(default=False, required=False, allow_null=True)
|
send_email = serializers.BooleanField(default=False, required=False, allow_null=True)
|
||||||
@@ -183,8 +191,11 @@ class GiftCardSerializer(I18nAwareModelSerializer):
|
|||||||
qs = GiftCard.objects.filter(
|
qs = GiftCard.objects.filter(
|
||||||
secret=s
|
secret=s
|
||||||
).filter(
|
).filter(
|
||||||
Q(issuer=self.context["organizer"]) | Q(
|
Q(issuer=self.context["organizer"]) |
|
||||||
issuer__gift_card_collector_acceptance__collector=self.context["organizer"])
|
Q(issuer__in=GiftCardAcceptance.objects.filter(
|
||||||
|
acceptor=self.context["organizer"],
|
||||||
|
active=True,
|
||||||
|
).values_list('issuer', flat=True))
|
||||||
)
|
)
|
||||||
if self.instance:
|
if self.instance:
|
||||||
qs = qs.exclude(pk=self.instance.pk)
|
qs = qs.exclude(pk=self.instance.pk)
|
||||||
@@ -248,6 +259,8 @@ class DeviceSerializer(serializers.ModelSerializer):
|
|||||||
unique_serial = serializers.CharField(read_only=True)
|
unique_serial = serializers.CharField(read_only=True)
|
||||||
hardware_brand = serializers.CharField(read_only=True)
|
hardware_brand = serializers.CharField(read_only=True)
|
||||||
hardware_model = serializers.CharField(read_only=True)
|
hardware_model = serializers.CharField(read_only=True)
|
||||||
|
os_name = serializers.CharField(read_only=True)
|
||||||
|
os_version = serializers.CharField(read_only=True)
|
||||||
software_brand = serializers.CharField(read_only=True)
|
software_brand = serializers.CharField(read_only=True)
|
||||||
software_version = serializers.CharField(read_only=True)
|
software_version = serializers.CharField(read_only=True)
|
||||||
created = serializers.DateTimeField(read_only=True)
|
created = serializers.DateTimeField(read_only=True)
|
||||||
@@ -260,7 +273,7 @@ class DeviceSerializer(serializers.ModelSerializer):
|
|||||||
fields = (
|
fields = (
|
||||||
'device_id', 'unique_serial', 'initialization_token', 'all_events', 'limit_events',
|
'device_id', 'unique_serial', 'initialization_token', 'all_events', 'limit_events',
|
||||||
'revoked', 'name', 'created', 'initialized', 'hardware_brand', 'hardware_model',
|
'revoked', 'name', 'created', 'initialized', 'hardware_brand', 'hardware_model',
|
||||||
'software_brand', 'software_version', 'security_profile'
|
'os_name', 'os_version', 'software_brand', 'software_version', 'security_profile'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -387,6 +400,9 @@ class OrganizerSettingsSerializer(SettingsSerializer):
|
|||||||
'reusable_media_type_nfc_uid',
|
'reusable_media_type_nfc_uid',
|
||||||
'reusable_media_type_nfc_uid_autocreate_giftcard',
|
'reusable_media_type_nfc_uid_autocreate_giftcard',
|
||||||
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
|
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
|
||||||
|
'reusable_media_type_nfc_mf0aes',
|
||||||
|
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
|
||||||
|
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
|||||||
@@ -35,8 +35,7 @@
|
|||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.conf.urls import re_path
|
from django.urls import include, re_path
|
||||||
from django.urls import include
|
|
||||||
from rest_framework import routers
|
from rest_framework import routers
|
||||||
|
|
||||||
from pretix.api.views import cart
|
from pretix.api.views import cart
|
||||||
@@ -62,6 +61,8 @@ orga_router.register(r'membershiptypes', organizer.MembershipTypeViewSet)
|
|||||||
orga_router.register(r'reusablemedia', media.ReusableMediaViewSet)
|
orga_router.register(r'reusablemedia', media.ReusableMediaViewSet)
|
||||||
orga_router.register(r'teams', organizer.TeamViewSet)
|
orga_router.register(r'teams', organizer.TeamViewSet)
|
||||||
orga_router.register(r'devices', organizer.DeviceViewSet)
|
orga_router.register(r'devices', organizer.DeviceViewSet)
|
||||||
|
orga_router.register(r'orders', order.OrganizerOrderViewSet)
|
||||||
|
orga_router.register(r'invoices', order.InvoiceViewSet)
|
||||||
orga_router.register(r'exporters', exporters.OrganizerExportersViewSet, basename='exporters')
|
orga_router.register(r'exporters', exporters.OrganizerExportersViewSet, basename='exporters')
|
||||||
|
|
||||||
team_router = routers.DefaultRouter()
|
team_router = routers.DefaultRouter()
|
||||||
@@ -78,7 +79,7 @@ event_router.register(r'questions', item.QuestionViewSet)
|
|||||||
event_router.register(r'discounts', discount.DiscountViewSet)
|
event_router.register(r'discounts', discount.DiscountViewSet)
|
||||||
event_router.register(r'quotas', item.QuotaViewSet)
|
event_router.register(r'quotas', item.QuotaViewSet)
|
||||||
event_router.register(r'vouchers', voucher.VoucherViewSet)
|
event_router.register(r'vouchers', voucher.VoucherViewSet)
|
||||||
event_router.register(r'orders', order.OrderViewSet)
|
event_router.register(r'orders', order.EventOrderViewSet)
|
||||||
event_router.register(r'orderpositions', order.OrderPositionViewSet)
|
event_router.register(r'orderpositions', order.OrderPositionViewSet)
|
||||||
event_router.register(r'invoices', order.InvoiceViewSet)
|
event_router.register(r'invoices', order.InvoiceViewSet)
|
||||||
event_router.register(r'revokedsecrets', order.RevokedSecretViewSet, basename='revokedsecrets')
|
event_router.register(r'revokedsecrets', order.RevokedSecretViewSet, basename='revokedsecrets')
|
||||||
|
|||||||
@@ -164,8 +164,21 @@ class CheckinListViewSet(viewsets.ModelViewSet):
|
|||||||
secret=serializer.validated_data['raw_barcode']
|
secret=serializer.validated_data['raw_barcode']
|
||||||
).first()
|
).first()
|
||||||
|
|
||||||
|
clist = self.get_object()
|
||||||
|
if serializer.validated_data.get('nonce'):
|
||||||
|
if kwargs.get('position'):
|
||||||
|
prev = kwargs['position'].all_checkins.filter(nonce=serializer.validated_data['nonce']).first()
|
||||||
|
else:
|
||||||
|
prev = clist.checkins.filter(
|
||||||
|
nonce=serializer.validated_data['nonce'],
|
||||||
|
raw_barcode=serializer.validated_data['raw_barcode'],
|
||||||
|
).first()
|
||||||
|
if prev:
|
||||||
|
# Ignore because nonce is already handled
|
||||||
|
return Response(serializer.data, status=201)
|
||||||
|
|
||||||
c = serializer.save(
|
c = serializer.save(
|
||||||
list=self.get_object(),
|
list=clist,
|
||||||
successful=False,
|
successful=False,
|
||||||
forced=True,
|
forced=True,
|
||||||
force_sent=True,
|
force_sent=True,
|
||||||
@@ -396,7 +409,7 @@ def _checkin_list_position_queryset(checkinlists, ignore_status=False, ignore_pr
|
|||||||
|
|
||||||
def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force, checkin_type, ignore_unpaid, nonce,
|
def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force, checkin_type, ignore_unpaid, nonce,
|
||||||
untrusted_input, user, auth, expand, pdf_data, request, questions_supported, canceled_supported,
|
untrusted_input, user, auth, expand, pdf_data, request, questions_supported, canceled_supported,
|
||||||
source_type='barcode', legacy_url_support=False):
|
source_type='barcode', legacy_url_support=False, simulate=False):
|
||||||
if not checkinlists:
|
if not checkinlists:
|
||||||
raise ValidationError('No check-in list passed.')
|
raise ValidationError('No check-in list passed.')
|
||||||
|
|
||||||
@@ -433,6 +446,8 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
|||||||
)
|
)
|
||||||
raw_barcode_for_checkin = None
|
raw_barcode_for_checkin = None
|
||||||
from_revoked_secret = False
|
from_revoked_secret = False
|
||||||
|
if simulate:
|
||||||
|
common_checkin_args['__fake_arg_to_prevent_this_from_being_saved'] = True
|
||||||
|
|
||||||
# 1. Gather a list of positions that could be the one we looking for, either from their ID, secret or
|
# 1. Gather a list of positions that could be the one we looking for, either from their ID, secret or
|
||||||
# parent secret
|
# parent secret
|
||||||
@@ -472,13 +487,14 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
|||||||
revoked_matches = list(
|
revoked_matches = list(
|
||||||
RevokedTicketSecret.objects.filter(event_id__in=list_by_event.keys(), secret=raw_barcode))
|
RevokedTicketSecret.objects.filter(event_id__in=list_by_event.keys(), secret=raw_barcode))
|
||||||
if len(revoked_matches) == 0:
|
if len(revoked_matches) == 0:
|
||||||
checkinlists[0].event.log_action('pretix.event.checkin.unknown', data={
|
if not simulate:
|
||||||
'datetime': datetime,
|
checkinlists[0].event.log_action('pretix.event.checkin.unknown', data={
|
||||||
'type': checkin_type,
|
'datetime': datetime,
|
||||||
'list': checkinlists[0].pk,
|
'type': checkin_type,
|
||||||
'barcode': raw_barcode,
|
'list': checkinlists[0].pk,
|
||||||
'searched_lists': [cl.pk for cl in checkinlists]
|
'barcode': raw_barcode,
|
||||||
}, user=user, auth=auth)
|
'searched_lists': [cl.pk for cl in checkinlists]
|
||||||
|
}, user=user, auth=auth)
|
||||||
|
|
||||||
for cl in checkinlists:
|
for cl in checkinlists:
|
||||||
for k, s in cl.event.ticket_secret_generators.items():
|
for k, s in cl.event.ticket_secret_generators.items():
|
||||||
@@ -492,12 +508,13 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
Checkin.objects.create(
|
if not simulate:
|
||||||
position=None,
|
Checkin.objects.create(
|
||||||
successful=False,
|
position=None,
|
||||||
error_reason=Checkin.REASON_INVALID,
|
successful=False,
|
||||||
**common_checkin_args,
|
error_reason=Checkin.REASON_INVALID,
|
||||||
)
|
**common_checkin_args,
|
||||||
|
)
|
||||||
|
|
||||||
if force and legacy_url_support and isinstance(auth, Device):
|
if force and legacy_url_support and isinstance(auth, Device):
|
||||||
# There was a bug in libpretixsync: If you scanned a ticket in offline mode that was
|
# There was a bug in libpretixsync: If you scanned a ticket in offline mode that was
|
||||||
@@ -539,19 +556,20 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
|||||||
from_revoked_secret = True
|
from_revoked_secret = True
|
||||||
else:
|
else:
|
||||||
op = revoked_matches[0].position
|
op = revoked_matches[0].position
|
||||||
op.order.log_action('pretix.event.checkin.revoked', data={
|
if not simulate:
|
||||||
'datetime': datetime,
|
op.order.log_action('pretix.event.checkin.revoked', data={
|
||||||
'type': checkin_type,
|
'datetime': datetime,
|
||||||
'list': list_by_event[revoked_matches[0].event_id].pk,
|
'type': checkin_type,
|
||||||
'barcode': raw_barcode
|
'list': list_by_event[revoked_matches[0].event_id].pk,
|
||||||
}, user=user, auth=auth)
|
'barcode': raw_barcode
|
||||||
common_checkin_args['list'] = list_by_event[revoked_matches[0].event_id]
|
}, user=user, auth=auth)
|
||||||
Checkin.objects.create(
|
common_checkin_args['list'] = list_by_event[revoked_matches[0].event_id]
|
||||||
position=op,
|
Checkin.objects.create(
|
||||||
successful=False,
|
position=op,
|
||||||
error_reason=Checkin.REASON_REVOKED,
|
successful=False,
|
||||||
**common_checkin_args
|
error_reason=Checkin.REASON_REVOKED,
|
||||||
)
|
**common_checkin_args
|
||||||
|
)
|
||||||
return Response({
|
return Response({
|
||||||
'status': 'error',
|
'status': 'error',
|
||||||
'reason': Checkin.REASON_REVOKED,
|
'reason': Checkin.REASON_REVOKED,
|
||||||
@@ -588,24 +606,25 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
|||||||
# We choose the first match (regardless of product) for the logging since it's most likely to be the
|
# We choose the first match (regardless of product) for the logging since it's most likely to be the
|
||||||
# base product according to our order_by above.
|
# base product according to our order_by above.
|
||||||
op = op_candidates[0]
|
op = op_candidates[0]
|
||||||
op.order.log_action('pretix.event.checkin.denied', data={
|
if not simulate:
|
||||||
'position': op.id,
|
op.order.log_action('pretix.event.checkin.denied', data={
|
||||||
'positionid': op.positionid,
|
'position': op.id,
|
||||||
'errorcode': Checkin.REASON_AMBIGUOUS,
|
'positionid': op.positionid,
|
||||||
'reason_explanation': None,
|
'errorcode': Checkin.REASON_AMBIGUOUS,
|
||||||
'force': force,
|
'reason_explanation': None,
|
||||||
'datetime': datetime,
|
'force': force,
|
||||||
'type': checkin_type,
|
'datetime': datetime,
|
||||||
'list': list_by_event[op.order.event_id].pk,
|
'type': checkin_type,
|
||||||
}, user=user, auth=auth)
|
'list': list_by_event[op.order.event_id].pk,
|
||||||
common_checkin_args['list'] = list_by_event[op.order.event_id]
|
}, user=user, auth=auth)
|
||||||
Checkin.objects.create(
|
common_checkin_args['list'] = list_by_event[op.order.event_id]
|
||||||
position=op,
|
Checkin.objects.create(
|
||||||
successful=False,
|
position=op,
|
||||||
error_reason=Checkin.REASON_AMBIGUOUS,
|
successful=False,
|
||||||
error_explanation=None,
|
error_reason=Checkin.REASON_AMBIGUOUS,
|
||||||
**common_checkin_args,
|
error_explanation=None,
|
||||||
)
|
**common_checkin_args,
|
||||||
|
)
|
||||||
return Response({
|
return Response({
|
||||||
'status': 'error',
|
'status': 'error',
|
||||||
'reason': Checkin.REASON_AMBIGUOUS,
|
'reason': Checkin.REASON_AMBIGUOUS,
|
||||||
@@ -652,6 +671,7 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
|||||||
raw_barcode=raw_barcode_for_checkin,
|
raw_barcode=raw_barcode_for_checkin,
|
||||||
raw_source_type=source_type,
|
raw_source_type=source_type,
|
||||||
from_revoked_secret=from_revoked_secret,
|
from_revoked_secret=from_revoked_secret,
|
||||||
|
simulate=simulate,
|
||||||
)
|
)
|
||||||
except RequiredQuestionsError as e:
|
except RequiredQuestionsError as e:
|
||||||
return Response({
|
return Response({
|
||||||
@@ -664,23 +684,24 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
|||||||
'list': MiniCheckinListSerializer(list_by_event[op.order.event_id]).data,
|
'list': MiniCheckinListSerializer(list_by_event[op.order.event_id]).data,
|
||||||
}, status=400)
|
}, status=400)
|
||||||
except CheckInError as e:
|
except CheckInError as e:
|
||||||
op.order.log_action('pretix.event.checkin.denied', data={
|
if not simulate:
|
||||||
'position': op.id,
|
op.order.log_action('pretix.event.checkin.denied', data={
|
||||||
'positionid': op.positionid,
|
'position': op.id,
|
||||||
'errorcode': e.code,
|
'positionid': op.positionid,
|
||||||
'reason_explanation': e.reason,
|
'errorcode': e.code,
|
||||||
'force': force,
|
'reason_explanation': e.reason,
|
||||||
'datetime': datetime,
|
'force': force,
|
||||||
'type': checkin_type,
|
'datetime': datetime,
|
||||||
'list': list_by_event[op.order.event_id].pk,
|
'type': checkin_type,
|
||||||
}, user=user, auth=auth)
|
'list': list_by_event[op.order.event_id].pk,
|
||||||
Checkin.objects.create(
|
}, user=user, auth=auth)
|
||||||
position=op,
|
Checkin.objects.create(
|
||||||
successful=False,
|
position=op,
|
||||||
error_reason=e.code,
|
successful=False,
|
||||||
error_explanation=e.reason,
|
error_reason=e.code,
|
||||||
**common_checkin_args,
|
error_explanation=e.reason,
|
||||||
)
|
**common_checkin_args,
|
||||||
|
)
|
||||||
return Response({
|
return Response({
|
||||||
'status': 'error',
|
'status': 'error',
|
||||||
'reason': e.code,
|
'reason': e.code,
|
||||||
|
|||||||
@@ -19,8 +19,12 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||||
# <https://www.gnu.org/licenses/>.
|
# <https://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
|
import base64
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends.openssl.backend import Backend
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import padding
|
||||||
|
from cryptography.hazmat.primitives.serialization import load_pem_public_key
|
||||||
from django.db.models import Exists, OuterRef, Q
|
from django.db.models import Exists, OuterRef, Q
|
||||||
from django.db.models.functions import Coalesce
|
from django.db.models.functions import Coalesce
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
@@ -34,6 +38,8 @@ from pretix.api.auth.device import DeviceTokenAuthentication
|
|||||||
from pretix.api.views.version import numeric_version
|
from pretix.api.views.version import numeric_version
|
||||||
from pretix.base.models import CheckinList, Device, SubEvent
|
from pretix.base.models import CheckinList, Device, SubEvent
|
||||||
from pretix.base.models.devices import Gate, generate_api_token
|
from pretix.base.models.devices import Gate, generate_api_token
|
||||||
|
from pretix.base.models.media import MediumKeySet
|
||||||
|
from pretix.base.services.media import get_keysets_for_organizer
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -42,17 +48,73 @@ class InitializationRequestSerializer(serializers.Serializer):
|
|||||||
token = serializers.CharField(max_length=190)
|
token = serializers.CharField(max_length=190)
|
||||||
hardware_brand = serializers.CharField(max_length=190)
|
hardware_brand = serializers.CharField(max_length=190)
|
||||||
hardware_model = serializers.CharField(max_length=190)
|
hardware_model = serializers.CharField(max_length=190)
|
||||||
|
os_name = serializers.CharField(max_length=190, required=False, allow_null=True)
|
||||||
|
os_version = serializers.CharField(max_length=190, required=False, allow_null=True)
|
||||||
software_brand = serializers.CharField(max_length=190)
|
software_brand = serializers.CharField(max_length=190)
|
||||||
software_version = serializers.CharField(max_length=190)
|
software_version = serializers.CharField(max_length=190)
|
||||||
info = serializers.JSONField(required=False, allow_null=True)
|
info = serializers.JSONField(required=False, allow_null=True)
|
||||||
|
rsa_pubkey = serializers.CharField(required=False, allow_null=True)
|
||||||
|
|
||||||
|
def validate(self, attrs):
|
||||||
|
if attrs.get('rsa_pubkey'):
|
||||||
|
try:
|
||||||
|
load_pem_public_key(
|
||||||
|
attrs['rsa_pubkey'].encode(), Backend()
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
raise ValidationError({'rsa_pubkey': ['Not a valid public key.']})
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
class UpdateRequestSerializer(serializers.Serializer):
|
class UpdateRequestSerializer(serializers.Serializer):
|
||||||
hardware_brand = serializers.CharField(max_length=190)
|
hardware_brand = serializers.CharField(max_length=190)
|
||||||
hardware_model = serializers.CharField(max_length=190)
|
hardware_model = serializers.CharField(max_length=190)
|
||||||
|
os_name = serializers.CharField(max_length=190, required=False, allow_null=True)
|
||||||
|
os_version = serializers.CharField(max_length=190, required=False, allow_null=True)
|
||||||
software_brand = serializers.CharField(max_length=190)
|
software_brand = serializers.CharField(max_length=190)
|
||||||
software_version = serializers.CharField(max_length=190)
|
software_version = serializers.CharField(max_length=190)
|
||||||
info = serializers.JSONField(required=False, allow_null=True)
|
info = serializers.JSONField(required=False, allow_null=True)
|
||||||
|
rsa_pubkey = serializers.CharField(required=False, allow_null=True)
|
||||||
|
|
||||||
|
def validate(self, attrs):
|
||||||
|
if attrs.get('rsa_pubkey'):
|
||||||
|
try:
|
||||||
|
load_pem_public_key(
|
||||||
|
attrs['rsa_pubkey'].encode(), Backend()
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
raise ValidationError({'rsa_pubkey': ['Not a valid public key.']})
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
class RSAEncryptedField(serializers.Field):
|
||||||
|
def to_representation(self, value):
|
||||||
|
public_key = load_pem_public_key(
|
||||||
|
self.context['device'].rsa_pubkey.encode(), Backend()
|
||||||
|
)
|
||||||
|
cipher_text = public_key.encrypt(
|
||||||
|
# RSA/ECB/PKCS1Padding
|
||||||
|
value,
|
||||||
|
padding.PKCS1v15()
|
||||||
|
)
|
||||||
|
return base64.b64encode(cipher_text).decode()
|
||||||
|
|
||||||
|
|
||||||
|
class MediumKeySetSerializer(serializers.ModelSerializer):
|
||||||
|
uid_key = RSAEncryptedField(read_only=True)
|
||||||
|
diversification_key = RSAEncryptedField(read_only=True)
|
||||||
|
organizer = serializers.SlugRelatedField(slug_field='slug', read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = MediumKeySet
|
||||||
|
fields = [
|
||||||
|
'public_id',
|
||||||
|
'organizer',
|
||||||
|
'active',
|
||||||
|
'media_type',
|
||||||
|
'uid_key',
|
||||||
|
'diversification_key',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class GateSerializer(serializers.ModelSerializer):
|
class GateSerializer(serializers.ModelSerializer):
|
||||||
@@ -99,9 +161,12 @@ class InitializeView(APIView):
|
|||||||
device.initialized = now()
|
device.initialized = now()
|
||||||
device.hardware_brand = serializer.validated_data.get('hardware_brand')
|
device.hardware_brand = serializer.validated_data.get('hardware_brand')
|
||||||
device.hardware_model = serializer.validated_data.get('hardware_model')
|
device.hardware_model = serializer.validated_data.get('hardware_model')
|
||||||
|
device.os_name = serializer.validated_data.get('os_name')
|
||||||
|
device.os_version = serializer.validated_data.get('os_version')
|
||||||
device.software_brand = serializer.validated_data.get('software_brand')
|
device.software_brand = serializer.validated_data.get('software_brand')
|
||||||
device.software_version = serializer.validated_data.get('software_version')
|
device.software_version = serializer.validated_data.get('software_version')
|
||||||
device.info = serializer.validated_data.get('info')
|
device.info = serializer.validated_data.get('info')
|
||||||
|
device.rsa_pubkey = serializer.validated_data.get('rsa_pubkey')
|
||||||
device.api_token = generate_api_token()
|
device.api_token = generate_api_token()
|
||||||
device.save()
|
device.save()
|
||||||
|
|
||||||
@@ -120,8 +185,15 @@ class UpdateView(APIView):
|
|||||||
device = request.auth
|
device = request.auth
|
||||||
device.hardware_brand = serializer.validated_data.get('hardware_brand')
|
device.hardware_brand = serializer.validated_data.get('hardware_brand')
|
||||||
device.hardware_model = serializer.validated_data.get('hardware_model')
|
device.hardware_model = serializer.validated_data.get('hardware_model')
|
||||||
|
device.os_name = serializer.validated_data.get('os_name')
|
||||||
|
device.os_version = serializer.validated_data.get('os_version')
|
||||||
device.software_brand = serializer.validated_data.get('software_brand')
|
device.software_brand = serializer.validated_data.get('software_brand')
|
||||||
device.software_version = serializer.validated_data.get('software_version')
|
device.software_version = serializer.validated_data.get('software_version')
|
||||||
|
if serializer.validated_data.get('rsa_pubkey') and serializer.validated_data.get('rsa_pubkey') != device.rsa_pubkey:
|
||||||
|
if device.rsa_pubkey:
|
||||||
|
raise ValidationError({'rsa_pubkey': ['You cannot change the rsa_pubkey of the device once it is set.']})
|
||||||
|
else:
|
||||||
|
device.rsa_pubkey = serializer.validated_data.get('rsa_pubkey')
|
||||||
device.info = serializer.validated_data.get('info')
|
device.info = serializer.validated_data.get('info')
|
||||||
device.save()
|
device.save()
|
||||||
device.log_action('pretix.device.updated', data=serializer.validated_data, auth=device)
|
device.log_action('pretix.device.updated', data=serializer.validated_data, auth=device)
|
||||||
@@ -169,8 +241,12 @@ class InfoView(APIView):
|
|||||||
'pretix': __version__,
|
'pretix': __version__,
|
||||||
'pretix_numeric': numeric_version(__version__),
|
'pretix_numeric': numeric_version(__version__),
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
'medium_key_sets': MediumKeySetSerializer(
|
||||||
|
get_keysets_for_organizer(device.organizer),
|
||||||
|
many=True,
|
||||||
|
context={'device': request.auth}
|
||||||
|
).data if device.rsa_pubkey else []
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -71,6 +71,8 @@ with scopes_disabled():
|
|||||||
ends_after = django_filters.rest_framework.IsoDateTimeFilter(method='ends_after_qs')
|
ends_after = django_filters.rest_framework.IsoDateTimeFilter(method='ends_after_qs')
|
||||||
sales_channel = django_filters.rest_framework.CharFilter(method='sales_channel_qs')
|
sales_channel = django_filters.rest_framework.CharFilter(method='sales_channel_qs')
|
||||||
search = django_filters.rest_framework.CharFilter(method='search_qs')
|
search = django_filters.rest_framework.CharFilter(method='search_qs')
|
||||||
|
date_from = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
|
||||||
|
date_to = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Event
|
model = Event
|
||||||
@@ -336,6 +338,8 @@ with scopes_disabled():
|
|||||||
modified_since = django_filters.IsoDateTimeFilter(field_name='last_modified', lookup_expr='gte')
|
modified_since = django_filters.IsoDateTimeFilter(field_name='last_modified', lookup_expr='gte')
|
||||||
sales_channel = django_filters.rest_framework.CharFilter(method='sales_channel_qs')
|
sales_channel = django_filters.rest_framework.CharFilter(method='sales_channel_qs')
|
||||||
search = django_filters.rest_framework.CharFilter(method='search_qs')
|
search = django_filters.rest_framework.CharFilter(method='search_qs')
|
||||||
|
date_from = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
|
||||||
|
date_to = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = SubEvent
|
model = SubEvent
|
||||||
@@ -411,6 +415,7 @@ class SubEventViewSet(ConditionalListView, viewsets.ModelViewSet):
|
|||||||
'subeventitem_set',
|
'subeventitem_set',
|
||||||
'subeventitemvariation_set',
|
'subeventitemvariation_set',
|
||||||
'meta_values',
|
'meta_values',
|
||||||
|
'meta_values__property',
|
||||||
Prefetch(
|
Prefetch(
|
||||||
'seat_category_mappings',
|
'seat_category_mappings',
|
||||||
to_attr='_seat_category_mappings',
|
to_attr='_seat_category_mappings',
|
||||||
|
|||||||
@@ -133,7 +133,12 @@ class EventExportersViewSet(ExportersMixin, viewsets.ViewSet):
|
|||||||
def exporters(self):
|
def exporters(self):
|
||||||
exporters = []
|
exporters = []
|
||||||
responses = register_data_exporters.send(self.request.event)
|
responses = register_data_exporters.send(self.request.event)
|
||||||
for ex in sorted([response(self.request.event, self.request.organizer) for r, response in responses if response], key=lambda ex: str(ex.verbose_name)):
|
raw_exporters = [response(self.request.event, self.request.organizer) for r, response in responses if response]
|
||||||
|
raw_exporters = [
|
||||||
|
ex for ex in raw_exporters
|
||||||
|
if ex.available_for_user(self.request.user if self.request.user and self.request.user.is_authenticated else None)
|
||||||
|
]
|
||||||
|
for ex in sorted(raw_exporters, key=lambda ex: str(ex.verbose_name)):
|
||||||
ex._serializer = JobRunSerializer(exporter=ex)
|
ex._serializer = JobRunSerializer(exporter=ex)
|
||||||
exporters.append(ex)
|
exporters.append(ex)
|
||||||
return exporters
|
return exporters
|
||||||
@@ -166,7 +171,7 @@ class OrganizerExportersViewSet(ExportersMixin, viewsets.ViewSet):
|
|||||||
if (
|
if (
|
||||||
not isinstance(ex, OrganizerLevelExportMixin) or
|
not isinstance(ex, OrganizerLevelExportMixin) or
|
||||||
perm_holder.has_organizer_permission(self.request.organizer, ex.organizer_required_permission, self.request)
|
perm_holder.has_organizer_permission(self.request.organizer, ex.organizer_required_permission, self.request)
|
||||||
)
|
) and ex.available_for_user(self.request.user if self.request.user and self.request.user.is_authenticated else None)
|
||||||
]
|
]
|
||||||
for ex in sorted(raw_exporters, key=lambda ex: str(ex.verbose_name)):
|
for ex in sorted(raw_exporters, key=lambda ex: str(ex.verbose_name)):
|
||||||
ex._serializer = JobRunSerializer(exporter=ex, events=events)
|
ex._serializer = JobRunSerializer(exporter=ex, events=events)
|
||||||
|
|||||||
@@ -39,7 +39,8 @@ from pretix.api.serializers.media import (
|
|||||||
)
|
)
|
||||||
from pretix.base.media import MEDIA_TYPES
|
from pretix.base.media import MEDIA_TYPES
|
||||||
from pretix.base.models import (
|
from pretix.base.models import (
|
||||||
Checkin, GiftCard, GiftCardTransaction, OrderPosition, ReusableMedium,
|
Checkin, GiftCard, GiftCardAcceptance, GiftCardTransaction, OrderPosition,
|
||||||
|
ReusableMedium,
|
||||||
)
|
)
|
||||||
from pretix.helpers import OF_SELF
|
from pretix.helpers import OF_SELF
|
||||||
from pretix.helpers.dicts import merge_dicts
|
from pretix.helpers.dicts import merge_dicts
|
||||||
@@ -103,6 +104,12 @@ class ReusableMediaViewSet(viewsets.ModelViewSet):
|
|||||||
auth=self.request.auth,
|
auth=self.request.auth,
|
||||||
data=merge_dicts(self.request.data, {'id': inst.pk})
|
data=merge_dicts(self.request.data, {'id': inst.pk})
|
||||||
)
|
)
|
||||||
|
mt = MEDIA_TYPES.get(serializer.validated_data["type"])
|
||||||
|
if mt:
|
||||||
|
m = mt.handle_new(self.request.organizer, inst, self.request.user, self.request.auth)
|
||||||
|
if m:
|
||||||
|
s = self.get_serializer(m)
|
||||||
|
return Response({"result": s.data})
|
||||||
|
|
||||||
@transaction.atomic()
|
@transaction.atomic()
|
||||||
def perform_update(self, serializer):
|
def perform_update(self, serializer):
|
||||||
@@ -135,12 +142,28 @@ class ReusableMediaViewSet(viewsets.ModelViewSet):
|
|||||||
s = self.get_serializer(m)
|
s = self.get_serializer(m)
|
||||||
return Response({"result": s.data})
|
return Response({"result": s.data})
|
||||||
except ReusableMedium.DoesNotExist:
|
except ReusableMedium.DoesNotExist:
|
||||||
mt = MEDIA_TYPES.get(s.validated_data["type"])
|
try:
|
||||||
if mt:
|
with scopes_disabled():
|
||||||
m = mt.handle_unknown(request.organizer, s.validated_data["identifier"], request.user, request.auth)
|
m = ReusableMedium.objects.get(
|
||||||
if m:
|
organizer__in=GiftCardAcceptance.objects.filter(
|
||||||
s = self.get_serializer(m)
|
acceptor=request.organizer,
|
||||||
return Response({"result": s.data})
|
active=True,
|
||||||
|
reusable_media=True,
|
||||||
|
).values_list('issuer', flat=True),
|
||||||
|
type=s.validated_data["type"],
|
||||||
|
identifier=s.validated_data["identifier"],
|
||||||
|
)
|
||||||
|
m.linked_orderposition = None # not relevant for cross-organizer
|
||||||
|
m.customer = None # not relevant for cross-organizer
|
||||||
|
s = self.get_serializer(m)
|
||||||
|
return Response({"result": s.data})
|
||||||
|
except ReusableMedium.DoesNotExist:
|
||||||
|
mt = MEDIA_TYPES.get(s.validated_data["type"])
|
||||||
|
if mt:
|
||||||
|
m = mt.handle_unknown(request.organizer, s.validated_data["identifier"], request.user, request.auth)
|
||||||
|
if m:
|
||||||
|
s = self.get_serializer(m)
|
||||||
|
return Response({"result": s.data})
|
||||||
|
|
||||||
return Response({"result": None})
|
return Response({"result": None})
|
||||||
|
|
||||||
|
|||||||
@@ -23,9 +23,9 @@ import datetime
|
|||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
import django_filters
|
import django_filters
|
||||||
import pytz
|
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from django.db.models import (
|
from django.db.models import (
|
||||||
Exists, F, OuterRef, Prefetch, Q, Subquery, prefetch_related_objects,
|
Exists, F, OuterRef, Prefetch, Q, Subquery, prefetch_related_objects,
|
||||||
@@ -44,6 +44,7 @@ from rest_framework.exceptions import (
|
|||||||
APIException, NotFound, PermissionDenied, ValidationError,
|
APIException, NotFound, PermissionDenied, ValidationError,
|
||||||
)
|
)
|
||||||
from rest_framework.mixins import CreateModelMixin
|
from rest_framework.mixins import CreateModelMixin
|
||||||
|
from rest_framework.permissions import SAFE_METHODS
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
from pretix.api.models import OAuthAccessToken
|
from pretix.api.models import OAuthAccessToken
|
||||||
@@ -185,7 +186,7 @@ with scopes_disabled():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class OrderViewSet(viewsets.ModelViewSet):
|
class OrderViewSetMixin:
|
||||||
serializer_class = OrderSerializer
|
serializer_class = OrderSerializer
|
||||||
queryset = Order.objects.none()
|
queryset = Order.objects.none()
|
||||||
filter_backends = (DjangoFilterBackend, TotalOrderingFilter)
|
filter_backends = (DjangoFilterBackend, TotalOrderingFilter)
|
||||||
@@ -193,19 +194,12 @@ class OrderViewSet(viewsets.ModelViewSet):
|
|||||||
ordering_fields = ('datetime', 'code', 'status', 'last_modified')
|
ordering_fields = ('datetime', 'code', 'status', 'last_modified')
|
||||||
filterset_class = OrderFilter
|
filterset_class = OrderFilter
|
||||||
lookup_field = 'code'
|
lookup_field = 'code'
|
||||||
permission = 'can_view_orders'
|
|
||||||
write_permission = 'can_change_orders'
|
|
||||||
|
|
||||||
def get_serializer_context(self):
|
def get_base_queryset(self):
|
||||||
ctx = super().get_serializer_context()
|
raise NotImplementedError()
|
||||||
ctx['event'] = self.request.event
|
|
||||||
ctx['pdf_data'] = self.request.query_params.get('pdf_data', 'false') == 'true'
|
|
||||||
ctx['exclude'] = self.request.query_params.getlist('exclude')
|
|
||||||
ctx['include'] = self.request.query_params.getlist('include')
|
|
||||||
return ctx
|
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
qs = self.request.event.orders
|
qs = self.get_base_queryset()
|
||||||
if 'fees' not in self.request.GET.getlist('exclude'):
|
if 'fees' not in self.request.GET.getlist('exclude'):
|
||||||
if self.request.query_params.get('include_canceled_fees', 'false') == 'true':
|
if self.request.query_params.get('include_canceled_fees', 'false') == 'true':
|
||||||
fqs = OrderFee.all
|
fqs = OrderFee.all
|
||||||
@@ -227,11 +221,12 @@ class OrderViewSet(viewsets.ModelViewSet):
|
|||||||
opq = OrderPosition.all
|
opq = OrderPosition.all
|
||||||
else:
|
else:
|
||||||
opq = OrderPosition.objects
|
opq = OrderPosition.objects
|
||||||
if request.query_params.get('pdf_data', 'false') == 'true':
|
if request.query_params.get('pdf_data', 'false') == 'true' and getattr(request, 'event', None):
|
||||||
prefetch_related_objects([request.organizer], 'meta_properties')
|
prefetch_related_objects([request.organizer], 'meta_properties')
|
||||||
prefetch_related_objects(
|
prefetch_related_objects(
|
||||||
[request.event],
|
[request.event],
|
||||||
Prefetch('meta_values', queryset=EventMetaValue.objects.select_related('property'), to_attr='meta_values_cached'),
|
Prefetch('meta_values', queryset=EventMetaValue.objects.select_related('property'),
|
||||||
|
to_attr='meta_values_cached'),
|
||||||
'questions',
|
'questions',
|
||||||
'item_meta_properties',
|
'item_meta_properties',
|
||||||
)
|
)
|
||||||
@@ -266,13 +261,12 @@ class OrderViewSet(viewsets.ModelViewSet):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_output_provider(self, identifier):
|
def get_serializer_context(self):
|
||||||
responses = register_ticket_outputs.send(self.request.event)
|
ctx = super().get_serializer_context()
|
||||||
for receiver, response in responses:
|
ctx['exclude'] = self.request.query_params.getlist('exclude')
|
||||||
prov = response(self.request.event)
|
ctx['include'] = self.request.query_params.getlist('include')
|
||||||
if prov.identifier == identifier:
|
ctx['pdf_data'] = False
|
||||||
return prov
|
return ctx
|
||||||
raise NotFound('Unknown output provider.')
|
|
||||||
|
|
||||||
@scopes_disabled() # we are sure enough that get_queryset() is correct, so we save some perforamnce
|
@scopes_disabled() # we are sure enough that get_queryset() is correct, so we save some perforamnce
|
||||||
def list(self, request, **kwargs):
|
def list(self, request, **kwargs):
|
||||||
@@ -289,6 +283,45 @@ class OrderViewSet(viewsets.ModelViewSet):
|
|||||||
serializer = self.get_serializer(queryset, many=True)
|
serializer = self.get_serializer(queryset, many=True)
|
||||||
return Response(serializer.data, headers={'X-Page-Generated': date})
|
return Response(serializer.data, headers={'X-Page-Generated': date})
|
||||||
|
|
||||||
|
|
||||||
|
class OrganizerOrderViewSet(OrderViewSetMixin, viewsets.ReadOnlyModelViewSet):
|
||||||
|
def get_base_queryset(self):
|
||||||
|
perm = "can_view_orders" if self.request.method in SAFE_METHODS else "can_change_orders"
|
||||||
|
if isinstance(self.request.auth, (TeamAPIToken, Device)):
|
||||||
|
return Order.objects.filter(
|
||||||
|
event__organizer=self.request.organizer,
|
||||||
|
event__in=self.request.auth.get_events_with_permission(perm)
|
||||||
|
)
|
||||||
|
elif self.request.user.is_authenticated:
|
||||||
|
return Order.objects.filter(
|
||||||
|
event__organizer=self.request.organizer,
|
||||||
|
event__in=self.request.user.get_events_with_permission(perm)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise PermissionDenied()
|
||||||
|
|
||||||
|
|
||||||
|
class EventOrderViewSet(OrderViewSetMixin, viewsets.ModelViewSet):
|
||||||
|
permission = 'can_view_orders'
|
||||||
|
write_permission = 'can_change_orders'
|
||||||
|
|
||||||
|
def get_serializer_context(self):
|
||||||
|
ctx = super().get_serializer_context()
|
||||||
|
ctx['event'] = self.request.event
|
||||||
|
ctx['pdf_data'] = self.request.query_params.get('pdf_data', 'false') == 'true'
|
||||||
|
return ctx
|
||||||
|
|
||||||
|
def get_base_queryset(self):
|
||||||
|
return self.request.event.orders
|
||||||
|
|
||||||
|
def _get_output_provider(self, identifier):
|
||||||
|
responses = register_ticket_outputs.send(self.request.event)
|
||||||
|
for receiver, response in responses:
|
||||||
|
prov = response(self.request.event)
|
||||||
|
if prov.identifier == identifier:
|
||||||
|
return prov
|
||||||
|
raise NotFound('Unknown output provider.')
|
||||||
|
|
||||||
@action(detail=True, url_name='download', url_path='download/(?P<output>[^/]+)')
|
@action(detail=True, url_name='download', url_path='download/(?P<output>[^/]+)')
|
||||||
def download(self, request, output, **kwargs):
|
def download(self, request, output, **kwargs):
|
||||||
provider = self._get_output_provider(output)
|
provider = self._get_output_provider(output)
|
||||||
@@ -612,7 +645,7 @@ class OrderViewSet(viewsets.ModelViewSet):
|
|||||||
status=status.HTTP_400_BAD_REQUEST
|
status=status.HTTP_400_BAD_REQUEST
|
||||||
)
|
)
|
||||||
|
|
||||||
tz = pytz.timezone(self.request.event.settings.timezone)
|
tz = ZoneInfo(self.request.event.settings.timezone)
|
||||||
new_date = make_aware(datetime.datetime.combine(
|
new_date = make_aware(datetime.datetime.combine(
|
||||||
new_date,
|
new_date,
|
||||||
datetime.time(hour=23, minute=59, second=59)
|
datetime.time(hour=23, minute=59, second=59)
|
||||||
@@ -661,7 +694,16 @@ class OrderViewSet(viewsets.ModelViewSet):
|
|||||||
|
|
||||||
with language(order.locale, self.request.event.settings.region):
|
with language(order.locale, self.request.event.settings.region):
|
||||||
payment = order.payments.last()
|
payment = order.payments.last()
|
||||||
|
# OrderCreateSerializer creates at most one payment
|
||||||
|
if payment and payment.state == OrderPayment.PAYMENT_STATE_CONFIRMED:
|
||||||
|
order.log_action(
|
||||||
|
'pretix.event.order.payment.confirmed', {
|
||||||
|
'local_id': payment.local_id,
|
||||||
|
'provider': payment.provider,
|
||||||
|
},
|
||||||
|
user=request.user if request.user.is_authenticated else None,
|
||||||
|
auth=request.auth,
|
||||||
|
)
|
||||||
order_placed.send(self.request.event, order=order)
|
order_placed.send(self.request.event, order=order)
|
||||||
if order.status == Order.STATUS_PAID:
|
if order.status == Order.STATUS_PAID:
|
||||||
order_paid.send(self.request.event, order=order)
|
order_paid.send(self.request.event, order=order)
|
||||||
@@ -936,6 +978,7 @@ with scopes_disabled():
|
|||||||
| Q(addon_to__attendee_email__icontains=value)
|
| Q(addon_to__attendee_email__icontains=value)
|
||||||
| Q(order__code__istartswith=value)
|
| Q(order__code__istartswith=value)
|
||||||
| Q(order__invoice_address__name_cached__icontains=value)
|
| Q(order__invoice_address__name_cached__icontains=value)
|
||||||
|
| Q(order__invoice_address__company__icontains=value)
|
||||||
| Q(order__email__icontains=value)
|
| Q(order__email__icontains=value)
|
||||||
| Q(pk__in=matching_media)
|
| Q(pk__in=matching_media)
|
||||||
)
|
)
|
||||||
@@ -1772,11 +1815,24 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
|
|||||||
write_permission = 'can_change_orders'
|
write_permission = 'can_change_orders'
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return self.request.event.invoices.prefetch_related('lines').select_related('order', 'refers').annotate(
|
perm = "can_view_orders" if self.request.method in SAFE_METHODS else "can_change_orders"
|
||||||
|
if getattr(self.request, 'event', None):
|
||||||
|
qs = self.request.event.invoices
|
||||||
|
elif isinstance(self.request.auth, (TeamAPIToken, Device)):
|
||||||
|
qs = Invoice.objects.filter(
|
||||||
|
event__organizer=self.request.organizer,
|
||||||
|
event__in=self.request.auth.get_events_with_permission(perm)
|
||||||
|
)
|
||||||
|
elif self.request.user.is_authenticated:
|
||||||
|
qs = Invoice.objects.filter(
|
||||||
|
event__organizer=self.request.organizer,
|
||||||
|
event__in=self.request.user.get_events_with_permission(perm)
|
||||||
|
)
|
||||||
|
return qs.prefetch_related('lines').select_related('order', 'refers').annotate(
|
||||||
nr=Concat('prefix', 'invoice_no')
|
nr=Concat('prefix', 'invoice_no')
|
||||||
)
|
)
|
||||||
|
|
||||||
@action(detail=True, )
|
@action(detail=True)
|
||||||
def download(self, request, **kwargs):
|
def download(self, request, **kwargs):
|
||||||
invoice = self.get_object()
|
invoice = self.get_object()
|
||||||
|
|
||||||
@@ -1795,7 +1851,7 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
|
|||||||
return resp
|
return resp
|
||||||
|
|
||||||
@action(detail=True, methods=['POST'])
|
@action(detail=True, methods=['POST'])
|
||||||
def regenerate(self, request, **kwarts):
|
def regenerate(self, request, **kwargs):
|
||||||
inv = self.get_object()
|
inv = self.get_object()
|
||||||
if inv.canceled:
|
if inv.canceled:
|
||||||
raise ValidationError('The invoice has already been canceled.')
|
raise ValidationError('The invoice has already been canceled.')
|
||||||
@@ -1805,7 +1861,7 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
|
|||||||
raise PermissionDenied('The invoice file is no longer stored on the server.')
|
raise PermissionDenied('The invoice file is no longer stored on the server.')
|
||||||
elif inv.sent_to_organizer:
|
elif inv.sent_to_organizer:
|
||||||
raise PermissionDenied('The invoice file has already been exported.')
|
raise PermissionDenied('The invoice file has already been exported.')
|
||||||
elif now().astimezone(self.request.event.timezone).date() - inv.date > datetime.timedelta(days=1):
|
elif now().astimezone(inv.event.timezone).date() - inv.date > datetime.timedelta(days=1):
|
||||||
raise PermissionDenied('The invoice file is too old to be regenerated.')
|
raise PermissionDenied('The invoice file is too old to be regenerated.')
|
||||||
else:
|
else:
|
||||||
inv = regenerate_invoice(inv)
|
inv = regenerate_invoice(inv)
|
||||||
@@ -1820,7 +1876,7 @@ class InvoiceViewSet(viewsets.ReadOnlyModelViewSet):
|
|||||||
return Response(status=204)
|
return Response(status=204)
|
||||||
|
|
||||||
@action(detail=True, methods=['POST'])
|
@action(detail=True, methods=['POST'])
|
||||||
def reissue(self, request, **kwarts):
|
def reissue(self, request, **kwargs):
|
||||||
inv = self.get_object()
|
inv = self.get_object()
|
||||||
if inv.canceled:
|
if inv.canceled:
|
||||||
raise ValidationError('The invoice has already been canceled.')
|
raise ValidationError('The invoice has already been canceled.')
|
||||||
|
|||||||
@@ -189,6 +189,34 @@ class ParametrizedOrderPositionWebhookEvent(ParametrizedOrderWebhookEvent):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
class ParametrizedWaitingListEntryWebhookEvent(ParametrizedWebhookEvent):
|
||||||
|
|
||||||
|
def build_payload(self, logentry: LogEntry):
|
||||||
|
# do not use content_object, this is also called in deletion
|
||||||
|
return {
|
||||||
|
'notification_id': logentry.pk,
|
||||||
|
'organizer': logentry.event.organizer.slug,
|
||||||
|
'event': logentry.event.slug,
|
||||||
|
'waitinglistentry': logentry.object_id,
|
||||||
|
'action': logentry.action_type,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ParametrizedCustomerWebhookEvent(ParametrizedWebhookEvent):
|
||||||
|
|
||||||
|
def build_payload(self, logentry: LogEntry):
|
||||||
|
customer = logentry.content_object
|
||||||
|
if not customer:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return {
|
||||||
|
'notification_id': logentry.pk,
|
||||||
|
'organizer': customer.organizer.slug,
|
||||||
|
'customer': customer.identifier,
|
||||||
|
'action': logentry.action_type,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@receiver(register_webhook_events, dispatch_uid="base_register_default_webhook_events")
|
@receiver(register_webhook_events, dispatch_uid="base_register_default_webhook_events")
|
||||||
def register_default_webhook_events(sender, **kwargs):
|
def register_default_webhook_events(sender, **kwargs):
|
||||||
return (
|
return (
|
||||||
@@ -321,6 +349,34 @@ def register_default_webhook_events(sender, **kwargs):
|
|||||||
'pretix.event.testmode.deactivated',
|
'pretix.event.testmode.deactivated',
|
||||||
_('Test-Mode of shop has been deactivated'),
|
_('Test-Mode of shop has been deactivated'),
|
||||||
),
|
),
|
||||||
|
ParametrizedWaitingListEntryWebhookEvent(
|
||||||
|
'pretix.event.orders.waitinglist.added',
|
||||||
|
_('Waiting list entry added'),
|
||||||
|
),
|
||||||
|
ParametrizedWaitingListEntryWebhookEvent(
|
||||||
|
'pretix.event.orders.waitinglist.changed',
|
||||||
|
_('Waiting list entry changed'),
|
||||||
|
),
|
||||||
|
ParametrizedWaitingListEntryWebhookEvent(
|
||||||
|
'pretix.event.orders.waitinglist.deleted',
|
||||||
|
_('Waiting list entry deleted'),
|
||||||
|
),
|
||||||
|
ParametrizedWaitingListEntryWebhookEvent(
|
||||||
|
'pretix.event.orders.waitinglist.voucher_assigned',
|
||||||
|
_('Waiting list entry received voucher'),
|
||||||
|
),
|
||||||
|
ParametrizedCustomerWebhookEvent(
|
||||||
|
'pretix.customer.created',
|
||||||
|
_('Customer account created'),
|
||||||
|
),
|
||||||
|
ParametrizedCustomerWebhookEvent(
|
||||||
|
'pretix.customer.changed',
|
||||||
|
_('Customer account changed'),
|
||||||
|
),
|
||||||
|
ParametrizedCustomerWebhookEvent(
|
||||||
|
'pretix.customer.anonymized',
|
||||||
|
_('Customer account anonymized'),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -62,27 +62,27 @@ class NamespacedCache:
|
|||||||
prefix = int(time.time())
|
prefix = int(time.time())
|
||||||
self.cache.set(self.prefixkey, prefix)
|
self.cache.set(self.prefixkey, prefix)
|
||||||
|
|
||||||
def set(self, key: str, value: str, timeout: int=300):
|
def set(self, key: str, value: any, timeout: int=300):
|
||||||
return self.cache.set(self._prefix_key(key), value, timeout)
|
return self.cache.set(self._prefix_key(key), value, timeout)
|
||||||
|
|
||||||
def get(self, key: str) -> str:
|
def get(self, key: str) -> any:
|
||||||
return self.cache.get(self._prefix_key(key, known_prefix=self._last_prefix))
|
return self.cache.get(self._prefix_key(key, known_prefix=self._last_prefix))
|
||||||
|
|
||||||
def get_or_set(self, key: str, default: Callable, timeout=300) -> str:
|
def get_or_set(self, key: str, default: Callable, timeout=300) -> any:
|
||||||
return self.cache.get_or_set(
|
return self.cache.get_or_set(
|
||||||
self._prefix_key(key, known_prefix=self._last_prefix),
|
self._prefix_key(key, known_prefix=self._last_prefix),
|
||||||
default=default,
|
default=default,
|
||||||
timeout=timeout
|
timeout=timeout
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_many(self, keys: List[str]) -> Dict[str, str]:
|
def get_many(self, keys: List[str]) -> Dict[str, any]:
|
||||||
values = self.cache.get_many([self._prefix_key(key) for key in keys])
|
values = self.cache.get_many([self._prefix_key(key) for key in keys])
|
||||||
newvalues = {}
|
newvalues = {}
|
||||||
for k, v in values.items():
|
for k, v in values.items():
|
||||||
newvalues[self._strip_prefix(k)] = v
|
newvalues[self._strip_prefix(k)] = v
|
||||||
return newvalues
|
return newvalues
|
||||||
|
|
||||||
def set_many(self, values: Dict[str, str], timeout=300):
|
def set_many(self, values: Dict[str, any], timeout=300):
|
||||||
newvalues = {}
|
newvalues = {}
|
||||||
for k, v in values.items():
|
for k, v in values.items():
|
||||||
newvalues[self._prefix_key(k)] = v
|
newvalues[self._prefix_key(k)] = v
|
||||||
|
|||||||
@@ -134,8 +134,11 @@ class TemplateBasedMailRenderer(BaseHTMLMailRenderer):
|
|||||||
def template_name(self):
|
def template_name(self):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def compile_markdown(self, plaintext):
|
||||||
|
return markdown_compile_email(plaintext)
|
||||||
|
|
||||||
def render(self, plain_body: str, plain_signature: str, subject: str, order, position) -> str:
|
def render(self, plain_body: str, plain_signature: str, subject: str, order, position) -> str:
|
||||||
body_md = markdown_compile_email(plain_body)
|
body_md = self.compile_markdown(plain_body)
|
||||||
htmlctx = {
|
htmlctx = {
|
||||||
'site': settings.PRETIX_INSTANCE_NAME,
|
'site': settings.PRETIX_INSTANCE_NAME,
|
||||||
'site_url': settings.SITE_URL,
|
'site_url': settings.SITE_URL,
|
||||||
@@ -153,7 +156,7 @@ class TemplateBasedMailRenderer(BaseHTMLMailRenderer):
|
|||||||
|
|
||||||
if plain_signature:
|
if plain_signature:
|
||||||
signature_md = plain_signature.replace('\n', '<br>\n')
|
signature_md = plain_signature.replace('\n', '<br>\n')
|
||||||
signature_md = markdown_compile_email(signature_md)
|
signature_md = self.compile_markdown(signature_md)
|
||||||
htmlctx['signature'] = signature_md
|
htmlctx['signature'] = signature_md
|
||||||
|
|
||||||
if order:
|
if order:
|
||||||
|
|||||||
@@ -37,8 +37,8 @@ import tempfile
|
|||||||
from collections import OrderedDict, namedtuple
|
from collections import OrderedDict, namedtuple
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from typing import Optional, Tuple
|
from typing import Optional, Tuple
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
import pytz
|
|
||||||
from defusedcsv import csv
|
from defusedcsv import csv
|
||||||
from django import forms
|
from django import forms
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -68,7 +68,7 @@ class BaseExporter:
|
|||||||
self.events = event
|
self.events = event
|
||||||
self.event = None
|
self.event = None
|
||||||
e = self.events.first()
|
e = self.events.first()
|
||||||
self.timezone = e.timezone if e else pytz.timezone(settings.TIME_ZONE)
|
self.timezone = e.timezone if e else ZoneInfo(settings.TIME_ZONE)
|
||||||
else:
|
else:
|
||||||
self.events = Event.objects.filter(pk=event.pk)
|
self.events = Event.objects.filter(pk=event.pk)
|
||||||
self.timezone = event.timezone
|
self.timezone = event.timezone
|
||||||
@@ -140,7 +140,7 @@ class BaseExporter:
|
|||||||
"""
|
"""
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
def render(self, form_data: dict) -> Tuple[str, str, bytes]:
|
def render(self, form_data: dict) -> Tuple[str, str, Optional[bytes]]:
|
||||||
"""
|
"""
|
||||||
Render the exported file and return a tuple consisting of a filename, a file type
|
Render the exported file and return a tuple consisting of a filename, a file type
|
||||||
and file content.
|
and file content.
|
||||||
@@ -157,6 +157,13 @@ class BaseExporter:
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError() # NOQA
|
raise NotImplementedError() # NOQA
|
||||||
|
|
||||||
|
def available_for_user(self, user) -> bool:
|
||||||
|
"""
|
||||||
|
Allows to do additional checks whether an exporter is available based on the user who calls it. Note that
|
||||||
|
``user`` may be ``None`` e.g. during API usage.
|
||||||
|
"""
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class OrganizerLevelExportMixin:
|
class OrganizerLevelExportMixin:
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -34,8 +34,8 @@
|
|||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
import pytz
|
|
||||||
from django import forms
|
from django import forms
|
||||||
from django.db.models import (
|
from django.db.models import (
|
||||||
Case, CharField, Count, DateTimeField, F, IntegerField, Max, Min, OuterRef,
|
Case, CharField, Count, DateTimeField, F, IntegerField, Max, Min, OuterRef,
|
||||||
@@ -326,7 +326,7 @@ class OrderListExporter(MultiSheetListExporter):
|
|||||||
|
|
||||||
yield self.ProgressSetTotal(total=qs.count())
|
yield self.ProgressSetTotal(total=qs.count())
|
||||||
for order in qs.order_by('datetime').iterator():
|
for order in qs.order_by('datetime').iterator():
|
||||||
tz = pytz.timezone(self.event_object_cache[order.event_id].settings.timezone)
|
tz = ZoneInfo(self.event_object_cache[order.event_id].settings.timezone)
|
||||||
|
|
||||||
row = [
|
row = [
|
||||||
self.event_object_cache[order.event_id].slug,
|
self.event_object_cache[order.event_id].slug,
|
||||||
@@ -459,7 +459,7 @@ class OrderListExporter(MultiSheetListExporter):
|
|||||||
yield self.ProgressSetTotal(total=qs.count())
|
yield self.ProgressSetTotal(total=qs.count())
|
||||||
for op in qs.order_by('order__datetime').iterator():
|
for op in qs.order_by('order__datetime').iterator():
|
||||||
order = op.order
|
order = op.order
|
||||||
tz = pytz.timezone(order.event.settings.timezone)
|
tz = ZoneInfo(order.event.settings.timezone)
|
||||||
row = [
|
row = [
|
||||||
self.event_object_cache[order.event_id].slug,
|
self.event_object_cache[order.event_id].slug,
|
||||||
order.code,
|
order.code,
|
||||||
@@ -549,7 +549,9 @@ class OrderListExporter(MultiSheetListExporter):
|
|||||||
headers.append(_('End date'))
|
headers.append(_('End date'))
|
||||||
headers += [
|
headers += [
|
||||||
_('Product'),
|
_('Product'),
|
||||||
|
_('Product ID'),
|
||||||
_('Variation'),
|
_('Variation'),
|
||||||
|
_('Variation ID'),
|
||||||
_('Price'),
|
_('Price'),
|
||||||
_('Tax rate'),
|
_('Tax rate'),
|
||||||
_('Tax rule'),
|
_('Tax rule'),
|
||||||
@@ -631,7 +633,7 @@ class OrderListExporter(MultiSheetListExporter):
|
|||||||
|
|
||||||
for op in ops:
|
for op in ops:
|
||||||
order = op.order
|
order = op.order
|
||||||
tz = pytz.timezone(self.event_object_cache[order.event_id].settings.timezone)
|
tz = ZoneInfo(self.event_object_cache[order.event_id].settings.timezone)
|
||||||
row = [
|
row = [
|
||||||
self.event_object_cache[order.event_id].slug,
|
self.event_object_cache[order.event_id].slug,
|
||||||
order.code,
|
order.code,
|
||||||
@@ -656,7 +658,9 @@ class OrderListExporter(MultiSheetListExporter):
|
|||||||
row.append('')
|
row.append('')
|
||||||
row += [
|
row += [
|
||||||
str(op.item),
|
str(op.item),
|
||||||
|
str(op.item_id),
|
||||||
str(op.variation) if op.variation else '',
|
str(op.variation) if op.variation else '',
|
||||||
|
str(op.variation_id) if op.variation_id else '',
|
||||||
op.price,
|
op.price,
|
||||||
op.tax_rate,
|
op.tax_rate,
|
||||||
str(op.tax_rule) if op.tax_rule else '',
|
str(op.tax_rule) if op.tax_rule else '',
|
||||||
@@ -850,6 +854,8 @@ class TransactionListExporter(ListExporter):
|
|||||||
_('Tax rule ID'),
|
_('Tax rule ID'),
|
||||||
_('Tax rule'),
|
_('Tax rule'),
|
||||||
_('Tax value'),
|
_('Tax value'),
|
||||||
|
_('Gross total'),
|
||||||
|
_('Tax total'),
|
||||||
]
|
]
|
||||||
|
|
||||||
if form_data.get('_format') == 'xlsx':
|
if form_data.get('_format') == 'xlsx':
|
||||||
@@ -901,6 +907,8 @@ class TransactionListExporter(ListExporter):
|
|||||||
t.tax_rule_id or '',
|
t.tax_rule_id or '',
|
||||||
str(t.tax_rule.internal_name or t.tax_rule.name) if t.tax_rule_id else '',
|
str(t.tax_rule.internal_name or t.tax_rule.name) if t.tax_rule_id else '',
|
||||||
t.tax_value,
|
t.tax_value,
|
||||||
|
t.price * t.count,
|
||||||
|
t.tax_value * t.count,
|
||||||
]
|
]
|
||||||
|
|
||||||
if form_data.get('_format') == 'xlsx':
|
if form_data.get('_format') == 'xlsx':
|
||||||
@@ -1024,7 +1032,7 @@ class PaymentListExporter(ListExporter):
|
|||||||
|
|
||||||
yield self.ProgressSetTotal(total=len(objs))
|
yield self.ProgressSetTotal(total=len(objs))
|
||||||
for obj in objs:
|
for obj in objs:
|
||||||
tz = pytz.timezone(obj.order.event.settings.timezone)
|
tz = ZoneInfo(obj.order.event.settings.timezone)
|
||||||
if isinstance(obj, OrderPayment) and obj.payment_date:
|
if isinstance(obj, OrderPayment) and obj.payment_date:
|
||||||
d2 = obj.payment_date.astimezone(tz).date().strftime('%Y-%m-%d')
|
d2 = obj.payment_date.astimezone(tz).date().strftime('%Y-%m-%d')
|
||||||
elif isinstance(obj, OrderRefund) and obj.execution_date:
|
elif isinstance(obj, OrderRefund) and obj.execution_date:
|
||||||
@@ -1143,7 +1151,7 @@ class GiftcardTransactionListExporter(OrganizerLevelExportMixin, ListExporter):
|
|||||||
def iterate_list(self, form_data):
|
def iterate_list(self, form_data):
|
||||||
qs = GiftCardTransaction.objects.filter(
|
qs = GiftCardTransaction.objects.filter(
|
||||||
card__issuer=self.organizer,
|
card__issuer=self.organizer,
|
||||||
).order_by('datetime').select_related('card', 'order', 'order__event')
|
).order_by('datetime').select_related('card', 'order', 'order__event', 'acceptor')
|
||||||
|
|
||||||
if form_data.get('date_range'):
|
if form_data.get('date_range'):
|
||||||
dt_start, dt_end = resolve_timeframe_to_datetime_start_inclusive_end_exclusive(now(), form_data['date_range'], self.timezone)
|
dt_start, dt_end = resolve_timeframe_to_datetime_start_inclusive_end_exclusive(now(), form_data['date_range'], self.timezone)
|
||||||
@@ -1159,6 +1167,7 @@ class GiftcardTransactionListExporter(OrganizerLevelExportMixin, ListExporter):
|
|||||||
_('Amount'),
|
_('Amount'),
|
||||||
_('Currency'),
|
_('Currency'),
|
||||||
_('Order'),
|
_('Order'),
|
||||||
|
_('Organizer'),
|
||||||
]
|
]
|
||||||
yield headers
|
yield headers
|
||||||
|
|
||||||
@@ -1170,6 +1179,7 @@ class GiftcardTransactionListExporter(OrganizerLevelExportMixin, ListExporter):
|
|||||||
obj.value,
|
obj.value,
|
||||||
obj.card.currency,
|
obj.card.currency,
|
||||||
obj.order.full_code if obj.order else None,
|
obj.order.full_code if obj.order else None,
|
||||||
|
str(obj.acceptor or ""),
|
||||||
]
|
]
|
||||||
yield row
|
yield row
|
||||||
|
|
||||||
@@ -1203,7 +1213,7 @@ class GiftcardRedemptionListExporter(ListExporter):
|
|||||||
yield headers
|
yield headers
|
||||||
|
|
||||||
for obj in objs:
|
for obj in objs:
|
||||||
tz = pytz.timezone(obj.order.event.settings.timezone)
|
tz = ZoneInfo(obj.order.event.settings.timezone)
|
||||||
gc = GiftCard.objects.get(pk=obj.info_data.get('gift_card'))
|
gc = GiftCard.objects.get(pk=obj.info_data.get('gift_card'))
|
||||||
row = [
|
row = [
|
||||||
obj.order.event.slug,
|
obj.order.event.slug,
|
||||||
|
|||||||
@@ -20,8 +20,8 @@
|
|||||||
# <https://www.gnu.org/licenses/>.
|
# <https://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
import pytz
|
|
||||||
from django import forms
|
from django import forms
|
||||||
from django.db.models import F, Q
|
from django.db.models import F, Q
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
@@ -137,7 +137,7 @@ class WaitingListExporter(ListExporter):
|
|||||||
|
|
||||||
# which event should be used to output dates in columns "Start date" and "End date"
|
# which event should be used to output dates in columns "Start date" and "End date"
|
||||||
event_for_date_columns = entry.subevent if entry.subevent else entry.event
|
event_for_date_columns = entry.subevent if entry.subevent else entry.event
|
||||||
tz = pytz.timezone(entry.event.settings.timezone)
|
tz = ZoneInfo(entry.event.settings.timezone)
|
||||||
datetime_format = '%Y-%m-%d %H:%M:%S'
|
datetime_format = '%Y-%m-%d %H:%M:%S'
|
||||||
|
|
||||||
row = [
|
row = [
|
||||||
|
|||||||
@@ -167,6 +167,7 @@ class SettingsForm(i18nfield.forms.I18nFormMixin, HierarkeyForm):
|
|||||||
|
|
||||||
class PrefixForm(forms.Form):
|
class PrefixForm(forms.Form):
|
||||||
prefix = forms.CharField(widget=forms.HiddenInput)
|
prefix = forms.CharField(widget=forms.HiddenInput)
|
||||||
|
template_name = "django/forms/table.html"
|
||||||
|
|
||||||
|
|
||||||
class SafeSessionWizardView(SessionWizardView):
|
class SafeSessionWizardView(SessionWizardView):
|
||||||
|
|||||||
@@ -38,10 +38,10 @@ import logging
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
import dateutil.parser
|
import dateutil.parser
|
||||||
import pycountry
|
import pycountry
|
||||||
import pytz
|
|
||||||
from django import forms
|
from django import forms
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib import messages
|
from django.contrib import messages
|
||||||
@@ -61,6 +61,7 @@ from django.utils.timezone import get_current_timezone, now
|
|||||||
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
||||||
from django_countries import countries
|
from django_countries import countries
|
||||||
from django_countries.fields import Country, CountryField
|
from django_countries.fields import Country, CountryField
|
||||||
|
from geoip2.errors import AddressNotFoundError
|
||||||
from phonenumber_field.formfields import PhoneNumberField
|
from phonenumber_field.formfields import PhoneNumberField
|
||||||
from phonenumber_field.phonenumber import PhoneNumber
|
from phonenumber_field.phonenumber import PhoneNumber
|
||||||
from phonenumber_field.widgets import PhoneNumberPrefixWidget
|
from phonenumber_field.widgets import PhoneNumberPrefixWidget
|
||||||
@@ -356,9 +357,12 @@ class WrappedPhoneNumberPrefixWidget(PhoneNumberPrefixWidget):
|
|||||||
def guess_country_from_request(request, event):
|
def guess_country_from_request(request, event):
|
||||||
if settings.HAS_GEOIP:
|
if settings.HAS_GEOIP:
|
||||||
g = GeoIP2()
|
g = GeoIP2()
|
||||||
res = g.country(get_client_ip(request))
|
try:
|
||||||
if res['country_code'] and len(res['country_code']) == 2:
|
res = g.country(get_client_ip(request))
|
||||||
return Country(res['country_code'])
|
if res['country_code'] and len(res['country_code']) == 2:
|
||||||
|
return Country(res['country_code'])
|
||||||
|
except AddressNotFoundError:
|
||||||
|
pass
|
||||||
return guess_country(event)
|
return guess_country(event)
|
||||||
|
|
||||||
|
|
||||||
@@ -733,7 +737,7 @@ class BaseQuestionsForm(forms.Form):
|
|||||||
initial = answers[0]
|
initial = answers[0]
|
||||||
else:
|
else:
|
||||||
initial = None
|
initial = None
|
||||||
tz = pytz.timezone(event.settings.timezone)
|
tz = ZoneInfo(event.settings.timezone)
|
||||||
help_text = rich_text(q.help_text)
|
help_text = rich_text(q.help_text)
|
||||||
label = escape(q.question) # django-bootstrap3 calls mark_safe
|
label = escape(q.question) # django-bootstrap3 calls mark_safe
|
||||||
required = q.required and not self.all_optional
|
required = q.required and not self.all_optional
|
||||||
|
|||||||
63
src/pretix/base/forms/renderers.py
Normal file
63
src/pretix/base/forms/renderers.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
#
|
||||||
|
# This file is part of pretix (Community Edition).
|
||||||
|
#
|
||||||
|
# Copyright (C) 2014-2020 Raphael Michel and contributors
|
||||||
|
# Copyright (C) 2020-2021 rami.io GmbH and contributors
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||||
|
# Public License as published by the Free Software Foundation in version 3 of the License.
|
||||||
|
#
|
||||||
|
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
|
||||||
|
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
|
||||||
|
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
|
||||||
|
# this file, see <https://pretix.eu/about/en/license>.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||||
|
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
||||||
|
# details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||||
|
# <https://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
from bootstrap3.renderers import (
|
||||||
|
FieldRenderer as BaseFieldRenderer,
|
||||||
|
InlineFieldRenderer as BaseInlineFieldRenderer,
|
||||||
|
)
|
||||||
|
from django.forms import (
|
||||||
|
CheckboxInput, CheckboxSelectMultiple, ClearableFileInput, RadioSelect,
|
||||||
|
SelectDateWidget,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class FieldRenderer(BaseFieldRenderer):
|
||||||
|
# Local application of https://github.com/zostera/django-bootstrap3/pull/859
|
||||||
|
|
||||||
|
def post_widget_render(self, html):
|
||||||
|
if isinstance(self.widget, CheckboxSelectMultiple):
|
||||||
|
html = self.list_to_class(html, "checkbox")
|
||||||
|
elif isinstance(self.widget, RadioSelect):
|
||||||
|
html = self.list_to_class(html, "radio")
|
||||||
|
elif isinstance(self.widget, SelectDateWidget):
|
||||||
|
html = self.fix_date_select_input(html)
|
||||||
|
elif isinstance(self.widget, ClearableFileInput):
|
||||||
|
html = self.fix_clearable_file_input(html)
|
||||||
|
elif isinstance(self.widget, CheckboxInput):
|
||||||
|
html = self.put_inside_label(html)
|
||||||
|
return html
|
||||||
|
|
||||||
|
|
||||||
|
class InlineFieldRenderer(BaseInlineFieldRenderer):
|
||||||
|
# Local application of https://github.com/zostera/django-bootstrap3/pull/859
|
||||||
|
|
||||||
|
def post_widget_render(self, html):
|
||||||
|
if isinstance(self.widget, CheckboxSelectMultiple):
|
||||||
|
html = self.list_to_class(html, "checkbox")
|
||||||
|
elif isinstance(self.widget, RadioSelect):
|
||||||
|
html = self.list_to_class(html, "radio")
|
||||||
|
elif isinstance(self.widget, SelectDateWidget):
|
||||||
|
html = self.fix_date_select_input(html)
|
||||||
|
elif isinstance(self.widget, ClearableFileInput):
|
||||||
|
html = self.fix_clearable_file_input(html)
|
||||||
|
elif isinstance(self.widget, CheckboxInput):
|
||||||
|
html = self.put_inside_label(html)
|
||||||
|
return html
|
||||||
@@ -24,7 +24,7 @@ Django, for theoretically very valid reasons, creates migrations for *every sing
|
|||||||
we change on a model. Even the `help_text`! This makes sense, as we don't know if any
|
we change on a model. Even the `help_text`! This makes sense, as we don't know if any
|
||||||
database backend unknown to us might actually use this information for its database schema.
|
database backend unknown to us might actually use this information for its database schema.
|
||||||
|
|
||||||
However, pretix only supports PostgreSQL, MySQL, MariaDB and SQLite and we can be pretty
|
However, pretix only supports PostgreSQL and SQLite and we can be pretty
|
||||||
certain that some changes to models will never require a change to the database. In this case,
|
certain that some changes to models will never require a change to the database. In this case,
|
||||||
not creating a migration for certain changes will save us some performance while applying them
|
not creating a migration for certain changes will save us some performance while applying them
|
||||||
*and* allow for a cleaner git history. Win-win!
|
*and* allow for a cleaner git history. Win-win!
|
||||||
|
|||||||
@@ -22,7 +22,7 @@
|
|||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import pytz
|
import pytz_deprecation_shim
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from django.utils.timezone import override
|
from django.utils.timezone import override
|
||||||
from django_scopes import scope
|
from django_scopes import scope
|
||||||
@@ -60,7 +60,7 @@ class Command(BaseCommand):
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
locale = options.get("locale", None)
|
locale = options.get("locale", None)
|
||||||
timezone = pytz.timezone(options['timezone']) if options.get('timezone') else None
|
timezone = pytz_deprecation_shim.timezone(options['timezone']) if options.get('timezone') else None
|
||||||
|
|
||||||
with scope(organizer=o):
|
with scope(organizer=o):
|
||||||
if options['event_slug']:
|
if options['event_slug']:
|
||||||
|
|||||||
@@ -49,6 +49,9 @@ class BaseMediaType:
|
|||||||
def handle_unknown(self, organizer, identifier, user, auth):
|
def handle_unknown(self, organizer, identifier, user, auth):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def handle_new(self, organizer, medium, user, auth):
|
||||||
|
pass
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.verbose_name)
|
return str(self.verbose_name)
|
||||||
|
|
||||||
@@ -108,9 +111,43 @@ class NfcUidMediaType(BaseMediaType):
|
|||||||
return m
|
return m
|
||||||
|
|
||||||
|
|
||||||
|
class NfcMf0aesMediaType(BaseMediaType):
|
||||||
|
identifier = 'nfc_mf0aes'
|
||||||
|
verbose_name = 'NFC Mifare Ultralight AES'
|
||||||
|
medium_created_by_server = False
|
||||||
|
supports_giftcard = True
|
||||||
|
supports_orderposition = False
|
||||||
|
|
||||||
|
def handle_new(self, organizer, medium, user, auth):
|
||||||
|
from pretix.base.models import GiftCard
|
||||||
|
|
||||||
|
if organizer.settings.get(f'reusable_media_type_{self.identifier}_autocreate_giftcard', as_type=bool):
|
||||||
|
with transaction.atomic():
|
||||||
|
gc = GiftCard.objects.create(
|
||||||
|
issuer=organizer,
|
||||||
|
expires=organizer.default_gift_card_expiry,
|
||||||
|
currency=organizer.settings.get(f'reusable_media_type_{self.identifier}_autocreate_giftcard_currency'),
|
||||||
|
)
|
||||||
|
medium.linked_giftcard = gc
|
||||||
|
medium.save()
|
||||||
|
medium.log_action(
|
||||||
|
'pretix.reusable_medium.linked_giftcard.changed',
|
||||||
|
user=user, auth=auth,
|
||||||
|
data={
|
||||||
|
'linked_giftcard': gc.pk
|
||||||
|
}
|
||||||
|
)
|
||||||
|
gc.log_action(
|
||||||
|
'pretix.giftcards.created',
|
||||||
|
user=user, auth=auth,
|
||||||
|
)
|
||||||
|
return medium
|
||||||
|
|
||||||
|
|
||||||
MEDIA_TYPES = {
|
MEDIA_TYPES = {
|
||||||
m.identifier: m for m in [
|
m.identifier: m for m in [
|
||||||
BarcodePlainMediaType(),
|
BarcodePlainMediaType(),
|
||||||
NfcUidMediaType(),
|
NfcUidMediaType(),
|
||||||
|
NfcMf0aesMediaType(),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,12 +21,12 @@
|
|||||||
#
|
#
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from urllib.parse import urlsplit
|
from urllib.parse import urlsplit
|
||||||
|
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
||||||
|
|
||||||
import pytz
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.http import Http404, HttpRequest, HttpResponse
|
from django.http import Http404, HttpRequest, HttpResponse
|
||||||
from django.middleware.common import CommonMiddleware
|
from django.middleware.common import CommonMiddleware
|
||||||
from django.urls import get_script_prefix
|
from django.urls import get_script_prefix, resolve
|
||||||
from django.utils import timezone, translation
|
from django.utils import timezone, translation
|
||||||
from django.utils.cache import patch_vary_headers
|
from django.utils.cache import patch_vary_headers
|
||||||
from django.utils.deprecation import MiddlewareMixin
|
from django.utils.deprecation import MiddlewareMixin
|
||||||
@@ -98,9 +98,9 @@ class LocaleMiddleware(MiddlewareMixin):
|
|||||||
tzname = request.user.timezone
|
tzname = request.user.timezone
|
||||||
if tzname:
|
if tzname:
|
||||||
try:
|
try:
|
||||||
timezone.activate(pytz.timezone(tzname))
|
timezone.activate(ZoneInfo(tzname))
|
||||||
request.timezone = tzname
|
request.timezone = tzname
|
||||||
except pytz.UnknownTimeZoneError:
|
except ZoneInfoNotFoundError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
timezone.deactivate()
|
timezone.deactivate()
|
||||||
@@ -230,6 +230,8 @@ class SecurityMiddleware(MiddlewareMixin):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def process_response(self, request, resp):
|
def process_response(self, request, resp):
|
||||||
|
url = resolve(request.path_info)
|
||||||
|
|
||||||
if settings.DEBUG and resp.status_code >= 400:
|
if settings.DEBUG and resp.status_code >= 400:
|
||||||
# Don't use CSP on debug error page as it breaks of Django's fancy error
|
# Don't use CSP on debug error page as it breaks of Django's fancy error
|
||||||
# pages
|
# pages
|
||||||
@@ -249,20 +251,28 @@ class SecurityMiddleware(MiddlewareMixin):
|
|||||||
|
|
||||||
h = {
|
h = {
|
||||||
'default-src': ["{static}"],
|
'default-src': ["{static}"],
|
||||||
'script-src': ['{static}', 'https://checkout.stripe.com', 'https://js.stripe.com'],
|
'script-src': ['{static}'],
|
||||||
'object-src': ["'none'"],
|
'object-src': ["'none'"],
|
||||||
'frame-src': ['{static}', 'https://checkout.stripe.com', 'https://js.stripe.com'],
|
'frame-src': ['{static}'],
|
||||||
'style-src': ["{static}", "{media}"],
|
'style-src': ["{static}", "{media}"],
|
||||||
'connect-src': ["{dynamic}", "{media}", "https://checkout.stripe.com"],
|
'connect-src': ["{dynamic}", "{media}"],
|
||||||
'img-src': ["{static}", "{media}", "data:", "https://*.stripe.com"] + img_src,
|
'img-src': ["{static}", "{media}", "data:"] + img_src,
|
||||||
'font-src': ["{static}"],
|
'font-src': ["{static}"],
|
||||||
'media-src': ["{static}", "data:"],
|
'media-src': ["{static}", "data:"],
|
||||||
# form-action is not only used to match on form actions, but also on URLs
|
# form-action is not only used to match on form actions, but also on URLs
|
||||||
# form-actions redirect to. In the context of e.g. payment providers or
|
# form-actions redirect to. In the context of e.g. payment providers or
|
||||||
# single-sign-on this can be nearly anything so we cannot really restrict
|
# single-sign-on this can be nearly anything, so we cannot really restrict
|
||||||
# this. However, we'll restrict it to HTTPS.
|
# this. However, we'll restrict it to HTTPS.
|
||||||
'form-action': ["{dynamic}", "https:"] + (['http:'] if settings.SITE_URL.startswith('http://') else []),
|
'form-action': ["{dynamic}", "https:"] + (['http:'] if settings.SITE_URL.startswith('http://') else []),
|
||||||
}
|
}
|
||||||
|
# Only include pay.google.com for wallet detection purposes on the Payment selection page
|
||||||
|
if (
|
||||||
|
url.url_name == "event.order.pay.change" or
|
||||||
|
(url.url_name == "event.checkout" and url.kwargs['step'] == "payment")
|
||||||
|
):
|
||||||
|
h['script-src'].append('https://pay.google.com')
|
||||||
|
h['frame-src'].append('https://pay.google.com')
|
||||||
|
h['connect-src'].append('https://google.com/pay')
|
||||||
if settings.LOG_CSP:
|
if settings.LOG_CSP:
|
||||||
h['report-uri'] = ["/csp_report/"]
|
h['report-uri'] = ["/csp_report/"]
|
||||||
if 'Content-Security-Policy' in resp:
|
if 'Content-Security-Policy' in resp:
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
# Generated by Django 1.10.4 on 2017-02-03 14:21
|
# Generated by Django 1.10.4 on 2017-02-03 14:21
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
import django.core.validators
|
import django.core.validators
|
||||||
import django.db.migrations.operations.special
|
import django.db.migrations.operations.special
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
@@ -26,7 +28,7 @@ def forwards42(apps, schema_editor):
|
|||||||
for s in EventSetting.objects.filter(key='timezone').values('object_id', 'value')
|
for s in EventSetting.objects.filter(key='timezone').values('object_id', 'value')
|
||||||
}
|
}
|
||||||
for order in Order.objects.all():
|
for order in Order.objects.all():
|
||||||
tz = pytz.timezone(etz.get(order.event_id, 'UTC'))
|
tz = ZoneInfo(etz.get(order.event_id, 'UTC'))
|
||||||
order.expires = order.expires.astimezone(tz).replace(hour=23, minute=59, second=59)
|
order.expires = order.expires.astimezone(tz).replace(hour=23, minute=59, second=59)
|
||||||
order.save()
|
order.save()
|
||||||
|
|
||||||
|
|||||||
@@ -2,9 +2,9 @@
|
|||||||
# Generated by Django 1.10.2 on 2016-10-19 17:57
|
# Generated by Django 1.10.2 on 2016-10-19 17:57
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import pytz
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
from django.utils import timezone
|
|
||||||
|
|
||||||
|
|
||||||
def forwards(apps, schema_editor):
|
def forwards(apps, schema_editor):
|
||||||
@@ -15,7 +15,7 @@ def forwards(apps, schema_editor):
|
|||||||
for s in EventSetting.objects.filter(key='timezone').values('object_id', 'value')
|
for s in EventSetting.objects.filter(key='timezone').values('object_id', 'value')
|
||||||
}
|
}
|
||||||
for order in Order.objects.all():
|
for order in Order.objects.all():
|
||||||
tz = pytz.timezone(etz.get(order.event_id, 'UTC'))
|
tz = ZoneInfo(etz.get(order.event_id, 'UTC'))
|
||||||
order.expires = order.expires.astimezone(tz).replace(hour=23, minute=59, second=59)
|
order.expires = order.expires.astimezone(tz).replace(hour=23, minute=59, second=59)
|
||||||
order.save()
|
order.save()
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
|
|
||||||
from django.core.exceptions import ImproperlyConfigured
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
from django_mysql.checks import mysql_connections
|
|
||||||
|
|
||||||
|
|
||||||
def set_attendee_name_parts(apps, schema_editor):
|
def set_attendee_name_parts(apps, schema_editor):
|
||||||
@@ -24,40 +23,12 @@ def set_attendee_name_parts(apps, schema_editor):
|
|||||||
ia.save(update_fields=['name_parts'])
|
ia.save(update_fields=['name_parts'])
|
||||||
|
|
||||||
|
|
||||||
def check_mysqlversion(apps, schema_editor):
|
|
||||||
errors = []
|
|
||||||
any_conn_works = False
|
|
||||||
conns = list(mysql_connections())
|
|
||||||
found = 'Unknown version'
|
|
||||||
for alias, conn in conns:
|
|
||||||
if hasattr(conn, 'mysql_is_mariadb') and conn.mysql_is_mariadb and hasattr(conn, 'mysql_version'):
|
|
||||||
if conn.mysql_version >= (10, 2, 7):
|
|
||||||
any_conn_works = True
|
|
||||||
else:
|
|
||||||
found = 'MariaDB ' + '.'.join(str(v) for v in conn.mysql_version)
|
|
||||||
elif hasattr(conn, 'mysql_version'):
|
|
||||||
if conn.mysql_version >= (5, 7):
|
|
||||||
any_conn_works = True
|
|
||||||
else:
|
|
||||||
found = 'MySQL ' + '.'.join(str(v) for v in conn.mysql_version)
|
|
||||||
|
|
||||||
if conns and not any_conn_works:
|
|
||||||
raise ImproperlyConfigured(
|
|
||||||
'As of pretix 2.2, you need MySQL 5.7+ or MariaDB 10.2.7+ to run pretix. However, we detected a '
|
|
||||||
'database connection to {}'.format(found)
|
|
||||||
)
|
|
||||||
return errors
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('pretixbase', '0101_auto_20181025_2255'),
|
('pretixbase', '0101_auto_20181025_2255'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RunPython(
|
|
||||||
check_mysqlversion, migrations.RunPython.noop
|
|
||||||
),
|
|
||||||
migrations.RenameField(
|
migrations.RenameField(
|
||||||
model_name='cartposition',
|
model_name='cartposition',
|
||||||
old_name='attendee_name',
|
old_name='attendee_name',
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
# Generated by Django 3.2.4 on 2021-09-30 10:25
|
# Generated by Django 3.2.4 on 2021-09-30 10:25
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
from pytz import UTC
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
@@ -15,7 +14,7 @@ class Migration(migrations.Migration):
|
|||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='invoice',
|
model_name='invoice',
|
||||||
name='sent_to_customer',
|
name='sent_to_customer',
|
||||||
field=models.DateTimeField(blank=True, null=True, default=UTC.localize(datetime(1970, 1, 1, 0, 0, 0, 0))),
|
field=models.DateTimeField(blank=True, null=True, default=datetime(1970, 1, 1, 0, 0, 0, 0, tzinfo=timezone.utc)),
|
||||||
preserve_default=False,
|
preserve_default=False,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -50,6 +50,6 @@ class Migration(migrations.Migration):
|
|||||||
],
|
],
|
||||||
options={
|
options={
|
||||||
'unique_together': {('event', 'secret')},
|
'unique_together': {('event', 'secret')},
|
||||||
} if 'mysql' not in settings.DATABASES['default']['ENGINE'] else {}
|
}
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
38
src/pretix/base/migrations/0242_auto_20230512_1008.py
Normal file
38
src/pretix/base/migrations/0242_auto_20230512_1008.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# Generated by Django 3.2.18 on 2023-05-12 10:08
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('pretixbase', '0241_itemmetaproperties_required_values'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='giftcardacceptance',
|
||||||
|
old_name='collector',
|
||||||
|
new_name='acceptor',
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='giftcardacceptance',
|
||||||
|
name='active',
|
||||||
|
field=models.BooleanField(default=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='giftcardacceptance',
|
||||||
|
name='reusable_media',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='giftcardacceptance',
|
||||||
|
name='issuer',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='gift_card_acceptor_acceptance', to='pretixbase.organizer'),
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='giftcardacceptance',
|
||||||
|
unique_together={('issuer', 'acceptor')},
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 4.1.9 on 2023-06-26 10:59
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('pretixbase', '0242_auto_20230512_1008'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='device',
|
||||||
|
name='os_name',
|
||||||
|
field=models.CharField(max_length=190, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='device',
|
||||||
|
name='os_version',
|
||||||
|
field=models.CharField(max_length=190, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
35
src/pretix/base/migrations/0244_mediumkeyset.py
Normal file
35
src/pretix/base/migrations/0244_mediumkeyset.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# Generated by Django 3.2.18 on 2023-05-17 11:32
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('pretixbase', '0243_device_os_name_and_os_version'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='device',
|
||||||
|
name='rsa_pubkey',
|
||||||
|
field=models.TextField(null=True),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='MediumKeySet',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)),
|
||||||
|
('public_id', models.BigIntegerField(unique=True)),
|
||||||
|
('media_type', models.CharField(max_length=100)),
|
||||||
|
('active', models.BooleanField(default=True)),
|
||||||
|
('uid_key', models.BinaryField()),
|
||||||
|
('diversification_key', models.BinaryField()),
|
||||||
|
('organizer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='medium_key_sets', to='pretixbase.organizer')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.AddConstraint(
|
||||||
|
model_name='mediumkeyset',
|
||||||
|
constraint=models.UniqueConstraint(condition=models.Q(('active', True)), fields=('organizer', 'media_type'), name='keyset_unique_active'),
|
||||||
|
),
|
||||||
|
]
|
||||||
34
src/pretix/base/migrations/0245_discount_benefit_products.py
Normal file
34
src/pretix/base/migrations/0245_discount_benefit_products.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# Generated by Django 4.2.4 on 2023-08-28 12:30
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("pretixbase", "0244_mediumkeyset"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="discount",
|
||||||
|
name="benefit_apply_to_addons",
|
||||||
|
field=models.BooleanField(default=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="discount",
|
||||||
|
name="benefit_ignore_voucher_discounted",
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="discount",
|
||||||
|
name="benefit_limit_products",
|
||||||
|
field=models.ManyToManyField(
|
||||||
|
related_name="benefit_discounts", to="pretixbase.item"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="discount",
|
||||||
|
name="benefit_same_products",
|
||||||
|
field=models.BooleanField(default=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -97,7 +97,7 @@ def _transactions_mark_order_dirty(order_id, using=None):
|
|||||||
if getattr(dirty_transactions, 'order_ids', None) is None:
|
if getattr(dirty_transactions, 'order_ids', None) is None:
|
||||||
dirty_transactions.order_ids = set()
|
dirty_transactions.order_ids = set()
|
||||||
|
|
||||||
if _check_for_dirty_orders not in [func for savepoint_id, func in conn.run_on_commit]:
|
if _check_for_dirty_orders not in [func for (savepoint_id, func, *__) in conn.run_on_commit]:
|
||||||
transaction.on_commit(_check_for_dirty_orders, using)
|
transaction.on_commit(_check_for_dirty_orders, using)
|
||||||
dirty_transactions.order_ids.clear() # This is necessary to clean up after old threads with rollbacked transactions
|
dirty_transactions.order_ids.clear() # This is necessary to clean up after old threads with rollbacked transactions
|
||||||
|
|
||||||
|
|||||||
@@ -121,14 +121,23 @@ class Customer(LoggedModel):
|
|||||||
if self.email:
|
if self.email:
|
||||||
self.email = self.email.lower()
|
self.email = self.email.lower()
|
||||||
if 'update_fields' in kwargs and 'last_modified' not in kwargs['update_fields']:
|
if 'update_fields' in kwargs and 'last_modified' not in kwargs['update_fields']:
|
||||||
kwargs['update_fields'] = list(kwargs['update_fields']) + ['last_modified']
|
kwargs['update_fields'] = {'last_modified'}.union(kwargs['update_fields'])
|
||||||
if not self.identifier:
|
if not self.identifier:
|
||||||
self.assign_identifier()
|
self.assign_identifier()
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
|
||||||
if self.name_parts:
|
if self.name_parts:
|
||||||
self.name_cached = self.name
|
name = self.name
|
||||||
|
if self.name_cached != name:
|
||||||
|
self.name_cached = name
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'name_cached'}.union(kwargs['update_fields'])
|
||||||
else:
|
else:
|
||||||
self.name_cached = ""
|
if self.name_cached != "" or self.name_parts != {}:
|
||||||
self.name_parts = {}
|
self.name_cached = ""
|
||||||
|
self.name_parts = {}
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'name_cached', 'name_parts'}.union(kwargs['update_fields'])
|
||||||
super().save(**kwargs)
|
super().save(**kwargs)
|
||||||
|
|
||||||
def anonymize(self):
|
def anonymize(self):
|
||||||
|
|||||||
@@ -98,6 +98,8 @@ class Gate(LoggedModel):
|
|||||||
if not Gate.objects.filter(organizer=self.organizer, identifier=code).exists():
|
if not Gate.objects.filter(organizer=self.organizer, identifier=code).exists():
|
||||||
self.identifier = code
|
self.identifier = code
|
||||||
break
|
break
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
|
||||||
return super().save(*args, **kwargs)
|
return super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@@ -141,6 +143,14 @@ class Device(LoggedModel):
|
|||||||
max_length=190,
|
max_length=190,
|
||||||
null=True, blank=True
|
null=True, blank=True
|
||||||
)
|
)
|
||||||
|
os_name = models.CharField(
|
||||||
|
max_length=190,
|
||||||
|
null=True, blank=True
|
||||||
|
)
|
||||||
|
os_version = models.CharField(
|
||||||
|
max_length=190,
|
||||||
|
null=True, blank=True
|
||||||
|
)
|
||||||
software_brand = models.CharField(
|
software_brand = models.CharField(
|
||||||
max_length=190,
|
max_length=190,
|
||||||
null=True, blank=True
|
null=True, blank=True
|
||||||
@@ -156,6 +166,10 @@ class Device(LoggedModel):
|
|||||||
null=True,
|
null=True,
|
||||||
blank=False
|
blank=False
|
||||||
)
|
)
|
||||||
|
rsa_pubkey = models.TextField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
)
|
||||||
info = models.JSONField(
|
info = models.JSONField(
|
||||||
null=True, blank=True,
|
null=True, blank=True,
|
||||||
)
|
)
|
||||||
@@ -173,6 +187,8 @@ class Device(LoggedModel):
|
|||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
if not self.device_id:
|
if not self.device_id:
|
||||||
self.device_id = (self.organizer.devices.aggregate(m=Max('device_id'))['m'] or 0) + 1
|
self.device_id = (self.organizer.devices.aggregate(m=Max('device_id'))['m'] or 0) + 1
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'device_id'}.union(kwargs['update_fields'])
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
def permission_set(self) -> set:
|
def permission_set(self) -> set:
|
||||||
|
|||||||
@@ -99,7 +99,7 @@ class Discount(LoggedModel):
|
|||||||
)
|
)
|
||||||
condition_apply_to_addons = models.BooleanField(
|
condition_apply_to_addons = models.BooleanField(
|
||||||
default=True,
|
default=True,
|
||||||
verbose_name=_("Apply to add-on products"),
|
verbose_name=_("Count add-on products"),
|
||||||
help_text=_("Discounts never apply to bundled products"),
|
help_text=_("Discounts never apply to bundled products"),
|
||||||
)
|
)
|
||||||
condition_ignore_voucher_discounted = models.BooleanField(
|
condition_ignore_voucher_discounted = models.BooleanField(
|
||||||
@@ -107,7 +107,7 @@ class Discount(LoggedModel):
|
|||||||
verbose_name=_("Ignore products discounted by a voucher"),
|
verbose_name=_("Ignore products discounted by a voucher"),
|
||||||
help_text=_("If this option is checked, products that already received a discount through a voucher will not "
|
help_text=_("If this option is checked, products that already received a discount through a voucher will not "
|
||||||
"be considered for this discount. However, products that use a voucher only to e.g. unlock a "
|
"be considered for this discount. However, products that use a voucher only to e.g. unlock a "
|
||||||
"hidden product or gain access to sold-out quota will still receive the discount."),
|
"hidden product or gain access to sold-out quota will still be considered."),
|
||||||
)
|
)
|
||||||
condition_min_count = models.PositiveIntegerField(
|
condition_min_count = models.PositiveIntegerField(
|
||||||
verbose_name=_('Minimum number of matching products'),
|
verbose_name=_('Minimum number of matching products'),
|
||||||
@@ -120,6 +120,19 @@ class Discount(LoggedModel):
|
|||||||
default=Decimal('0.00'),
|
default=Decimal('0.00'),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
benefit_same_products = models.BooleanField(
|
||||||
|
default=True,
|
||||||
|
verbose_name=_("Apply discount to same set of products"),
|
||||||
|
help_text=_("By default, the discount is applied across the same selection of products than the condition for "
|
||||||
|
"the discount given above. If you want, you can however also select a different selection of "
|
||||||
|
"products.")
|
||||||
|
)
|
||||||
|
benefit_limit_products = models.ManyToManyField(
|
||||||
|
'Item',
|
||||||
|
verbose_name=_("Apply discount to specific products"),
|
||||||
|
related_name='benefit_discounts',
|
||||||
|
blank=True
|
||||||
|
)
|
||||||
benefit_discount_matching_percent = models.DecimalField(
|
benefit_discount_matching_percent = models.DecimalField(
|
||||||
verbose_name=_('Percentual discount on matching products'),
|
verbose_name=_('Percentual discount on matching products'),
|
||||||
decimal_places=2,
|
decimal_places=2,
|
||||||
@@ -139,6 +152,18 @@ class Discount(LoggedModel):
|
|||||||
blank=True,
|
blank=True,
|
||||||
validators=[MinValueValidator(1)],
|
validators=[MinValueValidator(1)],
|
||||||
)
|
)
|
||||||
|
benefit_apply_to_addons = models.BooleanField(
|
||||||
|
default=True,
|
||||||
|
verbose_name=_("Apply to add-on products"),
|
||||||
|
help_text=_("Discounts never apply to bundled products"),
|
||||||
|
)
|
||||||
|
benefit_ignore_voucher_discounted = models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
verbose_name=_("Ignore products discounted by a voucher"),
|
||||||
|
help_text=_("If this option is checked, products that already received a discount through a voucher will not "
|
||||||
|
"be discounted. However, products that use a voucher only to e.g. unlock a hidden product or gain "
|
||||||
|
"access to sold-out quota will still receive the discount."),
|
||||||
|
)
|
||||||
|
|
||||||
# more feature ideas:
|
# more feature ideas:
|
||||||
# - max_usages_per_order
|
# - max_usages_per_order
|
||||||
@@ -187,6 +212,14 @@ class Discount(LoggedModel):
|
|||||||
'on a minimum value.')
|
'on a minimum value.')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if data.get('subevent_mode') == cls.SUBEVENT_MODE_DISTINCT and not data.get('benefit_same_products'):
|
||||||
|
raise ValidationError(
|
||||||
|
{'benefit_same_products': [
|
||||||
|
_('You cannot apply the discount to a different set of products if the discount is only valid '
|
||||||
|
'for bookings of different dates.')
|
||||||
|
]}
|
||||||
|
)
|
||||||
|
|
||||||
def allow_delete(self):
|
def allow_delete(self):
|
||||||
return not self.orderposition_set.exists()
|
return not self.orderposition_set.exists()
|
||||||
|
|
||||||
@@ -197,6 +230,7 @@ class Discount(LoggedModel):
|
|||||||
'condition_min_value': self.condition_min_value,
|
'condition_min_value': self.condition_min_value,
|
||||||
'benefit_only_apply_to_cheapest_n_matches': self.benefit_only_apply_to_cheapest_n_matches,
|
'benefit_only_apply_to_cheapest_n_matches': self.benefit_only_apply_to_cheapest_n_matches,
|
||||||
'subevent_mode': self.subevent_mode,
|
'subevent_mode': self.subevent_mode,
|
||||||
|
'benefit_same_products': self.benefit_same_products,
|
||||||
})
|
})
|
||||||
|
|
||||||
def is_available_by_time(self, now_dt=None) -> bool:
|
def is_available_by_time(self, now_dt=None) -> bool:
|
||||||
@@ -207,14 +241,14 @@ class Discount(LoggedModel):
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _apply_min_value(self, positions, idx_group, result):
|
def _apply_min_value(self, positions, condition_idx_group, benefit_idx_group, result):
|
||||||
if self.condition_min_value and sum(positions[idx][2] for idx in idx_group) < self.condition_min_value:
|
if self.condition_min_value and sum(positions[idx][2] for idx in condition_idx_group) < self.condition_min_value:
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.condition_min_count or self.benefit_only_apply_to_cheapest_n_matches:
|
if self.condition_min_count or self.benefit_only_apply_to_cheapest_n_matches:
|
||||||
raise ValueError('Validation invariant violated.')
|
raise ValueError('Validation invariant violated.')
|
||||||
|
|
||||||
for idx in idx_group:
|
for idx in benefit_idx_group:
|
||||||
previous_price = positions[idx][2]
|
previous_price = positions[idx][2]
|
||||||
new_price = round_decimal(
|
new_price = round_decimal(
|
||||||
previous_price * (Decimal('100.00') - self.benefit_discount_matching_percent) / Decimal('100.00'),
|
previous_price * (Decimal('100.00') - self.benefit_discount_matching_percent) / Decimal('100.00'),
|
||||||
@@ -222,8 +256,8 @@ class Discount(LoggedModel):
|
|||||||
)
|
)
|
||||||
result[idx] = new_price
|
result[idx] = new_price
|
||||||
|
|
||||||
def _apply_min_count(self, positions, idx_group, result):
|
def _apply_min_count(self, positions, condition_idx_group, benefit_idx_group, result):
|
||||||
if len(idx_group) < self.condition_min_count:
|
if len(condition_idx_group) < self.condition_min_count:
|
||||||
return
|
return
|
||||||
|
|
||||||
if not self.condition_min_count or self.condition_min_value:
|
if not self.condition_min_count or self.condition_min_value:
|
||||||
@@ -233,15 +267,17 @@ class Discount(LoggedModel):
|
|||||||
if not self.condition_min_count:
|
if not self.condition_min_count:
|
||||||
raise ValueError('Validation invariant violated.')
|
raise ValueError('Validation invariant violated.')
|
||||||
|
|
||||||
idx_group = sorted(idx_group, key=lambda idx: (positions[idx][2], -idx)) # sort by line_price
|
condition_idx_group = sorted(condition_idx_group, key=lambda idx: (positions[idx][2], -idx)) # sort by line_price
|
||||||
|
benefit_idx_group = sorted(benefit_idx_group, key=lambda idx: (positions[idx][2], -idx)) # sort by line_price
|
||||||
|
|
||||||
# Prevent over-consuming of items, i.e. if our discount is "buy 2, get 1 free", we only
|
# Prevent over-consuming of items, i.e. if our discount is "buy 2, get 1 free", we only
|
||||||
# want to match multiples of 3
|
# want to match multiples of 3
|
||||||
consume_idx = idx_group[:len(idx_group) // self.condition_min_count * self.condition_min_count]
|
n_groups = min(len(condition_idx_group) // self.condition_min_count, len(benefit_idx_group))
|
||||||
benefit_idx = idx_group[:len(idx_group) // self.condition_min_count * self.benefit_only_apply_to_cheapest_n_matches]
|
consume_idx = condition_idx_group[:n_groups * self.condition_min_count]
|
||||||
|
benefit_idx = benefit_idx_group[:n_groups * self.benefit_only_apply_to_cheapest_n_matches]
|
||||||
else:
|
else:
|
||||||
consume_idx = idx_group
|
consume_idx = condition_idx_group
|
||||||
benefit_idx = idx_group
|
benefit_idx = benefit_idx_group
|
||||||
|
|
||||||
for idx in benefit_idx:
|
for idx in benefit_idx:
|
||||||
previous_price = positions[idx][2]
|
previous_price = positions[idx][2]
|
||||||
@@ -276,7 +312,7 @@ class Discount(LoggedModel):
|
|||||||
limit_products = {p.pk for p in self.condition_limit_products.all()}
|
limit_products = {p.pk for p in self.condition_limit_products.all()}
|
||||||
|
|
||||||
# First, filter out everything not even covered by our product scope
|
# First, filter out everything not even covered by our product scope
|
||||||
initial_candidates = [
|
condition_candidates = [
|
||||||
idx
|
idx
|
||||||
for idx, (item_id, subevent_id, line_price_gross, is_addon_to, voucher_discount) in positions.items()
|
for idx, (item_id, subevent_id, line_price_gross, is_addon_to, voucher_discount) in positions.items()
|
||||||
if (
|
if (
|
||||||
@@ -286,11 +322,25 @@ class Discount(LoggedModel):
|
|||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
if self.benefit_same_products:
|
||||||
|
benefit_candidates = list(condition_candidates)
|
||||||
|
else:
|
||||||
|
benefit_products = {p.pk for p in self.benefit_limit_products.all()}
|
||||||
|
benefit_candidates = [
|
||||||
|
idx
|
||||||
|
for idx, (item_id, subevent_id, line_price_gross, is_addon_to, voucher_discount) in positions.items()
|
||||||
|
if (
|
||||||
|
item_id in benefit_products and
|
||||||
|
(self.benefit_apply_to_addons or not is_addon_to) and
|
||||||
|
(not self.benefit_ignore_voucher_discounted or voucher_discount is None or voucher_discount == Decimal('0.00'))
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
if self.subevent_mode == self.SUBEVENT_MODE_MIXED: # also applies to non-series events
|
if self.subevent_mode == self.SUBEVENT_MODE_MIXED: # also applies to non-series events
|
||||||
if self.condition_min_count:
|
if self.condition_min_count:
|
||||||
self._apply_min_count(positions, initial_candidates, result)
|
self._apply_min_count(positions, condition_candidates, benefit_candidates, result)
|
||||||
else:
|
else:
|
||||||
self._apply_min_value(positions, initial_candidates, result)
|
self._apply_min_value(positions, condition_candidates, benefit_candidates, result)
|
||||||
|
|
||||||
elif self.subevent_mode == self.SUBEVENT_MODE_SAME:
|
elif self.subevent_mode == self.SUBEVENT_MODE_SAME:
|
||||||
def key(idx):
|
def key(idx):
|
||||||
@@ -299,17 +349,18 @@ class Discount(LoggedModel):
|
|||||||
# Build groups of candidates with the same subevent, then apply our regular algorithm
|
# Build groups of candidates with the same subevent, then apply our regular algorithm
|
||||||
# to each group
|
# to each group
|
||||||
|
|
||||||
_groups = groupby(sorted(initial_candidates, key=key), key=key)
|
_groups = groupby(sorted(condition_candidates, key=key), key=key)
|
||||||
candidate_groups = [list(g) for k, g in _groups]
|
candidate_groups = [(k, list(g)) for k, g in _groups]
|
||||||
|
|
||||||
for g in candidate_groups:
|
for subevent_id, g in candidate_groups:
|
||||||
|
benefit_g = [idx for idx in benefit_candidates if positions[idx][1] == subevent_id]
|
||||||
if self.condition_min_count:
|
if self.condition_min_count:
|
||||||
self._apply_min_count(positions, g, result)
|
self._apply_min_count(positions, g, benefit_g, result)
|
||||||
else:
|
else:
|
||||||
self._apply_min_value(positions, g, result)
|
self._apply_min_value(positions, g, benefit_g, result)
|
||||||
|
|
||||||
elif self.subevent_mode == self.SUBEVENT_MODE_DISTINCT:
|
elif self.subevent_mode == self.SUBEVENT_MODE_DISTINCT:
|
||||||
if self.condition_min_value:
|
if self.condition_min_value or not self.benefit_same_products:
|
||||||
raise ValueError('Validation invariant violated.')
|
raise ValueError('Validation invariant violated.')
|
||||||
|
|
||||||
# Build optimal groups of candidates with distinct subevents, then apply our regular algorithm
|
# Build optimal groups of candidates with distinct subevents, then apply our regular algorithm
|
||||||
@@ -336,7 +387,7 @@ class Discount(LoggedModel):
|
|||||||
candidates = []
|
candidates = []
|
||||||
cardinality = None
|
cardinality = None
|
||||||
for se, l in subevent_to_idx.items():
|
for se, l in subevent_to_idx.items():
|
||||||
l = [ll for ll in l if ll in initial_candidates and ll not in current_group]
|
l = [ll for ll in l if ll in condition_candidates and ll not in current_group]
|
||||||
if cardinality and len(l) != cardinality:
|
if cardinality and len(l) != cardinality:
|
||||||
continue
|
continue
|
||||||
if se not in {positions[idx][1] for idx in current_group}:
|
if se not in {positions[idx][1] for idx in current_group}:
|
||||||
@@ -373,5 +424,5 @@ class Discount(LoggedModel):
|
|||||||
break
|
break
|
||||||
|
|
||||||
for g in candidate_groups:
|
for g in candidate_groups:
|
||||||
self._apply_min_count(positions, g, result)
|
self._apply_min_count(positions, g, g, result)
|
||||||
return result
|
return result
|
||||||
|
|||||||
@@ -40,8 +40,9 @@ from collections import Counter, OrderedDict, defaultdict
|
|||||||
from datetime import datetime, time, timedelta
|
from datetime import datetime, time, timedelta
|
||||||
from operator import attrgetter
|
from operator import attrgetter
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
import pytz
|
import pytz_deprecation_shim
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.core.files.storage import default_storage
|
from django.core.files.storage import default_storage
|
||||||
@@ -214,7 +215,7 @@ class EventMixin:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def timezone(self):
|
def timezone(self):
|
||||||
return pytz.timezone(self.settings.timezone)
|
return pytz_deprecation_shim.timezone(self.settings.timezone)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def effective_presale_end(self):
|
def effective_presale_end(self):
|
||||||
@@ -773,7 +774,7 @@ class Event(EventMixin, LoggedModel):
|
|||||||
"""
|
"""
|
||||||
The last datetime of payments for this event.
|
The last datetime of payments for this event.
|
||||||
"""
|
"""
|
||||||
tz = pytz.timezone(self.settings.timezone)
|
tz = ZoneInfo(self.settings.timezone)
|
||||||
return make_aware(datetime.combine(
|
return make_aware(datetime.combine(
|
||||||
self.settings.get('payment_term_last', as_type=RelativeDateWrapper).datetime(self).date(),
|
self.settings.get('payment_term_last', as_type=RelativeDateWrapper).datetime(self).date(),
|
||||||
time(hour=23, minute=59, second=59)
|
time(hour=23, minute=59, second=59)
|
||||||
@@ -906,14 +907,18 @@ class Event(EventMixin, LoggedModel):
|
|||||||
self.items.filter(hidden_if_available_id=oldid).update(hidden_if_available=q)
|
self.items.filter(hidden_if_available_id=oldid).update(hidden_if_available=q)
|
||||||
|
|
||||||
for d in Discount.objects.filter(event=other).prefetch_related('condition_limit_products'):
|
for d in Discount.objects.filter(event=other).prefetch_related('condition_limit_products'):
|
||||||
items = list(d.condition_limit_products.all())
|
c_items = list(d.condition_limit_products.all())
|
||||||
|
b_items = list(d.benefit_limit_products.all())
|
||||||
d.pk = None
|
d.pk = None
|
||||||
d.event = self
|
d.event = self
|
||||||
d.save(force_insert=True)
|
d.save(force_insert=True)
|
||||||
d.log_action('pretix.object.cloned')
|
d.log_action('pretix.object.cloned')
|
||||||
for i in items:
|
for i in c_items:
|
||||||
if i.pk in item_map:
|
if i.pk in item_map:
|
||||||
d.condition_limit_products.add(item_map[i.pk])
|
d.condition_limit_products.add(item_map[i.pk])
|
||||||
|
for i in b_items:
|
||||||
|
if i.pk in item_map:
|
||||||
|
d.benefit_limit_products.add(item_map[i.pk])
|
||||||
|
|
||||||
question_map = {}
|
question_map = {}
|
||||||
for q in Question.objects.filter(event=other).prefetch_related('items', 'options'):
|
for q in Question.objects.filter(event=other).prefetch_related('items', 'options'):
|
||||||
@@ -1276,6 +1281,9 @@ class Event(EventMixin, LoggedModel):
|
|||||||
return not self.orders.exists() and not self.invoices.exists()
|
return not self.orders.exists() and not self.invoices.exists()
|
||||||
|
|
||||||
def delete_sub_objects(self):
|
def delete_sub_objects(self):
|
||||||
|
from .checkin import Checkin
|
||||||
|
|
||||||
|
Checkin.all.filter(successful=False, list__event=self).delete()
|
||||||
self.cartposition_set.filter(addon_to__isnull=False).delete()
|
self.cartposition_set.filter(addon_to__isnull=False).delete()
|
||||||
self.cartposition_set.all().delete()
|
self.cartposition_set.all().delete()
|
||||||
self.vouchers.all().delete()
|
self.vouchers.all().delete()
|
||||||
|
|||||||
@@ -19,10 +19,11 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||||
# <https://www.gnu.org/licenses/>.
|
# <https://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
|
import zoneinfo
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
import pytz
|
|
||||||
from dateutil.rrule import rrulestr
|
from dateutil.rrule import rrulestr
|
||||||
|
from dateutil.tz import datetime_exists
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
from django.db import models
|
from django.db import models
|
||||||
@@ -108,12 +109,9 @@ class AbstractScheduledExport(LoggedModel):
|
|||||||
self.schedule_next_run = None
|
self.schedule_next_run = None
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
self.schedule_next_run = make_aware(datetime.combine(new_d.date(), self.schedule_rrule_time), tz)
|
||||||
self.schedule_next_run = make_aware(datetime.combine(new_d.date(), self.schedule_rrule_time), tz)
|
if not datetime_exists(self.schedule_next_run):
|
||||||
except pytz.exceptions.AmbiguousTimeError:
|
self.schedule_next_run += timedelta(hours=1)
|
||||||
self.schedule_next_run = make_aware(datetime.combine(new_d.date(), self.schedule_rrule_time), tz, is_dst=False)
|
|
||||||
except pytz.exceptions.NonExistentTimeError:
|
|
||||||
self.schedule_next_run = make_aware(datetime.combine(new_d.date(), self.schedule_rrule_time) + timedelta(hours=1), tz)
|
|
||||||
|
|
||||||
|
|
||||||
class ScheduledEventExport(AbstractScheduledExport):
|
class ScheduledEventExport(AbstractScheduledExport):
|
||||||
@@ -136,4 +134,4 @@ class ScheduledOrganizerExport(AbstractScheduledExport):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def tz(self):
|
def tz(self):
|
||||||
return pytz.timezone(self.timezone)
|
return zoneinfo.ZoneInfo(self.timezone)
|
||||||
|
|||||||
@@ -46,14 +46,19 @@ def gen_giftcard_secret(length=8):
|
|||||||
class GiftCardAcceptance(models.Model):
|
class GiftCardAcceptance(models.Model):
|
||||||
issuer = models.ForeignKey(
|
issuer = models.ForeignKey(
|
||||||
'Organizer',
|
'Organizer',
|
||||||
related_name='gift_card_collector_acceptance',
|
related_name='gift_card_acceptor_acceptance',
|
||||||
on_delete=models.CASCADE
|
on_delete=models.CASCADE
|
||||||
)
|
)
|
||||||
collector = models.ForeignKey(
|
acceptor = models.ForeignKey(
|
||||||
'Organizer',
|
'Organizer',
|
||||||
related_name='gift_card_issuer_acceptance',
|
related_name='gift_card_issuer_acceptance',
|
||||||
on_delete=models.CASCADE
|
on_delete=models.CASCADE
|
||||||
)
|
)
|
||||||
|
active = models.BooleanField(default=True)
|
||||||
|
reusable_media = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = (('issuer', 'acceptor'),)
|
||||||
|
|
||||||
|
|
||||||
class GiftCard(LoggedModel):
|
class GiftCard(LoggedModel):
|
||||||
@@ -114,7 +119,7 @@ class GiftCard(LoggedModel):
|
|||||||
return self.transactions.aggregate(s=Sum('value'))['s'] or Decimal('0.00')
|
return self.transactions.aggregate(s=Sum('value'))['s'] or Decimal('0.00')
|
||||||
|
|
||||||
def accepted_by(self, organizer):
|
def accepted_by(self, organizer):
|
||||||
return self.issuer == organizer or GiftCardAcceptance.objects.filter(issuer=self.issuer, collector=organizer).exists()
|
return self.issuer == organizer or GiftCardAcceptance.objects.filter(issuer=self.issuer, acceptor=organizer, active=True).exists()
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
if not self.secret:
|
if not self.secret:
|
||||||
|
|||||||
@@ -251,14 +251,20 @@ class Invoice(models.Model):
|
|||||||
raise ValueError('Every invoice needs to be connected to an order')
|
raise ValueError('Every invoice needs to be connected to an order')
|
||||||
if not self.event:
|
if not self.event:
|
||||||
self.event = self.order.event
|
self.event = self.order.event
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'event'}.union(kwargs['update_fields'])
|
||||||
if not self.organizer:
|
if not self.organizer:
|
||||||
self.organizer = self.order.event.organizer
|
self.organizer = self.order.event.organizer
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'organizer'}.union(kwargs['update_fields'])
|
||||||
if not self.prefix:
|
if not self.prefix:
|
||||||
self.prefix = self.event.settings.invoice_numbers_prefix or (self.event.slug.upper() + '-')
|
self.prefix = self.event.settings.invoice_numbers_prefix or (self.event.slug.upper() + '-')
|
||||||
if self.is_cancellation:
|
if self.is_cancellation:
|
||||||
self.prefix = self.event.settings.invoice_numbers_prefix_cancellations or self.prefix
|
self.prefix = self.event.settings.invoice_numbers_prefix_cancellations or self.prefix
|
||||||
if '%' in self.prefix:
|
if '%' in self.prefix:
|
||||||
self.prefix = self.date.strftime(self.prefix)
|
self.prefix = self.date.strftime(self.prefix)
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'prefix'}.union(kwargs['update_fields'])
|
||||||
|
|
||||||
if not self.invoice_no:
|
if not self.invoice_no:
|
||||||
if self.order.testmode:
|
if self.order.testmode:
|
||||||
@@ -276,8 +282,13 @@ class Invoice(models.Model):
|
|||||||
# Suppress duplicate key errors and try again
|
# Suppress duplicate key errors and try again
|
||||||
if i == 9:
|
if i == 9:
|
||||||
raise
|
raise
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'invoice_no'}.union(kwargs['update_fields'])
|
||||||
|
|
||||||
self.full_invoice_no = self.prefix + self.invoice_no
|
if self.full_invoice_no != self.prefix + self.invoice_no:
|
||||||
|
self.full_invoice_no = self.prefix + self.invoice_no
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'full_invoice_no'}.union(kwargs['update_fields'])
|
||||||
return super().save(*args, **kwargs)
|
return super().save(*args, **kwargs)
|
||||||
|
|
||||||
def delete(self, *args, **kwargs):
|
def delete(self, *args, **kwargs):
|
||||||
|
|||||||
@@ -40,9 +40,11 @@ from collections import Counter, OrderedDict
|
|||||||
from datetime import date, datetime, time, timedelta
|
from datetime import date, datetime, time, timedelta
|
||||||
from decimal import Decimal, DecimalException
|
from decimal import Decimal, DecimalException
|
||||||
from typing import Optional, Tuple
|
from typing import Optional, Tuple
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
import dateutil.parser
|
import dateutil.parser
|
||||||
import pytz
|
import django_redis
|
||||||
|
from dateutil.tz import datetime_exists
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.core.validators import (
|
from django.core.validators import (
|
||||||
@@ -56,7 +58,6 @@ from django.utils.functional import cached_property
|
|||||||
from django.utils.timezone import is_naive, make_aware, now
|
from django.utils.timezone import is_naive, make_aware, now
|
||||||
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
||||||
from django_countries.fields import Country
|
from django_countries.fields import Country
|
||||||
from django_redis import get_redis_connection
|
|
||||||
from django_scopes import ScopedManager
|
from django_scopes import ScopedManager
|
||||||
from i18nfield.fields import I18nCharField, I18nTextField
|
from i18nfield.fields import I18nCharField, I18nTextField
|
||||||
|
|
||||||
@@ -927,22 +928,22 @@ class Item(LoggedModel):
|
|||||||
)
|
)
|
||||||
if self.validity_dynamic_duration_days:
|
if self.validity_dynamic_duration_days:
|
||||||
replace_date += timedelta(days=self.validity_dynamic_duration_days)
|
replace_date += timedelta(days=self.validity_dynamic_duration_days)
|
||||||
valid_until = tz.localize(valid_until.replace(
|
valid_until = valid_until.replace(
|
||||||
year=replace_date.year,
|
year=replace_date.year,
|
||||||
month=replace_date.month,
|
month=replace_date.month,
|
||||||
day=replace_date.day,
|
day=replace_date.day,
|
||||||
hour=23, minute=59, second=59, microsecond=0,
|
hour=23, minute=59, second=59, microsecond=0,
|
||||||
tzinfo=None,
|
tzinfo=tz,
|
||||||
))
|
)
|
||||||
elif self.validity_dynamic_duration_days:
|
elif self.validity_dynamic_duration_days:
|
||||||
replace_date = valid_until.date() + timedelta(days=self.validity_dynamic_duration_days - 1)
|
replace_date = valid_until.date() + timedelta(days=self.validity_dynamic_duration_days - 1)
|
||||||
valid_until = tz.localize(valid_until.replace(
|
valid_until = valid_until.replace(
|
||||||
year=replace_date.year,
|
year=replace_date.year,
|
||||||
month=replace_date.month,
|
month=replace_date.month,
|
||||||
day=replace_date.day,
|
day=replace_date.day,
|
||||||
hour=23, minute=59, second=59, microsecond=0,
|
hour=23, minute=59, second=59, microsecond=0,
|
||||||
tzinfo=None
|
tzinfo=tz
|
||||||
))
|
)
|
||||||
|
|
||||||
if self.validity_dynamic_duration_hours:
|
if self.validity_dynamic_duration_hours:
|
||||||
valid_until += timedelta(hours=self.validity_dynamic_duration_hours)
|
valid_until += timedelta(hours=self.validity_dynamic_duration_hours)
|
||||||
@@ -950,6 +951,9 @@ class Item(LoggedModel):
|
|||||||
if self.validity_dynamic_duration_minutes:
|
if self.validity_dynamic_duration_minutes:
|
||||||
valid_until += timedelta(minutes=self.validity_dynamic_duration_minutes)
|
valid_until += timedelta(minutes=self.validity_dynamic_duration_minutes)
|
||||||
|
|
||||||
|
if not datetime_exists(valid_until):
|
||||||
|
valid_until += timedelta(hours=1)
|
||||||
|
|
||||||
return requested_start, valid_until
|
return requested_start, valid_until
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -1589,6 +1593,8 @@ class Question(LoggedModel):
|
|||||||
if not Question.objects.filter(event=self.event, identifier=code).exists():
|
if not Question.objects.filter(event=self.event, identifier=code).exists():
|
||||||
self.identifier = code
|
self.identifier = code
|
||||||
break
|
break
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
if self.event:
|
if self.event:
|
||||||
self.event.cache.clear()
|
self.event.cache.clear()
|
||||||
@@ -1678,7 +1684,7 @@ class Question(LoggedModel):
|
|||||||
try:
|
try:
|
||||||
dt = dateutil.parser.parse(answer)
|
dt = dateutil.parser.parse(answer)
|
||||||
if is_naive(dt):
|
if is_naive(dt):
|
||||||
dt = make_aware(dt, pytz.timezone(self.event.settings.timezone))
|
dt = make_aware(dt, ZoneInfo(self.event.settings.timezone))
|
||||||
except:
|
except:
|
||||||
raise ValidationError(_('Invalid datetime input.'))
|
raise ValidationError(_('Invalid datetime input.'))
|
||||||
else:
|
else:
|
||||||
@@ -1736,6 +1742,8 @@ class QuestionOption(models.Model):
|
|||||||
if not QuestionOption.objects.filter(question__event=self.question.event, identifier=code).exists():
|
if not QuestionOption.objects.filter(question__event=self.question.event, identifier=code).exists():
|
||||||
self.identifier = code
|
self.identifier = code
|
||||||
break
|
break
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -1902,8 +1910,13 @@ class Quota(LoggedModel):
|
|||||||
|
|
||||||
def rebuild_cache(self, now_dt=None):
|
def rebuild_cache(self, now_dt=None):
|
||||||
if settings.HAS_REDIS:
|
if settings.HAS_REDIS:
|
||||||
rc = get_redis_connection("redis")
|
rc = django_redis.get_redis_connection("redis")
|
||||||
rc.hdel(f'quotas:{self.event_id}:availabilitycache', str(self.pk))
|
p = rc.pipeline()
|
||||||
|
p.hdel(f'quotas:{self.event_id}:availabilitycache', str(self.pk))
|
||||||
|
p.hdel(f'quotas:{self.event_id}:availabilitycache:nocw', str(self.pk))
|
||||||
|
p.hdel(f'quotas:{self.event_id}:availabilitycache:igcl', str(self.pk))
|
||||||
|
p.hdel(f'quotas:{self.event_id}:availabilitycache:nocw:igcl', str(self.pk))
|
||||||
|
p.execute()
|
||||||
self.availability(now_dt=now_dt)
|
self.availability(now_dt=now_dt)
|
||||||
|
|
||||||
def availability(
|
def availability(
|
||||||
|
|||||||
@@ -88,9 +88,7 @@ class LogEntry(models.Model):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('-datetime', '-id')
|
ordering = ('-datetime', '-id')
|
||||||
index_together = [
|
indexes = [models.Index(fields=["datetime", "id"])]
|
||||||
['datetime', 'id']
|
|
||||||
]
|
|
||||||
|
|
||||||
def display(self):
|
def display(self):
|
||||||
from ..signals import logentry_display
|
from ..signals import logentry_display
|
||||||
|
|||||||
@@ -121,5 +121,30 @@ class ReusableMedium(LoggedModel):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
unique_together = (("identifier", "type", "organizer"),)
|
unique_together = (("identifier", "type", "organizer"),)
|
||||||
index_together = (("identifier", "type", "organizer"), ("updated", "id"))
|
indexes = [
|
||||||
|
models.Index(fields=("identifier", "type", "organizer")),
|
||||||
|
models.Index(fields=("updated", "id")),
|
||||||
|
]
|
||||||
ordering = "identifier", "type", "organizer"
|
ordering = "identifier", "type", "organizer"
|
||||||
|
|
||||||
|
|
||||||
|
class MediumKeySet(models.Model):
|
||||||
|
organizer = models.ForeignKey('Organizer', on_delete=models.CASCADE, related_name='medium_key_sets')
|
||||||
|
public_id = models.BigIntegerField(
|
||||||
|
unique=True,
|
||||||
|
)
|
||||||
|
media_type = models.CharField(max_length=100)
|
||||||
|
active = models.BooleanField(default=True)
|
||||||
|
uid_key = models.BinaryField()
|
||||||
|
diversification_key = models.BinaryField()
|
||||||
|
|
||||||
|
objects = ScopedManager(organizer='organizer')
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
constraints = [
|
||||||
|
models.UniqueConstraint(
|
||||||
|
fields=["organizer", "media_type"],
|
||||||
|
condition=Q(active=True),
|
||||||
|
name="keyset_unique_active",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|||||||
@@ -42,10 +42,10 @@ from collections import Counter
|
|||||||
from datetime import datetime, time, timedelta
|
from datetime import datetime, time, timedelta
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from typing import Any, Dict, List, Union
|
from typing import Any, Dict, List, Union
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
import dateutil
|
import dateutil
|
||||||
import pycountry
|
import pycountry
|
||||||
import pytz
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.db import models, transaction
|
from django.db import models, transaction
|
||||||
@@ -270,9 +270,9 @@ class Order(LockModel, LoggedModel):
|
|||||||
verbose_name = _("Order")
|
verbose_name = _("Order")
|
||||||
verbose_name_plural = _("Orders")
|
verbose_name_plural = _("Orders")
|
||||||
ordering = ("-datetime", "-pk")
|
ordering = ("-datetime", "-pk")
|
||||||
index_together = [
|
indexes = [
|
||||||
["datetime", "id"],
|
models.Index(fields=["datetime", "id"]),
|
||||||
["last_modified", "id"],
|
models.Index(fields=["last_modified", "id"]),
|
||||||
]
|
]
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
@@ -461,14 +461,20 @@ class Order(LockModel, LoggedModel):
|
|||||||
return '{event}-{code}'.format(event=self.event.slug.upper(), code=self.code)
|
return '{event}-{code}'.format(event=self.event.slug.upper(), code=self.code)
|
||||||
|
|
||||||
def save(self, **kwargs):
|
def save(self, **kwargs):
|
||||||
if 'update_fields' in kwargs and 'last_modified' not in kwargs['update_fields']:
|
if 'update_fields' in kwargs:
|
||||||
kwargs['update_fields'] = list(kwargs['update_fields']) + ['last_modified']
|
kwargs['update_fields'] = {'last_modified'}.union(kwargs['update_fields'])
|
||||||
if not self.code:
|
if not self.code:
|
||||||
self.assign_code()
|
self.assign_code()
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'code'}.union(kwargs['update_fields'])
|
||||||
if not self.datetime:
|
if not self.datetime:
|
||||||
self.datetime = now()
|
self.datetime = now()
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'datetime'}.union(kwargs['update_fields'])
|
||||||
if not self.expires:
|
if not self.expires:
|
||||||
self.set_expires()
|
self.set_expires()
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'expires'}.union(kwargs['update_fields'])
|
||||||
|
|
||||||
is_new = not self.pk
|
is_new = not self.pk
|
||||||
update_fields = kwargs.get('update_fields', [])
|
update_fields = kwargs.get('update_fields', [])
|
||||||
@@ -496,7 +502,7 @@ class Order(LockModel, LoggedModel):
|
|||||||
|
|
||||||
def set_expires(self, now_dt=None, subevents=None):
|
def set_expires(self, now_dt=None, subevents=None):
|
||||||
now_dt = now_dt or now()
|
now_dt = now_dt or now()
|
||||||
tz = pytz.timezone(self.event.settings.timezone)
|
tz = ZoneInfo(self.event.settings.timezone)
|
||||||
mode = self.event.settings.get('payment_term_mode')
|
mode = self.event.settings.get('payment_term_mode')
|
||||||
if mode == 'days':
|
if mode == 'days':
|
||||||
exp_by_date = now_dt.astimezone(tz) + timedelta(days=self.event.settings.get('payment_term_days', as_type=int))
|
exp_by_date = now_dt.astimezone(tz) + timedelta(days=self.event.settings.get('payment_term_days', as_type=int))
|
||||||
@@ -870,7 +876,7 @@ class Order(LockModel, LoggedModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def payment_term_last(self):
|
def payment_term_last(self):
|
||||||
tz = pytz.timezone(self.event.settings.timezone)
|
tz = ZoneInfo(self.event.settings.timezone)
|
||||||
term_last = self.event.settings.get('payment_term_last', as_type=RelativeDateWrapper)
|
term_last = self.event.settings.get('payment_term_last', as_type=RelativeDateWrapper)
|
||||||
if term_last:
|
if term_last:
|
||||||
if self.event.has_subevents:
|
if self.event.has_subevents:
|
||||||
@@ -890,6 +896,33 @@ class Order(LockModel, LoggedModel):
|
|||||||
), tz)
|
), tz)
|
||||||
return term_last
|
return term_last
|
||||||
|
|
||||||
|
@property
|
||||||
|
def payment_term_expire_date(self):
|
||||||
|
delay = self.event.settings.get('payment_term_expire_delay_days', as_type=int)
|
||||||
|
if not delay: # performance saver + backwards compatibility
|
||||||
|
return self.expires
|
||||||
|
|
||||||
|
term_last = self.payment_term_last
|
||||||
|
if term_last and self.expires > term_last: # backwards compatibility
|
||||||
|
return self.expires
|
||||||
|
|
||||||
|
expires = self.expires.date() + timedelta(days=delay)
|
||||||
|
if self.event.settings.get('payment_term_weekdays'):
|
||||||
|
if expires.weekday() == 5:
|
||||||
|
expires += timedelta(days=2)
|
||||||
|
elif expires.weekday() == 6:
|
||||||
|
expires += timedelta(days=1)
|
||||||
|
|
||||||
|
tz = ZoneInfo(self.event.settings.timezone)
|
||||||
|
expires = make_aware(datetime.combine(
|
||||||
|
expires,
|
||||||
|
time(hour=23, minute=59, second=59)
|
||||||
|
), tz)
|
||||||
|
if term_last:
|
||||||
|
return min(expires, term_last)
|
||||||
|
else:
|
||||||
|
return expires
|
||||||
|
|
||||||
def _can_be_paid(self, count_waitinglist=True, ignore_date=False, force=False) -> Union[bool, str]:
|
def _can_be_paid(self, count_waitinglist=True, ignore_date=False, force=False) -> Union[bool, str]:
|
||||||
error_messages = {
|
error_messages = {
|
||||||
'late_lastdate': _("The payment can not be accepted as the last date of payments configured in the "
|
'late_lastdate': _("The payment can not be accepted as the last date of payments configured in the "
|
||||||
@@ -1230,7 +1263,7 @@ class QuestionAnswer(models.Model):
|
|||||||
try:
|
try:
|
||||||
d = dateutil.parser.parse(self.answer)
|
d = dateutil.parser.parse(self.answer)
|
||||||
if self.orderposition:
|
if self.orderposition:
|
||||||
tz = pytz.timezone(self.orderposition.order.event.settings.timezone)
|
tz = ZoneInfo(self.orderposition.order.event.settings.timezone)
|
||||||
d = d.astimezone(tz)
|
d = d.astimezone(tz)
|
||||||
return date_format(d, "SHORT_DATETIME_FORMAT")
|
return date_format(d, "SHORT_DATETIME_FORMAT")
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@@ -1442,12 +1475,20 @@ class AbstractPosition(models.Model):
|
|||||||
else self.variation.quotas.filter(subevent=self.subevent))
|
else self.variation.quotas.filter(subevent=self.subevent))
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
update_fields = kwargs.get('update_fields', [])
|
update_fields = kwargs.get('update_fields', set())
|
||||||
if 'attendee_name_parts' in update_fields:
|
if 'attendee_name_parts' in update_fields:
|
||||||
update_fields.append('attendee_name_cached')
|
kwargs['update_fields'] = {'attendee_name_cached'}.union(kwargs['update_fields'])
|
||||||
self.attendee_name_cached = self.attendee_name
|
|
||||||
|
name = self.attendee_name
|
||||||
|
if name != self.attendee_name_cached:
|
||||||
|
self.attendee_name_cached = name
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'attendee_name_cached'}.union(kwargs['update_fields'])
|
||||||
|
|
||||||
if self.attendee_name_parts is None:
|
if self.attendee_name_parts is None:
|
||||||
self.attendee_name_parts = {}
|
self.attendee_name_parts = {}
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'attendee_name_parts'}.union(kwargs['update_fields'])
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -1631,12 +1672,13 @@ class OrderPayment(models.Model):
|
|||||||
if status_change:
|
if status_change:
|
||||||
self.order.create_transactions()
|
self.order.create_transactions()
|
||||||
|
|
||||||
def fail(self, info=None, user=None, auth=None, log_data=None):
|
def fail(self, info=None, user=None, auth=None, log_data=None, send_mail=True):
|
||||||
"""
|
"""
|
||||||
Marks the order as failed and sets info to ``info``, but only if the order is in ``created`` or ``pending``
|
Marks the order as failed and sets info to ``info``, but only if the order is in ``created`` or ``pending``
|
||||||
state. This is equivalent to setting ``state`` to ``OrderPayment.PAYMENT_STATE_FAILED`` and logging a failure,
|
state. This is equivalent to setting ``state`` to ``OrderPayment.PAYMENT_STATE_FAILED`` and logging a failure,
|
||||||
but it adds strong database logging since we do not want to report a failure for an order that has just
|
but it adds strong database locking since we do not want to report a failure for an order that has just
|
||||||
been marked as paid.
|
been marked as paid.
|
||||||
|
:param send_mail: Whether an email should be sent to the user about this event (default: ``True``).
|
||||||
"""
|
"""
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
locked_instance = OrderPayment.objects.select_for_update(of=OF_SELF).get(pk=self.pk)
|
locked_instance = OrderPayment.objects.select_for_update(of=OF_SELF).get(pk=self.pk)
|
||||||
@@ -1661,6 +1703,17 @@ class OrderPayment(models.Model):
|
|||||||
'info': info,
|
'info': info,
|
||||||
'data': log_data,
|
'data': log_data,
|
||||||
}, user=user, auth=auth)
|
}, user=user, auth=auth)
|
||||||
|
|
||||||
|
if send_mail:
|
||||||
|
with language(self.order.locale, self.order.event.settings.region):
|
||||||
|
email_subject = self.order.event.settings.mail_subject_order_payment_failed
|
||||||
|
email_template = self.order.event.settings.mail_text_order_payment_failed
|
||||||
|
email_context = get_email_context(event=self.order.event, order=self.order)
|
||||||
|
self.order.send_mail(
|
||||||
|
email_subject, email_template, email_context,
|
||||||
|
'pretix.event.order.email.payment_failed', user=user, auth=auth,
|
||||||
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def confirm(self, count_waitinglist=True, send_mail=True, force=False, user=None, auth=None, mail_text='',
|
def confirm(self, count_waitinglist=True, send_mail=True, force=False, user=None, auth=None, mail_text='',
|
||||||
@@ -1827,6 +1880,8 @@ class OrderPayment(models.Model):
|
|||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
if not self.local_id:
|
if not self.local_id:
|
||||||
self.local_id = (self.order.payments.aggregate(m=Max('local_id'))['m'] or 0) + 1
|
self.local_id = (self.order.payments.aggregate(m=Max('local_id'))['m'] or 0) + 1
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'local_id'}.union(kwargs['update_fields'])
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
def create_external_refund(self, amount=None, execution_date=None, info='{}'):
|
def create_external_refund(self, amount=None, execution_date=None, info='{}'):
|
||||||
@@ -2025,6 +2080,8 @@ class OrderRefund(models.Model):
|
|||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
if not self.local_id:
|
if not self.local_id:
|
||||||
self.local_id = (self.order.refunds.aggregate(m=Max('local_id'))['m'] or 0) + 1
|
self.local_id = (self.order.refunds.aggregate(m=Max('local_id'))['m'] or 0) + 1
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'local_id'}.union(kwargs['update_fields'])
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@@ -2443,14 +2500,20 @@ class OrderPosition(AbstractPosition):
|
|||||||
assign_ticket_secret(
|
assign_ticket_secret(
|
||||||
event=self.order.event, position=self, force_invalidate=True, save=False
|
event=self.order.event, position=self, force_invalidate=True, save=False
|
||||||
)
|
)
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'secret'}.union(kwargs['update_fields'])
|
||||||
|
|
||||||
if not self.blocked:
|
if not self.blocked and self.blocked is not None:
|
||||||
self.blocked = None
|
self.blocked = None
|
||||||
elif not isinstance(self.blocked, list) or any(not isinstance(b, str) for b in self.blocked):
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'blocked'}.union(kwargs['update_fields'])
|
||||||
|
elif self.blocked and (not isinstance(self.blocked, list) or any(not isinstance(b, str) for b in self.blocked)):
|
||||||
raise TypeError("blocked needs to be a list of strings")
|
raise TypeError("blocked needs to be a list of strings")
|
||||||
|
|
||||||
if not self.pseudonymization_id:
|
if not self.pseudonymization_id:
|
||||||
self.assign_pseudonymization_id()
|
self.assign_pseudonymization_id()
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'pseudonymization_id'}.union(kwargs['update_fields'])
|
||||||
|
|
||||||
if not self.get_deferred_fields():
|
if not self.get_deferred_fields():
|
||||||
if Transaction.key(self) != self.__initial_transaction_key or self.canceled != self.__initial_canceled or not self.pk:
|
if Transaction.key(self) != self.__initial_transaction_key or self.canceled != self.__initial_canceled or not self.pk:
|
||||||
@@ -2693,8 +2756,8 @@ class Transaction(models.Model):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = 'datetime', 'pk'
|
ordering = 'datetime', 'pk'
|
||||||
index_together = [
|
indexes = [
|
||||||
['datetime', 'id']
|
models.Index(fields=['datetime', 'id'])
|
||||||
]
|
]
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
@@ -2936,10 +2999,17 @@ class InvoiceAddress(models.Model):
|
|||||||
self.order.touch()
|
self.order.touch()
|
||||||
|
|
||||||
if self.name_parts:
|
if self.name_parts:
|
||||||
self.name_cached = self.name
|
name = self.name
|
||||||
|
if self.name_cached != name:
|
||||||
|
self.name_cached = self.name
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'name_cached'}.union(kwargs['update_fields'])
|
||||||
else:
|
else:
|
||||||
self.name_cached = ""
|
if self.name_cached != "" or self.name_parts != {}:
|
||||||
self.name_parts = {}
|
self.name_cached = ""
|
||||||
|
self.name_parts = {}
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'name_cached', 'name_parts'}.union(kwargs['update_fields'])
|
||||||
super().save(**kwargs)
|
super().save(**kwargs)
|
||||||
|
|
||||||
def describe(self):
|
def describe(self):
|
||||||
@@ -3085,11 +3155,7 @@ class BlockedTicketSecret(models.Model):
|
|||||||
updated = models.DateTimeField(auto_now=True)
|
updated = models.DateTimeField(auto_now=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
if 'mysql' not in settings.DATABASES['default']['ENGINE']:
|
unique_together = (('event', 'secret'),)
|
||||||
# MySQL does not support indexes on TextField(). Django knows this and just ignores db_index, but it will
|
|
||||||
# not silently ignore the UNIQUE index, causing this table to fail. I'm so glad we're deprecating MySQL
|
|
||||||
# in a few months, so we'll just live without an unique index until then.
|
|
||||||
unique_together = (('event', 'secret'),)
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_delete, sender=CachedTicket)
|
@receiver(post_delete, sender=CachedTicket)
|
||||||
|
|||||||
@@ -35,12 +35,12 @@
|
|||||||
import string
|
import string
|
||||||
from datetime import date, datetime, time
|
from datetime import date, datetime, time
|
||||||
|
|
||||||
import pytz
|
import pytz_deprecation_shim
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.mail import get_connection
|
from django.core.mail import get_connection
|
||||||
from django.core.validators import MinLengthValidator, RegexValidator
|
from django.core.validators import MinLengthValidator, RegexValidator
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import Exists, OuterRef, Q
|
from django.db.models import Q
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from django.utils.crypto import get_random_string
|
from django.utils.crypto import get_random_string
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
@@ -102,6 +102,7 @@ class Organizer(LoggedModel):
|
|||||||
is_new = not self.pk
|
is_new = not self.pk
|
||||||
obj = super().save(*args, **kwargs)
|
obj = super().save(*args, **kwargs)
|
||||||
if is_new:
|
if is_new:
|
||||||
|
kwargs.pop('update_fields', None) # does not make sense here
|
||||||
self.set_defaults()
|
self.set_defaults()
|
||||||
else:
|
else:
|
||||||
self.get_cache().clear()
|
self.get_cache().clear()
|
||||||
@@ -140,7 +141,7 @@ class Organizer(LoggedModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def timezone(self):
|
def timezone(self):
|
||||||
return pytz.timezone(self.settings.timezone)
|
return pytz_deprecation_shim.timezone(self.settings.timezone)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def all_logentries_link(self):
|
def all_logentries_link(self):
|
||||||
@@ -156,17 +157,19 @@ class Organizer(LoggedModel):
|
|||||||
return self.cache.get_or_set(
|
return self.cache.get_or_set(
|
||||||
key='has_gift_cards',
|
key='has_gift_cards',
|
||||||
timeout=15,
|
timeout=15,
|
||||||
default=lambda: self.issued_gift_cards.exists() or self.gift_card_issuer_acceptance.exists()
|
default=lambda: self.issued_gift_cards.exists() or self.gift_card_issuer_acceptance.filter(active=True).exists()
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def accepted_gift_cards(self):
|
def accepted_gift_cards(self):
|
||||||
from .giftcards import GiftCard, GiftCardAcceptance
|
from .giftcards import GiftCard, GiftCardAcceptance
|
||||||
|
|
||||||
return GiftCard.objects.annotate(
|
return GiftCard.objects.filter(
|
||||||
accepted=Exists(GiftCardAcceptance.objects.filter(issuer=OuterRef('issuer'), collector=self))
|
Q(issuer=self) |
|
||||||
).filter(
|
Q(issuer__in=GiftCardAcceptance.objects.filter(
|
||||||
Q(issuer=self) | Q(accepted=True)
|
acceptor=self,
|
||||||
|
active=True,
|
||||||
|
).values_list('issuer', flat=True))
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -22,9 +22,12 @@
|
|||||||
import json
|
import json
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
|
from django.contrib.staticfiles import finders
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
from django.utils.deconstruct import deconstructible
|
||||||
from django.utils.formats import localize
|
from django.utils.formats import localize
|
||||||
from django.utils.translation import gettext_lazy as _, pgettext
|
from django.utils.translation import gettext_lazy as _, pgettext
|
||||||
from i18nfield.fields import I18nCharField
|
from i18nfield.fields import I18nCharField
|
||||||
@@ -135,6 +138,25 @@ def cc_to_vat_prefix(country_code):
|
|||||||
return country_code
|
return country_code
|
||||||
|
|
||||||
|
|
||||||
|
@deconstructible
|
||||||
|
class CustomRulesValidator:
|
||||||
|
def __call__(self, value):
|
||||||
|
if not isinstance(value, dict):
|
||||||
|
try:
|
||||||
|
val = json.loads(value)
|
||||||
|
except ValueError:
|
||||||
|
raise ValidationError(_('Your layout file is not a valid JSON file.'))
|
||||||
|
else:
|
||||||
|
val = value
|
||||||
|
with open(finders.find('schema/tax-rules-custom.schema.json'), 'r') as f:
|
||||||
|
schema = json.loads(f.read())
|
||||||
|
try:
|
||||||
|
jsonschema.validate(val, schema)
|
||||||
|
except jsonschema.ValidationError as e:
|
||||||
|
e = str(e).replace('%', '%%')
|
||||||
|
raise ValidationError(_('Your set of rules is not valid. Error message: {}').format(e))
|
||||||
|
|
||||||
|
|
||||||
class TaxRule(LoggedModel):
|
class TaxRule(LoggedModel):
|
||||||
event = models.ForeignKey('Event', related_name='tax_rules', on_delete=models.CASCADE)
|
event = models.ForeignKey('Event', related_name='tax_rules', on_delete=models.CASCADE)
|
||||||
internal_name = models.CharField(
|
internal_name = models.CharField(
|
||||||
@@ -318,10 +340,17 @@ class TaxRule(LoggedModel):
|
|||||||
rules = self._custom_rules
|
rules = self._custom_rules
|
||||||
if invoice_address:
|
if invoice_address:
|
||||||
for r in rules:
|
for r in rules:
|
||||||
if r['country'] == 'EU' and not is_eu_country(invoice_address.country):
|
if r['country'] == 'ZZ': # Rule: Any country
|
||||||
continue
|
pass
|
||||||
if r['country'] not in ('ZZ', 'EU') and r['country'] != str(invoice_address.country):
|
elif r['country'] == 'EU': # Rule: Any EU country
|
||||||
continue
|
if not is_eu_country(invoice_address.country):
|
||||||
|
continue
|
||||||
|
elif '-' in r['country']: # Rule: Specific country and state
|
||||||
|
if r['country'] != str(invoice_address.country) + '-' + str(invoice_address.state):
|
||||||
|
continue
|
||||||
|
else: # Rule: Specific country
|
||||||
|
if r['country'] != str(invoice_address.country):
|
||||||
|
continue
|
||||||
if r['address_type'] == 'individual' and invoice_address.is_business:
|
if r['address_type'] == 'individual' and invoice_address.is_business:
|
||||||
continue
|
continue
|
||||||
if r['address_type'] in ('business', 'business_vat_id') and not invoice_address.is_business:
|
if r['address_type'] in ('business', 'business_vat_id') and not invoice_address.is_business:
|
||||||
|
|||||||
@@ -502,7 +502,10 @@ class Voucher(LoggedModel):
|
|||||||
return seat
|
return seat
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
self.code = self.code.upper()
|
if self.code != self.code.upper():
|
||||||
|
self.code = self.code.upper()
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'code'}.union(kwargs['update_fields'])
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
self.event.cache.set('vouchers_exist', True)
|
self.event.cache.set('vouchers_exist', True)
|
||||||
|
|
||||||
|
|||||||
@@ -126,12 +126,19 @@ class WaitingListEntry(LoggedModel):
|
|||||||
raise ValidationError('Invalid input')
|
raise ValidationError('Invalid input')
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
update_fields = kwargs.get('update_fields', [])
|
update_fields = kwargs.get('update_fields', set())
|
||||||
if 'name_parts' in update_fields:
|
if 'name_parts' in update_fields:
|
||||||
update_fields.append('name_cached')
|
kwargs['update_fields'] = {'name_cached'}.union(kwargs['update_fields'])
|
||||||
self.name_cached = self.name
|
name = self.name
|
||||||
|
if name != self.name_cached:
|
||||||
|
self.name_cached = name
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'name_cached'}.union(kwargs['update_fields'])
|
||||||
|
|
||||||
if self.name_parts is None:
|
if self.name_parts is None:
|
||||||
self.name_parts = {}
|
self.name_parts = {}
|
||||||
|
if 'update_fields' in kwargs:
|
||||||
|
kwargs['update_fields'] = {'name_parts'}.union(kwargs['update_fields'])
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -211,7 +218,7 @@ class WaitingListEntry(LoggedModel):
|
|||||||
'waitinglistentry': self.pk,
|
'waitinglistentry': self.pk,
|
||||||
'subevent': self.subevent.pk if self.subevent else None,
|
'subevent': self.subevent.pk if self.subevent else None,
|
||||||
}, user=user, auth=auth)
|
}, user=user, auth=auth)
|
||||||
self.log_action('pretix.waitinglist.voucher', user=user, auth=auth)
|
self.log_action('pretix.event.orders.waitinglist.voucher_assigned', user=user, auth=auth)
|
||||||
self.voucher = v
|
self.voucher = v
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ import pycountry
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.core.validators import EmailValidator
|
from django.core.validators import EmailValidator
|
||||||
|
from django.db.models import Q
|
||||||
from django.utils import formats
|
from django.utils import formats
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
from django.utils.translation import (
|
from django.utils.translation import (
|
||||||
@@ -42,8 +43,8 @@ from phonenumbers import SUPPORTED_REGIONS
|
|||||||
from pretix.base.channels import get_all_sales_channels
|
from pretix.base.channels import get_all_sales_channels
|
||||||
from pretix.base.forms.questions import guess_country
|
from pretix.base.forms.questions import guess_country
|
||||||
from pretix.base.models import (
|
from pretix.base.models import (
|
||||||
ItemVariation, OrderPosition, Question, QuestionAnswer, QuestionOption,
|
Customer, ItemVariation, OrderPosition, Question, QuestionAnswer,
|
||||||
Seat, SubEvent,
|
QuestionOption, Seat, SubEvent,
|
||||||
)
|
)
|
||||||
from pretix.base.services.pricing import get_price
|
from pretix.base.services.pricing import get_price
|
||||||
from pretix.base.settings import (
|
from pretix.base.settings import (
|
||||||
@@ -210,7 +211,7 @@ class SubeventColumn(ImportColumn):
|
|||||||
for format in input_formats:
|
for format in input_formats:
|
||||||
try:
|
try:
|
||||||
d = datetime.datetime.strptime(value, format)
|
d = datetime.datetime.strptime(value, format)
|
||||||
d = self.event.timezone.localize(d)
|
d = d.replace(tzinfo=self.event.timezone)
|
||||||
try:
|
try:
|
||||||
se = self.event.subevents.get(
|
se = self.event.subevents.get(
|
||||||
active=True,
|
active=True,
|
||||||
@@ -660,7 +661,7 @@ class ValidFrom(ImportColumn):
|
|||||||
for format in input_formats:
|
for format in input_formats:
|
||||||
try:
|
try:
|
||||||
d = datetime.datetime.strptime(value, format)
|
d = datetime.datetime.strptime(value, format)
|
||||||
d = self.event.timezone.localize(d)
|
d = d.replace(tzinfo=self.event.timezone)
|
||||||
return d
|
return d
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
pass
|
pass
|
||||||
@@ -683,7 +684,7 @@ class ValidUntil(ImportColumn):
|
|||||||
for format in input_formats:
|
for format in input_formats:
|
||||||
try:
|
try:
|
||||||
d = datetime.datetime.strptime(value, format)
|
d = datetime.datetime.strptime(value, format)
|
||||||
d = self.event.timezone.localize(d)
|
d = d.replace(tzinfo=self.event.timezone)
|
||||||
return d
|
return d
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
pass
|
pass
|
||||||
@@ -804,7 +805,7 @@ class QuestionColumn(ImportColumn):
|
|||||||
return self.q.clean_answer(value)
|
return self.q.clean_answer(value)
|
||||||
|
|
||||||
def assign(self, value, order, position, invoice_address, **kwargs):
|
def assign(self, value, order, position, invoice_address, **kwargs):
|
||||||
if value:
|
if value is not None:
|
||||||
if not hasattr(order, '_answers'):
|
if not hasattr(order, '_answers'):
|
||||||
order._answers = []
|
order._answers = []
|
||||||
if isinstance(value, QuestionOption):
|
if isinstance(value, QuestionOption):
|
||||||
@@ -826,6 +827,28 @@ class QuestionColumn(ImportColumn):
|
|||||||
a.options.add(*a._options)
|
a.options.add(*a._options)
|
||||||
|
|
||||||
|
|
||||||
|
class CustomerColumn(ImportColumn):
|
||||||
|
identifier = 'customer'
|
||||||
|
verbose_name = gettext_lazy('Customer')
|
||||||
|
|
||||||
|
def clean(self, value, previous_values):
|
||||||
|
if value:
|
||||||
|
try:
|
||||||
|
value = self.event.organizer.customers.get(
|
||||||
|
Q(identifier=value) | Q(email__iexact=value) | Q(external_identifier=value)
|
||||||
|
)
|
||||||
|
except Customer.MultipleObjectsReturned:
|
||||||
|
value = self.event.organizer.customers.get(
|
||||||
|
Q(identifier=value)
|
||||||
|
)
|
||||||
|
except Customer.DoesNotExist:
|
||||||
|
raise ValidationError(_('No matching customer was found.'))
|
||||||
|
return value
|
||||||
|
|
||||||
|
def assign(self, value, order, position, invoice_address, **kwargs):
|
||||||
|
order.customer = value
|
||||||
|
|
||||||
|
|
||||||
def get_all_columns(event):
|
def get_all_columns(event):
|
||||||
default = []
|
default = []
|
||||||
if event.has_subevents:
|
if event.has_subevents:
|
||||||
@@ -837,6 +860,10 @@ def get_all_columns(event):
|
|||||||
Variation(event),
|
Variation(event),
|
||||||
InvoiceAddressCompany(event),
|
InvoiceAddressCompany(event),
|
||||||
]
|
]
|
||||||
|
if event.settings.customer_accounts:
|
||||||
|
default += [
|
||||||
|
CustomerColumn(event),
|
||||||
|
]
|
||||||
scheme = PERSON_NAME_SCHEMES.get(event.settings.name_scheme)
|
scheme = PERSON_NAME_SCHEMES.get(event.settings.name_scheme)
|
||||||
for n, l, w in scheme['fields']:
|
for n, l, w in scheme['fields']:
|
||||||
default.append(InvoiceAddressNamePart(event, n, l))
|
default.append(InvoiceAddressNamePart(event, n, l))
|
||||||
|
|||||||
@@ -39,8 +39,8 @@ import logging
|
|||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from decimal import ROUND_HALF_UP, Decimal
|
from decimal import ROUND_HALF_UP, Decimal
|
||||||
from typing import Any, Dict, Union
|
from typing import Any, Dict, Union
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
import pytz
|
|
||||||
from django import forms
|
from django import forms
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib import messages
|
from django.contrib import messages
|
||||||
@@ -60,7 +60,7 @@ from pretix.base.channels import get_all_sales_channels
|
|||||||
from pretix.base.forms import PlaceholderValidator
|
from pretix.base.forms import PlaceholderValidator
|
||||||
from pretix.base.models import (
|
from pretix.base.models import (
|
||||||
CartPosition, Event, GiftCard, InvoiceAddress, Order, OrderPayment,
|
CartPosition, Event, GiftCard, InvoiceAddress, Order, OrderPayment,
|
||||||
OrderRefund, Quota,
|
OrderRefund, Quota, TaxRule,
|
||||||
)
|
)
|
||||||
from pretix.base.reldate import RelativeDateField, RelativeDateWrapper
|
from pretix.base.reldate import RelativeDateField, RelativeDateWrapper
|
||||||
from pretix.base.settings import SettingsSandbox
|
from pretix.base.settings import SettingsSandbox
|
||||||
@@ -78,6 +78,16 @@ from pretix.presale.views.cart import cart_session, get_or_create_cart_id
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class WalletQueries:
|
||||||
|
APPLEPAY = 'applepay'
|
||||||
|
GOOGLEPAY = 'googlepay'
|
||||||
|
|
||||||
|
WALLETS = (
|
||||||
|
(APPLEPAY, pgettext_lazy('payment', 'Apple Pay')),
|
||||||
|
(GOOGLEPAY, pgettext_lazy('payment', 'Google Pay')),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class PaymentProviderForm(Form):
|
class PaymentProviderForm(Form):
|
||||||
def clean(self):
|
def clean(self):
|
||||||
cleaned_data = super().clean()
|
cleaned_data = super().clean()
|
||||||
@@ -436,6 +446,19 @@ class BasePaymentProvider:
|
|||||||
d['_restrict_to_sales_channels']._as_type = list
|
d['_restrict_to_sales_channels']._as_type = list
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
@property
|
||||||
|
def walletqueries(self):
|
||||||
|
"""
|
||||||
|
.. warning:: This property is considered **experimental**. It might change or get removed at any time without
|
||||||
|
prior notice.
|
||||||
|
|
||||||
|
A list of wallet payment methods that should be dynamically joined to the public name of the payment method,
|
||||||
|
if they are available to the user.
|
||||||
|
The detection is made on a best effort basis with no guarantees of correctness and actual availability.
|
||||||
|
Wallets that pretix can check for are exposed through ``pretix.base.payment.WalletQueries``.
|
||||||
|
"""
|
||||||
|
return []
|
||||||
|
|
||||||
def settings_form_clean(self, cleaned_data):
|
def settings_form_clean(self, cleaned_data):
|
||||||
"""
|
"""
|
||||||
Overriding this method allows you to inject custom validation into the settings form.
|
Overriding this method allows you to inject custom validation into the settings form.
|
||||||
@@ -518,7 +541,7 @@ class BasePaymentProvider:
|
|||||||
|
|
||||||
def _is_still_available(self, now_dt=None, cart_id=None, order=None):
|
def _is_still_available(self, now_dt=None, cart_id=None, order=None):
|
||||||
now_dt = now_dt or now()
|
now_dt = now_dt or now()
|
||||||
tz = pytz.timezone(self.event.settings.timezone)
|
tz = ZoneInfo(self.event.settings.timezone)
|
||||||
|
|
||||||
availability_date = self.settings.get('_availability_date', as_type=RelativeDateWrapper)
|
availability_date = self.settings.get('_availability_date', as_type=RelativeDateWrapper)
|
||||||
if availability_date:
|
if availability_date:
|
||||||
@@ -1015,7 +1038,11 @@ class FreeOrderProvider(BasePaymentProvider):
|
|||||||
|
|
||||||
cart = get_cart(request)
|
cart = get_cart(request)
|
||||||
total = get_cart_total(request)
|
total = get_cart_total(request)
|
||||||
total += sum([f.value for f in get_fees(self.event, request, total, None, None, cart)])
|
try:
|
||||||
|
total += sum([f.value for f in get_fees(self.event, request, total, None, None, cart)])
|
||||||
|
except TaxRule.SaleNotAllowed:
|
||||||
|
# ignore for now, will fail on order creation
|
||||||
|
pass
|
||||||
return total == 0
|
return total == 0
|
||||||
|
|
||||||
def order_change_allowed(self, order: Order) -> bool:
|
def order_change_allowed(self, order: Order) -> bool:
|
||||||
|
|||||||
@@ -43,16 +43,18 @@ import subprocess
|
|||||||
import tempfile
|
import tempfile
|
||||||
import unicodedata
|
import unicodedata
|
||||||
import uuid
|
import uuid
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict, defaultdict
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
import reportlab.rl_config
|
||||||
from bidi.algorithm import get_display
|
from bidi.algorithm import get_display
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.staticfiles import finders
|
from django.contrib.staticfiles import finders
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.db.models import Max, Min
|
from django.db.models import Max, Min
|
||||||
|
from django.db.models.fields.files import FieldFile
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.utils.deconstruct import deconstructible
|
from django.utils.deconstruct import deconstructible
|
||||||
from django.utils.formats import date_format
|
from django.utils.formats import date_format
|
||||||
@@ -60,8 +62,8 @@ from django.utils.html import conditional_escape
|
|||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from django.utils.translation import gettext_lazy as _, pgettext
|
from django.utils.translation import gettext_lazy as _, pgettext
|
||||||
from i18nfield.strings import LazyI18nString
|
from i18nfield.strings import LazyI18nString
|
||||||
from pypdf import PdfReader
|
from pypdf import PdfReader, PdfWriter, Transformation
|
||||||
from pytz import timezone
|
from pypdf.generic import RectangleObject
|
||||||
from reportlab.graphics import renderPDF
|
from reportlab.graphics import renderPDF
|
||||||
from reportlab.graphics.barcode.qr import QrCodeWidget
|
from reportlab.graphics.barcode.qr import QrCodeWidget
|
||||||
from reportlab.graphics.shapes import Drawing
|
from reportlab.graphics.shapes import Drawing
|
||||||
@@ -86,6 +88,9 @@ from pretix.presale.style import get_fonts
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
if not settings.DEBUG:
|
||||||
|
reportlab.rl_config.shapeChecking = 0
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_VARIABLES = OrderedDict((
|
DEFAULT_VARIABLES = OrderedDict((
|
||||||
("secret", {
|
("secret", {
|
||||||
@@ -103,7 +108,10 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
("positionid", {
|
("positionid", {
|
||||||
"label": _("Order position number"),
|
"label": _("Order position number"),
|
||||||
"editor_sample": "1",
|
"editor_sample": "1",
|
||||||
"evaluate": lambda orderposition, order, event: str(orderposition.positionid)
|
"evaluate": lambda orderposition, order, event: str(orderposition.positionid),
|
||||||
|
# There is no performance gain in using evaluate_bulk here, but we want to make sure it is used somewhere
|
||||||
|
# in core to make sure we notice if the implementation of the API breaks.
|
||||||
|
"evaluate_bulk": lambda orderpositions: [str(p.positionid) for p in orderpositions],
|
||||||
}),
|
}),
|
||||||
("order_positionid", {
|
("order_positionid", {
|
||||||
"label": _("Order code and position number"),
|
"label": _("Order code and position number"),
|
||||||
@@ -237,7 +245,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Event begin date and time"),
|
"label": _("Event begin date and time"),
|
||||||
"editor_sample": _("2017-05-31 20:00"),
|
"editor_sample": _("2017-05-31 20:00"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
ev.date_from.astimezone(timezone(ev.settings.timezone)),
|
ev.date_from.astimezone(ev.timezone),
|
||||||
"SHORT_DATETIME_FORMAT"
|
"SHORT_DATETIME_FORMAT"
|
||||||
) if ev.date_from else ""
|
) if ev.date_from else ""
|
||||||
}),
|
}),
|
||||||
@@ -245,7 +253,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Event begin date"),
|
"label": _("Event begin date"),
|
||||||
"editor_sample": _("2017-05-31"),
|
"editor_sample": _("2017-05-31"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
ev.date_from.astimezone(timezone(ev.settings.timezone)),
|
ev.date_from.astimezone(ev.timezone),
|
||||||
"SHORT_DATE_FORMAT"
|
"SHORT_DATE_FORMAT"
|
||||||
) if ev.date_from else ""
|
) if ev.date_from else ""
|
||||||
}),
|
}),
|
||||||
@@ -263,7 +271,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Event end date and time"),
|
"label": _("Event end date and time"),
|
||||||
"editor_sample": _("2017-05-31 22:00"),
|
"editor_sample": _("2017-05-31 22:00"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
ev.date_to.astimezone(timezone(ev.settings.timezone)),
|
ev.date_to.astimezone(ev.timezone),
|
||||||
"SHORT_DATETIME_FORMAT"
|
"SHORT_DATETIME_FORMAT"
|
||||||
) if ev.date_to else ""
|
) if ev.date_to else ""
|
||||||
}),
|
}),
|
||||||
@@ -271,7 +279,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Event end date"),
|
"label": _("Event end date"),
|
||||||
"editor_sample": _("2017-05-31"),
|
"editor_sample": _("2017-05-31"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
ev.date_to.astimezone(timezone(ev.settings.timezone)),
|
ev.date_to.astimezone(ev.timezone),
|
||||||
"SHORT_DATE_FORMAT"
|
"SHORT_DATE_FORMAT"
|
||||||
) if ev.date_to else ""
|
) if ev.date_to else ""
|
||||||
}),
|
}),
|
||||||
@@ -279,7 +287,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Event end time"),
|
"label": _("Event end time"),
|
||||||
"editor_sample": _("22:00"),
|
"editor_sample": _("22:00"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
ev.date_to.astimezone(timezone(ev.settings.timezone)),
|
ev.date_to.astimezone(ev.timezone),
|
||||||
"TIME_FORMAT"
|
"TIME_FORMAT"
|
||||||
) if ev.date_to else ""
|
) if ev.date_to else ""
|
||||||
}),
|
}),
|
||||||
@@ -292,7 +300,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Event admission date and time"),
|
"label": _("Event admission date and time"),
|
||||||
"editor_sample": _("2017-05-31 19:00"),
|
"editor_sample": _("2017-05-31 19:00"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
ev.date_admission.astimezone(timezone(ev.settings.timezone)),
|
ev.date_admission.astimezone(ev.timezone),
|
||||||
"SHORT_DATETIME_FORMAT"
|
"SHORT_DATETIME_FORMAT"
|
||||||
) if ev.date_admission else ""
|
) if ev.date_admission else ""
|
||||||
}),
|
}),
|
||||||
@@ -300,7 +308,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Event admission time"),
|
"label": _("Event admission time"),
|
||||||
"editor_sample": _("19:00"),
|
"editor_sample": _("19:00"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
ev.date_admission.astimezone(timezone(ev.settings.timezone)),
|
ev.date_admission.astimezone(ev.timezone),
|
||||||
"TIME_FORMAT"
|
"TIME_FORMAT"
|
||||||
) if ev.date_admission else ""
|
) if ev.date_admission else ""
|
||||||
}),
|
}),
|
||||||
@@ -356,14 +364,9 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
}),
|
}),
|
||||||
("addons", {
|
("addons", {
|
||||||
"label": _("List of Add-Ons"),
|
"label": _("List of Add-Ons"),
|
||||||
"editor_sample": _("Add-on 1\nAdd-on 2"),
|
"editor_sample": _("Add-on 1\n2x Add-on 2"),
|
||||||
"evaluate": lambda op, order, ev: "\n".join([
|
"evaluate": lambda op, order, ev: "\n".join([
|
||||||
'{} - {}'.format(p.item.name, p.variation.value) if p.variation else str(p.item.name)
|
str(p) for p in generate_compressed_addon_list(op, order, ev)
|
||||||
for p in (
|
|
||||||
op.addons.all() if 'addons' in getattr(op, '_prefetched_objects_cache', {})
|
|
||||||
else op.addons.select_related('item', 'variation')
|
|
||||||
)
|
|
||||||
if not p.canceled
|
|
||||||
])
|
])
|
||||||
}),
|
}),
|
||||||
("organizer", {
|
("organizer", {
|
||||||
@@ -385,7 +388,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Printing date"),
|
"label": _("Printing date"),
|
||||||
"editor_sample": _("2017-05-31"),
|
"editor_sample": _("2017-05-31"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
now().astimezone(timezone(ev.settings.timezone)),
|
now().astimezone(ev.timezone),
|
||||||
"SHORT_DATE_FORMAT"
|
"SHORT_DATE_FORMAT"
|
||||||
)
|
)
|
||||||
}),
|
}),
|
||||||
@@ -393,7 +396,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Printing date and time"),
|
"label": _("Printing date and time"),
|
||||||
"editor_sample": _("2017-05-31 19:00"),
|
"editor_sample": _("2017-05-31 19:00"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
now().astimezone(timezone(ev.settings.timezone)),
|
now().astimezone(ev.timezone),
|
||||||
"SHORT_DATETIME_FORMAT"
|
"SHORT_DATETIME_FORMAT"
|
||||||
)
|
)
|
||||||
}),
|
}),
|
||||||
@@ -401,7 +404,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Printing time"),
|
"label": _("Printing time"),
|
||||||
"editor_sample": _("19:00"),
|
"editor_sample": _("19:00"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
now().astimezone(timezone(ev.settings.timezone)),
|
now().astimezone(ev.timezone),
|
||||||
"TIME_FORMAT"
|
"TIME_FORMAT"
|
||||||
)
|
)
|
||||||
}),
|
}),
|
||||||
@@ -409,7 +412,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Validity start date"),
|
"label": _("Validity start date"),
|
||||||
"editor_sample": _("2017-05-31"),
|
"editor_sample": _("2017-05-31"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
op.valid_from.astimezone(timezone(ev.settings.timezone)),
|
op.valid_from.astimezone(ev.timezone),
|
||||||
"SHORT_DATE_FORMAT"
|
"SHORT_DATE_FORMAT"
|
||||||
) if op.valid_from else ""
|
) if op.valid_from else ""
|
||||||
}),
|
}),
|
||||||
@@ -417,7 +420,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Validity start date and time"),
|
"label": _("Validity start date and time"),
|
||||||
"editor_sample": _("2017-05-31 19:00"),
|
"editor_sample": _("2017-05-31 19:00"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
op.valid_from.astimezone(timezone(ev.settings.timezone)),
|
op.valid_from.astimezone(ev.timezone),
|
||||||
"SHORT_DATETIME_FORMAT"
|
"SHORT_DATETIME_FORMAT"
|
||||||
) if op.valid_from else ""
|
) if op.valid_from else ""
|
||||||
}),
|
}),
|
||||||
@@ -425,7 +428,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Validity start time"),
|
"label": _("Validity start time"),
|
||||||
"editor_sample": _("19:00"),
|
"editor_sample": _("19:00"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
op.valid_from.astimezone(timezone(ev.settings.timezone)),
|
op.valid_from.astimezone(ev.timezone),
|
||||||
"TIME_FORMAT"
|
"TIME_FORMAT"
|
||||||
) if op.valid_from else ""
|
) if op.valid_from else ""
|
||||||
}),
|
}),
|
||||||
@@ -433,7 +436,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Validity end date"),
|
"label": _("Validity end date"),
|
||||||
"editor_sample": _("2017-05-31"),
|
"editor_sample": _("2017-05-31"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
op.valid_until.astimezone(timezone(ev.settings.timezone)),
|
op.valid_until.astimezone(ev.timezone),
|
||||||
"SHORT_DATE_FORMAT"
|
"SHORT_DATE_FORMAT"
|
||||||
) if op.valid_until else ""
|
) if op.valid_until else ""
|
||||||
}),
|
}),
|
||||||
@@ -441,7 +444,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Validity end date and time"),
|
"label": _("Validity end date and time"),
|
||||||
"editor_sample": _("2017-05-31 19:00"),
|
"editor_sample": _("2017-05-31 19:00"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
op.valid_until.astimezone(timezone(ev.settings.timezone)),
|
op.valid_until.astimezone(ev.timezone),
|
||||||
"SHORT_DATETIME_FORMAT"
|
"SHORT_DATETIME_FORMAT"
|
||||||
) if op.valid_until else ""
|
) if op.valid_until else ""
|
||||||
}),
|
}),
|
||||||
@@ -449,7 +452,7 @@ DEFAULT_VARIABLES = OrderedDict((
|
|||||||
"label": _("Validity end time"),
|
"label": _("Validity end time"),
|
||||||
"editor_sample": _("19:00"),
|
"editor_sample": _("19:00"),
|
||||||
"evaluate": lambda op, order, ev: date_format(
|
"evaluate": lambda op, order, ev: date_format(
|
||||||
op.valid_until.astimezone(timezone(ev.settings.timezone)),
|
op.valid_until.astimezone(ev.timezone),
|
||||||
"TIME_FORMAT"
|
"TIME_FORMAT"
|
||||||
) if op.valid_until else ""
|
) if op.valid_until else ""
|
||||||
}),
|
}),
|
||||||
@@ -697,6 +700,30 @@ def get_seat(op: OrderPosition):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def generate_compressed_addon_list(op, order, event):
|
||||||
|
itemcount = defaultdict(int)
|
||||||
|
addons = [p for p in (
|
||||||
|
op.addons.all() if 'addons' in getattr(op, '_prefetched_objects_cache', {})
|
||||||
|
else op.addons.select_related('item', 'variation')
|
||||||
|
) if not p.canceled]
|
||||||
|
for pos in addons:
|
||||||
|
itemcount[pos.item, pos.variation] += 1
|
||||||
|
|
||||||
|
addonlist = []
|
||||||
|
for (item, variation), count in itemcount.items():
|
||||||
|
if variation:
|
||||||
|
if count > 1:
|
||||||
|
addonlist.append('{}x {} - {}'.format(count, item.name, variation.value))
|
||||||
|
else:
|
||||||
|
addonlist.append('{} - {}'.format(item.name, variation.value))
|
||||||
|
else:
|
||||||
|
if count > 1:
|
||||||
|
addonlist.append('{}x {}'.format(count, item.name))
|
||||||
|
else:
|
||||||
|
addonlist.append(item.name)
|
||||||
|
return addonlist
|
||||||
|
|
||||||
|
|
||||||
class Renderer:
|
class Renderer:
|
||||||
|
|
||||||
def __init__(self, event, layout, background_file):
|
def __init__(self, event, layout, background_file):
|
||||||
@@ -861,22 +888,37 @@ class Renderer:
|
|||||||
image_file = None
|
image_file = None
|
||||||
|
|
||||||
if image_file:
|
if image_file:
|
||||||
ir = ThumbnailingImageReader(image_file)
|
|
||||||
try:
|
try:
|
||||||
|
ir = ThumbnailingImageReader(image_file)
|
||||||
ir.resize(float(o['width']) * mm, float(o['height']) * mm, 300)
|
ir.resize(float(o['width']) * mm, float(o['height']) * mm, 300)
|
||||||
|
canvas.drawImage(
|
||||||
|
image=ir,
|
||||||
|
x=float(o['left']) * mm,
|
||||||
|
y=float(o['bottom']) * mm,
|
||||||
|
width=float(o['width']) * mm,
|
||||||
|
height=float(o['height']) * mm,
|
||||||
|
preserveAspectRatio=True,
|
||||||
|
anchor='c', # centered in frame
|
||||||
|
mask='auto'
|
||||||
|
)
|
||||||
|
if isinstance(image_file, FieldFile):
|
||||||
|
# ThumbnailingImageReader "closes" the file, so it's no use to use the same file pointer
|
||||||
|
# in case we need it again. For FieldFile, fortunately, there is an easy way to make the file
|
||||||
|
# refresh itself when it is used next.
|
||||||
|
del image_file.file
|
||||||
except:
|
except:
|
||||||
logger.exception("Can not resize image")
|
logger.exception("Can not load or resize image")
|
||||||
pass
|
canvas.saveState()
|
||||||
canvas.drawImage(
|
canvas.setFillColorRGB(.8, .8, .8, alpha=1)
|
||||||
image=ir,
|
canvas.rect(
|
||||||
x=float(o['left']) * mm,
|
x=float(o['left']) * mm,
|
||||||
y=float(o['bottom']) * mm,
|
y=float(o['bottom']) * mm,
|
||||||
width=float(o['width']) * mm,
|
width=float(o['width']) * mm,
|
||||||
height=float(o['height']) * mm,
|
height=float(o['height']) * mm,
|
||||||
preserveAspectRatio=True,
|
stroke=0,
|
||||||
anchor='c', # centered in frame
|
fill=1,
|
||||||
mask='auto'
|
)
|
||||||
)
|
canvas.restoreState()
|
||||||
else:
|
else:
|
||||||
canvas.saveState()
|
canvas.saveState()
|
||||||
canvas.setFillColorRGB(.8, .8, .8, alpha=1)
|
canvas.setFillColorRGB(.8, .8, .8, alpha=1)
|
||||||
@@ -930,7 +972,7 @@ class Renderer:
|
|||||||
|
|
||||||
# reportlab does not support unicode combination characters
|
# reportlab does not support unicode combination characters
|
||||||
# It's important we do this before we use ArabicReshaper
|
# It's important we do this before we use ArabicReshaper
|
||||||
text = unicodedata.normalize("NFKC", text)
|
text = unicodedata.normalize("NFC", text)
|
||||||
|
|
||||||
# reportlab does not support RTL, ligature-heavy scripts like Arabic. Therefore, we use ArabicReshaper
|
# reportlab does not support RTL, ligature-heavy scripts like Arabic. Therefore, we use ArabicReshaper
|
||||||
# to resolve all ligatures and python-bidi to switch RTL texts.
|
# to resolve all ligatures and python-bidi to switch RTL texts.
|
||||||
@@ -983,7 +1025,10 @@ class Renderer:
|
|||||||
elif o['type'] == "poweredby":
|
elif o['type'] == "poweredby":
|
||||||
self._draw_poweredby(canvas, op, o)
|
self._draw_poweredby(canvas, op, o)
|
||||||
if self.bg_pdf:
|
if self.bg_pdf:
|
||||||
page_size = (self.bg_pdf.pages[0].mediabox[2], self.bg_pdf.pages[0].mediabox[3])
|
page_size = (
|
||||||
|
self.bg_pdf.pages[0].mediabox[2] - self.bg_pdf.pages[0].mediabox[0],
|
||||||
|
self.bg_pdf.pages[0].mediabox[3] - self.bg_pdf.pages[0].mediabox[1]
|
||||||
|
)
|
||||||
if self.bg_pdf.pages[0].get('/Rotate') in (90, 270):
|
if self.bg_pdf.pages[0].get('/Rotate') in (90, 270):
|
||||||
# swap dimensions due to pdf being rotated
|
# swap dimensions due to pdf being rotated
|
||||||
page_size = page_size[::-1]
|
page_size = page_size[::-1]
|
||||||
@@ -1011,14 +1056,12 @@ class Renderer:
|
|||||||
with open(os.path.join(d, 'out.pdf'), 'rb') as f:
|
with open(os.path.join(d, 'out.pdf'), 'rb') as f:
|
||||||
return BytesIO(f.read())
|
return BytesIO(f.read())
|
||||||
else:
|
else:
|
||||||
from pypdf import PdfReader, PdfWriter, Transformation
|
|
||||||
from pypdf.generic import RectangleObject
|
|
||||||
buffer.seek(0)
|
buffer.seek(0)
|
||||||
new_pdf = PdfReader(buffer)
|
new_pdf = PdfReader(buffer)
|
||||||
output = PdfWriter()
|
output = PdfWriter()
|
||||||
|
|
||||||
for i, page in enumerate(new_pdf.pages):
|
for i, page in enumerate(new_pdf.pages):
|
||||||
bg_page = copy.copy(self.bg_pdf.pages[i])
|
bg_page = copy.deepcopy(self.bg_pdf.pages[i])
|
||||||
bg_rotation = bg_page.get('/Rotate')
|
bg_rotation = bg_page.get('/Rotate')
|
||||||
if bg_rotation:
|
if bg_rotation:
|
||||||
# /Rotate is clockwise, transformation.rotate is counter-clockwise
|
# /Rotate is clockwise, transformation.rotate is counter-clockwise
|
||||||
@@ -1055,6 +1098,56 @@ class Renderer:
|
|||||||
return outbuffer
|
return outbuffer
|
||||||
|
|
||||||
|
|
||||||
|
def merge_background(fg_pdf, bg_pdf, out_file, compress):
|
||||||
|
if settings.PDFTK:
|
||||||
|
with tempfile.TemporaryDirectory() as d:
|
||||||
|
fg_filename = os.path.join(d, 'fg.pdf')
|
||||||
|
bg_filename = os.path.join(d, 'bg.pdf')
|
||||||
|
fg_pdf.write(fg_filename)
|
||||||
|
bg_pdf.write(bg_filename)
|
||||||
|
pdftk_cmd = [
|
||||||
|
settings.PDFTK,
|
||||||
|
fg_filename,
|
||||||
|
'multibackground',
|
||||||
|
bg_filename,
|
||||||
|
'output',
|
||||||
|
'-',
|
||||||
|
]
|
||||||
|
if compress:
|
||||||
|
pdftk_cmd.append('compress')
|
||||||
|
subprocess.run(pdftk_cmd, check=True, stdout=out_file)
|
||||||
|
else:
|
||||||
|
output = PdfWriter()
|
||||||
|
for i, page in enumerate(fg_pdf.pages):
|
||||||
|
bg_page = copy.deepcopy(bg_pdf.pages[i])
|
||||||
|
bg_rotation = bg_page.get('/Rotate')
|
||||||
|
if bg_rotation:
|
||||||
|
# /Rotate is clockwise, transformation.rotate is counter-clockwise
|
||||||
|
t = Transformation().rotate(bg_rotation)
|
||||||
|
w = float(page.mediabox.getWidth())
|
||||||
|
h = float(page.mediabox.getHeight())
|
||||||
|
if bg_rotation in (90, 270):
|
||||||
|
# offset due to rotation base
|
||||||
|
if bg_rotation == 90:
|
||||||
|
t = t.translate(h, 0)
|
||||||
|
else:
|
||||||
|
t = t.translate(0, w)
|
||||||
|
# rotate mediabox as well
|
||||||
|
page.mediabox = RectangleObject((
|
||||||
|
page.mediabox.left.as_numeric(),
|
||||||
|
page.mediabox.bottom.as_numeric(),
|
||||||
|
page.mediabox.top.as_numeric(),
|
||||||
|
page.mediabox.right.as_numeric(),
|
||||||
|
))
|
||||||
|
page.trimbox = page.mediabox
|
||||||
|
elif bg_rotation == 180:
|
||||||
|
t = t.translate(w, h)
|
||||||
|
page.add_transformation(t)
|
||||||
|
bg_page.merge_page(page)
|
||||||
|
output.add_page(bg_page)
|
||||||
|
output.write(out_file)
|
||||||
|
|
||||||
|
|
||||||
@deconstructible
|
@deconstructible
|
||||||
class PdfLayoutValidator:
|
class PdfLayoutValidator:
|
||||||
def __call__(self, value):
|
def __call__(self, value):
|
||||||
|
|||||||
@@ -22,8 +22,8 @@
|
|||||||
import datetime
|
import datetime
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
import pytz
|
|
||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
from django import forms
|
from django import forms
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
@@ -67,7 +67,7 @@ class RelativeDateWrapper:
|
|||||||
if self.data.minutes_before is not None:
|
if self.data.minutes_before is not None:
|
||||||
raise ValueError('A minute-based relative datetime can not be used as a date')
|
raise ValueError('A minute-based relative datetime can not be used as a date')
|
||||||
|
|
||||||
tz = pytz.timezone(event.settings.timezone)
|
tz = ZoneInfo(event.settings.timezone)
|
||||||
if isinstance(event, SubEvent):
|
if isinstance(event, SubEvent):
|
||||||
base_date = (
|
base_date = (
|
||||||
getattr(event, self.data.base_date_name)
|
getattr(event, self.data.base_date_name)
|
||||||
@@ -86,7 +86,7 @@ class RelativeDateWrapper:
|
|||||||
if isinstance(self.data, (datetime.datetime, datetime.date)):
|
if isinstance(self.data, (datetime.datetime, datetime.date)):
|
||||||
return self.data
|
return self.data
|
||||||
else:
|
else:
|
||||||
tz = pytz.timezone(event.settings.timezone)
|
tz = ZoneInfo(event.settings.timezone)
|
||||||
if isinstance(event, SubEvent):
|
if isinstance(event, SubEvent):
|
||||||
base_date = (
|
base_date = (
|
||||||
getattr(event, self.data.base_date_name)
|
getattr(event, self.data.base_date_name)
|
||||||
@@ -99,8 +99,7 @@ class RelativeDateWrapper:
|
|||||||
if self.data.minutes_before is not None:
|
if self.data.minutes_before is not None:
|
||||||
return base_date.astimezone(tz) - datetime.timedelta(minutes=self.data.minutes_before)
|
return base_date.astimezone(tz) - datetime.timedelta(minutes=self.data.minutes_before)
|
||||||
else:
|
else:
|
||||||
oldoffset = base_date.astimezone(tz).utcoffset()
|
new_date = (base_date.astimezone(tz) - datetime.timedelta(days=self.data.days_before)).astimezone(tz)
|
||||||
new_date = base_date.astimezone(tz) - datetime.timedelta(days=self.data.days_before)
|
|
||||||
if self.data.time:
|
if self.data.time:
|
||||||
new_date = new_date.replace(
|
new_date = new_date.replace(
|
||||||
hour=self.data.time.hour,
|
hour=self.data.time.hour,
|
||||||
@@ -108,8 +107,6 @@ class RelativeDateWrapper:
|
|||||||
second=self.data.time.second
|
second=self.data.time.second
|
||||||
)
|
)
|
||||||
new_date = new_date.astimezone(tz)
|
new_date = new_date.astimezone(tz)
|
||||||
new_offset = new_date.utcoffset()
|
|
||||||
new_date += oldoffset - new_offset
|
|
||||||
return new_date
|
return new_date
|
||||||
|
|
||||||
def to_string(self) -> str:
|
def to_string(self) -> str:
|
||||||
|
|||||||
@@ -141,9 +141,10 @@ error_messages = {
|
|||||||
'price_not_a_number': gettext_lazy('The entered price is not a number.'),
|
'price_not_a_number': gettext_lazy('The entered price is not a number.'),
|
||||||
'price_too_high': gettext_lazy('The entered price is to high.'),
|
'price_too_high': gettext_lazy('The entered price is to high.'),
|
||||||
'voucher_invalid': gettext_lazy('This voucher code is not known in our database.'),
|
'voucher_invalid': gettext_lazy('This voucher code is not known in our database.'),
|
||||||
'voucher_min_usages': gettext_lazy(
|
'voucher_min_usages': ngettext_lazy(
|
||||||
'The voucher code "%(voucher)s" can only be used if you select at least %(number)s '
|
'The voucher code "%(voucher)s" can only be used if you select at least %(number)s matching products.',
|
||||||
'matching products.'
|
'The voucher code "%(voucher)s" can only be used if you select at least %(number)s matching products.',
|
||||||
|
'number'
|
||||||
),
|
),
|
||||||
'voucher_min_usages_removed': ngettext_lazy(
|
'voucher_min_usages_removed': ngettext_lazy(
|
||||||
'The voucher code "%(voucher)s" can only be used if you select at least %(number)s matching products. '
|
'The voucher code "%(voucher)s" can only be used if you select at least %(number)s matching products. '
|
||||||
@@ -317,6 +318,9 @@ class CartManager:
|
|||||||
def _delete_out_of_timeframe(self):
|
def _delete_out_of_timeframe(self):
|
||||||
err = None
|
err = None
|
||||||
for cp in self.positions:
|
for cp in self.positions:
|
||||||
|
if not cp.pk:
|
||||||
|
continue
|
||||||
|
|
||||||
if cp.subevent and cp.subevent.presale_start and self.now_dt < cp.subevent.presale_start:
|
if cp.subevent and cp.subevent.presale_start and self.now_dt < cp.subevent.presale_start:
|
||||||
err = error_messages['some_subevent_not_started']
|
err = error_messages['some_subevent_not_started']
|
||||||
cp.addons.all().delete()
|
cp.addons.all().delete()
|
||||||
@@ -1074,6 +1078,7 @@ class CartManager:
|
|||||||
quotas_ok = _get_quota_availability(self._quota_diff, self.now_dt)
|
quotas_ok = _get_quota_availability(self._quota_diff, self.now_dt)
|
||||||
err = None
|
err = None
|
||||||
new_cart_positions = []
|
new_cart_positions = []
|
||||||
|
deleted_positions = set()
|
||||||
|
|
||||||
err = err or self._check_min_max_per_product()
|
err = err or self._check_min_max_per_product()
|
||||||
|
|
||||||
@@ -1085,7 +1090,10 @@ class CartManager:
|
|||||||
if op.position.expires > self.now_dt:
|
if op.position.expires > self.now_dt:
|
||||||
for q in op.position.quotas:
|
for q in op.position.quotas:
|
||||||
quotas_ok[q] += 1
|
quotas_ok[q] += 1
|
||||||
op.position.addons.all().delete()
|
addons = op.position.addons.all()
|
||||||
|
deleted_positions |= {a.pk for a in addons}
|
||||||
|
addons.delete()
|
||||||
|
deleted_positions.add(op.position.pk)
|
||||||
op.position.delete()
|
op.position.delete()
|
||||||
|
|
||||||
elif isinstance(op, (self.AddOperation, self.ExtendOperation)):
|
elif isinstance(op, (self.AddOperation, self.ExtendOperation)):
|
||||||
@@ -1235,20 +1243,28 @@ class CartManager:
|
|||||||
if op.seat and not op.seat.is_available(ignore_cart=op.position, sales_channel=self._sales_channel,
|
if op.seat and not op.seat.is_available(ignore_cart=op.position, sales_channel=self._sales_channel,
|
||||||
ignore_voucher_id=op.position.voucher_id):
|
ignore_voucher_id=op.position.voucher_id):
|
||||||
err = err or error_messages['seat_unavailable']
|
err = err or error_messages['seat_unavailable']
|
||||||
op.position.addons.all().delete()
|
|
||||||
|
addons = op.position.addons.all()
|
||||||
|
deleted_positions |= {a.pk for a in addons}
|
||||||
|
deleted_positions.add(op.position.pk)
|
||||||
|
addons.delete()
|
||||||
op.position.delete()
|
op.position.delete()
|
||||||
elif available_count == 1:
|
elif available_count == 1:
|
||||||
op.position.expires = self._expiry
|
op.position.expires = self._expiry
|
||||||
op.position.listed_price = op.listed_price
|
op.position.listed_price = op.listed_price
|
||||||
op.position.price_after_voucher = op.price_after_voucher
|
op.position.price_after_voucher = op.price_after_voucher
|
||||||
# op.position.price will be updated by recompute_final_prices_and_taxes()
|
# op.position.price will be updated by recompute_final_prices_and_taxes()
|
||||||
try:
|
if op.position.pk not in deleted_positions:
|
||||||
op.position.save(force_update=True, update_fields=['expires', 'listed_price', 'price_after_voucher'])
|
try:
|
||||||
except DatabaseError:
|
op.position.save(force_update=True, update_fields=['expires', 'listed_price', 'price_after_voucher'])
|
||||||
# Best effort... The position might have been deleted in the meantime!
|
except DatabaseError:
|
||||||
pass
|
# Best effort... The position might have been deleted in the meantime!
|
||||||
|
pass
|
||||||
elif available_count == 0:
|
elif available_count == 0:
|
||||||
op.position.addons.all().delete()
|
addons = op.position.addons.all()
|
||||||
|
deleted_positions |= {a.pk for a in addons}
|
||||||
|
deleted_positions.add(op.position.pk)
|
||||||
|
addons.delete()
|
||||||
op.position.delete()
|
op.position.delete()
|
||||||
else:
|
else:
|
||||||
raise AssertionError("ExtendOperation cannot affect more than one item")
|
raise AssertionError("ExtendOperation cannot affect more than one item")
|
||||||
|
|||||||
@@ -32,12 +32,12 @@
|
|||||||
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
# License for the specific language governing permissions and limitations under the License.
|
# License for the specific language governing permissions and limitations under the License.
|
||||||
import os
|
import os
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta, timezone
|
||||||
from functools import partial, reduce
|
from functools import partial, reduce
|
||||||
|
|
||||||
import dateutil
|
import dateutil
|
||||||
import dateutil.parser
|
import dateutil.parser
|
||||||
import pytz
|
from dateutil.tz import datetime_exists
|
||||||
from django.core.files import File
|
from django.core.files import File
|
||||||
from django.db import IntegrityError, transaction
|
from django.db import IntegrityError, transaction
|
||||||
from django.db.models import (
|
from django.db.models import (
|
||||||
@@ -53,7 +53,8 @@ from django.utils.translation import gettext as _
|
|||||||
from django_scopes import scope, scopes_disabled
|
from django_scopes import scope, scopes_disabled
|
||||||
|
|
||||||
from pretix.base.models import (
|
from pretix.base.models import (
|
||||||
Checkin, CheckinList, Device, Order, OrderPosition, QuestionOption,
|
Checkin, CheckinList, Device, Event, ItemVariation, Order, OrderPosition,
|
||||||
|
QuestionOption,
|
||||||
)
|
)
|
||||||
from pretix.base.signals import checkin_created, order_placed, periodic_task
|
from pretix.base.signals import checkin_created, order_placed, periodic_task
|
||||||
from pretix.helpers import OF_SELF
|
from pretix.helpers import OF_SELF
|
||||||
@@ -65,12 +66,13 @@ from pretix.helpers.jsonlogic_query import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _build_time(t=None, value=None, ev=None):
|
def _build_time(t=None, value=None, ev=None, now_dt=None):
|
||||||
|
now_dt = now_dt or now()
|
||||||
if t == "custom":
|
if t == "custom":
|
||||||
return dateutil.parser.parse(value)
|
return dateutil.parser.parse(value)
|
||||||
elif t == "customtime":
|
elif t == "customtime":
|
||||||
parsed = dateutil.parser.parse(value)
|
parsed = dateutil.parser.parse(value)
|
||||||
return now().astimezone(ev.timezone).replace(
|
return now_dt.astimezone(ev.timezone).replace(
|
||||||
hour=parsed.hour,
|
hour=parsed.hour,
|
||||||
minute=parsed.minute,
|
minute=parsed.minute,
|
||||||
second=parsed.second,
|
second=parsed.second,
|
||||||
@@ -84,7 +86,42 @@ def _build_time(t=None, value=None, ev=None):
|
|||||||
return ev.date_admission or ev.date_from
|
return ev.date_admission or ev.date_from
|
||||||
|
|
||||||
|
|
||||||
def _logic_explain(rules, ev, rule_data):
|
def _logic_annotate_for_graphic_explain(rules, ev, rule_data, now_dt):
|
||||||
|
logic_environment = _get_logic_environment(ev, now_dt)
|
||||||
|
event = ev if isinstance(ev, Event) else ev.event
|
||||||
|
|
||||||
|
def _evaluate_inners(r):
|
||||||
|
if not isinstance(r, dict):
|
||||||
|
return r
|
||||||
|
operator = list(r.keys())[0]
|
||||||
|
values = r[operator]
|
||||||
|
if operator in ("and", "or"):
|
||||||
|
return {operator: [_evaluate_inners(v) for v in values]}
|
||||||
|
result = logic_environment.apply(r, rule_data)
|
||||||
|
return {**r, '__result': result}
|
||||||
|
|
||||||
|
def _add_var_values(r):
|
||||||
|
if not isinstance(r, dict):
|
||||||
|
return r
|
||||||
|
operator = [k for k in r.keys() if not k.startswith("__")][0]
|
||||||
|
values = r[operator]
|
||||||
|
if operator == "var":
|
||||||
|
var = values[0] if isinstance(values, list) else values
|
||||||
|
val = rule_data[var]
|
||||||
|
if var == "product":
|
||||||
|
val = str(event.items.get(pk=val))
|
||||||
|
elif var == "variation":
|
||||||
|
val = str(ItemVariation.objects.get(item__event=event, pk=val))
|
||||||
|
elif isinstance(val, datetime):
|
||||||
|
val = date_format(val.astimezone(ev.timezone), "SHORT_DATETIME_FORMAT")
|
||||||
|
return {"var": var, "__result": val}
|
||||||
|
else:
|
||||||
|
return {**r, operator: [_add_var_values(v) for v in values]}
|
||||||
|
|
||||||
|
return _add_var_values(_evaluate_inners(rules))
|
||||||
|
|
||||||
|
|
||||||
|
def _logic_explain(rules, ev, rule_data, now_dt=None):
|
||||||
"""
|
"""
|
||||||
Explains when the logic denied the check-in. Only works for a denied check-in.
|
Explains when the logic denied the check-in. Only works for a denied check-in.
|
||||||
|
|
||||||
@@ -114,7 +151,8 @@ def _logic_explain(rules, ev, rule_data):
|
|||||||
Additionally, we favor a "close failure". Therefore, in the above example, we'd show "You can only
|
Additionally, we favor a "close failure". Therefore, in the above example, we'd show "You can only
|
||||||
get in before 17:00". In the middle of the night it would switch to "You can only get in after 09:00".
|
get in before 17:00". In the middle of the night it would switch to "You can only get in after 09:00".
|
||||||
"""
|
"""
|
||||||
logic_environment = _get_logic_environment(ev)
|
now_dt = now_dt or now()
|
||||||
|
logic_environment = _get_logic_environment(ev, now_dt)
|
||||||
_var_values = {'False': False, 'True': True}
|
_var_values = {'False': False, 'True': True}
|
||||||
_var_explanations = {}
|
_var_explanations = {}
|
||||||
|
|
||||||
@@ -191,16 +229,16 @@ def _logic_explain(rules, ev, rule_data):
|
|||||||
for vname, data in _var_explanations.items():
|
for vname, data in _var_explanations.items():
|
||||||
var, operator, rhs = data['var'], data['operator'], data['rhs']
|
var, operator, rhs = data['var'], data['operator'], data['rhs']
|
||||||
if var == 'now':
|
if var == 'now':
|
||||||
compare_to = _build_time(*rhs[0]['buildTime'], ev=ev).astimezone(ev.timezone)
|
compare_to = _build_time(*rhs[0]['buildTime'], ev=ev, now_dt=now_dt).astimezone(ev.timezone)
|
||||||
tolerance = timedelta(minutes=float(rhs[1])) if len(rhs) > 1 and rhs[1] else timedelta(seconds=0)
|
tolerance = timedelta(minutes=float(rhs[1])) if len(rhs) > 1 and rhs[1] else timedelta(seconds=0)
|
||||||
if operator == 'isBefore':
|
if operator == 'isBefore':
|
||||||
compare_to += tolerance
|
compare_to += tolerance
|
||||||
else:
|
else:
|
||||||
compare_to -= tolerance
|
compare_to -= tolerance
|
||||||
|
|
||||||
var_weights[vname] = (200, abs(now() - compare_to).total_seconds())
|
var_weights[vname] = (200, abs(now_dt - compare_to).total_seconds())
|
||||||
|
|
||||||
if abs(now() - compare_to) < timedelta(hours=12):
|
if abs(now_dt - compare_to) < timedelta(hours=12):
|
||||||
compare_to_text = date_format(compare_to, 'TIME_FORMAT')
|
compare_to_text = date_format(compare_to, 'TIME_FORMAT')
|
||||||
else:
|
else:
|
||||||
compare_to_text = date_format(compare_to, 'SHORT_DATETIME_FORMAT')
|
compare_to_text = date_format(compare_to, 'SHORT_DATETIME_FORMAT')
|
||||||
@@ -299,7 +337,7 @@ def _logic_explain(rules, ev, rule_data):
|
|||||||
return ', '.join(var_texts[v] for v in paths_with_min_weight[0] if not _var_values[v])
|
return ', '.join(var_texts[v] for v in paths_with_min_weight[0] if not _var_values[v])
|
||||||
|
|
||||||
|
|
||||||
def _get_logic_environment(ev):
|
def _get_logic_environment(ev, now_dt):
|
||||||
# Every change to our supported JSON logic must be done
|
# Every change to our supported JSON logic must be done
|
||||||
# * in pretix.base.services.checkin
|
# * in pretix.base.services.checkin
|
||||||
# * in pretix.base.models.checkin
|
# * in pretix.base.models.checkin
|
||||||
@@ -316,7 +354,7 @@ def _get_logic_environment(ev):
|
|||||||
logic.add_operation('objectList', lambda *objs: list(objs))
|
logic.add_operation('objectList', lambda *objs: list(objs))
|
||||||
logic.add_operation('lookup', lambda model, pk, str: int(pk))
|
logic.add_operation('lookup', lambda model, pk, str: int(pk))
|
||||||
logic.add_operation('inList', lambda a, b: a in b)
|
logic.add_operation('inList', lambda a, b: a in b)
|
||||||
logic.add_operation('buildTime', partial(_build_time, ev=ev))
|
logic.add_operation('buildTime', partial(_build_time, ev=ev, now_dt=now_dt))
|
||||||
logic.add_operation('isBefore', is_before)
|
logic.add_operation('isBefore', is_before)
|
||||||
logic.add_operation('isAfter', lambda t1, t2, tol=None: is_before(t2, t1, tol))
|
logic.add_operation('isAfter', lambda t1, t2, tol=None: is_before(t2, t1, tol))
|
||||||
return logic
|
return logic
|
||||||
@@ -357,7 +395,7 @@ class LazyRuleVars:
|
|||||||
@cached_property
|
@cached_property
|
||||||
def entries_today(self):
|
def entries_today(self):
|
||||||
tz = self._clist.event.timezone
|
tz = self._clist.event.timezone
|
||||||
midnight = now().astimezone(tz).replace(hour=0, minute=0, second=0, microsecond=0)
|
midnight = self._dt.astimezone(tz).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
return self._position.checkins.filter(type=Checkin.TYPE_ENTRY, list=self._clist, datetime__gte=midnight).count()
|
return self._position.checkins.filter(type=Checkin.TYPE_ENTRY, list=self._clist, datetime__gte=midnight).count()
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
@@ -378,7 +416,7 @@ class LazyRuleVars:
|
|||||||
# between platforms (None<1 is true on some, but not all), we rather choose something that is at least
|
# between platforms (None<1 is true on some, but not all), we rather choose something that is at least
|
||||||
# consistent.
|
# consistent.
|
||||||
return -1
|
return -1
|
||||||
return (now() - last_entry.datetime).total_seconds() // 60
|
return (self._dt - last_entry.datetime).total_seconds() // 60
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def minutes_since_first_entry(self):
|
def minutes_since_first_entry(self):
|
||||||
@@ -390,7 +428,7 @@ class LazyRuleVars:
|
|||||||
# between platforms (None<1 is true on some, but not all), we rather choose something that is at least
|
# between platforms (None<1 is true on some, but not all), we rather choose something that is at least
|
||||||
# consistent.
|
# consistent.
|
||||||
return -1
|
return -1
|
||||||
return (now() - last_entry.datetime).total_seconds() // 60
|
return (self._dt - last_entry.datetime).total_seconds() // 60
|
||||||
|
|
||||||
|
|
||||||
class SQLLogic:
|
class SQLLogic:
|
||||||
@@ -439,7 +477,7 @@ class SQLLogic:
|
|||||||
|
|
||||||
if operator == 'buildTime':
|
if operator == 'buildTime':
|
||||||
if values[0] == "custom":
|
if values[0] == "custom":
|
||||||
return Value(dateutil.parser.parse(values[1]).astimezone(pytz.UTC))
|
return Value(dateutil.parser.parse(values[1]).astimezone(timezone.utc))
|
||||||
elif values[0] == "customtime":
|
elif values[0] == "customtime":
|
||||||
parsed = dateutil.parser.parse(values[1])
|
parsed = dateutil.parser.parse(values[1])
|
||||||
return Value(now().astimezone(self.list.event.timezone).replace(
|
return Value(now().astimezone(self.list.event.timezone).replace(
|
||||||
@@ -447,7 +485,7 @@ class SQLLogic:
|
|||||||
minute=parsed.minute,
|
minute=parsed.minute,
|
||||||
second=parsed.second,
|
second=parsed.second,
|
||||||
microsecond=parsed.microsecond,
|
microsecond=parsed.microsecond,
|
||||||
).astimezone(pytz.UTC))
|
).astimezone(timezone.utc))
|
||||||
elif values[0] == 'date_from':
|
elif values[0] == 'date_from':
|
||||||
return Coalesce(
|
return Coalesce(
|
||||||
F('subevent__date_from'),
|
F('subevent__date_from'),
|
||||||
@@ -475,7 +513,7 @@ class SQLLogic:
|
|||||||
return int(values[1])
|
return int(values[1])
|
||||||
elif operator == 'var':
|
elif operator == 'var':
|
||||||
if values[0] == 'now':
|
if values[0] == 'now':
|
||||||
return Value(now().astimezone(pytz.UTC))
|
return Value(now().astimezone(timezone.utc))
|
||||||
elif values[0] == 'now_isoweekday':
|
elif values[0] == 'now_isoweekday':
|
||||||
return Value(now().astimezone(self.list.event.timezone).isoweekday())
|
return Value(now().astimezone(self.list.event.timezone).isoweekday())
|
||||||
elif values[0] == 'product':
|
elif values[0] == 'product':
|
||||||
@@ -693,7 +731,7 @@ def _save_answers(op, answers, given_answers):
|
|||||||
def perform_checkin(op: OrderPosition, clist: CheckinList, given_answers: dict, force=False,
|
def perform_checkin(op: OrderPosition, clist: CheckinList, given_answers: dict, force=False,
|
||||||
ignore_unpaid=False, nonce=None, datetime=None, questions_supported=True,
|
ignore_unpaid=False, nonce=None, datetime=None, questions_supported=True,
|
||||||
user=None, auth=None, canceled_supported=False, type=Checkin.TYPE_ENTRY,
|
user=None, auth=None, canceled_supported=False, type=Checkin.TYPE_ENTRY,
|
||||||
raw_barcode=None, raw_source_type=None, from_revoked_secret=False):
|
raw_barcode=None, raw_source_type=None, from_revoked_secret=False, simulate=False):
|
||||||
"""
|
"""
|
||||||
Create a checkin for this particular order position and check-in list. Fails with CheckInError if the check in is
|
Create a checkin for this particular order position and check-in list. Fails with CheckInError if the check in is
|
||||||
not valid at this time.
|
not valid at this time.
|
||||||
@@ -707,6 +745,7 @@ def perform_checkin(op: OrderPosition, clist: CheckinList, given_answers: dict,
|
|||||||
:param questions_supported: When set to False, questions are ignored
|
:param questions_supported: When set to False, questions are ignored
|
||||||
:param nonce: A random nonce to prevent race conditions.
|
:param nonce: A random nonce to prevent race conditions.
|
||||||
:param datetime: The datetime of the checkin, defaults to now.
|
:param datetime: The datetime of the checkin, defaults to now.
|
||||||
|
:param simulate: If true, the check-in is not saved.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# !!!!!!!!!
|
# !!!!!!!!!
|
||||||
@@ -734,7 +773,7 @@ def perform_checkin(op: OrderPosition, clist: CheckinList, given_answers: dict,
|
|||||||
'blocked'
|
'blocked'
|
||||||
)
|
)
|
||||||
|
|
||||||
if type != Checkin.TYPE_EXIT and op.valid_from and op.valid_from > now():
|
if type != Checkin.TYPE_EXIT and op.valid_from and op.valid_from > dt:
|
||||||
if force:
|
if force:
|
||||||
force_used = True
|
force_used = True
|
||||||
else:
|
else:
|
||||||
@@ -748,7 +787,7 @@ def perform_checkin(op: OrderPosition, clist: CheckinList, given_answers: dict,
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
if type != Checkin.TYPE_EXIT and op.valid_until and op.valid_until < now():
|
if type != Checkin.TYPE_EXIT and op.valid_until and op.valid_until < dt:
|
||||||
if force:
|
if force:
|
||||||
force_used = True
|
force_used = True
|
||||||
else:
|
else:
|
||||||
@@ -773,7 +812,8 @@ def perform_checkin(op: OrderPosition, clist: CheckinList, given_answers: dict,
|
|||||||
if q not in given_answers and q not in answers:
|
if q not in given_answers and q not in answers:
|
||||||
require_answers.append(q)
|
require_answers.append(q)
|
||||||
|
|
||||||
_save_answers(op, answers, given_answers)
|
if not simulate:
|
||||||
|
_save_answers(op, answers, given_answers)
|
||||||
|
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
# Lock order positions, if it is an entry. We don't need it for exits, as a race condition wouldn't be problematic
|
# Lock order positions, if it is an entry. We don't need it for exits, as a race condition wouldn't be problematic
|
||||||
@@ -821,7 +861,7 @@ def perform_checkin(op: OrderPosition, clist: CheckinList, given_answers: dict,
|
|||||||
|
|
||||||
if type == Checkin.TYPE_ENTRY and clist.rules:
|
if type == Checkin.TYPE_ENTRY and clist.rules:
|
||||||
rule_data = LazyRuleVars(op, clist, dt)
|
rule_data = LazyRuleVars(op, clist, dt)
|
||||||
logic = _get_logic_environment(op.subevent or clist.event)
|
logic = _get_logic_environment(op.subevent or clist.event, now_dt=dt)
|
||||||
if not logic.apply(clist.rules, rule_data):
|
if not logic.apply(clist.rules, rule_data):
|
||||||
if force:
|
if force:
|
||||||
force_used = True
|
force_used = True
|
||||||
@@ -846,7 +886,7 @@ def perform_checkin(op: OrderPosition, clist: CheckinList, given_answers: dict,
|
|||||||
if isinstance(auth, Device):
|
if isinstance(auth, Device):
|
||||||
device = auth
|
device = auth
|
||||||
|
|
||||||
last_cis = list(op.checkins.order_by('-datetime').filter(list=clist).only('type', 'nonce'))
|
last_cis = list(op.checkins.order_by('-datetime').filter(list=clist).only('type', 'nonce', 'position_id'))
|
||||||
entry_allowed = (
|
entry_allowed = (
|
||||||
type == Checkin.TYPE_EXIT or
|
type == Checkin.TYPE_EXIT or
|
||||||
clist.allow_multiple_entries or
|
clist.allow_multiple_entries or
|
||||||
@@ -859,30 +899,33 @@ def perform_checkin(op: OrderPosition, clist: CheckinList, given_answers: dict,
|
|||||||
return
|
return
|
||||||
|
|
||||||
if entry_allowed or force:
|
if entry_allowed or force:
|
||||||
ci = Checkin.objects.create(
|
if simulate:
|
||||||
position=op,
|
return True
|
||||||
type=type,
|
else:
|
||||||
list=clist,
|
ci = Checkin.objects.create(
|
||||||
datetime=dt,
|
position=op,
|
||||||
device=device,
|
type=type,
|
||||||
gate=device.gate if device else None,
|
list=clist,
|
||||||
nonce=nonce,
|
datetime=dt,
|
||||||
forced=force and (not entry_allowed or from_revoked_secret or force_used),
|
device=device,
|
||||||
force_sent=force,
|
gate=device.gate if device else None,
|
||||||
raw_barcode=raw_barcode,
|
nonce=nonce,
|
||||||
raw_source_type=raw_source_type,
|
forced=force and (not entry_allowed or from_revoked_secret or force_used),
|
||||||
)
|
force_sent=force,
|
||||||
op.order.log_action('pretix.event.checkin', data={
|
raw_barcode=raw_barcode,
|
||||||
'position': op.id,
|
raw_source_type=raw_source_type,
|
||||||
'positionid': op.positionid,
|
)
|
||||||
'first': True,
|
op.order.log_action('pretix.event.checkin', data={
|
||||||
'forced': force or op.order.status != Order.STATUS_PAID,
|
'position': op.id,
|
||||||
'datetime': dt,
|
'positionid': op.positionid,
|
||||||
'type': type,
|
'first': True,
|
||||||
'answers': {k.pk: str(v) for k, v in given_answers.items()},
|
'forced': force or op.order.status != Order.STATUS_PAID,
|
||||||
'list': clist.pk
|
'datetime': dt,
|
||||||
}, user=user, auth=auth)
|
'type': type,
|
||||||
checkin_created.send(op.order.event, checkin=ci)
|
'answers': {k.pk: str(v) for k, v in given_answers.items()},
|
||||||
|
'list': clist.pk
|
||||||
|
}, user=user, auth=auth)
|
||||||
|
checkin_created.send(op.order.event, checkin=ci)
|
||||||
else:
|
else:
|
||||||
raise CheckInError(
|
raise CheckInError(
|
||||||
_('This ticket has already been redeemed.'),
|
_('This ticket has already been redeemed.'),
|
||||||
@@ -926,14 +969,11 @@ def process_exit_all(sender, **kwargs):
|
|||||||
if cl.event.settings.get(f'autocheckin_dst_hack_{cl.pk}'): # move time back if yesterday was DST switch
|
if cl.event.settings.get(f'autocheckin_dst_hack_{cl.pk}'): # move time back if yesterday was DST switch
|
||||||
d -= timedelta(hours=1)
|
d -= timedelta(hours=1)
|
||||||
cl.event.settings.delete(f'autocheckin_dst_hack_{cl.pk}')
|
cl.event.settings.delete(f'autocheckin_dst_hack_{cl.pk}')
|
||||||
try:
|
|
||||||
cl.exit_all_at = make_aware(datetime.combine(d.date() + timedelta(days=1), d.time()), cl.event.timezone)
|
cl.exit_all_at = make_aware(datetime.combine(d.date() + timedelta(days=1), d.time().replace(fold=1)), cl.event.timezone)
|
||||||
except pytz.exceptions.AmbiguousTimeError:
|
if not datetime_exists(cl.exit_all_at):
|
||||||
cl.exit_all_at = make_aware(datetime.combine(d.date() + timedelta(days=1), d.time()), cl.event.timezone,
|
|
||||||
is_dst=False)
|
|
||||||
except pytz.exceptions.NonExistentTimeError:
|
|
||||||
cl.event.settings.set(f'autocheckin_dst_hack_{cl.pk}', True)
|
cl.event.settings.set(f'autocheckin_dst_hack_{cl.pk}', True)
|
||||||
d += timedelta(hours=1)
|
d += timedelta(hours=1)
|
||||||
cl.exit_all_at = make_aware(datetime.combine(d.date() + timedelta(days=1), d.time()), cl.event.timezone)
|
cl.exit_all_at = make_aware(datetime.combine(d.date() + timedelta(days=1), d.time().replace(fold=1)), cl.event.timezone)
|
||||||
# AmbiguousTimeError shouldn't be possible since d.time() includes fold=0
|
# AmbiguousTimeError shouldn't be possible since d.time() includes fold=0
|
||||||
cl.save(update_fields=['exit_all_at'])
|
cl.save(update_fields=['exit_all_at'])
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user