forked from CGM_Public/pretix_original
Compare commits
505 Commits
v4.19.0.po
...
quota-cach
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e54837c532 | ||
|
|
bc49f0f7f1 | ||
|
|
3e122e0270 | ||
|
|
e8ea6e0f5c | ||
|
|
e94e5be878 | ||
|
|
1073ea626e | ||
|
|
23ab8df443 | ||
|
|
d6caf01a38 | ||
|
|
1424ae78e9 | ||
|
|
827382edc3 | ||
|
|
85482bc939 | ||
|
|
42ce545f2f | ||
|
|
e49bc5d78d | ||
|
|
6e7a32ef2a | ||
|
|
37df7a6313 | ||
|
|
d5951415a4 | ||
|
|
691159ed83 | ||
|
|
18f517af44 | ||
|
|
89ba2da7e7 | ||
|
|
c1c47e50c3 | ||
|
|
f262cd632c | ||
|
|
8d58294af1 | ||
|
|
ddc94a8a16 | ||
|
|
83811c0343 | ||
|
|
b2c05a72e5 | ||
|
|
8c56a23562 | ||
|
|
53e1d9c6c4 | ||
|
|
6250ab2165 | ||
|
|
6ada83df9a | ||
|
|
cfd6376936 | ||
|
|
edb0cd0941 | ||
|
|
88ac407cf3 | ||
|
|
5ba56fb5ac | ||
|
|
b51c9f7552 | ||
|
|
0853296663 | ||
|
|
721e7549bc | ||
|
|
aee86de330 | ||
|
|
756a4355d1 | ||
|
|
5119bbd0b1 | ||
|
|
728bd74e28 | ||
|
|
015ffeecbf | ||
|
|
0365f6d9fc | ||
|
|
e208a79c32 | ||
|
|
0037d37960 | ||
|
|
50d9b1e4a3 | ||
|
|
7919d012e6 | ||
|
|
327f95a9cc | ||
|
|
98946ded4b | ||
|
|
cf47b69bd3 | ||
|
|
fa5c69ce0a | ||
|
|
39d85fc112 | ||
|
|
23e222bf13 | ||
|
|
cb068b029f | ||
|
|
9e95f3be1b | ||
|
|
401c02865b | ||
|
|
062450002d | ||
|
|
6d834762c4 | ||
|
|
4f1e9a31c6 | ||
|
|
8ed3911dfb | ||
|
|
4562879cb2 | ||
|
|
ef0024b2ef | ||
|
|
8e603410fa | ||
|
|
16691ca2f6 | ||
|
|
d7e70fd0b9 | ||
|
|
071a3e2c9b | ||
|
|
1733c383b3 | ||
|
|
65ecdc184e | ||
|
|
63ae0724cf | ||
|
|
370d1bf06b | ||
|
|
06f361cece | ||
|
|
4b706339ed | ||
|
|
26213f2ba9 | ||
|
|
c183351d50 | ||
|
|
14131a7cec | ||
|
|
dfde308010 | ||
|
|
96b8631e09 | ||
|
|
84f464885d | ||
|
|
098147ce70 | ||
|
|
08b6186d77 | ||
|
|
e9e98a7821 | ||
|
|
3150c6a3ea | ||
|
|
898d1ab6ed | ||
|
|
52ae7626b0 | ||
|
|
c652911bfb | ||
|
|
52023cde09 | ||
|
|
b134f29cf6 | ||
|
|
19e1d132c2 | ||
|
|
393a218df5 | ||
|
|
f247eb0568 | ||
|
|
b35a388685 | ||
|
|
6dbbfe3b04 | ||
|
|
b2c49461bc | ||
|
|
23dcdf1fd1 | ||
|
|
1f80e9ef82 | ||
|
|
0969abb460 | ||
|
|
7b5789b110 | ||
|
|
f3b5996b82 | ||
|
|
5dcab59174 | ||
|
|
a2e38bb415 | ||
|
|
0510814aae | ||
|
|
dee2818f5d | ||
|
|
0d7809c36b | ||
|
|
4c494b5265 | ||
|
|
9e85e8c60a | ||
|
|
ab8c71fab8 | ||
|
|
1fa8ea3a12 | ||
|
|
f584d3d5af | ||
|
|
46ae911ade | ||
|
|
85db5698a6 | ||
|
|
09a17b57ce | ||
|
|
826962d6e2 | ||
|
|
f77e79bb38 | ||
|
|
d21e832204 | ||
|
|
119d4f0e04 | ||
|
|
feab6acfbd | ||
|
|
d85a6074ec | ||
|
|
6c813ea299 | ||
|
|
8a903f21ae | ||
|
|
a7f7c64cce | ||
|
|
82969daf37 | ||
|
|
8e9d0fb723 | ||
|
|
ef3d44e581 | ||
|
|
f9055fce9f | ||
|
|
cff0e86fd9 | ||
|
|
f0913fc720 | ||
|
|
23a9f60171 | ||
|
|
faf41c805c | ||
|
|
41cded095c | ||
|
|
90fb034897 | ||
|
|
f4203b7408 | ||
|
|
8a9f14db03 | ||
|
|
a2adf2825a | ||
|
|
8f7220b574 | ||
|
|
5adbdb80a8 | ||
|
|
3717c4b553 | ||
|
|
609f45d818 | ||
|
|
1d49c98cf2 | ||
|
|
586f42557f | ||
|
|
e3f219366d | ||
|
|
c571b269ff | ||
|
|
6d57501c5c | ||
|
|
5f3e039b2e | ||
|
|
8fa7aeef78 | ||
|
|
3b5baa7701 | ||
|
|
c6bb3e71bf | ||
|
|
104607d34e | ||
|
|
714ef0d3b6 | ||
|
|
db7c52ca93 | ||
|
|
fc94fbd9c8 | ||
|
|
61b3207ea2 | ||
|
|
ccf17db972 | ||
|
|
456bee7efa | ||
|
|
ccfdd364a3 | ||
|
|
cf92988eae | ||
|
|
6c561b1908 | ||
|
|
5634a16a85 | ||
|
|
6883ae268f | ||
|
|
f75f8dead6 | ||
|
|
0b28df8b83 | ||
|
|
0ffffc6a51 | ||
|
|
3f95f06845 | ||
|
|
22bb4a9ac4 | ||
|
|
ee50ee8e99 | ||
|
|
63a6b17229 | ||
|
|
f33153ef01 | ||
|
|
09517837ba | ||
|
|
0f9ec8beca | ||
|
|
6d604889f2 | ||
|
|
f9da500c06 | ||
|
|
8f3b92a5b4 | ||
|
|
c82aa891e6 | ||
|
|
591ff61d1b | ||
|
|
af3ba16631 | ||
|
|
dce0bba707 | ||
|
|
0a942a670f | ||
|
|
310b1f50bc | ||
|
|
0cef7029e1 | ||
|
|
fbc2a4cdc2 | ||
|
|
2daf6f6d97 | ||
|
|
1fe80fa8c5 | ||
|
|
fa0b31b19f | ||
|
|
3a77eeaa91 | ||
|
|
a1faa66ecd | ||
|
|
1e458d21f9 | ||
|
|
d1a051544f | ||
|
|
8bd4ddcd0d | ||
|
|
59a16789ea | ||
|
|
f4ce3654bb | ||
|
|
3ad99d8239 | ||
|
|
b415393ccf | ||
|
|
84dbd93d9e | ||
|
|
5a4f990ab9 | ||
|
|
35f3d95a46 | ||
|
|
c729b71320 | ||
|
|
8eb7c8db9e | ||
|
|
d5609f6ab0 | ||
|
|
5d8fa31bdf | ||
|
|
9360b1fd90 | ||
|
|
51da6570bf | ||
|
|
fbdbddd555 | ||
|
|
eb3edd83b8 | ||
|
|
25f5fe54a9 | ||
|
|
7bf153bb3b | ||
|
|
48e64071a1 | ||
|
|
95ea4fd4c9 | ||
|
|
206b57adfd | ||
|
|
b7f3f7a7a1 | ||
|
|
34e7a0fc31 | ||
|
|
cc7f249cb8 | ||
|
|
147061eaa4 | ||
|
|
c16491889b | ||
|
|
1eb1d8df5f | ||
|
|
3f47cf785c | ||
|
|
e8859cb2e2 | ||
|
|
61ab6f729d | ||
|
|
79c9ba3cf3 | ||
|
|
1d86f7a0c3 | ||
|
|
e259b3994a | ||
|
|
18e97624fd | ||
|
|
1c9a245231 | ||
|
|
b51ca58820 | ||
|
|
7a48cac862 | ||
|
|
1bdcc4580e | ||
|
|
dd10bdd433 | ||
|
|
f7a74c2e74 | ||
|
|
4037e1886d | ||
|
|
c4ae363fdb | ||
|
|
3df64a46e7 | ||
|
|
69502986ad | ||
|
|
51ea63335c | ||
|
|
dc76b554f8 | ||
|
|
f8be8296dd | ||
|
|
b3c917925c | ||
|
|
4954373a04 | ||
|
|
5571ec3858 | ||
|
|
9ef3139905 | ||
|
|
3139b9fe6f | ||
|
|
437d33ba79 | ||
|
|
0a9890b1b0 | ||
|
|
1420ad43db | ||
|
|
30da7a6429 | ||
|
|
a2f3dcce02 | ||
|
|
41f5ca3f9d | ||
|
|
817f1e0371 | ||
|
|
35fc001768 | ||
|
|
002416e435 | ||
|
|
4917249bab | ||
|
|
afd2468375 | ||
|
|
54d06dd7f8 | ||
|
|
5e59844cf5 | ||
|
|
0d2a981674 | ||
|
|
943aeaa31f | ||
|
|
cfe0f67f0d | ||
|
|
635bb94cc4 | ||
|
|
cf732ce173 | ||
|
|
74e9a4ad2d | ||
|
|
570357e9be | ||
|
|
473375d4ae | ||
|
|
a78b698520 | ||
|
|
332c968294 | ||
|
|
ad12c344c5 | ||
|
|
91c0db1ac0 | ||
|
|
4d231b70aa | ||
|
|
ab2f6f6bed | ||
|
|
28458f7b85 | ||
|
|
50ff968c17 | ||
|
|
0b4064f14f | ||
|
|
1897bd4b26 | ||
|
|
fd6843822b | ||
|
|
ee1644e037 | ||
|
|
a6c1486650 | ||
|
|
f4b437e92b | ||
|
|
446c55dc89 | ||
|
|
0990eeeea0 | ||
|
|
591fe23a99 | ||
|
|
ad70765287 | ||
|
|
c59d29493c | ||
|
|
bd32b33ba9 | ||
|
|
3a8556bb78 | ||
|
|
c972d24ce7 | ||
|
|
647e68ef01 | ||
|
|
f439a591df | ||
|
|
8f17b338d1 | ||
|
|
35350a13d6 | ||
|
|
0d93f7f52f | ||
|
|
170dcf93e7 | ||
|
|
9319202213 | ||
|
|
bfd0eee2c1 | ||
|
|
8570f53ed0 | ||
|
|
f56f6dd628 | ||
|
|
413fabd821 | ||
|
|
9813e59210 | ||
|
|
d91d942eac | ||
|
|
22104f79bd | ||
|
|
f289ad9e4f | ||
|
|
f81a734716 | ||
|
|
7a27a42e79 | ||
|
|
65a2bab9bb | ||
|
|
a26f46b619 | ||
|
|
5c37c85415 | ||
|
|
8ddba36690 | ||
|
|
f9bf05e09b | ||
|
|
8471422bba | ||
|
|
ee9acebe03 | ||
|
|
35d2a73f75 | ||
|
|
eb3eca45b5 | ||
|
|
f7816924b0 | ||
|
|
12c3fef390 | ||
|
|
8e39aaa292 | ||
|
|
ee186b283d | ||
|
|
87130c3f2c | ||
|
|
23e2fda762 | ||
|
|
dc4e82905f | ||
|
|
6845771148 | ||
|
|
c26bec93c8 | ||
|
|
46238eb157 | ||
|
|
b3298c91c3 | ||
|
|
7801d06d17 | ||
|
|
9cc1d16676 | ||
|
|
8dd3ec89e0 | ||
|
|
7a419f9bb5 | ||
|
|
c594b6c1e5 | ||
|
|
763e811c7b | ||
|
|
380dc46deb | ||
|
|
82f6084059 | ||
|
|
9af889ad02 | ||
|
|
9869516b9c | ||
|
|
84180f5af4 | ||
|
|
cf781fc79e | ||
|
|
faa14a610c | ||
|
|
997deb72e1 | ||
|
|
d84998143e | ||
|
|
deef067dae | ||
|
|
8356c0f5bf | ||
|
|
042e60ee1c | ||
|
|
a3202ffc71 | ||
|
|
c8ef681cc3 | ||
|
|
63e4841460 | ||
|
|
af503d06fe | ||
|
|
ec24776e66 | ||
|
|
9a1163c65a | ||
|
|
1237b8ba47 | ||
|
|
364d86085c | ||
|
|
f7d52abb0e | ||
|
|
158b69ddb2 | ||
|
|
1e1af7572a | ||
|
|
fe05e11f6d | ||
|
|
94c1bd2e7e | ||
|
|
fa43fb702d | ||
|
|
70b466971c | ||
|
|
d86eef66fa | ||
|
|
f2ce4b8feb | ||
|
|
c9077a0e15 | ||
|
|
337406b612 | ||
|
|
cdcd1f5cc9 | ||
|
|
12eca31426 | ||
|
|
bda9813253 | ||
|
|
77e96564ec | ||
|
|
7fdbe1edd6 | ||
|
|
dc91d049ea | ||
|
|
7a11be63ec | ||
|
|
5eef82f616 | ||
|
|
c6b00e9ad6 | ||
|
|
6da9111519 | ||
|
|
8448e0e35c | ||
|
|
a3aa69d203 | ||
|
|
2a262c78d6 | ||
|
|
20202d3f50 | ||
|
|
cdd26dabaa | ||
|
|
5bbde9b53d | ||
|
|
b7e80a5a8d | ||
|
|
cc9fe68aa4 | ||
|
|
73ea04b4c0 | ||
|
|
edd9e1c9c1 | ||
|
|
3aec3c739f | ||
|
|
f336a0d259 | ||
|
|
9fb8657e00 | ||
|
|
9fdc6a5f16 | ||
|
|
6bdc7f8b41 | ||
|
|
e7cd5a3215 | ||
|
|
5400a3cdea | ||
|
|
c75c080c5c | ||
|
|
5eff9a86f4 | ||
|
|
0f8ac3ffb3 | ||
|
|
d6f0615712 | ||
|
|
e0524f2a03 | ||
|
|
db013f5e8c | ||
|
|
1c3623b223 | ||
|
|
c9007de853 | ||
|
|
a7052abb43 | ||
|
|
07b8555fa6 | ||
|
|
fb26229834 | ||
|
|
6a508b87f7 | ||
|
|
c9c379346e | ||
|
|
19fce8b086 | ||
|
|
20ad0becb3 | ||
|
|
e489134bdb | ||
|
|
9dc7201f50 | ||
|
|
376bb48686 | ||
|
|
8f85c015fb | ||
|
|
2decf026e9 | ||
|
|
02b42bd7ab | ||
|
|
78d8e49990 | ||
|
|
0de8239348 | ||
|
|
e644faf6b3 | ||
|
|
8d6d0c5893 | ||
|
|
37ba5a983b | ||
|
|
6fd8f9809c | ||
|
|
58dd6d7600 | ||
|
|
6d7e585d97 | ||
|
|
104c11d5dc | ||
|
|
90ee435f55 | ||
|
|
1d1f68945f | ||
|
|
6e4e161973 | ||
|
|
14fcacfb4d | ||
|
|
676b37f9c2 | ||
|
|
b81accf551 | ||
|
|
759d13f7b6 | ||
|
|
f73cb4cda3 | ||
|
|
8376a2da23 | ||
|
|
eeea64bd53 | ||
|
|
7caa957f07 | ||
|
|
5657ed8173 | ||
|
|
148addaaea | ||
|
|
22d2b23b37 | ||
|
|
959e940be7 | ||
|
|
0ddae0ed99 | ||
|
|
abf8c65d8b | ||
|
|
069c599cff | ||
|
|
e7d6bfd8b1 | ||
|
|
4678cef32a | ||
|
|
5de3b76718 | ||
|
|
670e22a611 | ||
|
|
c0419518c3 | ||
|
|
916ee0697f | ||
|
|
813a2332eb | ||
|
|
b4558201f5 | ||
|
|
059cc2ab09 | ||
|
|
e194063827 | ||
|
|
6ae5eecf27 | ||
|
|
89fe3d5bd2 | ||
|
|
0de58cd213 | ||
|
|
ddb9b3f445 | ||
|
|
96414d90d4 | ||
|
|
502cb60dc5 | ||
|
|
d680652aa8 | ||
|
|
0060f98233 | ||
|
|
aa3af8790c | ||
|
|
814c475475 | ||
|
|
61526f5465 | ||
|
|
19e762c9b9 | ||
|
|
1777a954a9 | ||
|
|
b8c7ace30e | ||
|
|
e153fa7227 | ||
|
|
232366a639 | ||
|
|
9afaa677c4 | ||
|
|
bb67ecc8e6 | ||
|
|
ce8bee5c11 | ||
|
|
abfca211b8 | ||
|
|
60a4428ebb | ||
|
|
4058772da8 | ||
|
|
55b4059abe | ||
|
|
497cb8de06 | ||
|
|
0897e375c8 | ||
|
|
0c81b57225 | ||
|
|
6fac1aeb62 | ||
|
|
996621c028 | ||
|
|
119a2621b5 | ||
|
|
85dd7a078e | ||
|
|
14114a6c1f | ||
|
|
f79ac05dcb | ||
|
|
5bacbfa9f1 | ||
|
|
c051d04462 | ||
|
|
1d0eb81659 | ||
|
|
f97effd0b7 | ||
|
|
4f71244e64 | ||
|
|
d800447cd6 | ||
|
|
b29686d9f2 | ||
|
|
369f4b6f8d | ||
|
|
11594346eb | ||
|
|
4dc5c770e3 | ||
|
|
f4de616e73 | ||
|
|
3b615fea6d | ||
|
|
dca0329cd1 | ||
|
|
0100383686 | ||
|
|
835788e477 | ||
|
|
298c8989f1 | ||
|
|
135dec81ff | ||
|
|
2a8b6ae66a | ||
|
|
e86eb345b3 | ||
|
|
050ff43a55 | ||
|
|
00a77f8652 | ||
|
|
6a2dc32c1d | ||
|
|
740dcceda7 | ||
|
|
3810dcd5b8 | ||
|
|
fa4cdbfe4a | ||
|
|
0e008812c3 | ||
|
|
418bfa8b6b | ||
|
|
d080e35999 | ||
|
|
b641d343d6 | ||
|
|
377765e2e1 | ||
|
|
b8b5835eff | ||
|
|
4383187e36 | ||
|
|
38e826724f | ||
|
|
6b983d5f55 |
49
.github/workflows/build.yml
vendored
Normal file
49
.github/workflows/build.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
paths-ignore:
|
||||
- 'doc/**'
|
||||
- 'src/pretix/locale/**'
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
paths-ignore:
|
||||
- 'doc/**'
|
||||
- 'src/pretix/locale/**'
|
||||
|
||||
permissions:
|
||||
contents: read # to fetch code (actions/checkout)
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-22.04
|
||||
name: Packaging
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
- name: Install system dependencies
|
||||
run: sudo apt update && sudo apt install gettext unzip
|
||||
- name: Install Python dependencies
|
||||
run: pip3 install -U setuptools build pip check-manifest
|
||||
- name: Run check-manifest
|
||||
run: check-manifest
|
||||
- name: Run build
|
||||
run: python -m build
|
||||
- name: Check files
|
||||
run: unzip -l dist/pretix*whl | grep node_modules || exit 1
|
||||
4
.github/workflows/style.yml
vendored
4
.github/workflows/style.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
- name: Install Dependencies
|
||||
run: pip3 install -e ".[dev]" mysqlclient psycopg2-binary
|
||||
run: pip3 install -e ".[dev]" psycopg2-binary
|
||||
- name: Run isort
|
||||
run: isort -c .
|
||||
working-directory: ./src
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
- name: Install Dependencies
|
||||
run: pip3 install -e ".[dev]" mysqlclient psycopg2-binary
|
||||
run: pip3 install -e ".[dev]" psycopg2-binary
|
||||
- name: Run flake8
|
||||
run: flake8 .
|
||||
working-directory: ./src
|
||||
|
||||
18
.github/workflows/tests.yml
vendored
18
.github/workflows/tests.yml
vendored
@@ -25,27 +25,17 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.9", "3.10", "3.11"]
|
||||
database: [sqlite, postgres, mysql]
|
||||
database: [sqlite, postgres]
|
||||
exclude:
|
||||
- database: mysql
|
||||
python-version: "3.9"
|
||||
- database: mysql
|
||||
python-version: "3.11"
|
||||
- database: sqlite
|
||||
python-version: "3.9"
|
||||
- database: sqlite
|
||||
python-version: "3.10"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: getong/mariadb-action@v1.1
|
||||
with:
|
||||
mariadb version: '10.10'
|
||||
mysql database: 'pretix'
|
||||
mysql root password: ''
|
||||
if: matrix.database == 'mysql'
|
||||
- uses: harmon758/postgresql-action@v1
|
||||
with:
|
||||
postgresql version: '11'
|
||||
postgresql version: '15'
|
||||
postgresql db: 'pretix'
|
||||
postgresql user: 'postgres'
|
||||
postgresql password: 'postgres'
|
||||
@@ -61,9 +51,9 @@ jobs:
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
- name: Install system dependencies
|
||||
run: sudo apt update && sudo apt install gettext mariadb-client
|
||||
run: sudo apt update && sudo apt install gettext
|
||||
- name: Install Python dependencies
|
||||
run: pip3 install --ignore-requires-python -e ".[dev]" mysqlclient psycopg2-binary # We ignore that flake8 needs newer python as we don't run flake8 during tests
|
||||
run: pip3 install --ignore-requires-python -e ".[dev]" psycopg2-binary # We ignore that flake8 needs newer python as we don't run flake8 during tests
|
||||
- name: Run checks
|
||||
run: python manage.py check
|
||||
working-directory: ./src
|
||||
|
||||
@@ -22,14 +22,15 @@ pypi:
|
||||
- source env/bin/activate
|
||||
- pip install -U pip wheel setuptools check-manifest twine
|
||||
- XDG_CACHE_HOME=/cache pip3 install -e ".[dev]"
|
||||
- cd src
|
||||
- python setup.py sdist
|
||||
- pip install dist/pretix-*.tar.gz
|
||||
- python -m pretix migrate
|
||||
- python -m pretix check
|
||||
- check-manifest
|
||||
- cd src
|
||||
- make npminstall
|
||||
- python setup.py sdist bdist_wheel
|
||||
- cd ..
|
||||
- check-manifest
|
||||
- python -m build
|
||||
- twine check dist/*
|
||||
- twine upload dist/*
|
||||
tags:
|
||||
|
||||
28
Dockerfile
28
Dockerfile
@@ -3,7 +3,6 @@ FROM python:3.11-bullseye
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
libmariadb-dev \
|
||||
gettext \
|
||||
git \
|
||||
libffi-dev \
|
||||
@@ -34,25 +33,12 @@ RUN apt-get update && \
|
||||
mkdir /static && \
|
||||
mkdir /etc/supervisord && \
|
||||
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - && \
|
||||
apt-get install -y nodejs && \
|
||||
curl -qL https://www.npmjs.com/install.sh | sh
|
||||
apt-get install -y nodejs
|
||||
|
||||
|
||||
ENV LC_ALL=C.UTF-8 \
|
||||
DJANGO_SETTINGS_MODULE=production_settings
|
||||
|
||||
# To copy only the requirements files needed to install from PIP
|
||||
COPY src/setup.py /pretix/src/setup.py
|
||||
RUN pip3 install -U \
|
||||
pip \
|
||||
setuptools \
|
||||
wheel && \
|
||||
cd /pretix/src && \
|
||||
PRETIX_DOCKER_BUILD=TRUE pip3 install \
|
||||
-e ".[memcached,mysql]" \
|
||||
gunicorn django-extensions ipython && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
COPY deployment/docker/pretix.bash /usr/local/bin/pretix
|
||||
COPY deployment/docker/supervisord /etc/supervisord
|
||||
COPY deployment/docker/supervisord.all.conf /etc/supervisord.all.conf
|
||||
@@ -60,9 +46,19 @@ COPY deployment/docker/supervisord.web.conf /etc/supervisord.web.conf
|
||||
COPY deployment/docker/nginx.conf /etc/nginx/nginx.conf
|
||||
COPY deployment/docker/nginx-max-body-size.conf /etc/nginx/conf.d/nginx-max-body-size.conf
|
||||
COPY deployment/docker/production_settings.py /pretix/src/production_settings.py
|
||||
COPY pyproject.toml /pretix/pyproject.toml
|
||||
COPY _build /pretix/_build
|
||||
COPY src /pretix/src
|
||||
|
||||
RUN cd /pretix/src && python setup.py install
|
||||
RUN pip3 install -U \
|
||||
pip \
|
||||
setuptools \
|
||||
wheel && \
|
||||
cd /pretix && \
|
||||
PRETIX_DOCKER_BUILD=TRUE pip3 install \
|
||||
-e ".[memcached]" \
|
||||
gunicorn django-extensions ipython && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
RUN chmod +x /usr/local/bin/pretix && \
|
||||
rm /etc/nginx/sites-enabled/default && \
|
||||
|
||||
15
MANIFEST.in
15
MANIFEST.in
@@ -1,5 +1,7 @@
|
||||
include LICENSE
|
||||
include README.rst
|
||||
include src/Makefile
|
||||
include _build/backend.py
|
||||
global-include *.proto
|
||||
recursive-include src/pretix/static *
|
||||
recursive-include src/pretix/static.dist *
|
||||
@@ -31,3 +33,16 @@ recursive-include src/pretix/plugins/returnurl/templates *
|
||||
recursive-include src/pretix/plugins/returnurl/static *
|
||||
recursive-include src/pretix/plugins/webcheckin/templates *
|
||||
recursive-include src/pretix/plugins/webcheckin/static *
|
||||
recursive-include src *.cfg
|
||||
recursive-include src *.csv
|
||||
recursive-include src *.gitkeep
|
||||
recursive-include src *.jpg
|
||||
recursive-include src *.json
|
||||
recursive-include src *.py
|
||||
recursive-include src *.svg
|
||||
recursive-include src *.txt
|
||||
recursive-include src Makefile
|
||||
|
||||
recursive-exclude doc *
|
||||
recursive-exclude deployment *
|
||||
recursive-exclude res *
|
||||
|
||||
12
_build/backend.py
Normal file
12
_build/backend.py
Normal file
@@ -0,0 +1,12 @@
|
||||
import tomli
|
||||
from setuptools import build_meta as _orig
|
||||
from setuptools.build_meta import *
|
||||
|
||||
|
||||
def get_requires_for_build_wheel(config_settings=None):
|
||||
with open("pyproject.toml", "rb") as f:
|
||||
p = tomli.load(f)
|
||||
return [
|
||||
*_orig.get_requires_for_build_wheel(config_settings),
|
||||
*p['project']['dependencies']
|
||||
]
|
||||
@@ -1,4 +1,4 @@
|
||||
from pretix.settings import *
|
||||
|
||||
LOGGING['handlers']['mail_admins']['include_html'] = True
|
||||
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
|
||||
STORAGES["staticfiles"]["BACKEND"] = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
|
||||
|
||||
@@ -152,25 +152,26 @@ Example::
|
||||
password=abcd
|
||||
host=localhost
|
||||
port=3306
|
||||
sslmode=require
|
||||
sslrootcert=/etc/pretix/postgresql-ca.crt
|
||||
sslcert=/etc/pretix/postgresql-client-crt.crt
|
||||
sslkey=/etc/pretix/postgresql-client-key.key
|
||||
|
||||
``backend``
|
||||
One of ``mysql`` (deprecated), ``sqlite3`` and ``postgresql``.
|
||||
One of ``sqlite3`` and ``postgresql``.
|
||||
Default: ``sqlite3``.
|
||||
|
||||
If you use MySQL, be sure to create your database using
|
||||
``CREATE DATABASE <dbname> CHARACTER SET utf8;``. Otherwise, Unicode
|
||||
support will not properly work.
|
||||
|
||||
``name``
|
||||
The database's name. Default: ``db.sqlite3``.
|
||||
|
||||
``user``, ``password``, ``host``, ``port``
|
||||
Connection details for the database connection. Empty by default.
|
||||
|
||||
``galera``
|
||||
(Deprecated) Indicates if the database backend is a MySQL/MariaDB Galera cluster and
|
||||
turns on some optimizations/special case handlers. Default: ``False``
|
||||
``sslmode``, ``sslrootcert``
|
||||
Connection TLS details for the PostgreSQL database connection. Possible values of ``sslmode`` are ``disable``, ``allow``, ``prefer``, ``require``, ``verify-ca``, and ``verify-full``. ``sslrootcert`` should be the accessible path of the ca certificate. Both values are empty by default.
|
||||
|
||||
``sslcert``, ``sslkey``
|
||||
Connection mTLS details for the PostgreSQL database connection. It's also necessary to specify ``sslmode`` and ``sslrootcert`` parameters, please check the correct values from the TLS part. ``sslcert`` should be the accessible path of the client certificate. ``sslkey`` should be the accessible path of the client key. All values are empty by default.
|
||||
.. _`config-replica`:
|
||||
|
||||
Database replica settings
|
||||
@@ -332,6 +333,10 @@ to speed up various operations::
|
||||
["sentinel_host_3", 26379]
|
||||
]
|
||||
password=password
|
||||
ssl_cert_reqs=required
|
||||
ssl_ca_certs=/etc/pretix/redis-ca.pem
|
||||
ssl_keyfile=/etc/pretix/redis-client-crt.pem
|
||||
ssl_certfile=/etc/pretix/redis-client-key.key
|
||||
|
||||
``location``
|
||||
The location of redis, as a URL of the form ``redis://[:password]@localhost:6379/0``
|
||||
@@ -355,6 +360,22 @@ to speed up various operations::
|
||||
If your redis setup doesn't require a password or you already specified it in the location you can omit this option.
|
||||
If this is set it will be passed to redis as the connection option PASSWORD.
|
||||
|
||||
``ssl_cert_reqs``
|
||||
If this is set it will be passed to redis as the connection option ``SSL_CERT_REQS``.
|
||||
Possible values are ``none``, ``optional``, and ``required``.
|
||||
|
||||
``ssl_ca_certs``
|
||||
If your redis setup doesn't require TLS you can omit this option.
|
||||
If this is set it will be passed to redis as the connection option ``SSL_CA_CERTS``. Possible value is the ca path.
|
||||
|
||||
``ssl_keyfile``
|
||||
If your redis setup doesn't require mTLS you can omit this option.
|
||||
If this is set it will be passed to redis as the connection option ``SSL_KEYFILE``. Possible value is the keyfile path.
|
||||
|
||||
``ssl_certfile``
|
||||
If your redis setup doesn't require mTLS you can omit this option.
|
||||
If this is set it will be passed to redis as the connection option ``SSL_CERTFILE``. Possible value is the certfile path.
|
||||
|
||||
If redis is not configured, pretix will store sessions and locks in the database. If memcached
|
||||
is configured, memcached will be used for caching instead of redis.
|
||||
|
||||
@@ -404,6 +425,8 @@ The two ``transport_options`` entries can be omitted in most cases.
|
||||
If they are present they need to be a valid JSON dictionary.
|
||||
For possible entries in that dictionary see the `Celery documentation`_.
|
||||
|
||||
It is possible the use Redis with TLS/mTLS for the broker or the backend. To do so, it is necessary to specify the TLS identifier ``rediss``, the ssl mode ``ssl_cert_reqs`` and optionally specify the CA (TLS) ``ssl_ca_certs``, cert ``ssl_certfile`` and key ``ssl_keyfile`` (mTLS) path as encoded string. the following uri describes the format and possible parameters ``rediss://0.0.0.0:6379/1?ssl_cert_reqs=required&ssl_ca_certs=%2Fetc%2Fpretix%2Fredis-ca.pem&ssl_certfile=%2Fetc%2Fpretix%2Fredis-client-crt.pem&ssl_keyfile=%2Fetc%2Fpretix%2Fredis-client-key.key``
|
||||
|
||||
To use redis with sentinels set the broker or backend to ``sentinel://sentinel_host_1:26379;sentinel_host_2:26379/0``
|
||||
and the respective transport_options to ``{"master_name":"mymaster"}``.
|
||||
If your redis instances behind the sentinel have a password use ``sentinel://:my_password@sentinel_host_1:26379;sentinel_host_2:26379/0``.
|
||||
|
||||
@@ -26,7 +26,7 @@ installation guides):
|
||||
* `Docker`_
|
||||
* A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for
|
||||
* A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections
|
||||
* A `PostgreSQL`_ 9.6+ database server
|
||||
* A `PostgreSQL`_ 11+ database server
|
||||
* A `redis`_ server
|
||||
|
||||
We also recommend that you use a firewall, although this is not a pretix-specific recommendation. If you're new to
|
||||
@@ -321,11 +321,11 @@ workers, e.g. ``docker run … taskworker -Q notifications --concurrency 32``.
|
||||
|
||||
|
||||
.. _Docker: https://docs.docker.com/engine/installation/linux/debian/
|
||||
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-16-04
|
||||
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-22-04
|
||||
.. _nginx: https://botleg.com/stories/https-with-lets-encrypt-and-nginx/
|
||||
.. _Let's Encrypt: https://letsencrypt.org/
|
||||
.. _pretix.eu: https://pretix.eu/
|
||||
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-20-04
|
||||
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-22-04
|
||||
.. _redis: https://blog.programster.org/debian-8-install-redis-server/
|
||||
.. _ufw: https://en.wikipedia.org/wiki/Uncomplicated_Firewall
|
||||
.. _redis website: https://redis.io/topics/security
|
||||
|
||||
@@ -68,7 +68,7 @@ generated key and installs the plugin from the URL we told you::
|
||||
mkdir -p /etc/ssh && \
|
||||
ssh-keyscan -t rsa -p 10022 code.rami.io >> /root/.ssh/known_hosts && \
|
||||
echo StrictHostKeyChecking=no >> /root/.ssh/config && \
|
||||
DJANGO_SETTINGS_MODULE=pretix.settings pip3 install -U "git+ssh://git@code.rami.io:10022/pretix/pretix-slack.git@stable#egg=pretix-slack" && \
|
||||
DJANGO_SETTINGS_MODULE= pip3 install -U "git+ssh://git@code.rami.io:10022/pretix/pretix-slack.git@stable#egg=pretix-slack" && \
|
||||
cd /pretix/src && \
|
||||
sudo -u pretixuser make production
|
||||
USER pretixuser
|
||||
|
||||
@@ -16,14 +16,11 @@ To use pretix, you will need the following things:
|
||||
* A periodic task runner, e.g. ``cron``
|
||||
|
||||
* **A database**. This needs to be a SQL-based that is supported by Django. We highly recommend to either
|
||||
go for **PostgreSQL** or **MySQL/MariaDB**. If you do not provide one, pretix will run on SQLite, which is useful
|
||||
go for **PostgreSQL**. If you do not provide one, pretix will run on SQLite, which is useful
|
||||
for evaluation and development purposes.
|
||||
|
||||
.. warning:: Do not ever use SQLite in production. It will break.
|
||||
|
||||
.. warning:: We recommend **PostgreSQL**. If you go for MySQL, make sure you run **MySQL 5.7 or newer** or
|
||||
**MariaDB 10.2.7 or newer**.
|
||||
|
||||
* A **reverse proxy**. pretix needs to deliver some static content to your users (e.g. CSS, images, ...). While pretix
|
||||
is capable of doing this, having this handled by a proper web server like **nginx** or **Apache** will be much
|
||||
faster. Also, you need a proxying web server in front to provide SSL encryption.
|
||||
|
||||
@@ -21,6 +21,7 @@ Requirements
|
||||
Please set up the following systems beforehand, we'll not explain them here in detail (but see these links for external
|
||||
installation guides):
|
||||
|
||||
* A python 3.9+ installation
|
||||
* A SMTP server to send out mails, e.g. `Postfix`_ on your machine or some third-party server you have credentials for
|
||||
* A HTTP reverse proxy, e.g. `nginx`_ or Apache to allow HTTPS connections
|
||||
* A `PostgreSQL`_ 11+ database server
|
||||
@@ -323,11 +324,11 @@ Then, proceed like after any plugin installation::
|
||||
(venv)$ python -m pretix updatestyles
|
||||
# systemctl restart pretix-web pretix-worker
|
||||
|
||||
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-16-04
|
||||
.. _Postfix: https://www.digitalocean.com/community/tutorials/how-to-install-and-configure-postfix-as-a-send-only-smtp-server-on-ubuntu-22-04
|
||||
.. _nginx: https://botleg.com/stories/https-with-lets-encrypt-and-nginx/
|
||||
.. _Let's Encrypt: https://letsencrypt.org/
|
||||
.. _pretix.eu: https://pretix.eu/
|
||||
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-20-04
|
||||
.. _PostgreSQL: https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-22-04
|
||||
.. _redis: https://blog.programster.org/debian-8-install-redis-server/
|
||||
.. _ufw: https://en.wikipedia.org/wiki/Uncomplicated_Firewall
|
||||
.. _strong encryption settings: https://mozilla.github.io/server-side-tls/ssl-config-generator/
|
||||
|
||||
@@ -3,11 +3,11 @@
|
||||
Migrating from MySQL/MariaDB to PostgreSQL
|
||||
==========================================
|
||||
|
||||
Our recommended database for all production installations is PostgreSQL. Support for MySQL/MariaDB will be removed in
|
||||
pretix 5.0.
|
||||
Our recommended database for all production installations is PostgreSQL. Support for MySQL/MariaDB has been removed
|
||||
in newer pretix releases.
|
||||
|
||||
In order to follow this guide, your pretix installation needs to be a version that fully supports MySQL/MariaDB. If you
|
||||
already upgraded to pretix 5.0, downgrade back to the last 4.x release using ``pip``.
|
||||
already upgraded to pretix 5.0 or later, downgrade back to the last 4.x release using ``pip``.
|
||||
|
||||
.. note:: We have tested this guide carefully, but we can't assume any liability for its correctness. The data loss
|
||||
risk should be low as long as pretix is not running while you do the migration. If you are a pretix Enterprise
|
||||
@@ -51,7 +51,7 @@ For our standard docker installation, create the database and user like this::
|
||||
# sudo -u postgres createuser -P pretix
|
||||
# sudo -u postgres createdb -O pretix pretix
|
||||
|
||||
Make sure that your database listens on the network. If PostgreSQL on the same same host as docker, but not inside a docker container, we recommend that you just listen on the Docker interface by changing the following line in ``/etc/postgresql/<version>/main/postgresql.conf``::
|
||||
Make sure that your database listens on the network. If PostgreSQL on the same same host as docker, but not inside a docker container, we recommend that you listen on the Docker interface by changing the following line in ``/etc/postgresql/<version>/main/postgresql.conf``::
|
||||
|
||||
listen_addresses = 'localhost,172.17.0.1'
|
||||
|
||||
@@ -153,4 +153,89 @@ And you're done! After you've verified everything has been copied correctly, you
|
||||
|
||||
.. note:: Don't forget to update your backup process to back up your PostgreSQL database instead of your MySQL database now.
|
||||
|
||||
Troubleshooting
|
||||
---------------
|
||||
|
||||
Peer authentication failed
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
Sometimes you might see an error message like this::
|
||||
|
||||
django.db.utils.OperationalError: connection to server on socket "/var/run/postgresql/.s.PGSQL.5432" failed: FATAL: Peer authentication failed for user "pretix"
|
||||
|
||||
It is important to understand that PostgreSQL by default offers two types of authentication:
|
||||
|
||||
- **Peer authentication**, which works automatically based on the Linux user you are working as. This requires that
|
||||
the connection is made through a local socket (empty ``host=`` in ``pretix.cfg``) and the name of the PostgreSQL user
|
||||
and the Linux user are identical.
|
||||
|
||||
- Typically, you might run into this error if you accidentally execute ``python -m pretix`` commands as root instead
|
||||
of the ``pretix`` user.
|
||||
|
||||
- **Password authentication**, which requires a username and password and works over network connections. To force
|
||||
password authentication instead of peer authentication, set ``host=127.0.0.1`` in ``pretix.cfg``.
|
||||
|
||||
- You can alter the password on a PostgreSQL shell using the command ``ALTER USER pretix WITH PASSWORD '***';``.
|
||||
When creating a user with the ``createuser`` command, pass option ``-P`` to set a new password.
|
||||
|
||||
- Even with password authentication, PostgreSQL by default only allows local connections. To allow remote connections,
|
||||
you need to adjust both the ``listen_address`` configuration parameter as well as the ``pg_hba.conf`` file (see above
|
||||
for an example with the docker networking setup).
|
||||
|
||||
Database error: relation does not exist
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
If you see an error like this::
|
||||
|
||||
2023-04-17T19:20:47.744023Z ERROR Database error 42P01: relation "public.pretix_foobar" does not exist
|
||||
QUERY: ALTER TABLE public.pretix_foobar DROP CONSTRAINT IF EXISTS pretix_foobar_order_id_57e2cb41_fk_pretixbas CASCADE;
|
||||
2023-04-17T19:20:47.744023Z FATAL Failed to create the schema, see above.
|
||||
|
||||
The reason is most likely that in the past, you installed a pretix plugin that you no longer have installed. However,
|
||||
the database still contains tables of that plugin. If you want to keep the data, reinstall the plugin and re-run the
|
||||
``migrate`` step from above. If you want to get rid of the data, manually drop the table mentioned in the error message
|
||||
from your MySQL database::
|
||||
|
||||
# mysql -u root pretix
|
||||
mysql> DROP TABLE pretix_foobar;
|
||||
|
||||
Then, retry. You might see a new error message with a new table, which you can handle the same way.
|
||||
|
||||
Cleaning out a failed attempt
|
||||
"""""""""""""""""""""""""""""
|
||||
|
||||
You might want to clean your PostgreSQL database before you try again after an error. You can do so like this::
|
||||
|
||||
# sudo -u postgres psql pretix
|
||||
pretix=# DROP SCHEMA public CASCADE;
|
||||
pretix=# CREATE SCHEMA public;
|
||||
pretix=# ALTER SCHEMA public OWNER TO pretix;
|
||||
|
||||
``pgloader`` crashes with heap exhaustion error
|
||||
"""""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
On some larger databases, we've seen ``pgloader`` crash with error messages similar to this::
|
||||
|
||||
Heap exhausted during garbage collection: 16 bytes available, 48 requested.
|
||||
|
||||
Or this::
|
||||
|
||||
2021-01-04T21:31:17.367000Z ERROR A SB-KERNEL::HEAP-EXHAUSTED-ERROR condition without bindings for heap statistics. (If
|
||||
you did not expect to see this message, please report it.
|
||||
2021-01-04T21:31:17.382000Z ERROR The value
|
||||
NIL
|
||||
is not of type
|
||||
NUMBER
|
||||
when binding SB-KERNEL::X
|
||||
|
||||
The ``pgloader`` version distributed for Debian and Ubuntu is compiled with the ``SBCL`` compiler. If compiled with
|
||||
``CCL``, these bugs go away. Unfortunately, it is pretty hard to compile ``pgloader`` manually with ``CCL``. If you
|
||||
run into this, we therefore recommend using the docker container provided by the ``pgloader`` maintainers::
|
||||
|
||||
sudo docker run --rm -v /tmp:/tmp --network host -it dimitri/pgloader:ccl.latest pgloader /tmp/pretix.load
|
||||
|
||||
As peer authentication is not available from inside the container, this requires you to use password-based authentication
|
||||
in PostgreSQL (see above).
|
||||
|
||||
|
||||
.. _PostgreSQL repositories: https://wiki.postgresql.org/wiki/Apt
|
||||
|
||||
@@ -32,10 +32,16 @@ as well as the type of underlying hardware. Example:
|
||||
"token": "kpp4jn8g2ynzonp6",
|
||||
"hardware_brand": "Samsung",
|
||||
"hardware_model": "Galaxy S",
|
||||
"os_name": "Android",
|
||||
"os_version": "2.3.6",
|
||||
"software_brand": "pretixdroid",
|
||||
"software_version": "4.0.0"
|
||||
"software_version": "4.0.0",
|
||||
"rsa_pubkey": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqh…nswIDAQAB\n-----END PUBLIC KEY-----\n"
|
||||
}
|
||||
|
||||
The ``rsa_pubkey`` is optional any only required for certain fatures such as working with reusable
|
||||
media and NFC cryptography.
|
||||
|
||||
Every initialization token can only be used once. On success, you will receive a response containing
|
||||
information on your device as well as your API token:
|
||||
|
||||
@@ -98,6 +104,8 @@ following endpoint:
|
||||
{
|
||||
"hardware_brand": "Samsung",
|
||||
"hardware_model": "Galaxy S",
|
||||
"os_name": "Android",
|
||||
"os_version": "2.3.6",
|
||||
"software_brand": "pretixdroid",
|
||||
"software_version": "4.1.0",
|
||||
"info": {"arbitrary": "data"}
|
||||
@@ -133,9 +141,29 @@ The response will look like this:
|
||||
"id": 3,
|
||||
"name": "South entrance"
|
||||
}
|
||||
}
|
||||
},
|
||||
"server": {
|
||||
"version": {
|
||||
"pretix": "3.6.0.dev0",
|
||||
"pretix_numeric": 30060001000
|
||||
}
|
||||
},
|
||||
"medium_key_sets": [
|
||||
{
|
||||
"public_id": 3456349,
|
||||
"organizer": "foo",
|
||||
"active": true,
|
||||
"media_type": "nfc_mf0aes",
|
||||
"uid_key": "base64-encoded-encrypted-key",
|
||||
"diversification_key": "base64-encoded-encrypted-key",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
``"medium_key_sets`` will always be empty if you did not set an ``rsa_pubkey``.
|
||||
The individual keys in the key sets are encrypted with the device's ``rsa_pubkey``
|
||||
using ``RSA/ECB/PKCS1Padding``.
|
||||
|
||||
Creating a new API key
|
||||
----------------------
|
||||
|
||||
|
||||
@@ -24,6 +24,8 @@ all_events boolean Whether this de
|
||||
limit_events list List of event slugs this device has access to
|
||||
hardware_brand string Device hardware manufacturer (read-only)
|
||||
hardware_model string Device hardware model (read-only)
|
||||
os_name string Device operating system name (read-only)
|
||||
os_version string Device operating system version (read-only)
|
||||
software_brand string Device software product (read-only)
|
||||
software_version string Device software version (read-only)
|
||||
created datetime Creation time
|
||||
@@ -76,6 +78,8 @@ Device endpoints
|
||||
"security_profile": "full",
|
||||
"hardware_brand": "Zebra",
|
||||
"hardware_model": "TC25",
|
||||
"os_name": "Android",
|
||||
"os_version": "8.1.0",
|
||||
"software_brand": "pretixSCAN",
|
||||
"software_version": "1.5.1"
|
||||
}
|
||||
@@ -123,6 +127,8 @@ Device endpoints
|
||||
"security_profile": "full",
|
||||
"hardware_brand": "Zebra",
|
||||
"hardware_model": "TC25",
|
||||
"os_name": "Android",
|
||||
"os_version": "8.1.0",
|
||||
"software_brand": "pretixSCAN",
|
||||
"software_version": "1.5.1"
|
||||
}
|
||||
@@ -173,6 +179,8 @@ Device endpoints
|
||||
"initialized": null
|
||||
"hardware_brand": null,
|
||||
"hardware_model": null,
|
||||
"os_name": null,
|
||||
"os_version": null,
|
||||
"software_brand": null,
|
||||
"software_version": null
|
||||
}
|
||||
|
||||
@@ -70,6 +70,11 @@ Endpoints
|
||||
|
||||
The ``public_url`` field has been added.
|
||||
|
||||
.. versionchanged:: 5.0
|
||||
|
||||
The ``date_from_before``, ``date_from_after``, ``date_to_before``, and ``date_to_after`` query parameters have been
|
||||
added.
|
||||
|
||||
.. http:get:: /api/v1/organizers/(organizer)/events/
|
||||
|
||||
Returns a list of all events within a given organizer the authenticated user/token has access to.
|
||||
@@ -141,6 +146,10 @@ Endpoints
|
||||
:query has_subevents: If set to ``true``/``false``, only events with a matching value of ``has_subevents`` are returned.
|
||||
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned. Event series are never (always) returned.
|
||||
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned. Event series are never (always) returned.
|
||||
:query date_from_after: If set to a date and time, only events that start at or after the given time are returned.
|
||||
:query date_from_before: If set to a date and time, only events that start at or before the given time are returned.
|
||||
:query date_to_after: If set to a date and time, only events that have an end date and end at or after the given time are returned.
|
||||
:query date_to_before: If set to a date and time, only events that have an end date and end at or before the given time are returned.
|
||||
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned. Event series are never returned.
|
||||
:query string ordering: Manually set the ordering of results. Valid fields to be used are ``date_from`` and
|
||||
``slug``. Keep in mind that ``date_from`` of event series does not really tell you anything.
|
||||
|
||||
@@ -111,7 +111,7 @@ Listing available exporters
|
||||
"input_parameters": [
|
||||
{
|
||||
"name": "events",
|
||||
"required": true
|
||||
"required": false
|
||||
},
|
||||
{
|
||||
"name": "_format",
|
||||
|
||||
@@ -20,6 +20,12 @@ currency string Currency of the
|
||||
testmode boolean Whether this is a test gift card
|
||||
expires datetime Expiry date (or ``null``)
|
||||
conditions string Special terms and conditions for this card (or ``null``)
|
||||
owner_ticket integer Internal ID of an order position that is the "owner" of
|
||||
this gift card and can view all transactions. When setting
|
||||
this field, you can also give the ``secret`` of an order
|
||||
position.
|
||||
issuer string Organizer slug of the organizer who created this gift
|
||||
card and is responsible for it.
|
||||
===================================== ========================== =======================================================
|
||||
|
||||
The gift card transaction resource contains the following public fields:
|
||||
@@ -35,8 +41,17 @@ value money (string) Transaction amo
|
||||
event string Event slug, if the gift card was used in the web shop (or ``null``)
|
||||
order string Order code, if the gift card was used in the web shop (or ``null``)
|
||||
text string Custom text of the transaction (or ``null``)
|
||||
info object Additional data about the transaction (or ``null``)
|
||||
acceptor string Organizer slug of the organizer who created this transaction
|
||||
(can be ``null`` for all transactions performed before
|
||||
this field was added.)
|
||||
===================================== ========================== =======================================================
|
||||
|
||||
.. versionchanged:: 4.20
|
||||
|
||||
The ``owner_ticket`` and ``issuer`` attributes of the gift card and the ``info`` and ``acceptor`` attributes of the
|
||||
gift card transaction resource have been added.
|
||||
|
||||
Endpoints
|
||||
---------
|
||||
|
||||
@@ -72,6 +87,8 @@ Endpoints
|
||||
"testmode": false,
|
||||
"expires": null,
|
||||
"conditions": null,
|
||||
"owner_ticket": null,
|
||||
"issuer": "bigevents",
|
||||
"value": "13.37"
|
||||
}
|
||||
]
|
||||
@@ -81,6 +98,10 @@ Endpoints
|
||||
:query string secret: Only show gift cards with the given secret.
|
||||
:query boolean testmode: Filter for gift cards that are (not) in test mode.
|
||||
:query boolean include_accepted: Also show gift cards issued by other organizers that are accepted by this organizer.
|
||||
:query string expand: If you pass ``"owner_ticket"``, the respective field will be shown as a nested value instead of just an ID.
|
||||
The nested objects are identical to the respective resources, except that the ``owner_ticket``
|
||||
will have an attribute of the format ``"order": {"code": "ABCDE", "event": "eventslug"}`` to make
|
||||
matching easier. The parameter can be given multiple times.
|
||||
:param organizer: The ``slug`` field of the organizer to fetch
|
||||
:statuscode 200: no error
|
||||
:statuscode 401: Authentication failure
|
||||
@@ -113,6 +134,8 @@ Endpoints
|
||||
"testmode": false,
|
||||
"expires": null,
|
||||
"conditions": null,
|
||||
"owner_ticket": null,
|
||||
"issuer": "bigevents",
|
||||
"value": "13.37"
|
||||
}
|
||||
|
||||
@@ -157,10 +180,16 @@ Endpoints
|
||||
"currency": "EUR",
|
||||
"expires": null,
|
||||
"conditions": null,
|
||||
"owner_ticket": null,
|
||||
"issuer": "bigevents",
|
||||
"value": "13.37"
|
||||
}
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer to create a gift card for
|
||||
:query string expand: If you pass ``"owner_ticket"``, the respective field will be shown as a nested value instead of just an ID.
|
||||
The nested objects are identical to the respective resources, except that the ``owner_ticket``
|
||||
will have an attribute of the format ``"order": {"code": "ABCDE", "event": "eventslug"}`` to make
|
||||
matching easier. The parameter can be given multiple times.
|
||||
:statuscode 201: no error
|
||||
:statuscode 400: The gift card could not be created due to invalid submitted data.
|
||||
:statuscode 401: Authentication failure
|
||||
@@ -205,6 +234,8 @@ Endpoints
|
||||
"currency": "EUR",
|
||||
"expires": null,
|
||||
"conditions": null,
|
||||
"owner_ticket": null,
|
||||
"issuer": "bigevents",
|
||||
"value": "14.00"
|
||||
}
|
||||
|
||||
@@ -250,6 +281,8 @@ Endpoints
|
||||
"testmode": false,
|
||||
"expires": null,
|
||||
"conditions": null,
|
||||
"owner_ticket": null,
|
||||
"issuer": "bigevents",
|
||||
"value": "15.37"
|
||||
}
|
||||
|
||||
@@ -293,7 +326,11 @@ Endpoints
|
||||
"value": "50.00",
|
||||
"event": "democon",
|
||||
"order": "FXQYW",
|
||||
"text": null
|
||||
"text": null,
|
||||
"acceptor": "bigevents",
|
||||
"info": {
|
||||
"created_by": "plugin1"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ at :ref:`plugin-docs`.
|
||||
item_variations
|
||||
item_bundles
|
||||
item_add-ons
|
||||
item_meta_properties
|
||||
questions
|
||||
question_options
|
||||
quotas
|
||||
|
||||
211
doc/api/resources/item_meta_properties.rst
Normal file
211
doc/api/resources/item_meta_properties.rst
Normal file
@@ -0,0 +1,211 @@
|
||||
Item Meta Properties
|
||||
====================
|
||||
|
||||
Resource description
|
||||
--------------------
|
||||
|
||||
An Item Meta Property is used to include (event internally relevant) meta information with every item (product). This
|
||||
could be internal categories like booking positions.
|
||||
|
||||
The Item Meta Properties resource contains the following public fields:
|
||||
|
||||
.. rst-class:: rest-resource-table
|
||||
|
||||
===================================== ========================== =======================================================
|
||||
Field Type Description
|
||||
===================================== ========================== =======================================================
|
||||
id integer Unique ID for this property
|
||||
name string Name of the property
|
||||
default string Value of the default option
|
||||
required boolean If ``true``, this property will have to be assigned a
|
||||
value in all items of the related event
|
||||
allowed_values list List of all permitted values for this property,
|
||||
or ``null`` for no limitation
|
||||
===================================== ========================== =======================================================
|
||||
|
||||
Endpoints
|
||||
---------
|
||||
|
||||
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/item_meta_properties/
|
||||
|
||||
Returns a list of all Item Meta Properties within a given event.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /api/v1/organizers/bigevents/events/sampleconf/item_meta_properties/ HTTP/1.1
|
||||
Host: pretix.eu
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"count": 1,
|
||||
"next": null,
|
||||
"previous": null,
|
||||
"results": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Color",
|
||||
"default": "red",
|
||||
"required": true,
|
||||
"allowed_values": ["red", "green", "blue"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer
|
||||
:param event: The ``slug`` field of the event
|
||||
:statuscode 200: no error
|
||||
:statuscode 401: Authentication failure
|
||||
:statuscode 403: The requested organizer does not exist **or** you have no permission to view this resource.
|
||||
|
||||
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/item_meta_properties/(id)/
|
||||
|
||||
Returns information on one property, identified by its id.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /api/v1/organizers/bigevents/events/sampleconf/item_meta_properties/1/ HTTP/1.1
|
||||
Host: pretix.eu
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Color",
|
||||
"default": "red",
|
||||
"required": true,
|
||||
"allowed_values": ["red", "green", "blue"]
|
||||
}
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer
|
||||
:param event: The ``slug`` field of the event
|
||||
:param id: The ``id`` field of the item meta property to retrieve
|
||||
:statuscode 200: no error
|
||||
:statuscode 401: Authentication failure
|
||||
:statuscode 403: The requested organizer does not exist **or** you have no permission to view this resource.
|
||||
|
||||
.. http:post:: /api/v1/organizers/(organizer)/events/(event)/item_meta_properties/
|
||||
|
||||
Creates a new item meta property
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
POST /api/v1/organizers/bigevents/events/sampleconf/item_meta_properties/ HTTP/1.1
|
||||
Host: pretix.eu
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "ref-code",
|
||||
"default": "abcde",
|
||||
"required": true,
|
||||
"allowed_values": null
|
||||
}
|
||||
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
{
|
||||
"id": 2,
|
||||
"name": "ref-code",
|
||||
"default": "abcde",
|
||||
"required": true,
|
||||
"allowed_values": null
|
||||
}
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer
|
||||
:param event: The ``slug`` field of the event
|
||||
:statuscode 201: no error
|
||||
:statuscode 400: The item meta property could not be created due to invalid submitted data.
|
||||
:statuscode 401: Authentication failure
|
||||
:statuscode 403: The requested organizer does not exist **or** you have no permission to create this resource.
|
||||
|
||||
.. http:patch:: /api/v1/organizers/(organizer)/events/(event)/item_meta_properties/(id)/
|
||||
|
||||
Update an item meta property. You can also use ``PUT`` instead of ``PATCH``. With ``PUT``, you have to provide
|
||||
all fields of the resource, other fields will be reset to default. With ``PATCH``, you only need to provide the
|
||||
fields that you want to change.
|
||||
|
||||
You can change all fields of the resource except the ``id`` field.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
PATCH /api/v1/organizers/bigevents/events/sampleconf/item_meta_properties/2/ HTTP/1.1
|
||||
Host: pretix.eu
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json
|
||||
Content-Length: 94
|
||||
|
||||
{
|
||||
"required": false
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"id": 2,
|
||||
"name": "ref-code",
|
||||
"default": "abcde",
|
||||
"required": false,
|
||||
"allowed_values": []
|
||||
}
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer
|
||||
:param event: The ``slug`` field of the event
|
||||
:param id: The ``id`` field of the item meta property to modify
|
||||
:statuscode 200: no error
|
||||
:statuscode 400: The property could not be modified due to invalid submitted data
|
||||
:statuscode 401: Authentication failure
|
||||
:statuscode 403: The requested organizer does not exist **or** you have no permission to change this resource.
|
||||
|
||||
.. http:delete:: /api/v1/organizers/(organizer)/events/(event)/item_meta_properties/(id)/
|
||||
|
||||
Delete an item meta property.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
DELETE /api/v1/organizers/bigevents/events/sampleconf/item_meta_properties/1/ HTTP/1.1
|
||||
Host: pretix.eu
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 204 No Content
|
||||
Vary: Accept
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer
|
||||
:param event: The ``slug`` field of the event
|
||||
:param id: The ``id`` field of the item meta property to delete
|
||||
:statuscode 204: no error
|
||||
:statuscode 401: Authentication failure
|
||||
:statuscode 403: The requested organizer does not exist **or** you have no permission to delete this resource.
|
||||
@@ -18,7 +18,8 @@ The reusable medium resource contains the following public fields:
|
||||
Field Type Description
|
||||
===================================== ========================== =======================================================
|
||||
id integer Internal ID of the medium
|
||||
type string Type of medium, e.g. ``"barcode"`` or ``"nfc_uid"``.
|
||||
type string Type of medium, e.g. ``"barcode"``, ``"nfc_uid"`` or ``"nfc_mf0aes"``.
|
||||
organizer string Organizer slug of the organizer who "owns" this medium.
|
||||
identifier string Unique identifier of the medium. The format depends on the ``type``.
|
||||
active boolean Whether this medium may be used.
|
||||
created datetime Date of creation
|
||||
@@ -36,6 +37,7 @@ Existing media types are:
|
||||
|
||||
- ``barcode``
|
||||
- ``nfc_uid``
|
||||
- ``nfc_mf0aes``
|
||||
|
||||
Endpoints
|
||||
---------
|
||||
@@ -67,6 +69,7 @@ Endpoints
|
||||
"results": [
|
||||
{
|
||||
"id": 1,
|
||||
"organizer": "bigevents",
|
||||
"identifier": "ABCDEFGH",
|
||||
"created": "2021-04-06T13:44:22.809377Z",
|
||||
"updated": "2021-04-06T13:44:22.809377Z",
|
||||
@@ -91,11 +94,11 @@ Endpoints
|
||||
:query string updated_since: Only show media updated since a given date.
|
||||
:query integer linked_orderposition: Only show media linked to the given ticket.
|
||||
:query integer linked_giftcard: Only show media linked to the given gift card.
|
||||
:query string expand: If you pass ``"linked_giftcard"``, ``"linked_orderposition"``, oder ``"customer"``, the respective
|
||||
field will be shown as a nested value instead of just an ID. The nested objects are identical to
|
||||
the respective resources, except that the ``linked_orderposition`` will have an attribute of the
|
||||
format ``"order": {"code": "ABCDE", "event": "eventslug"}`` to make matching easier. The parameter
|
||||
can be given multiple times.
|
||||
:query string expand: If you pass ``"linked_giftcard"``, ``"linked_giftcard.owner_ticket"``, ``"linked_orderposition"``,
|
||||
or ``"customer"``, the respective field will be shown as a nested value instead of just an ID.
|
||||
The nested objects are identical to the respective resources, except that order positions
|
||||
will have an attribute of the format ``"order": {"code": "ABCDE", "event": "eventslug"}`` to make
|
||||
matching easier. The parameter can be given multiple times.
|
||||
:param organizer: The ``slug`` field of the organizer to fetch
|
||||
:statuscode 200: no error
|
||||
:statuscode 401: Authentication failure
|
||||
@@ -123,6 +126,7 @@ Endpoints
|
||||
|
||||
{
|
||||
"id": 1,
|
||||
"organizer": "bigevents",
|
||||
"identifier": "ABCDEFGH",
|
||||
"created": "2021-04-06T13:44:22.809377Z",
|
||||
"updated": "2021-04-06T13:44:22.809377Z",
|
||||
@@ -138,11 +142,11 @@ Endpoints
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer to fetch
|
||||
:param id: The ``id`` field of the medium to fetch
|
||||
:query string expand: If you pass ``"linked_giftcard"``, ``"linked_orderposition"``, oder ``"customer"``, the respective
|
||||
field will be shown as a nested value instead of just an ID. The nested objects are identical to
|
||||
the respective resources, except that the ``linked_orderposition`` will have an attribute of the
|
||||
format ``"order": {"code": "ABCDE", "event": "eventslug"}`` to make matching easier. The parameter
|
||||
can be given multiple times.
|
||||
:query string expand: If you pass ``"linked_giftcard"``, ``"linked_giftcard.owner_ticket"``, ``"linked_orderposition"``,
|
||||
or ``"customer"``, the respective field will be shown as a nested value instead of just an ID.
|
||||
The nested objects are identical to the respective resources, except that order positions
|
||||
will have an attribute of the format ``"order": {"code": "ABCDE", "event": "eventslug"}`` to make
|
||||
matching easier. The parameter can be given multiple times.
|
||||
:statuscode 200: no error
|
||||
:statuscode 401: Authentication failure
|
||||
:statuscode 403: The requested organizer does not exist **or** you have no permission to view this resource.
|
||||
@@ -152,6 +156,9 @@ Endpoints
|
||||
Look up a new reusable medium by its identifier. In some cases, this might lead to the automatic creation of a new
|
||||
medium behind the scenes.
|
||||
|
||||
This endpoint, and this endpoint only, might return media from a different organizer if there is a cross-acceptance
|
||||
agreement. In this case, only linked gift cards will be returned, no order position or customer records,
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
@@ -176,6 +183,7 @@ Endpoints
|
||||
|
||||
{
|
||||
"id": 1,
|
||||
"organizer": "bigevents",
|
||||
"identifier": "ABCDEFGH",
|
||||
"created": "2021-04-06T13:44:22.809377Z",
|
||||
"updated": "2021-04-06T13:44:22.809377Z",
|
||||
@@ -235,6 +243,7 @@ Endpoints
|
||||
|
||||
{
|
||||
"id": 1,
|
||||
"organizer": "bigevents",
|
||||
"identifier": "ABCDEFGH",
|
||||
"created": "2021-04-06T13:44:22.809377Z",
|
||||
"updated": "2021-04-06T13:44:22.809377Z",
|
||||
@@ -291,6 +300,7 @@ Endpoints
|
||||
|
||||
{
|
||||
"id": 1,
|
||||
"organizer": "bigevents",
|
||||
"identifier": "ABCDEFGH",
|
||||
"created": "2021-04-06T13:44:22.809377Z",
|
||||
"updated": "2021-04-06T13:44:22.809377Z",
|
||||
|
||||
@@ -18,8 +18,15 @@ subject multi-lingual string The subject of
|
||||
template multi-lingual string The body of the email
|
||||
all_products boolean If ``true``, the email is sent to buyers of all products
|
||||
limit_products list of integers List of product IDs, if ``all_products`` is not set
|
||||
include_pending boolean If ``true``, the email is sent to pending orders. If ``false``,
|
||||
[**DEPRECATED**] include_pending boolean If ``true``, the email is sent to pending orders. If ``false``,
|
||||
only paid orders are considered.
|
||||
restrict_to_status list List of order states to restrict recipients to. Valid
|
||||
entries are ``p`` for paid, ``e`` for expired, ``c`` for canceled,
|
||||
``n__pending_approval`` for pending approval,
|
||||
``n__not_pending_approval_and_not_valid_if_pending`` for payment pending,
|
||||
``n__valid_if_pending`` for payment pending but already confirmed,
|
||||
and ``n__pending_overdue`` for pending with payment overdue.
|
||||
The default is ``["p", "n__valid_if_pending"]``.
|
||||
date_is_absolute boolean If ``true``, the email is set at a specific point in time.
|
||||
send_date datetime If ``date_is_absolute`` is set: Date and time to send the email.
|
||||
send_offset_days integer If ``date_is_absolute`` is not set, this is the number of days
|
||||
@@ -37,7 +44,10 @@ send_to string Can be ``"order
|
||||
or ``"both"``.
|
||||
date. Otherwise it is relative to the event start date.
|
||||
===================================== ========================== =======================================================
|
||||
.. versionchanged:: 2023.7
|
||||
|
||||
The ``include_pending`` field has been deprecated.
|
||||
The ``restrict_to_status`` field has been added.
|
||||
|
||||
Endpoints
|
||||
---------
|
||||
@@ -74,7 +84,11 @@ Endpoints
|
||||
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
||||
"all_products": true,
|
||||
"limit_products": [],
|
||||
"include_pending": false,
|
||||
"restrict_to_status": [
|
||||
"p",
|
||||
"n__not_pending_approval_and_not_valid_if_pending",
|
||||
"n__valid_if_pending"
|
||||
],
|
||||
"send_date": null,
|
||||
"send_offset_days": 1,
|
||||
"send_offset_time": "18:00",
|
||||
@@ -120,7 +134,11 @@ Endpoints
|
||||
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
||||
"all_products": true,
|
||||
"limit_products": [],
|
||||
"include_pending": false,
|
||||
"restrict_to_status": [
|
||||
"p",
|
||||
"n__not_pending_approval_and_not_valid_if_pending",
|
||||
"n__valid_if_pending"
|
||||
],
|
||||
"send_date": null,
|
||||
"send_offset_days": 1,
|
||||
"send_offset_time": "18:00",
|
||||
@@ -157,7 +175,11 @@ Endpoints
|
||||
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
||||
"all_products": true,
|
||||
"limit_products": [],
|
||||
"include_pending": false,
|
||||
"restrict_to_status": [
|
||||
"p",
|
||||
"n__not_pending_approval_and_not_valid_if_pending",
|
||||
"n__valid_if_pending"
|
||||
],
|
||||
"send_date": null,
|
||||
"send_offset_days": 1,
|
||||
"send_offset_time": "18:00",
|
||||
@@ -182,7 +204,11 @@ Endpoints
|
||||
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
||||
"all_products": true,
|
||||
"limit_products": [],
|
||||
"include_pending": false,
|
||||
"restrict_to_status": [
|
||||
"p",
|
||||
"n__not_pending_approval_and_not_valid_if_pending",
|
||||
"n__valid_if_pending"
|
||||
],
|
||||
"send_date": null,
|
||||
"send_offset_days": 1,
|
||||
"send_offset_time": "18:00",
|
||||
@@ -235,7 +261,11 @@ Endpoints
|
||||
"template": {"en": "Don't forget your tickets, download them at {url}"},
|
||||
"all_products": true,
|
||||
"limit_products": [],
|
||||
"include_pending": false,
|
||||
"restrict_to_status": [
|
||||
"p",
|
||||
"n__not_pending_approval_and_not_valid_if_pending",
|
||||
"n__valid_if_pending"
|
||||
],
|
||||
"send_date": null,
|
||||
"send_offset_days": 1,
|
||||
"send_offset_time": "18:00",
|
||||
|
||||
@@ -63,6 +63,11 @@ last_modified datetime Last modificati
|
||||
|
||||
The ``search`` query parameter has been added to filter sub-events by their name or location in any language.
|
||||
|
||||
.. versionchanged:: 5.0
|
||||
|
||||
The ``date_from_before``, ``date_from_after``, ``date_to_before``, and ``date_to_after`` query parameters have been
|
||||
added.
|
||||
|
||||
Endpoints
|
||||
---------
|
||||
|
||||
@@ -130,6 +135,10 @@ Endpoints
|
||||
:query active: If set to ``true``/``false``, only events with a matching value of ``active`` are returned.
|
||||
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned.
|
||||
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned.
|
||||
:query date_from_after: If set to a date and time, only events that start at or after the given time are returned.
|
||||
:query date_from_before: If set to a date and time, only events that start at or before the given time are returned.
|
||||
:query date_to_after: If set to a date and time, only events that have an end date and end at or after the given time are returned.
|
||||
:query date_to_before: If set to a date and time, only events that have an end date and end at or before the given time are returned.
|
||||
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned.
|
||||
:query search: Only return events matching a given search query.
|
||||
:param organizer: The ``slug`` field of a valid organizer
|
||||
@@ -458,6 +467,10 @@ Endpoints
|
||||
:query event__live: If set to ``true``/``false``, only events with a matching value of ``live`` on the parent event are returned.
|
||||
:query is_future: If set to ``true`` (``false``), only events that happen currently or in the future are (not) returned.
|
||||
:query is_past: If set to ``true`` (``false``), only events that are over are (not) returned.
|
||||
:query date_from_after: If set to a date and time, only events that start at or after the given time are returned.
|
||||
:query date_from_before: If set to a date and time, only events that start at or before the given time are returned.
|
||||
:query date_to_after: If set to a date and time, only events that have an end date and end at or after the given time are returned.
|
||||
:query date_to_before: If set to a date and time, only events that have an end date and end at or before the given time are returned.
|
||||
:query ends_after: If set to a date and time, only events that happen during of after the given time are returned.
|
||||
:query sales_channel: If set to a sales channel identifier, the response will only contain subevents from events available on this sales channel.
|
||||
:param organizer: The ``slug`` field of a valid organizer
|
||||
|
||||
@@ -20,11 +20,16 @@ internal_name string An optional nam
|
||||
rate decimal (string) Tax rate in percent
|
||||
price_includes_tax boolean If ``true`` (default), tax is assumed to be included in
|
||||
the specified product price
|
||||
eu_reverse_charge boolean If ``true``, EU reverse charge rules are applied
|
||||
eu_reverse_charge boolean If ``true``, EU reverse charge rules are applied. Will
|
||||
be ignored if custom rules are set.
|
||||
home_country string Merchant country (required for reverse charge), can be
|
||||
``null`` or empty string
|
||||
keep_gross_if_rate_changes boolean If ``true``, changes of the tax rate based on custom
|
||||
rules keep the gross price constant (default is ``false``)
|
||||
custom_rules object Dynamic rules specification. Each list element
|
||||
corresponds to one rule that will be processed in order.
|
||||
The current version of the schema in use can be found
|
||||
`here`_.
|
||||
===================================== ========================== =======================================================
|
||||
|
||||
|
||||
@@ -32,6 +37,10 @@ keep_gross_if_rate_changes boolean If ``true``, ch
|
||||
|
||||
The ``internal_name`` and ``keep_gross_if_rate_changes`` attributes have been added.
|
||||
|
||||
.. versionchanged:: 2023.6
|
||||
|
||||
The ``custom_rules`` attribute has been added.
|
||||
|
||||
Endpoints
|
||||
---------
|
||||
|
||||
@@ -68,6 +77,7 @@ Endpoints
|
||||
"price_includes_tax": true,
|
||||
"eu_reverse_charge": false,
|
||||
"keep_gross_if_rate_changes": false,
|
||||
"custom_rules": null,
|
||||
"home_country": "DE"
|
||||
}
|
||||
]
|
||||
@@ -108,6 +118,7 @@ Endpoints
|
||||
"price_includes_tax": true,
|
||||
"eu_reverse_charge": false,
|
||||
"keep_gross_if_rate_changes": false,
|
||||
"custom_rules": null,
|
||||
"home_country": "DE"
|
||||
}
|
||||
|
||||
@@ -156,6 +167,7 @@ Endpoints
|
||||
"price_includes_tax": true,
|
||||
"eu_reverse_charge": false,
|
||||
"keep_gross_if_rate_changes": false,
|
||||
"custom_rules": null,
|
||||
"home_country": "DE"
|
||||
}
|
||||
|
||||
@@ -203,6 +215,7 @@ Endpoints
|
||||
"price_includes_tax": true,
|
||||
"eu_reverse_charge": false,
|
||||
"keep_gross_if_rate_changes": false,
|
||||
"custom_rules": null,
|
||||
"home_country": "DE"
|
||||
}
|
||||
|
||||
@@ -242,3 +255,5 @@ Endpoints
|
||||
:statuscode 204: no error
|
||||
:statuscode 401: Authentication failure
|
||||
:statuscode 403: The requested organizer/event/rule does not exist **or** you have no permission to change it **or** this tax rule cannot be deleted since it is currently in use.
|
||||
|
||||
.. _here: https://github.com/pretix/pretix/blob/master/src/pretix/static/schema/tax-rules-custom.schema.json
|
||||
|
||||
@@ -47,6 +47,8 @@ tag string A string that i
|
||||
comment string An internal comment on the voucher
|
||||
subevent integer ID of the date inside an event series this voucher belongs to (or ``null``).
|
||||
show_hidden_items boolean Only if set to ``true``, this voucher allows to buy products with the property ``hide_without_voucher``. Defaults to ``true``.
|
||||
all_addons_included boolean If set to ``true``, all add-on products for the product purchased with this voucher are included in the base price.
|
||||
all_bundles_included boolean If set to ``true``, all bundled products for the product purchased with this voucher are added without their designated price.
|
||||
===================================== ========================== =======================================================
|
||||
|
||||
|
||||
@@ -95,6 +97,9 @@ Endpoints
|
||||
"comment": "",
|
||||
"seat": null,
|
||||
"subevent": null,
|
||||
"show_hidden_items": false,
|
||||
"all_addons_included": false,
|
||||
"all_bundles_included": false
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -161,7 +166,10 @@ Endpoints
|
||||
"tag": "testvoucher",
|
||||
"comment": "",
|
||||
"seat": null,
|
||||
"subevent": null
|
||||
"subevent": null,
|
||||
"show_hidden_items": false,
|
||||
"all_addons_included": false,
|
||||
"all_bundles_included": false
|
||||
}
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer to fetch
|
||||
@@ -198,7 +206,10 @@ Endpoints
|
||||
"quota": null,
|
||||
"tag": "testvoucher",
|
||||
"comment": "",
|
||||
"subevent": null
|
||||
"subevent": null,
|
||||
"show_hidden_items": false,
|
||||
"all_addons_included": false,
|
||||
"all_bundles_included": false
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
@@ -225,7 +236,10 @@ Endpoints
|
||||
"tag": "testvoucher",
|
||||
"comment": "",
|
||||
"seat": null,
|
||||
"subevent": null
|
||||
"subevent": null,
|
||||
"show_hidden_items": false,
|
||||
"all_addons_included": false,
|
||||
"all_bundles_included": false
|
||||
}
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer to create a voucher for
|
||||
@@ -264,7 +278,10 @@ Endpoints
|
||||
"quota": null,
|
||||
"tag": "testvoucher",
|
||||
"comment": "",
|
||||
"subevent": null
|
||||
"subevent": null,
|
||||
"show_hidden_items": false,
|
||||
"all_addons_included": false,
|
||||
"all_bundles_included": false
|
||||
},
|
||||
{
|
||||
"code": "ASDKLJCYXCASDASD",
|
||||
@@ -279,7 +296,10 @@ Endpoints
|
||||
"quota": null,
|
||||
"tag": "testvoucher",
|
||||
"comment": "",
|
||||
"subevent": null
|
||||
"subevent": null,
|
||||
"show_hidden_items": false,
|
||||
"all_addons_included": false,
|
||||
"all_bundles_included": false
|
||||
},
|
||||
|
||||
**Example response**:
|
||||
@@ -353,7 +373,10 @@ Endpoints
|
||||
"tag": "testvoucher",
|
||||
"comment": "",
|
||||
"seat": null,
|
||||
"subevent": null
|
||||
"subevent": null,
|
||||
"show_hidden_items": false,
|
||||
"all_addons_included": false,
|
||||
"all_bundles_included": false
|
||||
}
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer to modify
|
||||
|
||||
@@ -50,6 +50,10 @@ The following values for ``action_types`` are valid with pretix core:
|
||||
* ``pretix.event.order.payment.confirmed``
|
||||
* ``pretix.event.order.approved``
|
||||
* ``pretix.event.order.denied``
|
||||
* ``pretix.event.orders.waitinglist.added``
|
||||
* ``pretix.event.orders.waitinglist.changed``
|
||||
* ``pretix.event.orders.waitinglist.deleted``
|
||||
* ``pretix.event.orders.waitinglist.voucher_assigned``
|
||||
* ``pretix.event.checkin``
|
||||
* ``pretix.event.checkin.reverted``
|
||||
* ``pretix.event.added``
|
||||
|
||||
@@ -18,13 +18,13 @@ If you want to add a custom view to the control area of an event, just register
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from django.conf.urls import url
|
||||
from django.urls import re_path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
|
||||
views.admin_view, name='backend'),
|
||||
re_path(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
|
||||
views.admin_view, name='backend'),
|
||||
]
|
||||
|
||||
It is required that your URL parameters are called ``organizer`` and ``event``. If you want to
|
||||
|
||||
@@ -13,7 +13,7 @@ Core
|
||||
.. automodule:: pretix.base.signals
|
||||
:members: periodic_task, event_live_issues, event_copy_data, email_filter, register_notification_types,
|
||||
item_copy_data, register_sales_channels, register_global_settings, quota_availability, global_email_filter,
|
||||
register_ticket_secret_generators
|
||||
register_ticket_secret_generators, gift_card_transaction_display
|
||||
|
||||
Order events
|
||||
""""""""""""
|
||||
@@ -21,7 +21,7 @@ Order events
|
||||
There are multiple signals that will be sent out in the ordering cycle:
|
||||
|
||||
.. automodule:: pretix.base.signals
|
||||
:members: validate_cart, validate_cart_addons, validate_order, order_fee_calculation, order_paid, order_placed, order_canceled, order_reactivated, order_expired, order_modified, order_changed, order_approved, order_denied, order_fee_type_name, allow_ticket_download, order_split, order_gracefully_delete, invoice_line_text
|
||||
:members: validate_cart, validate_cart_addons, validate_order, order_valid_if_pending, order_fee_calculation, order_paid, order_placed, order_canceled, order_reactivated, order_expired, order_modified, order_changed, order_approved, order_denied, order_fee_type_name, allow_ticket_download, order_split, order_gracefully_delete, invoice_line_text
|
||||
|
||||
Check-ins
|
||||
"""""""""
|
||||
@@ -61,7 +61,7 @@ Backend
|
||||
item_formsets, order_search_filter_q, order_search_forms
|
||||
|
||||
.. automodule:: pretix.base.signals
|
||||
:members: logentry_display, logentry_object_link, requiredaction_display, timeline_events, orderposition_blocked_display
|
||||
:members: logentry_display, logentry_object_link, requiredaction_display, timeline_events, orderposition_blocked_display, customer_created, customer_signed_in
|
||||
|
||||
Vouchers
|
||||
""""""""
|
||||
|
||||
@@ -70,6 +70,8 @@ The provider class
|
||||
|
||||
.. autoattribute:: settings_form_fields
|
||||
|
||||
.. autoattribute:: walletqueries
|
||||
|
||||
.. automethod:: settings_form_clean
|
||||
|
||||
.. automethod:: settings_content_render
|
||||
|
||||
@@ -37,7 +37,7 @@ you to execute a piece of code with a different locale:
|
||||
This is very useful e.g. when sending an email to a user that has a different language than the user performing the
|
||||
action that causes the mail to be sent.
|
||||
|
||||
.. _translation features: https://docs.djangoproject.com/en/1.9/topics/i18n/translation/
|
||||
.. _translation features: https://docs.djangoproject.com/en/4.2/topics/i18n/translation/
|
||||
.. _GNU gettext: https://www.gnu.org/software/gettext/
|
||||
.. _strings: https://django-i18nfield.readthedocs.io/en/latest/strings.html
|
||||
.. _database fields: https://django-i18nfield.readthedocs.io/en/latest/quickstart.html
|
||||
|
||||
@@ -15,33 +15,41 @@ and the admin panel is available at ``https://pretix.eu/control/event/bigorg/awe
|
||||
|
||||
If the organizer now configures a custom domain like ``tickets.bigorg.com``, his event will
|
||||
from now on be available on ``https://tickets.bigorg.com/awesomecon/``. The former URL at
|
||||
``pretix.eu`` will redirect there. However, the admin panel will still only be available
|
||||
on ``pretix.eu`` for convenience and security reasons.
|
||||
``pretix.eu`` will redirect there. It's also possible to do this for just an event, in which
|
||||
case the event will be available on ``https://tickets.awesomecon.org/``.
|
||||
|
||||
However, the admin panel will still only be available on ``pretix.eu`` for convenience and security reasons.
|
||||
|
||||
URL routing
|
||||
-----------
|
||||
|
||||
The hard part about implementing this URL routing in Django is that
|
||||
``https://pretix.eu/bigorg/awesomecon/`` contains two parameters of nearly arbitrary content
|
||||
and ``https://tickets.bigorg.com/awesomecon/`` contains only one. The only robust way to do
|
||||
this is by having *separate* URL configuration for those two cases. In pretix, we call the
|
||||
former our ``maindomain`` config and the latter our ``subdomain`` config. For pretix's core
|
||||
modules we do some magic to avoid duplicate configuration, but for a fairly simple plugin with
|
||||
only a handful of routes, we recommend just configuring the two URL sets separately.
|
||||
and ``https://tickets.bigorg.com/awesomecon/`` contains only one and ``https://tickets.awesomecon.org/`` does not contain any.
|
||||
The only robust way to do this is by having *separate* URL configuration for those three cases.
|
||||
|
||||
In pretix, we therefore do not have a global URL configuration, but three, living in the following modules:
|
||||
|
||||
- ``pretix.multidomain.maindomain_urlconf``
|
||||
- ``pretix.multidomain.organizer_domain_urlconf``
|
||||
- ``pretix.multidomain.event_domain_urlconf``
|
||||
|
||||
We provide some helper utilities to work with these to avoid duplicate configuration of the individual URLs.
|
||||
The file ``urls.py`` inside your plugin package will be loaded and scanned for URL configuration
|
||||
automatically and should be provided by any plugin that provides any view.
|
||||
However, unlike plain Django, we look not only for a ``urlpatterns`` attribute on the module but support other
|
||||
attributes like ``event_patterns`` and ``organizer_patterns`` as well.
|
||||
|
||||
A very basic example that provides one view in the admin panel and one view in the frontend
|
||||
could look like this::
|
||||
For example, for a simple plugin that adds one URL to the backend and one event-level URL to the frontend, you can
|
||||
create the following configuration in your ``urls.py``::
|
||||
|
||||
from django.conf.urls import url
|
||||
from django.urls import re_path
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
|
||||
views.AdminView.as_view(), name='backend'),
|
||||
re_path(r'^control/event/(?P<organizer>[^/]+)/(?P<event>[^/]+)/mypluginname/',
|
||||
views.AdminView.as_view(), name='backend'),
|
||||
]
|
||||
|
||||
event_patterns = [
|
||||
@@ -52,7 +60,7 @@ could look like this::
|
||||
As you can see, the view in the frontend is not included in the standard Django ``urlpatterns``
|
||||
setting but in a separate list with the name ``event_patterns``. This will automatically prepend
|
||||
the appropriate parameters to the regex (e.g. the event or the event and the organizer, depending
|
||||
on the called domain).
|
||||
on the called domain). For organizer-level views, ``organizer_patterns`` works the same way.
|
||||
|
||||
If you only provide URLs in the admin area, you do not need to provide a ``event_patterns`` attribute.
|
||||
|
||||
@@ -71,11 +79,16 @@ is a python method that emulates a behavior similar to ``reverse``:
|
||||
|
||||
.. autofunction:: pretix.multidomain.urlreverse.eventreverse
|
||||
|
||||
If you need to communicate the URL externally, you can use a different method to ensure that it is always an absolute URL:
|
||||
|
||||
.. autofunction:: pretix.multidomain.urlreverse.build_absolute_uri
|
||||
|
||||
In addition, there is a template tag that works similar to ``url`` but takes an event or organizer object
|
||||
as its first argument and can be used like this::
|
||||
|
||||
{% load eventurl %}
|
||||
<a href="{% eventurl request.event "presale:event.checkout" step="payment" %}">Pay</a>
|
||||
<a href="{% abseventurl request.event "presale:event.checkout" step="payment" %}">Pay</a>
|
||||
|
||||
|
||||
Implementation details
|
||||
|
||||
@@ -12,3 +12,4 @@ Developer documentation
|
||||
api/index
|
||||
structure
|
||||
translation/index
|
||||
nfc/index
|
||||
|
||||
15
doc/development/nfc/index.rst
Normal file
15
doc/development/nfc/index.rst
Normal file
@@ -0,0 +1,15 @@
|
||||
NFC media
|
||||
=========
|
||||
|
||||
pretix supports using NFC chips as "reusable media", for example to store gift cards or tickets.
|
||||
|
||||
Most of this implementation currently lives in our proprietary app pretixPOS, but in the future might also become part of our open-source pretixSCAN solution.
|
||||
Either way, we want this to be an open ecosystem and therefore document the exact mechanisms in use on the following pages.
|
||||
|
||||
We support multiple implementations of NFC media, each documented on its own page:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
uid
|
||||
mf0aes
|
||||
113
doc/development/nfc/mf0aes.rst
Normal file
113
doc/development/nfc/mf0aes.rst
Normal file
@@ -0,0 +1,113 @@
|
||||
Mifare Ultralight AES
|
||||
=====================
|
||||
|
||||
We offer an implementation that provides a higher security level than the UID-based approach and uses the `Mifare Ultralight AES`_ chip sold by NXP.
|
||||
We believe the security model of this approach is adequate to the situation where this will usually be used and we'll outline known risks below.
|
||||
|
||||
If you want to dive deeper into the properties of the Mifare Ultralight AES chip, we recommend reading the `data sheet`_.
|
||||
|
||||
Random UIDs
|
||||
-----------
|
||||
|
||||
Mifare Ultralight AES supports a feature that returns a randomized UID every time a non-authenticated user tries to
|
||||
read the UID. This has a strong privacy benefit, since no unauthorized entity can use the NFC chips to track users.
|
||||
On the other hand, this reduces interoperability of the system. For example, this prevents you from using the same NFC
|
||||
chips for a different purpose where you only need the UID. This will also prevent your guests from reading their UID
|
||||
themselves with their phones, which might be useful e.g. in debugging situations.
|
||||
|
||||
Since there's no one-size-fits-all choice here, you can enable or disable this feature in the pretix organizer
|
||||
settings. If you change it, the change will apply to all newly encoded chips after the change.
|
||||
|
||||
Key management
|
||||
--------------
|
||||
|
||||
For every organizer, the server will generate create a "key set", which consists of a publicly known ID (random 32-bit integer) and two 16-byte keys ("diversification key" and "UID key").
|
||||
|
||||
Using our :ref:`Device authentication mechanism <rest-deviceauth>`, an authorized device can submit a locally generated RSA public key to the server.
|
||||
This key can no longer changed on the server once it is set, thus protecting against the attack scenario of a leaked device API token.
|
||||
|
||||
The server will then include key sets in the response to ``/api/v1/device/info``, encrypted with the device's RSA key.
|
||||
This includes all key sets generated for the organizer the device belongs to, as well as all keys of organizers that have granted sufficient access to this organizer.
|
||||
|
||||
The device will decrypt the key sets using its RSA key and store the key sets locally.
|
||||
|
||||
.. warning:: The device **will** have access to the raw key sets. Therefore, there is a risk of leaked master keys if an
|
||||
authorized device is stolen or abused. Our implementation in pretixPOS attempts to make this very hard on
|
||||
modern, non-rooted Android devices by keeping them encrypted with the RSA key and only storing the RSA key
|
||||
in the hardware-backed keystore of the device. A sufficiently motivated attacker, however, will likely still
|
||||
be able to extract the keys from a stolen device.
|
||||
|
||||
Encoding a chip
|
||||
---------------
|
||||
|
||||
When a new chip is encoded, the following steps will be taken:
|
||||
|
||||
- The UID of the chip is retrieved.
|
||||
|
||||
- A chip-specific key is generated using the mechanism documented in `AN10922`_ using the "diversification key" from the
|
||||
organizer's key set as the CMAC key and the diversification input concatenated in the from of ``0x01 + UID + APPID + SYSTEMID``
|
||||
with the following values:
|
||||
|
||||
- The UID of the chip as ``UID``
|
||||
|
||||
- ``"eu.pretix"`` (``0x65 0x75 0x2e 0x70 0x72 0x65 0x74 0x69 0x78``) as ``APPID``
|
||||
|
||||
- The ``public_id`` from the organizer's key set as a 4-byte big-endian value as ``SYSTEMID``
|
||||
|
||||
- The chip-specific key is written to the chip as the "data protection key" (config pages 0x30 to 0x33)
|
||||
|
||||
- The UID key from the organizer's key set is written to the chip as the "UID retrieval key" (config pages 0x34 to 0x37)
|
||||
|
||||
- The config page 0x29 is set like this:
|
||||
|
||||
- ``RID_ACT`` (random UID) to ``1`` or ``0`` based on the organizer's configuration
|
||||
- ``SEC_MSG_ACT`` (secure messaging) to ``1``
|
||||
- ``AUTH0`` (first page that needs authentication) to 0x04 (first non-UID page)
|
||||
|
||||
- The config page 0x2A is set like this:
|
||||
|
||||
- ``PROT`` to ``0`` (only write access restricted, not read access)
|
||||
- ``AUTHLIM`` to ``256`` (maximum number of wrong authentications before "self-desctruction")
|
||||
- Everything else to its default value (no lock bits are set)
|
||||
|
||||
- The ``public_id`` of the key set will be written to page 0x04 as a big-endian value
|
||||
|
||||
- The UID of the chip will be registered as a reusable medium on the server.
|
||||
|
||||
.. warning:: During encoding, the chip-specific key and the UID key are transmitted in plain text over the air. The
|
||||
security model therefore relies on the encoding of chips being performed in a trusted physical environment
|
||||
to prevent a nearby attacker from sniffing the keys with a strong antenna.
|
||||
|
||||
.. note:: If an attacker tries to authenticate with the chip 256 times using the wrong key, the chip will become
|
||||
unusable. A chip may also become unusable if it is detached from the reader in the middle of the encoding
|
||||
process (even though we've tried to implement it in a way that makes this unlikely).
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
When a chip is presented to the NFC reader, the following steps will be taken:
|
||||
|
||||
- Command ``GET_VERSION`` is used to determine if it is a Mifare Ultralight AES chip (if not, abort).
|
||||
|
||||
- Page 0x04 is read. If it is all zeroes, the chip is considered un-encoded (abort). If it contains a value that
|
||||
corresponds to the ``public_id`` of a known key set, this key set is used for all further operations. If it contains
|
||||
a different value, we consider this chip to belong to a different organizer or not to a pretix system at all (abort).
|
||||
|
||||
- An authentication with the chip using the UID key is performed.
|
||||
|
||||
- The UID of the chip will be read.
|
||||
|
||||
- The chip-specific key will be derived using the mechanism described above in the encoding step.
|
||||
|
||||
- An authentication with the chip using the chip-specific key is performed. If this is fully successful, this step
|
||||
proves that the chip knows the same chip-specific key as we do and is therefore an authentic chip encoded by us and
|
||||
we can trust its UID value.
|
||||
|
||||
- The UID is transmitted to the server to fetch the correct medium.
|
||||
|
||||
During these steps, the keys are never transmitted in plain text and can thus not be sniffed by a nearby attacker
|
||||
with a strong antenna.
|
||||
|
||||
.. _Mifare Ultralight AES: https://www.nxp.com/products/rfid-nfc/mifare-hf/mifare-ultralight/mifare-ultralight-aes-enhanced-security-for-limited-use-contactless-applications:MF0AESx20
|
||||
.. _data sheet: https://www.nxp.com/docs/en/data-sheet/MF0AES(H)20.pdf
|
||||
.. _AN10922: https://www.nxp.com/docs/en/application-note/AN10922.pdf
|
||||
10
doc/development/nfc/uid.rst
Normal file
10
doc/development/nfc/uid.rst
Normal file
@@ -0,0 +1,10 @@
|
||||
UID-based
|
||||
=========
|
||||
|
||||
With UID-based NFC, only the unique ID (UID) of the NFC chip is used for identification purposes.
|
||||
This can be used with virtually all NFC chips that provide compatibility with the NFC reader in use, typically at least all chips that comply with ISO/IEC 14443-3A.
|
||||
|
||||
We make only one restriction: The UID may not start with ``08``, since that usually signifies a randomized UID that changes on every read (which would not be very useful).
|
||||
|
||||
.. warning:: The UID-based approach provides only a very low level of security. It is easy to clone a chip with the same
|
||||
UID and impersonate someone else.
|
||||
@@ -96,6 +96,20 @@ http://localhost:8000/control/ for the admin view.
|
||||
port (for example because you develop on `pretixdroid`_), you can check
|
||||
`Django's documentation`_ for more options.
|
||||
|
||||
When running the local development webserver, ensure Celery is not configured
|
||||
in ``pretix.cfg``. i.e., you should remove anything such as::
|
||||
|
||||
[celery]
|
||||
backend=redis://redis:6379/2
|
||||
broker=redis://redis:6379/2
|
||||
|
||||
If you choose to use Celery for development, you must also start a Celery worker
|
||||
process::
|
||||
|
||||
celery -A pretix.celery_app worker -l info
|
||||
|
||||
However, beware that code changes will not auto-reload within Celery.
|
||||
|
||||
.. _`checksandtests`:
|
||||
|
||||
Code checks and unit tests
|
||||
|
||||
143
doc/plugins/epaybl.rst
Normal file
143
doc/plugins/epaybl.rst
Normal file
@@ -0,0 +1,143 @@
|
||||
ePayBL
|
||||
======
|
||||
|
||||
.. note::
|
||||
|
||||
Since ePayBL is only available to german federal, provincial and communal entities, the following page is also
|
||||
only provided in german. Should you require assistance with ePayBL and do not speak this language, please feel free
|
||||
reach out to support@pretix.eu.
|
||||
|
||||
|
||||
Einführung
|
||||
----------
|
||||
|
||||
.. note::
|
||||
|
||||
Sollten Sie lediglich schnell entscheiden wollen, welcher Kontierungsmodus in den Einstellungen des pretix
|
||||
ePayBL-plugins gewählt werden soll, so springen Sie direkt zur Sektion :ref:`Kontierungsmodus`.
|
||||
|
||||
|
||||
`ePayBL`_ - das ePayment-System von Bund und Länder - ist das am weitesten verbreitete Zahlungssystem für Bundes-, Länder-
|
||||
sowie kommunale Aufgabenträger. Während es nur wie eines von vielen anderen Zahlungssystemen scheint, so bietet es
|
||||
seinen Nutzern besondere Vorteile, wie die automatische Erfassung von Zahlungsbelegen, dem Übertragen von Buchungen in
|
||||
Haushaltskassen/-systeme sowie die automatische Erfassung von Kontierungen und Steuermerkmalen.
|
||||
|
||||
Rein technisch gesehen ist ePayBL hierbei nicht ein eigenständiger Zahlungsdienstleister sondern nur ein eine Komponente
|
||||
im komplexen System, dass die Zahlungsabwicklung für Kommunen und Behörden ist.
|
||||
|
||||
Im folgenden der schematische Aufbau einer Umgebung, in welcher ePayBL zum Einsatz kommt:
|
||||
|
||||
.. figure:: img/epaybl_flowchart.png
|
||||
:class: screenshot
|
||||
|
||||
Quelle: Integrationshandbuch ePayBL-Konnektor, DResearch Digital Media Systems GmbH
|
||||
|
||||
|
||||
In diesem Schaubild stellt pretix, bzw. die von Ihnen als Veranstalter angelegten Ticketshops, das Fachverfahren dar.
|
||||
|
||||
ePayBL stellt das Bindeglied zwischen den Fachverfahren, Haushaltssystemen und dem eigentlichen Zahlungsdienstleister,
|
||||
dem sog. ZV-Provider dar. Dieser ZV-Provider ist die Stelle, welche die eigentlichen Kundengelder einzieht und an den
|
||||
Händler auszahlt. Das Gros der Zahlungsdienstleister unterstützt pretix hierbei auch direkt; sprich: Sollten Sie die
|
||||
Anbindung an Ihre Haushaltssysteme nicht benötigen, kann eine direkte Anbindung in der Regel ebenso - und dies bei meist
|
||||
vermindertem Aufwand - vorgenommen werden.
|
||||
|
||||
In der Vergangenheit zeigte sich jedoch schnell, dass nicht jeder IT-Dienstleister immer sofort die neueste Version von
|
||||
ePayBL seinen Nutzern angeboten hat. Die Gründe hierfür sind mannigfaltig: Von fest vorgegebenen Update-Zyklen bis hin
|
||||
zu Systeme mit speziellen Anpassungen, kann leider nicht davon ausgegangen werden, dass alle ePayBL-Systeme exakt gleich
|
||||
ansprechbar sind - auch wenn es sich dabei eigentlich um einen standardisierten Dienst handelt.
|
||||
|
||||
Aus diesem Grund gibt es mit dem ePayBL-Konnektor eine weitere Abstraktionsschicht welche optional zwischen den
|
||||
Fachverfahren und dem ePayBL-Server sitzt. Dieser Konnektor wird so gepflegt, dass er zum einen eine dauerhaft
|
||||
gleichartige Schnittstelle den Fachverfahren bietet aber gleichzeitig auch mit jeder Version des ePayBL-Servers
|
||||
kommunizieren kann - egal wie neu oder alt, wie regulär oder angepasst diese ist.
|
||||
|
||||
Im Grunde müsste daher eigentlich immer gesagt werden, dass pretix eine Anbindung an den ePayBL-Konnektor bietet; nicht
|
||||
an "ePayBL" oder den "ePayBL-Server". Diese Unterscheidung kann bei der Ersteinrichtung und Anforderung von Zugangsdaten
|
||||
von Relevanz sein. Da in der Praxis jedoch beide Begriffe gleichbedeutend genutzt werden, wird im Folgenden auch nur von
|
||||
einer ePayBL-Anbindung die Rede sein - auch wenn explizit der Konnektor gemeint ist.
|
||||
|
||||
|
||||
.. _`Kontierungsmodus`:
|
||||
|
||||
Kontierungsmodus
|
||||
----------------
|
||||
|
||||
ePayBL ist ein Produkt, welches für die Abwicklung von Online-Zahlungsvorgängen in der Verwaltung geschaffen wurde. Ein
|
||||
Umfeld, in dem klar definiert ist, was ein Kunde gerade bezahlt und wohin das Geld genau fließt. Diese Annahmen lassen
|
||||
sich in einem Ticketshop wie pretix jedoch nur teilweise genauso abbilden.
|
||||
|
||||
Die ePayBL-Integration für pretix bietet daher zwei unterschiedliche Modi an, wie Buchungen erfasst und an ePayBL und
|
||||
damit auch an die dahinterliegenden Haushaltssysteme gemeldet werden können.
|
||||
|
||||
Kontierung pro Position/Artikel
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Dieser Modus versucht den klassischen, behördentypischen ePayBL-Zahlungsvorgang abzubilden: Jede einzelne Position, die
|
||||
ein Kunde in den Warenkorb legt, wird auch genauso 1:1 an ePayBL und die Hintergrundsysteme übermittelt.
|
||||
|
||||
Hierbei muss zwingend auch für jede Position ein Kennzeichen für Haushaltsstelle und Objektnummer, sowie optional ein
|
||||
Kontierungsobjekt (``HREF``; bspw. ``stsl=Steuerschlüssel;psp=gsb:Geschäftsbereich,auft:Innenauftrag,kst:Kostenstelle;``
|
||||
) übermittelt werden.
|
||||
|
||||
Diese Daten sind vom Veranstalter entsprechend für jeden in der Veranstaltung angelegten Artikel innerhalb des Tabs
|
||||
"Zusätzliche Einstellungen" der Produkteinstellungen zu hinterlegen.
|
||||
|
||||
Während diese Einstellung eine größtmögliche Menge an Kontierungsdaten überträgt und auch ein separates Verbuchen von
|
||||
Leistungen auf unterschiedliche Haushaltsstellen erlaubt, so hat diese Option auch einen großen Nachteil: Der Kunde kann
|
||||
nur eine Zahlung für seine Bestellung leisten.
|
||||
|
||||
Während sich dies nicht nach einem großen Problem anhört, so kann dies beim Kunden zu Frust führen. pretix bietet die
|
||||
Option an, dass ein Veranstalter eine Bestellung jederzeit verändern kann: Ändern von Preisen von Positionen in einer
|
||||
aufgegebenen Bestellung, Zubuchen und Entfernen von Bestellpositionen, etc. Hat der Kunde seine ursprüngliche Bestellung
|
||||
jedoch schon bezahlt, kann pretix nicht mehr die komplette Bestellung mit den passenden Kontierungen übertragen - es
|
||||
müsste nur ein Differenz-Abbild zwischen Ursprungsbestellung und aktueller Bestellung übertragen werden. Aber auch wenn
|
||||
eine "Nachmeldung" möglich wäre, so wäre ein konkretes Auflösen für was jetzt genau gezahlt wird, nicht mehr möglich.
|
||||
|
||||
Daher gilt bei der Nutzung der Kontierung pro Position/Artikel: Der Kunde kann nur eine (erfolgreiche) Zahlung auf seine
|
||||
Bestellung leisten.
|
||||
|
||||
Eine weitere Einschränkung dieses Modus ist, dass aktuell keine Gebühren-Positionen (Versandkosten, Zahlungs-, Storno-
|
||||
oder Servicegebühren) in diesem Modus übertragen werden können. Bitte wenden Sie sich an uns, wenn Sie diese
|
||||
Funktionalität benötigen.
|
||||
|
||||
|
||||
Kontierung pro Zahlvorgang
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Dieser Modus verabschiedet sich vom behördlichen "Jede Position gehört genau zu einem Haushaltskonto und muss genau
|
||||
zugeordnet werden". Stattdessen werden alle Bestellpositionen - inklusive eventuell definierter Gebühren - vermengt und
|
||||
nur als ein großer Warenkorb, genauer gesagt: eine einzige Position an ePayBL sowie die Hintergrundsysteme gemeldet.
|
||||
|
||||
Während im "pro Postion/Artikel"-Modus jeder Artikel einzeln übermittelt wird und damit auch korrekt pro Artikel der
|
||||
jeweilige Brutto- und Nettopreis, sowie der anfallende Steuerbetrag und ein Steuerkennzeichen (mit Hilfe des optionalen
|
||||
``HREF``-Attributs) übermittelt werden, ist dies im "pro Zahlvorgang"-Modus nicht möglich.
|
||||
|
||||
Stattdessen übermittelt pretix nur einen Betrag für den gesamten Warenkorb: Bruttopreis == Nettopreis. Der Steuerbetrag
|
||||
wird hierbei als 0 übermittelt.
|
||||
|
||||
Die Angabe einer Haushaltsstelle und Objektnummer, sowie optional der ``HREF``-Kontierungsinformationen ist jedoch
|
||||
weiterhin notwendig - allerdings nicht mehr individuell für jeden Artikel/jede Position sondern nur für die gesamte
|
||||
Bestellung. Diese Daten sind direkt in den ePayBL-Einstellungen der Veranstaltung unter Einstellungen -> Zahlung ->
|
||||
ePayBL vorzunehmen
|
||||
|
||||
In der Praxis bedeutet dies, dass in einem angeschlossenen Haushaltssystem nicht nachvollzogen kann, welche Positionen
|
||||
konkret erworben und bezahlt wurden - stattdessen kann nur der Fakt, dass etwas verkauft wurde erfasst werden.
|
||||
|
||||
Je nach Aufbau und Vorgaben der Finanzbuchhaltung kann dies jedoch ausreichend sein - wenn bspw. eine Ferienfahrt
|
||||
angeboten wird und seitens der Haushaltssysteme nicht erfasst werden muss, wie viel vom Gesamtbetrag einer Bestellung
|
||||
auf die Ferienfahrt an sich, auf einen Zubringerbus und einen Satz Bettwäsche entfallen ist, sondern (vereinfacht
|
||||
gesagt) es ausreichend ist, dass "Eine Summe X für die Haushaltsstelle/Objektnummer geflossen ist".
|
||||
|
||||
Dieser Modus der Kontierung bietet Ihnen auch als Vorteil gegenüber dem vorhergehenden an, dass die Bestellungen der
|
||||
Kunden jederzeit erweitert und verändert werden können - auch wenn die Ursprungsbestellung schon bezahlt wurde und nur
|
||||
noch eine Differenz gezahlt wird.
|
||||
|
||||
|
||||
Einschränkungen
|
||||
---------------
|
||||
|
||||
Zum aktuellen Zeitpunkt erlaubt die pretix-Anbindung an ePayBL nicht das durchführen von Erstattungen von bereits
|
||||
geleisteten Zahlungen. Der Prozess hierfür unterscheidet sich von Behörde zu Behörde und muss daher händisch
|
||||
durchgeführt werden.
|
||||
|
||||
.. _ePayBL: https://www.epaybl.de/
|
||||
BIN
doc/plugins/img/epaybl_flowchart.png
Normal file
BIN
doc/plugins/img/epaybl_flowchart.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 44 KiB |
@@ -18,6 +18,7 @@ If you want to **create** a plugin, please go to the
|
||||
campaigns
|
||||
certificates
|
||||
digital
|
||||
epaybl
|
||||
exhibitors
|
||||
shipping
|
||||
imported_secrets
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
sphinx==6.2.*
|
||||
sphinx==7.0.*
|
||||
jinja2==3.1.*
|
||||
sphinx-rtd-theme
|
||||
sphinxcontrib-httpdomain
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
-e ../
|
||||
sphinx==6.2.*
|
||||
sphinx==7.0.*
|
||||
jinja2==3.1.*
|
||||
sphinx-rtd-theme
|
||||
sphinxcontrib-httpdomain
|
||||
|
||||
@@ -201,6 +201,10 @@ record for the subdomain ``pretix._domainkey`` with the following contents::
|
||||
|
||||
Then, please contact support@pretix.eu and we will enable DKIM for your domain on our mail servers.
|
||||
|
||||
.. note:: Many SMTP servers impose rate limits on the sent emails, such as a maximum number of emails sent per hour.
|
||||
These SMTP servers are often not suitable for use with pretix, in case you want to send an email to many
|
||||
hundreds or thousands of ticket buyers. Depending on how the rate limit is implemented, emails might be lost
|
||||
in this case, as pretix only retries email delivery for a certain time period.
|
||||
|
||||
.. _Sender Policy Framework: https://en.wikipedia.org/wiki/Sender_Policy_Framework
|
||||
.. _SPF specification: http://www.open-spf.org/SPF_Record_Syntax
|
||||
|
||||
@@ -22,77 +22,77 @@ classifiers = [
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Framework :: Django :: 3.2",
|
||||
"Framework :: Django :: 4.1",
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
# Note that many of these are repeated as build-time dependencies down below -- change them too in case of updates!
|
||||
"arabic-reshaper==3.0.0", # Support for Arabic in reportlab
|
||||
"babel",
|
||||
"BeautifulSoup4==4.12.*",
|
||||
"bleach==5.0.*",
|
||||
"celery==5.2.*",
|
||||
"celery==5.3.*",
|
||||
"chardet==5.1.*",
|
||||
"cryptography>=3.4.2",
|
||||
"css-inline==0.8.*",
|
||||
"defusedcsv>=1.1.0",
|
||||
"dj-static",
|
||||
"Django==3.2.*,>=3.2.18",
|
||||
"Django==4.2.*",
|
||||
"django-bootstrap3==23.1.*",
|
||||
"django-compressor==4.3.*",
|
||||
"django-countries==7.5.*",
|
||||
"django-filter==23.1",
|
||||
"django-filter==23.2",
|
||||
"django-formset-js-improved==0.5.0.3",
|
||||
"django-formtools==2.4",
|
||||
"django-formtools==2.4.1",
|
||||
"django-hierarkey==1.1.*",
|
||||
"django-hijack==3.3.*",
|
||||
"django-i18nfield==1.9.*,>=1.9.4",
|
||||
"django-libsass==0.9",
|
||||
"django-localflavor==4.0",
|
||||
"django-markup",
|
||||
"django-mysql",
|
||||
"django-oauth-toolkit==2.2.*",
|
||||
"django-otp==1.1.*",
|
||||
"django-phonenumber-field==7.0.*",
|
||||
"django-otp==1.2.*",
|
||||
"django-phonenumber-field==7.1.*",
|
||||
"django-redis==5.2.*",
|
||||
"django-scopes==1.2.*",
|
||||
"django-scopes==2.0.*",
|
||||
"django-statici18n==2.3.*",
|
||||
"djangorestframework==3.14.*",
|
||||
"dnspython==2.2.*",
|
||||
"dnspython==2.3.*",
|
||||
"drf_ujson2==1.7.*",
|
||||
"geoip2==4.*",
|
||||
"importlib_metadata==6.6.*", # Polyfill, we can probably drop this once we require Python 3.10+
|
||||
"importlib_metadata==6.*", # Polyfill, we can probably drop this once we require Python 3.10+
|
||||
"isoweek",
|
||||
"jsonschema",
|
||||
"kombu==5.2.*",
|
||||
"kombu==5.3.*",
|
||||
"libsass==0.22.*",
|
||||
"lxml",
|
||||
"markdown==3.4.3", # 3.3.5 requires importlib-metadata>=4.4, but django-bootstrap3 requires importlib-metadata<3.
|
||||
# We can upgrade markdown again once django-bootstrap3 upgrades or once we drop Python 3.6 and 3.7
|
||||
"mt-940==4.23.*",
|
||||
"mt-940==4.30.*",
|
||||
"oauthlib==3.2.*",
|
||||
"openpyxl==3.1.*",
|
||||
"packaging",
|
||||
"paypalrestsdk==1.13.*",
|
||||
"paypal-checkout-serversdk==1.0.*",
|
||||
"PyJWT==2.6.*",
|
||||
"PyJWT==2.7.*",
|
||||
"phonenumberslite==8.13.*",
|
||||
"Pillow==9.5.*",
|
||||
"protobuf==4.22.*",
|
||||
"pretix-plugin-build",
|
||||
"protobuf==4.23.*",
|
||||
"psycopg2-binary",
|
||||
"pycountry",
|
||||
"pycparser==2.21",
|
||||
"pycryptodome==3.17.*",
|
||||
"pypdf==3.8.*",
|
||||
"pycryptodome==3.18.*",
|
||||
"pypdf==3.9.*",
|
||||
"python-bidi==0.4.*", # Support for Arabic in reportlab
|
||||
"python-dateutil==2.8.*",
|
||||
"python-u2flib-server==4.*",
|
||||
"pytz",
|
||||
"pytz-deprecation-shim==0.1.*",
|
||||
"pyuca",
|
||||
"qrcode==7.4.*",
|
||||
"redis==4.5.*,>=4.5.4",
|
||||
"reportlab==3.6.*",
|
||||
"requests==2.28.*",
|
||||
"redis==4.6.*",
|
||||
"reportlab==4.0.*",
|
||||
"requests==2.31.*",
|
||||
"sentry-sdk==1.15.*",
|
||||
"sepaxml==2.6.*",
|
||||
"slimit",
|
||||
@@ -109,17 +109,13 @@ dependencies = [
|
||||
|
||||
[project.optional-dependencies]
|
||||
memcached = ["pylibmc"]
|
||||
mysql = ["mysqlclient"]
|
||||
dev = [
|
||||
"coverage",
|
||||
"coveralls",
|
||||
"django-debug-toolbar==4.0.*",
|
||||
"django-formset-js-improved==0.5.0.3",
|
||||
"django-oauth-toolkit==2.2.*",
|
||||
"fakeredis==2.18.*",
|
||||
"flake8==6.0.*",
|
||||
"freezegun",
|
||||
"isort==5.12.*",
|
||||
"oauthlib==3.2.*",
|
||||
"pep8-naming==0.13.*",
|
||||
"potypo",
|
||||
"pycodestyle==2.10.*",
|
||||
@@ -130,7 +126,7 @@ dev = [
|
||||
"pytest-mock==3.10.*",
|
||||
"pytest-rerunfailures==11.*",
|
||||
"pytest-sugar",
|
||||
"pytest-xdist==3.2.*",
|
||||
"pytest-xdist==3.3.*",
|
||||
"pytest==7.3.*",
|
||||
"responses",
|
||||
]
|
||||
@@ -140,31 +136,14 @@ build = "pretix._build:CustomBuild"
|
||||
build_ext = "pretix._build:CustomBuildExt"
|
||||
|
||||
[build-system]
|
||||
build-backend = "backend"
|
||||
backend-path = ["_build"]
|
||||
requires = [
|
||||
"setuptools",
|
||||
"setuptools-rust",
|
||||
"wheel",
|
||||
"importlib_metadata",
|
||||
|
||||
# These are runtime dependencies that we unfortunately need to be import in the step that generates
|
||||
# all CSS and JS asset files. We should keep their versions in sync with the definition above.
|
||||
"babel",
|
||||
"Django==3.2.*,>=3.2.18",
|
||||
"django-bootstrap3==23.1.*",
|
||||
"django-compressor==4.3.*",
|
||||
"django-countries==7.5.*",
|
||||
"django-formtools==2.4",
|
||||
"django-hierarkey==1.1.*",
|
||||
"django-i18nfield==1.9.*,>=1.9.4",
|
||||
"django-libsass==0.9",
|
||||
"django-phonenumber-field==7.0.*",
|
||||
"django-statici18n==2.3.*",
|
||||
"djangorestframework==3.14.*",
|
||||
"libsass==0.22.*",
|
||||
"phonenumberslite==8.13.*",
|
||||
"pycountry",
|
||||
"pyuca",
|
||||
"slimit",
|
||||
"tomli",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
@@ -182,4 +161,4 @@ version = {attr = "pretix.__version__"}
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["src"]
|
||||
include = ["pretix*"]
|
||||
namespaces = false
|
||||
namespaces = false
|
||||
|
||||
40
setup.cfg
Normal file
40
setup.cfg
Normal file
@@ -0,0 +1,40 @@
|
||||
[check-manifest]
|
||||
ignore =
|
||||
env/**
|
||||
doc/*
|
||||
deployment/*
|
||||
res/*
|
||||
src/.update-locales
|
||||
src/Makefile
|
||||
src/manage.py
|
||||
src/pretix/icons/*
|
||||
src/pretix/static.dist/**
|
||||
src/pretix/static/jsi18n/**
|
||||
src/requirements.txt
|
||||
src/requirements/*
|
||||
src/tests/*
|
||||
src/tests/api/*
|
||||
src/tests/base/*
|
||||
src/tests/control/*
|
||||
src/tests/testdummy/*
|
||||
src/tests/templates/*
|
||||
src/tests/presale/*
|
||||
src/tests/doc/*
|
||||
src/tests/helpers/*
|
||||
src/tests/media/*
|
||||
src/tests/multidomain/*
|
||||
src/tests/plugins/*
|
||||
src/tests/plugins/badges/*
|
||||
src/tests/plugins/banktransfer/*
|
||||
src/tests/plugins/paypal/*
|
||||
src/tests/plugins/paypal2/*
|
||||
src/tests/plugins/pretixdroid/*
|
||||
src/tests/plugins/stripe/*
|
||||
src/tests/plugins/sendmail/*
|
||||
src/tests/plugins/ticketoutputpdf/*
|
||||
.*
|
||||
CODE_OF_CONDUCT.md
|
||||
CONTRIBUTING.md
|
||||
Dockerfile
|
||||
SECURITY.md
|
||||
|
||||
25
setup.py
25
setup.py
@@ -19,8 +19,31 @@
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import setuptools
|
||||
|
||||
|
||||
sys.path.append(str(Path.cwd() / 'src'))
|
||||
|
||||
|
||||
def _CustomBuild(*args, **kwargs):
|
||||
from pretix._build import CustomBuild
|
||||
return CustomBuild(*args, **kwargs)
|
||||
|
||||
|
||||
def _CustomBuildExt(*args, **kwargs):
|
||||
from pretix._build import CustomBuildExt
|
||||
return CustomBuildExt(*args, **kwargs)
|
||||
|
||||
|
||||
cmdclass = {
|
||||
'build': _CustomBuild,
|
||||
'build_ext': _CustomBuildExt,
|
||||
}
|
||||
|
||||
if __name__ == "__main__":
|
||||
setuptools.setup()
|
||||
setuptools.setup(
|
||||
cmdclass=cmdclass,
|
||||
)
|
||||
|
||||
@@ -19,4 +19,4 @@
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
__version__ = "4.19.0"
|
||||
__version__ = "2023.8.0.dev0"
|
||||
|
||||
@@ -30,7 +30,6 @@ from django.utils.translation import gettext_lazy as _ # NOQA
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
|
||||
USE_I18N = True
|
||||
USE_L10N = True
|
||||
USE_TZ = True
|
||||
|
||||
INSTALLED_APPS = [
|
||||
@@ -68,6 +67,7 @@ INSTALLED_APPS = [
|
||||
'oauth2_provider',
|
||||
'phonenumber_field',
|
||||
'statici18n',
|
||||
'django.forms', # after pretix.base for overrides
|
||||
]
|
||||
|
||||
FORMAT_MODULE_PATH = [
|
||||
@@ -80,6 +80,7 @@ ALL_LANGUAGES = [
|
||||
('de-informal', _('German (informal)')),
|
||||
('ar', _('Arabic')),
|
||||
('zh-hans', _('Chinese (simplified)')),
|
||||
('zh-hant', _('Chinese (traditional)')),
|
||||
('cs', _('Czech')),
|
||||
('da', _('Danish')),
|
||||
('nl', _('Dutch')),
|
||||
@@ -179,6 +180,8 @@ TEMPLATES = [
|
||||
},
|
||||
]
|
||||
|
||||
FORM_RENDERER = "django.forms.renderers.TemplatesSetting"
|
||||
|
||||
STATIC_ROOT = os.path.join(os.path.dirname(__file__), 'static.dist')
|
||||
|
||||
STATICFILES_FINDERS = (
|
||||
@@ -193,7 +196,14 @@ STATICFILES_DIRS = [
|
||||
|
||||
STATICI18N_ROOT = os.path.join(BASE_DIR, "pretix/static")
|
||||
|
||||
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
|
||||
STORAGES = {
|
||||
"default": {
|
||||
"BACKEND": "django.core.files.storage.FileSystemStorage",
|
||||
},
|
||||
"staticfiles": {
|
||||
"BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage",
|
||||
},
|
||||
}
|
||||
|
||||
# if os.path.exists(os.path.join(DATA_DIR, 'static')):
|
||||
# STATICFILES_DIRS.insert(0, os.path.join(DATA_DIR, 'static'))
|
||||
|
||||
@@ -45,6 +45,10 @@ def npm_install():
|
||||
|
||||
class CustomBuild(build):
|
||||
def run(self):
|
||||
if "src" not in os.listdir(".") or "pretix" not in os.listdir("src"):
|
||||
# Only run this command on the pretix module, not on other modules even if it's registered globally
|
||||
# in some cases
|
||||
return build.run(self)
|
||||
if "PRETIX_DOCKER_BUILD" in os.environ:
|
||||
return # this is a hack to allow calling this file early in our docker build to make use of caching
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pretix._build_settings")
|
||||
@@ -68,6 +72,10 @@ class CustomBuild(build):
|
||||
|
||||
class CustomBuildExt(build_ext):
|
||||
def run(self):
|
||||
if "src" not in os.listdir(".") or "pretix" not in os.listdir("src"):
|
||||
# Only run this command on the pretix module, not on other modules even if it's registered globally
|
||||
# in some cases
|
||||
return build_ext.run(self)
|
||||
if "PRETIX_DOCKER_BUILD" in os.environ:
|
||||
return # this is a hack to allow calling this file early in our docker build to make use of caching
|
||||
npm_install()
|
||||
|
||||
@@ -201,6 +201,7 @@ class PretixPosSecurityProfile(AllowListSecurityProfile):
|
||||
('DELETE', 'api-v1:cartposition-detail'),
|
||||
('GET', 'api-v1:giftcard-list'),
|
||||
('POST', 'api-v1:giftcard-transact'),
|
||||
('PATCH', 'api-v1:giftcard-detail'),
|
||||
('GET', 'plugins:pretix_posbackend:posclosing-list'),
|
||||
('POST', 'plugins:pretix_posbackend:posreceipt-list'),
|
||||
('POST', 'plugins:pretix_posbackend:posclosing-list'),
|
||||
@@ -222,6 +223,7 @@ class PretixPosSecurityProfile(AllowListSecurityProfile):
|
||||
('POST', 'api-v1:checkinrpc.redeem'),
|
||||
('GET', 'api-v1:checkinrpc.search'),
|
||||
('POST', 'api-v1:reusablemedium-lookup'),
|
||||
('POST', 'api-v1:reusablemedium-list'),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -59,7 +59,7 @@ class IdempotencyMiddleware:
|
||||
auth_hash = sha1(auth_hash_parts.encode()).hexdigest()
|
||||
idempotency_key = request.headers.get('X-Idempotency-Key', '')
|
||||
|
||||
with transaction.atomic():
|
||||
with transaction.atomic(durable=True):
|
||||
call, created = ApiCall.objects.select_for_update(of=OF_SELF).get_or_create(
|
||||
auth_hash=auth_hash,
|
||||
idempotency_key=idempotency_key,
|
||||
@@ -75,7 +75,7 @@ class IdempotencyMiddleware:
|
||||
|
||||
if created:
|
||||
resp = self.get_response(request)
|
||||
with transaction.atomic():
|
||||
with transaction.atomic(durable=True):
|
||||
if resp.status_code in (409, 429, 500, 503):
|
||||
# This is the exception: These calls are *meant* to be retried!
|
||||
call.delete()
|
||||
|
||||
@@ -19,3 +19,45 @@
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
import json
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class AsymmetricField(serializers.Field):
|
||||
def __init__(self, read, write, **kwargs):
|
||||
self.read = read
|
||||
self.write = write
|
||||
super().__init__(
|
||||
required=self.write.required,
|
||||
default=self.write.default,
|
||||
initial=self.write.initial,
|
||||
source=self.write.source if self.write.source != self.write.field_name else None,
|
||||
label=self.write.label,
|
||||
allow_null=self.write.allow_null,
|
||||
error_messages=self.write.error_messages,
|
||||
validators=self.write.validators,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return self.write.to_internal_value(data)
|
||||
|
||||
def to_representation(self, value):
|
||||
return self.read.to_representation(value)
|
||||
|
||||
def run_validation(self, data=serializers.empty):
|
||||
return self.write.run_validation(data)
|
||||
|
||||
|
||||
class CompatibleJSONField(serializers.JSONField):
|
||||
def to_internal_value(self, data):
|
||||
try:
|
||||
return json.dumps(data)
|
||||
except (TypeError, ValueError):
|
||||
self.fail('invalid')
|
||||
|
||||
def to_representation(self, value):
|
||||
if value:
|
||||
return json.loads(value)
|
||||
return value
|
||||
|
||||
@@ -46,11 +46,15 @@ from rest_framework import serializers
|
||||
from rest_framework.fields import ChoiceField, Field
|
||||
from rest_framework.relations import SlugRelatedField
|
||||
|
||||
from pretix.api.serializers import CompatibleJSONField
|
||||
from pretix.api.serializers.i18n import I18nAwareModelSerializer
|
||||
from pretix.api.serializers.settings import SettingsSerializer
|
||||
from pretix.base.models import Device, Event, TaxRule, TeamAPIToken
|
||||
from pretix.base.models.event import SubEvent
|
||||
from pretix.base.models.items import SubEventItem, SubEventItemVariation
|
||||
from pretix.base.models.items import (
|
||||
ItemMetaProperty, SubEventItem, SubEventItemVariation,
|
||||
)
|
||||
from pretix.base.models.tax import CustomRulesValidator
|
||||
from pretix.base.services.seating import (
|
||||
SeatProtected, generate_seats, validate_plan_change,
|
||||
)
|
||||
@@ -648,9 +652,16 @@ class SubEventSerializer(I18nAwareModelSerializer):
|
||||
|
||||
|
||||
class TaxRuleSerializer(CountryFieldMixin, I18nAwareModelSerializer):
|
||||
custom_rules = CompatibleJSONField(
|
||||
validators=[CustomRulesValidator()],
|
||||
required=False,
|
||||
allow_null=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = TaxRule
|
||||
fields = ('id', 'name', 'rate', 'price_includes_tax', 'eu_reverse_charge', 'home_country', 'internal_name', 'keep_gross_if_rate_changes')
|
||||
fields = ('id', 'name', 'rate', 'price_includes_tax', 'eu_reverse_charge', 'home_country', 'internal_name',
|
||||
'keep_gross_if_rate_changes', 'custom_rules')
|
||||
|
||||
|
||||
class EventSettingsSerializer(SettingsSerializer):
|
||||
@@ -683,6 +694,7 @@ class EventSettingsSerializer(SettingsSerializer):
|
||||
'waiting_list_phones_asked',
|
||||
'waiting_list_phones_required',
|
||||
'waiting_list_phones_explanation_text',
|
||||
'waiting_list_limit_per_user',
|
||||
'max_items_per_order',
|
||||
'reservation_time',
|
||||
'contact_mail',
|
||||
@@ -716,6 +728,7 @@ class EventSettingsSerializer(SettingsSerializer):
|
||||
'payment_term_minutes',
|
||||
'payment_term_last',
|
||||
'payment_term_expire_automatically',
|
||||
'payment_term_expire_delay_days',
|
||||
'payment_term_accept_late',
|
||||
'payment_explanation',
|
||||
'payment_pending_hidden',
|
||||
@@ -765,6 +778,7 @@ class EventSettingsSerializer(SettingsSerializer):
|
||||
'invoice_footer_text',
|
||||
'invoice_eu_currencies',
|
||||
'invoice_logo_image',
|
||||
'invoice_renderer_highlight_order_code',
|
||||
'cancel_allow_user',
|
||||
'cancel_allow_user_until',
|
||||
'cancel_allow_user_unpaid_keep',
|
||||
@@ -803,6 +817,10 @@ class EventSettingsSerializer(SettingsSerializer):
|
||||
'reusable_media_type_nfc_uid',
|
||||
'reusable_media_type_nfc_uid_autocreate_giftcard',
|
||||
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
|
||||
'reusable_media_type_nfc_mf0aes',
|
||||
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
|
||||
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
|
||||
'reusable_media_type_nfc_mf0aes_random_uid',
|
||||
]
|
||||
readonly_fields = [
|
||||
# These are read-only since they are currently only settable on organizers, not events
|
||||
@@ -812,6 +830,10 @@ class EventSettingsSerializer(SettingsSerializer):
|
||||
'reusable_media_type_nfc_uid',
|
||||
'reusable_media_type_nfc_uid_autocreate_giftcard',
|
||||
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
|
||||
'reusable_media_type_nfc_mf0aes',
|
||||
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
|
||||
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
|
||||
'reusable_media_type_nfc_mf0aes_random_uid',
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -880,6 +902,8 @@ class DeviceEventSettingsSerializer(EventSettingsSerializer):
|
||||
'name_scheme',
|
||||
'reusable_media_type_barcode',
|
||||
'reusable_media_type_nfc_uid',
|
||||
'reusable_media_type_nfc_mf0aes',
|
||||
'reusable_media_type_nfc_mf0aes_random_uid',
|
||||
'system_question_order',
|
||||
]
|
||||
|
||||
@@ -902,3 +926,23 @@ class DeviceEventSettingsSerializer(EventSettingsSerializer):
|
||||
else []
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class MultiLineStringField(serializers.Field):
|
||||
|
||||
def to_representation(self, value):
|
||||
return [v.strip() for v in value.splitlines()]
|
||||
|
||||
def to_internal_value(self, data):
|
||||
if isinstance(data, list) and len(data) > 0:
|
||||
return "\n".join(data)
|
||||
else:
|
||||
raise ValidationError('Invalid data type.')
|
||||
|
||||
|
||||
class ItemMetaPropertiesSerializer(I18nAwareModelSerializer):
|
||||
allowed_values = MultiLineStringField(allow_null=True)
|
||||
|
||||
class Meta:
|
||||
model = ItemMetaProperty
|
||||
fields = ('id', 'name', 'default', 'required', 'allowed_values')
|
||||
|
||||
@@ -93,7 +93,7 @@ class JobRunSerializer(serializers.Serializer):
|
||||
if events is not None and not isinstance(ex, OrganizerLevelExportMixin):
|
||||
self.fields["events"] = serializers.SlugRelatedField(
|
||||
queryset=events,
|
||||
required=True,
|
||||
required=False,
|
||||
allow_empty=False,
|
||||
slug_field='slug',
|
||||
many=True
|
||||
@@ -156,8 +156,9 @@ class JobRunSerializer(serializers.Serializer):
|
||||
def to_internal_value(self, data):
|
||||
if isinstance(data, QueryDict):
|
||||
data = data.copy()
|
||||
|
||||
for k, v in self.fields.items():
|
||||
if isinstance(v, serializers.ManyRelatedField) and k not in data:
|
||||
if isinstance(v, serializers.ManyRelatedField) and k not in data and k != "events":
|
||||
data[k] = []
|
||||
|
||||
for fk in self.fields.keys():
|
||||
|
||||
@@ -60,11 +60,15 @@ class NestedGiftCardSerializer(GiftCardSerializer):
|
||||
|
||||
|
||||
class ReusableMediaSerializer(I18nAwareModelSerializer):
|
||||
organizer = serializers.SlugRelatedField(slug_field='slug', read_only=True)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if 'linked_giftcard' in self.context['request'].query_params.getlist('expand'):
|
||||
self.fields['linked_giftcard'] = NestedGiftCardSerializer(read_only=True)
|
||||
self.fields['linked_giftcard'] = NestedGiftCardSerializer(read_only=True, context=self.context)
|
||||
if 'linked_giftcard.owner_ticket' in self.context['request'].query_params.getlist('expand'):
|
||||
self.fields['linked_giftcard'].fields['owner_ticket'] = NestedOrderPositionSerializer(read_only=True, context=self.context)
|
||||
else:
|
||||
self.fields['linked_giftcard'] = serializers.PrimaryKeyRelatedField(
|
||||
required=False,
|
||||
@@ -109,6 +113,7 @@ class ReusableMediaSerializer(I18nAwareModelSerializer):
|
||||
model = ReusableMedium
|
||||
fields = (
|
||||
'id',
|
||||
'organizer',
|
||||
'created',
|
||||
'updated',
|
||||
'type',
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from collections import Counter, defaultdict
|
||||
@@ -28,6 +27,7 @@ from decimal import Decimal
|
||||
import pycountry
|
||||
from django.conf import settings
|
||||
from django.core.files import File
|
||||
from django.db import models
|
||||
from django.db.models import F, Q
|
||||
from django.utils.encoding import force_str
|
||||
from django.utils.timezone import now
|
||||
@@ -39,6 +39,7 @@ from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.relations import SlugRelatedField
|
||||
from rest_framework.reverse import reverse
|
||||
|
||||
from pretix.api.serializers import CompatibleJSONField
|
||||
from pretix.api.serializers.event import SubEventSerializer
|
||||
from pretix.api.serializers.i18n import I18nAwareModelSerializer
|
||||
from pretix.api.serializers.item import (
|
||||
@@ -372,11 +373,15 @@ class PdfDataSerializer(serializers.Field):
|
||||
self.context['vars_images'] = get_images(self.context['event'])
|
||||
|
||||
for k, f in self.context['vars'].items():
|
||||
try:
|
||||
res[k] = f['evaluate'](instance, instance.order, ev)
|
||||
except:
|
||||
logger.exception('Evaluating PDF variable failed')
|
||||
res[k] = '(error)'
|
||||
if 'evaluate_bulk' in f:
|
||||
# Will be evaluated later by our list serializers
|
||||
res[k] = (f['evaluate_bulk'], instance)
|
||||
else:
|
||||
try:
|
||||
res[k] = f['evaluate'](instance, instance.order, ev)
|
||||
except:
|
||||
logger.exception('Evaluating PDF variable failed')
|
||||
res[k] = '(error)'
|
||||
|
||||
if not hasattr(ev, '_cached_meta_data'):
|
||||
ev._cached_meta_data = ev.meta_data
|
||||
@@ -429,6 +434,38 @@ class PdfDataSerializer(serializers.Field):
|
||||
return res
|
||||
|
||||
|
||||
class OrderPositionListSerializer(serializers.ListSerializer):
|
||||
|
||||
def to_representation(self, data):
|
||||
# We have a custom implementation of this method because PdfDataSerializer() might keep some elements unevaluated
|
||||
# with a (callable, input) tuple. We'll loop over these entries and evaluate them bulk-wise to save on SQL queries.
|
||||
|
||||
if isinstance(self.parent, OrderSerializer) and isinstance(self.parent.parent, OrderListSerializer):
|
||||
# Do not execute our custom code because it will be executed by OrderListSerializer later for the
|
||||
# full result set.
|
||||
return super().to_representation(data)
|
||||
|
||||
iterable = data.all() if isinstance(data, models.Manager) else data
|
||||
|
||||
data = []
|
||||
evaluate_queue = defaultdict(list)
|
||||
|
||||
for item in iterable:
|
||||
entry = self.child.to_representation(item)
|
||||
if "pdf_data" in entry:
|
||||
for k, v in entry["pdf_data"].items():
|
||||
if isinstance(v, tuple) and callable(v[0]):
|
||||
evaluate_queue[v[0]].append((v[1], entry, k))
|
||||
data.append(entry)
|
||||
|
||||
for func, entries in evaluate_queue.items():
|
||||
results = func([item for (item, entry, k) in entries])
|
||||
for (item, entry, k), result in zip(entries, results):
|
||||
entry["pdf_data"][k] = result
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class OrderPositionSerializer(I18nAwareModelSerializer):
|
||||
checkins = CheckinSerializer(many=True, read_only=True)
|
||||
answers = AnswerSerializer(many=True)
|
||||
@@ -440,6 +477,7 @@ class OrderPositionSerializer(I18nAwareModelSerializer):
|
||||
attendee_name = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
list_serializer_class = OrderPositionListSerializer
|
||||
model = OrderPosition
|
||||
fields = ('id', 'order', 'positionid', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts',
|
||||
'company', 'street', 'zipcode', 'city', 'country', 'state', 'discount',
|
||||
@@ -468,6 +506,20 @@ class OrderPositionSerializer(I18nAwareModelSerializer):
|
||||
def validate(self, data):
|
||||
raise TypeError("this serializer is readonly")
|
||||
|
||||
def to_representation(self, data):
|
||||
if isinstance(self.parent, (OrderListSerializer, OrderPositionListSerializer)):
|
||||
# Do not execute our custom code because it will be executed by OrderListSerializer later for the
|
||||
# full result set.
|
||||
return super().to_representation(data)
|
||||
|
||||
entry = super().to_representation(data)
|
||||
if "pdf_data" in entry:
|
||||
for k, v in entry["pdf_data"].items():
|
||||
if isinstance(v, tuple) and callable(v[0]):
|
||||
entry["pdf_data"][k] = v[0]([v[1]])[0]
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
class RequireAttentionField(serializers.Field):
|
||||
def to_representation(self, instance: OrderPosition):
|
||||
@@ -535,8 +587,9 @@ class OrderPaymentTypeField(serializers.Field):
|
||||
# TODO: Remove after pretix 2.2
|
||||
def to_representation(self, instance: Order):
|
||||
t = None
|
||||
for p in instance.payments.all():
|
||||
t = p.provider
|
||||
if instance.pk:
|
||||
for p in instance.payments.all():
|
||||
t = p.provider
|
||||
return t
|
||||
|
||||
|
||||
@@ -544,10 +597,10 @@ class OrderPaymentDateField(serializers.DateField):
|
||||
# TODO: Remove after pretix 2.2
|
||||
def to_representation(self, instance: Order):
|
||||
t = None
|
||||
for p in instance.payments.all():
|
||||
t = p.payment_date or t
|
||||
if instance.pk:
|
||||
for p in instance.payments.all():
|
||||
t = p.payment_date or t
|
||||
if t:
|
||||
|
||||
return super().to_representation(t.date())
|
||||
|
||||
|
||||
@@ -612,6 +665,34 @@ class OrderURLField(serializers.URLField):
|
||||
})
|
||||
|
||||
|
||||
class OrderListSerializer(serializers.ListSerializer):
|
||||
|
||||
def to_representation(self, data):
|
||||
# We have a custom implementation of this method because PdfDataSerializer() might keep some elements
|
||||
# unevaluated with a (callable, input) tuple. We'll loop over these entries and evaluate them bulk-wise to
|
||||
# save on SQL queries.
|
||||
iterable = data.all() if isinstance(data, models.Manager) else data
|
||||
|
||||
data = []
|
||||
evaluate_queue = defaultdict(list)
|
||||
|
||||
for item in iterable:
|
||||
entry = self.child.to_representation(item)
|
||||
for p in entry.get("positions", []):
|
||||
if "pdf_data" in p:
|
||||
for k, v in p["pdf_data"].items():
|
||||
if isinstance(v, tuple) and callable(v[0]):
|
||||
evaluate_queue[v[0]].append((v[1], p, k))
|
||||
data.append(entry)
|
||||
|
||||
for func, entries in evaluate_queue.items():
|
||||
results = func([item for (item, entry, k) in entries])
|
||||
for (item, entry, k), result in zip(entries, results):
|
||||
entry["pdf_data"][k] = result
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class OrderSerializer(I18nAwareModelSerializer):
|
||||
invoice_address = InvoiceAddressSerializer(allow_null=True)
|
||||
positions = OrderPositionSerializer(many=True, read_only=True)
|
||||
@@ -626,6 +707,7 @@ class OrderSerializer(I18nAwareModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Order
|
||||
list_serializer_class = OrderListSerializer
|
||||
fields = (
|
||||
'code', 'status', 'testmode', 'secret', 'email', 'phone', 'locale', 'datetime', 'expires', 'payment_date',
|
||||
'payment_provider', 'fees', 'total', 'comment', 'custom_followup_at', 'invoice_address', 'positions', 'downloads',
|
||||
@@ -895,19 +977,6 @@ class OrderPositionCreateSerializer(I18nAwareModelSerializer):
|
||||
return data
|
||||
|
||||
|
||||
class CompatibleJSONField(serializers.JSONField):
|
||||
def to_internal_value(self, data):
|
||||
try:
|
||||
return json.dumps(data)
|
||||
except (TypeError, ValueError):
|
||||
self.fail('invalid')
|
||||
|
||||
def to_representation(self, value):
|
||||
if value:
|
||||
return json.loads(value)
|
||||
return value
|
||||
|
||||
|
||||
class WrappedList:
|
||||
def __init__(self, data):
|
||||
self._data = data
|
||||
@@ -1363,6 +1432,7 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
|
||||
answers.append(answ)
|
||||
pos.answers = answers
|
||||
pos.pseudonymization_id = "PREVIEW"
|
||||
pos.checkins = []
|
||||
pos_map[pos.positionid] = pos
|
||||
else:
|
||||
if pos.voucher:
|
||||
@@ -1459,6 +1529,8 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
|
||||
if simulate:
|
||||
order.fees = fees
|
||||
order.positions = pos_map.values()
|
||||
order.payments = []
|
||||
order.refunds = []
|
||||
return order # ignore payments
|
||||
else:
|
||||
order.save(update_fields=['total'])
|
||||
|
||||
@@ -70,6 +70,8 @@ class OrderPositionCreateForExistingOrderSerializer(OrderPositionCreateSerialize
|
||||
|
||||
def validate(self, data):
|
||||
data = super().validate(data)
|
||||
if 'order' in self.context:
|
||||
data['order'] = self.context['order']
|
||||
if data.get('addon_to'):
|
||||
try:
|
||||
data['addon_to'] = data['order'].positions.get(positionid=data['addon_to'])
|
||||
|
||||
@@ -22,21 +22,23 @@
|
||||
import logging
|
||||
from decimal import Decimal
|
||||
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db.models import Q
|
||||
from django.utils.crypto import get_random_string
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from pretix.api.serializers import AsymmetricField
|
||||
from pretix.api.serializers.i18n import I18nAwareModelSerializer
|
||||
from pretix.api.serializers.order import CompatibleJSONField
|
||||
from pretix.api.serializers.settings import SettingsSerializer
|
||||
from pretix.base.auth import get_auth_backends
|
||||
from pretix.base.i18n import get_language_without_region
|
||||
from pretix.base.models import (
|
||||
Customer, Device, GiftCard, GiftCardTransaction, Membership,
|
||||
MembershipType, Organizer, SeatingPlan, Team, TeamAPIToken, TeamInvite,
|
||||
User,
|
||||
Customer, Device, GiftCard, GiftCardAcceptance, GiftCardTransaction,
|
||||
Membership, MembershipType, OrderPosition, Organizer, ReusableMedium,
|
||||
SeatingPlan, Team, TeamAPIToken, TeamInvite, User,
|
||||
)
|
||||
from pretix.base.models.seating import SeatingPlanLayoutValidator
|
||||
from pretix.base.services.mail import SendMailException, mail
|
||||
@@ -127,8 +129,52 @@ class MembershipSerializer(I18nAwareModelSerializer):
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class FlexibleTicketRelatedField(serializers.PrimaryKeyRelatedField):
|
||||
|
||||
def to_internal_value(self, data):
|
||||
queryset = self.get_queryset()
|
||||
|
||||
if isinstance(data, int):
|
||||
try:
|
||||
return queryset.get(pk=data)
|
||||
except ObjectDoesNotExist:
|
||||
self.fail('does_not_exist', pk_value=data)
|
||||
|
||||
elif isinstance(data, str):
|
||||
try:
|
||||
return queryset.get(
|
||||
Q(secret=data)
|
||||
| Q(pseudonymization_id=data)
|
||||
| Q(pk__in=ReusableMedium.objects.filter(
|
||||
organizer=self.context['organizer'],
|
||||
type='barcode',
|
||||
identifier=data
|
||||
))
|
||||
)
|
||||
except ObjectDoesNotExist:
|
||||
self.fail('does_not_exist', pk_value=data)
|
||||
|
||||
self.fail('incorrect_type', data_type=type(data).__name__)
|
||||
|
||||
|
||||
class GiftCardSerializer(I18nAwareModelSerializer):
|
||||
value = serializers.DecimalField(max_digits=13, decimal_places=2, min_value=Decimal('0.00'))
|
||||
owner_ticket = FlexibleTicketRelatedField(required=False, allow_null=True, queryset=OrderPosition.all.none())
|
||||
issuer = serializers.SlugRelatedField(slug_field='slug', read_only=True)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['owner_ticket'].queryset = OrderPosition.objects.filter(order__event__organizer=self.context['organizer'])
|
||||
|
||||
if 'owner_ticket' in self.context['request'].query_params.getlist('expand'):
|
||||
from pretix.api.serializers.media import (
|
||||
NestedOrderPositionSerializer,
|
||||
)
|
||||
|
||||
self.fields['owner_ticket'] = AsymmetricField(
|
||||
NestedOrderPositionSerializer(read_only=True, context=self.context),
|
||||
self.fields['owner_ticket'],
|
||||
)
|
||||
|
||||
def validate(self, data):
|
||||
data = super().validate(data)
|
||||
@@ -137,8 +183,11 @@ class GiftCardSerializer(I18nAwareModelSerializer):
|
||||
qs = GiftCard.objects.filter(
|
||||
secret=s
|
||||
).filter(
|
||||
Q(issuer=self.context["organizer"]) | Q(
|
||||
issuer__gift_card_collector_acceptance__collector=self.context["organizer"])
|
||||
Q(issuer=self.context["organizer"]) |
|
||||
Q(issuer__in=GiftCardAcceptance.objects.filter(
|
||||
acceptor=self.context["organizer"],
|
||||
active=True,
|
||||
).values_list('issuer', flat=True))
|
||||
)
|
||||
if self.instance:
|
||||
qs = qs.exclude(pk=self.instance.pk)
|
||||
@@ -151,7 +200,8 @@ class GiftCardSerializer(I18nAwareModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = GiftCard
|
||||
fields = ('id', 'secret', 'issuance', 'value', 'currency', 'testmode', 'expires', 'conditions')
|
||||
fields = ('id', 'secret', 'issuance', 'value', 'currency', 'testmode', 'expires', 'conditions', 'owner_ticket',
|
||||
'issuer')
|
||||
|
||||
|
||||
class OrderEventSlugField(serializers.RelatedField):
|
||||
@@ -162,11 +212,12 @@ class OrderEventSlugField(serializers.RelatedField):
|
||||
|
||||
class GiftCardTransactionSerializer(I18nAwareModelSerializer):
|
||||
order = serializers.SlugRelatedField(slug_field='code', read_only=True)
|
||||
acceptor = serializers.SlugRelatedField(slug_field='slug', read_only=True)
|
||||
event = OrderEventSlugField(source='order', read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = GiftCardTransaction
|
||||
fields = ('id', 'datetime', 'value', 'event', 'order', 'text')
|
||||
fields = ('id', 'datetime', 'value', 'event', 'order', 'text', 'info', 'acceptor')
|
||||
|
||||
|
||||
class EventSlugField(serializers.SlugRelatedField):
|
||||
@@ -200,6 +251,8 @@ class DeviceSerializer(serializers.ModelSerializer):
|
||||
unique_serial = serializers.CharField(read_only=True)
|
||||
hardware_brand = serializers.CharField(read_only=True)
|
||||
hardware_model = serializers.CharField(read_only=True)
|
||||
os_name = serializers.CharField(read_only=True)
|
||||
os_version = serializers.CharField(read_only=True)
|
||||
software_brand = serializers.CharField(read_only=True)
|
||||
software_version = serializers.CharField(read_only=True)
|
||||
created = serializers.DateTimeField(read_only=True)
|
||||
@@ -212,7 +265,7 @@ class DeviceSerializer(serializers.ModelSerializer):
|
||||
fields = (
|
||||
'device_id', 'unique_serial', 'initialization_token', 'all_events', 'limit_events',
|
||||
'revoked', 'name', 'created', 'initialized', 'hardware_brand', 'hardware_model',
|
||||
'software_brand', 'software_version', 'security_profile'
|
||||
'os_name', 'os_version', 'software_brand', 'software_version', 'security_profile'
|
||||
)
|
||||
|
||||
|
||||
@@ -339,6 +392,9 @@ class OrganizerSettingsSerializer(SettingsSerializer):
|
||||
'reusable_media_type_nfc_uid',
|
||||
'reusable_media_type_nfc_uid_autocreate_giftcard',
|
||||
'reusable_media_type_nfc_uid_autocreate_giftcard_currency',
|
||||
'reusable_media_type_nfc_mf0aes',
|
||||
'reusable_media_type_nfc_mf0aes_autocreate_giftcard',
|
||||
'reusable_media_type_nfc_mf0aes_autocreate_giftcard_currency',
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
||||
@@ -63,7 +63,8 @@ class VoucherSerializer(I18nAwareModelSerializer):
|
||||
model = Voucher
|
||||
fields = ('id', 'code', 'max_usages', 'redeemed', 'min_usages', 'valid_until', 'block_quota',
|
||||
'allow_ignore_quota', 'price_mode', 'value', 'item', 'variation', 'quota',
|
||||
'tag', 'comment', 'subevent', 'show_hidden_items', 'seat')
|
||||
'tag', 'comment', 'subevent', 'show_hidden_items', 'seat', 'all_addons_included',
|
||||
'all_bundles_included')
|
||||
read_only_fields = ('id', 'redeemed')
|
||||
list_serializer_class = VoucherListSerializer
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ class WaitingListSerializer(I18nAwareModelSerializer):
|
||||
full_data = self.to_internal_value(self.to_representation(self.instance)) if self.instance else {}
|
||||
full_data.update(data)
|
||||
|
||||
WaitingListEntry.clean_duplicate(full_data.get('email'), full_data.get('item'), full_data.get('variation'),
|
||||
WaitingListEntry.clean_duplicate(event, full_data.get('email'), full_data.get('item'), full_data.get('variation'),
|
||||
full_data.get('subevent'), self.instance.pk if self.instance else None)
|
||||
WaitingListEntry.clean_itemvar(event, full_data.get('item'), full_data.get('variation'))
|
||||
WaitingListEntry.clean_subevent(event, full_data.get('subevent'))
|
||||
|
||||
@@ -35,8 +35,7 @@
|
||||
import importlib
|
||||
|
||||
from django.apps import apps
|
||||
from django.conf.urls import re_path
|
||||
from django.urls import include
|
||||
from django.urls import include, re_path
|
||||
from rest_framework import routers
|
||||
|
||||
from pretix.api.views import cart
|
||||
@@ -89,6 +88,7 @@ event_router.register(r'checkinlists', checkin.CheckinListViewSet)
|
||||
event_router.register(r'cartpositions', cart.CartPositionViewSet)
|
||||
event_router.register(r'exporters', exporters.EventExportersViewSet, basename='exporters')
|
||||
event_router.register(r'shredders', shredders.EventShreddersViewSet, basename='shredders')
|
||||
event_router.register(r'item_meta_properties', event.ItemMetaPropertiesViewSet)
|
||||
|
||||
checkinlist_router = routers.DefaultRouter()
|
||||
checkinlist_router.register(r'positions', checkin.CheckinListPositionViewSet, basename='checkinlistpos')
|
||||
|
||||
@@ -396,7 +396,7 @@ def _checkin_list_position_queryset(checkinlists, ignore_status=False, ignore_pr
|
||||
|
||||
def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force, checkin_type, ignore_unpaid, nonce,
|
||||
untrusted_input, user, auth, expand, pdf_data, request, questions_supported, canceled_supported,
|
||||
source_type='barcode', legacy_url_support=False):
|
||||
source_type='barcode', legacy_url_support=False, simulate=False):
|
||||
if not checkinlists:
|
||||
raise ValidationError('No check-in list passed.')
|
||||
|
||||
@@ -433,6 +433,8 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
||||
)
|
||||
raw_barcode_for_checkin = None
|
||||
from_revoked_secret = False
|
||||
if simulate:
|
||||
common_checkin_args['__fake_arg_to_prevent_this_from_being_saved'] = True
|
||||
|
||||
# 1. Gather a list of positions that could be the one we looking for, either from their ID, secret or
|
||||
# parent secret
|
||||
@@ -472,13 +474,14 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
||||
revoked_matches = list(
|
||||
RevokedTicketSecret.objects.filter(event_id__in=list_by_event.keys(), secret=raw_barcode))
|
||||
if len(revoked_matches) == 0:
|
||||
checkinlists[0].event.log_action('pretix.event.checkin.unknown', data={
|
||||
'datetime': datetime,
|
||||
'type': checkin_type,
|
||||
'list': checkinlists[0].pk,
|
||||
'barcode': raw_barcode,
|
||||
'searched_lists': [cl.pk for cl in checkinlists]
|
||||
}, user=user, auth=auth)
|
||||
if not simulate:
|
||||
checkinlists[0].event.log_action('pretix.event.checkin.unknown', data={
|
||||
'datetime': datetime,
|
||||
'type': checkin_type,
|
||||
'list': checkinlists[0].pk,
|
||||
'barcode': raw_barcode,
|
||||
'searched_lists': [cl.pk for cl in checkinlists]
|
||||
}, user=user, auth=auth)
|
||||
|
||||
for cl in checkinlists:
|
||||
for k, s in cl.event.ticket_secret_generators.items():
|
||||
@@ -492,12 +495,13 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
||||
except:
|
||||
pass
|
||||
|
||||
Checkin.objects.create(
|
||||
position=None,
|
||||
successful=False,
|
||||
error_reason=Checkin.REASON_INVALID,
|
||||
**common_checkin_args,
|
||||
)
|
||||
if not simulate:
|
||||
Checkin.objects.create(
|
||||
position=None,
|
||||
successful=False,
|
||||
error_reason=Checkin.REASON_INVALID,
|
||||
**common_checkin_args,
|
||||
)
|
||||
|
||||
if force and legacy_url_support and isinstance(auth, Device):
|
||||
# There was a bug in libpretixsync: If you scanned a ticket in offline mode that was
|
||||
@@ -539,19 +543,20 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
||||
from_revoked_secret = True
|
||||
else:
|
||||
op = revoked_matches[0].position
|
||||
op.order.log_action('pretix.event.checkin.revoked', data={
|
||||
'datetime': datetime,
|
||||
'type': checkin_type,
|
||||
'list': list_by_event[revoked_matches[0].event_id].pk,
|
||||
'barcode': raw_barcode
|
||||
}, user=user, auth=auth)
|
||||
common_checkin_args['list'] = list_by_event[revoked_matches[0].event_id]
|
||||
Checkin.objects.create(
|
||||
position=op,
|
||||
successful=False,
|
||||
error_reason=Checkin.REASON_REVOKED,
|
||||
**common_checkin_args
|
||||
)
|
||||
if not simulate:
|
||||
op.order.log_action('pretix.event.checkin.revoked', data={
|
||||
'datetime': datetime,
|
||||
'type': checkin_type,
|
||||
'list': list_by_event[revoked_matches[0].event_id].pk,
|
||||
'barcode': raw_barcode
|
||||
}, user=user, auth=auth)
|
||||
common_checkin_args['list'] = list_by_event[revoked_matches[0].event_id]
|
||||
Checkin.objects.create(
|
||||
position=op,
|
||||
successful=False,
|
||||
error_reason=Checkin.REASON_REVOKED,
|
||||
**common_checkin_args
|
||||
)
|
||||
return Response({
|
||||
'status': 'error',
|
||||
'reason': Checkin.REASON_REVOKED,
|
||||
@@ -562,7 +567,9 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
||||
'list': MiniCheckinListSerializer(list_by_event[revoked_matches[0].event_id]).data,
|
||||
}, status=400)
|
||||
else:
|
||||
op_candidates = [media.linked_orderposition] + list(media.linked_orderposition.addons.all())
|
||||
op_candidates = [media.linked_orderposition]
|
||||
if list_by_event[media.linked_orderposition.order.event_id].addon_match:
|
||||
op_candidates += list(media.linked_orderposition.addons.all())
|
||||
|
||||
# 3. Handle the "multiple options found" case: Except for the unlikely case of a secret being also a valid primary
|
||||
# key on the same list, we're probably dealing with the ``addon_match`` case here and need to figure out
|
||||
@@ -586,24 +593,25 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
||||
# We choose the first match (regardless of product) for the logging since it's most likely to be the
|
||||
# base product according to our order_by above.
|
||||
op = op_candidates[0]
|
||||
op.order.log_action('pretix.event.checkin.denied', data={
|
||||
'position': op.id,
|
||||
'positionid': op.positionid,
|
||||
'errorcode': Checkin.REASON_AMBIGUOUS,
|
||||
'reason_explanation': None,
|
||||
'force': force,
|
||||
'datetime': datetime,
|
||||
'type': checkin_type,
|
||||
'list': list_by_event[op.order.event_id].pk,
|
||||
}, user=user, auth=auth)
|
||||
common_checkin_args['list'] = list_by_event[op.order.event_id]
|
||||
Checkin.objects.create(
|
||||
position=op,
|
||||
successful=False,
|
||||
error_reason=Checkin.REASON_AMBIGUOUS,
|
||||
error_explanation=None,
|
||||
**common_checkin_args,
|
||||
)
|
||||
if not simulate:
|
||||
op.order.log_action('pretix.event.checkin.denied', data={
|
||||
'position': op.id,
|
||||
'positionid': op.positionid,
|
||||
'errorcode': Checkin.REASON_AMBIGUOUS,
|
||||
'reason_explanation': None,
|
||||
'force': force,
|
||||
'datetime': datetime,
|
||||
'type': checkin_type,
|
||||
'list': list_by_event[op.order.event_id].pk,
|
||||
}, user=user, auth=auth)
|
||||
common_checkin_args['list'] = list_by_event[op.order.event_id]
|
||||
Checkin.objects.create(
|
||||
position=op,
|
||||
successful=False,
|
||||
error_reason=Checkin.REASON_AMBIGUOUS,
|
||||
error_explanation=None,
|
||||
**common_checkin_args,
|
||||
)
|
||||
return Response({
|
||||
'status': 'error',
|
||||
'reason': Checkin.REASON_AMBIGUOUS,
|
||||
@@ -650,6 +658,7 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
||||
raw_barcode=raw_barcode_for_checkin,
|
||||
raw_source_type=source_type,
|
||||
from_revoked_secret=from_revoked_secret,
|
||||
simulate=simulate,
|
||||
)
|
||||
except RequiredQuestionsError as e:
|
||||
return Response({
|
||||
@@ -662,23 +671,24 @@ def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force,
|
||||
'list': MiniCheckinListSerializer(list_by_event[op.order.event_id]).data,
|
||||
}, status=400)
|
||||
except CheckInError as e:
|
||||
op.order.log_action('pretix.event.checkin.denied', data={
|
||||
'position': op.id,
|
||||
'positionid': op.positionid,
|
||||
'errorcode': e.code,
|
||||
'reason_explanation': e.reason,
|
||||
'force': force,
|
||||
'datetime': datetime,
|
||||
'type': checkin_type,
|
||||
'list': list_by_event[op.order.event_id].pk,
|
||||
}, user=user, auth=auth)
|
||||
Checkin.objects.create(
|
||||
position=op,
|
||||
successful=False,
|
||||
error_reason=e.code,
|
||||
error_explanation=e.reason,
|
||||
**common_checkin_args,
|
||||
)
|
||||
if not simulate:
|
||||
op.order.log_action('pretix.event.checkin.denied', data={
|
||||
'position': op.id,
|
||||
'positionid': op.positionid,
|
||||
'errorcode': e.code,
|
||||
'reason_explanation': e.reason,
|
||||
'force': force,
|
||||
'datetime': datetime,
|
||||
'type': checkin_type,
|
||||
'list': list_by_event[op.order.event_id].pk,
|
||||
}, user=user, auth=auth)
|
||||
Checkin.objects.create(
|
||||
position=op,
|
||||
successful=False,
|
||||
error_reason=e.code,
|
||||
error_explanation=e.reason,
|
||||
**common_checkin_args,
|
||||
)
|
||||
return Response({
|
||||
'status': 'error',
|
||||
'reason': e.code,
|
||||
|
||||
@@ -19,8 +19,12 @@
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
import base64
|
||||
import logging
|
||||
|
||||
from cryptography.hazmat.backends.openssl.backend import Backend
|
||||
from cryptography.hazmat.primitives.asymmetric import padding
|
||||
from cryptography.hazmat.primitives.serialization import load_pem_public_key
|
||||
from django.db.models import Exists, OuterRef, Q
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils.timezone import now
|
||||
@@ -34,6 +38,8 @@ from pretix.api.auth.device import DeviceTokenAuthentication
|
||||
from pretix.api.views.version import numeric_version
|
||||
from pretix.base.models import CheckinList, Device, SubEvent
|
||||
from pretix.base.models.devices import Gate, generate_api_token
|
||||
from pretix.base.models.media import MediumKeySet
|
||||
from pretix.base.services.media import get_keysets_for_organizer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,17 +48,73 @@ class InitializationRequestSerializer(serializers.Serializer):
|
||||
token = serializers.CharField(max_length=190)
|
||||
hardware_brand = serializers.CharField(max_length=190)
|
||||
hardware_model = serializers.CharField(max_length=190)
|
||||
os_name = serializers.CharField(max_length=190, required=False, allow_null=True)
|
||||
os_version = serializers.CharField(max_length=190, required=False, allow_null=True)
|
||||
software_brand = serializers.CharField(max_length=190)
|
||||
software_version = serializers.CharField(max_length=190)
|
||||
info = serializers.JSONField(required=False, allow_null=True)
|
||||
rsa_pubkey = serializers.CharField(required=False, allow_null=True)
|
||||
|
||||
def validate(self, attrs):
|
||||
if attrs.get('rsa_pubkey'):
|
||||
try:
|
||||
load_pem_public_key(
|
||||
attrs['rsa_pubkey'].encode(), Backend()
|
||||
)
|
||||
except:
|
||||
raise ValidationError({'rsa_pubkey': ['Not a valid public key.']})
|
||||
return attrs
|
||||
|
||||
|
||||
class UpdateRequestSerializer(serializers.Serializer):
|
||||
hardware_brand = serializers.CharField(max_length=190)
|
||||
hardware_model = serializers.CharField(max_length=190)
|
||||
os_name = serializers.CharField(max_length=190, required=False, allow_null=True)
|
||||
os_version = serializers.CharField(max_length=190, required=False, allow_null=True)
|
||||
software_brand = serializers.CharField(max_length=190)
|
||||
software_version = serializers.CharField(max_length=190)
|
||||
info = serializers.JSONField(required=False, allow_null=True)
|
||||
rsa_pubkey = serializers.CharField(required=False, allow_null=True)
|
||||
|
||||
def validate(self, attrs):
|
||||
if attrs.get('rsa_pubkey'):
|
||||
try:
|
||||
load_pem_public_key(
|
||||
attrs['rsa_pubkey'].encode(), Backend()
|
||||
)
|
||||
except:
|
||||
raise ValidationError({'rsa_pubkey': ['Not a valid public key.']})
|
||||
return attrs
|
||||
|
||||
|
||||
class RSAEncryptedField(serializers.Field):
|
||||
def to_representation(self, value):
|
||||
public_key = load_pem_public_key(
|
||||
self.context['device'].rsa_pubkey.encode(), Backend()
|
||||
)
|
||||
cipher_text = public_key.encrypt(
|
||||
# RSA/ECB/PKCS1Padding
|
||||
value,
|
||||
padding.PKCS1v15()
|
||||
)
|
||||
return base64.b64encode(cipher_text).decode()
|
||||
|
||||
|
||||
class MediumKeySetSerializer(serializers.ModelSerializer):
|
||||
uid_key = RSAEncryptedField(read_only=True)
|
||||
diversification_key = RSAEncryptedField(read_only=True)
|
||||
organizer = serializers.SlugRelatedField(slug_field='slug', read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = MediumKeySet
|
||||
fields = [
|
||||
'public_id',
|
||||
'organizer',
|
||||
'active',
|
||||
'media_type',
|
||||
'uid_key',
|
||||
'diversification_key',
|
||||
]
|
||||
|
||||
|
||||
class GateSerializer(serializers.ModelSerializer):
|
||||
@@ -93,12 +155,18 @@ class InitializeView(APIView):
|
||||
if device.initialized:
|
||||
raise ValidationError({'token': ['This initialization token has already been used.']})
|
||||
|
||||
if device.revoked:
|
||||
raise ValidationError({'token': ['This initialization token has been revoked.']})
|
||||
|
||||
device.initialized = now()
|
||||
device.hardware_brand = serializer.validated_data.get('hardware_brand')
|
||||
device.hardware_model = serializer.validated_data.get('hardware_model')
|
||||
device.os_name = serializer.validated_data.get('os_name')
|
||||
device.os_version = serializer.validated_data.get('os_version')
|
||||
device.software_brand = serializer.validated_data.get('software_brand')
|
||||
device.software_version = serializer.validated_data.get('software_version')
|
||||
device.info = serializer.validated_data.get('info')
|
||||
device.rsa_pubkey = serializer.validated_data.get('rsa_pubkey')
|
||||
device.api_token = generate_api_token()
|
||||
device.save()
|
||||
|
||||
@@ -117,8 +185,15 @@ class UpdateView(APIView):
|
||||
device = request.auth
|
||||
device.hardware_brand = serializer.validated_data.get('hardware_brand')
|
||||
device.hardware_model = serializer.validated_data.get('hardware_model')
|
||||
device.os_name = serializer.validated_data.get('os_name')
|
||||
device.os_version = serializer.validated_data.get('os_version')
|
||||
device.software_brand = serializer.validated_data.get('software_brand')
|
||||
device.software_version = serializer.validated_data.get('software_version')
|
||||
if serializer.validated_data.get('rsa_pubkey') and serializer.validated_data.get('rsa_pubkey') != device.rsa_pubkey:
|
||||
if device.rsa_pubkey:
|
||||
raise ValidationError({'rsa_pubkey': ['You cannot change the rsa_pubkey of the device once it is set.']})
|
||||
else:
|
||||
device.rsa_pubkey = serializer.validated_data.get('rsa_pubkey')
|
||||
device.info = serializer.validated_data.get('info')
|
||||
device.save()
|
||||
device.log_action('pretix.device.updated', data=serializer.validated_data, auth=device)
|
||||
@@ -166,8 +241,12 @@ class InfoView(APIView):
|
||||
'pretix': __version__,
|
||||
'pretix_numeric': numeric_version(__version__),
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
'medium_key_sets': MediumKeySetSerializer(
|
||||
get_keysets_for_organizer(device.organizer),
|
||||
many=True,
|
||||
context={'device': request.auth}
|
||||
).data if device.rsa_pubkey else []
|
||||
})
|
||||
|
||||
|
||||
|
||||
@@ -47,11 +47,13 @@ from pretix.api.auth.permission import EventCRUDPermission
|
||||
from pretix.api.pagination import TotalOrderingFilter
|
||||
from pretix.api.serializers.event import (
|
||||
CloneEventSerializer, DeviceEventSettingsSerializer, EventSerializer,
|
||||
EventSettingsSerializer, SubEventSerializer, TaxRuleSerializer,
|
||||
EventSettingsSerializer, ItemMetaPropertiesSerializer, SubEventSerializer,
|
||||
TaxRuleSerializer,
|
||||
)
|
||||
from pretix.api.views import ConditionalListView
|
||||
from pretix.base.models import (
|
||||
CartPosition, Device, Event, SeatCategoryMapping, TaxRule, TeamAPIToken,
|
||||
CartPosition, Device, Event, ItemMetaProperty, SeatCategoryMapping,
|
||||
TaxRule, TeamAPIToken,
|
||||
)
|
||||
from pretix.base.models.event import SubEvent
|
||||
from pretix.base.services.quotas import QuotaAvailability
|
||||
@@ -69,6 +71,8 @@ with scopes_disabled():
|
||||
ends_after = django_filters.rest_framework.IsoDateTimeFilter(method='ends_after_qs')
|
||||
sales_channel = django_filters.rest_framework.CharFilter(method='sales_channel_qs')
|
||||
search = django_filters.rest_framework.CharFilter(method='search_qs')
|
||||
date_from = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
|
||||
date_to = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
|
||||
|
||||
class Meta:
|
||||
model = Event
|
||||
@@ -334,6 +338,8 @@ with scopes_disabled():
|
||||
modified_since = django_filters.IsoDateTimeFilter(field_name='last_modified', lookup_expr='gte')
|
||||
sales_channel = django_filters.rest_framework.CharFilter(method='sales_channel_qs')
|
||||
search = django_filters.rest_framework.CharFilter(method='search_qs')
|
||||
date_from = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
|
||||
date_to = django_filters.rest_framework.IsoDateTimeFromToRangeFilter()
|
||||
|
||||
class Meta:
|
||||
model = SubEvent
|
||||
@@ -522,6 +528,54 @@ class TaxRuleViewSet(ConditionalListView, viewsets.ModelViewSet):
|
||||
super().perform_destroy(instance)
|
||||
|
||||
|
||||
class ItemMetaPropertiesViewSet(viewsets.ModelViewSet):
|
||||
serializer_class = ItemMetaPropertiesSerializer
|
||||
queryset = ItemMetaProperty.objects.none()
|
||||
write_permission = 'can_change_event_settings'
|
||||
|
||||
def get_queryset(self):
|
||||
qs = self.request.event.item_meta_properties.all()
|
||||
return qs
|
||||
|
||||
def get_serializer_context(self):
|
||||
ctx = super().get_serializer_context()
|
||||
ctx['organizer'] = self.request.organizer
|
||||
ctx['event'] = self.request.event
|
||||
return ctx
|
||||
|
||||
@transaction.atomic()
|
||||
def perform_destroy(self, instance):
|
||||
instance.log_action(
|
||||
'pretix.event.item_meta_property.deleted',
|
||||
user=self.request.user,
|
||||
auth=self.request.auth,
|
||||
data={'id': instance.pk}
|
||||
)
|
||||
instance.delete()
|
||||
|
||||
@transaction.atomic()
|
||||
def perform_create(self, serializer):
|
||||
inst = serializer.save(event=self.request.event)
|
||||
serializer.instance.log_action(
|
||||
'pretix.event.item_meta_property.added',
|
||||
user=self.request.user,
|
||||
auth=self.request.auth,
|
||||
data=self.request.data,
|
||||
)
|
||||
return inst
|
||||
|
||||
@transaction.atomic()
|
||||
def perform_update(self, serializer):
|
||||
inst = serializer.save(event=self.request.event)
|
||||
serializer.instance.log_action(
|
||||
'pretix.event.item_meta_property.changed',
|
||||
user=self.request.user,
|
||||
auth=self.request.auth,
|
||||
data=self.request.data,
|
||||
)
|
||||
return inst
|
||||
|
||||
|
||||
class EventSettingsView(views.APIView):
|
||||
permission = None
|
||||
write_permission = 'can_change_event_settings'
|
||||
|
||||
@@ -133,7 +133,12 @@ class EventExportersViewSet(ExportersMixin, viewsets.ViewSet):
|
||||
def exporters(self):
|
||||
exporters = []
|
||||
responses = register_data_exporters.send(self.request.event)
|
||||
for ex in sorted([response(self.request.event, self.request.organizer) for r, response in responses if response], key=lambda ex: str(ex.verbose_name)):
|
||||
raw_exporters = [response(self.request.event, self.request.organizer) for r, response in responses if response]
|
||||
raw_exporters = [
|
||||
ex for ex in raw_exporters
|
||||
if ex.available_for_user(self.request.user if self.request.user and self.request.user.is_authenticated else None)
|
||||
]
|
||||
for ex in sorted(raw_exporters, key=lambda ex: str(ex.verbose_name)):
|
||||
ex._serializer = JobRunSerializer(exporter=ex)
|
||||
exporters.append(ex)
|
||||
return exporters
|
||||
@@ -166,7 +171,7 @@ class OrganizerExportersViewSet(ExportersMixin, viewsets.ViewSet):
|
||||
if (
|
||||
not isinstance(ex, OrganizerLevelExportMixin) or
|
||||
perm_holder.has_organizer_permission(self.request.organizer, ex.organizer_required_permission, self.request)
|
||||
)
|
||||
) and ex.available_for_user(self.request.user if self.request.user and self.request.user.is_authenticated else None)
|
||||
]
|
||||
for ex in sorted(raw_exporters, key=lambda ex: str(ex.verbose_name)):
|
||||
ex._serializer = JobRunSerializer(exporter=ex, events=events)
|
||||
|
||||
@@ -39,7 +39,8 @@ from pretix.api.serializers.media import (
|
||||
)
|
||||
from pretix.base.media import MEDIA_TYPES
|
||||
from pretix.base.models import (
|
||||
Checkin, GiftCard, GiftCardTransaction, OrderPosition, ReusableMedium,
|
||||
Checkin, GiftCard, GiftCardAcceptance, GiftCardTransaction, OrderPosition,
|
||||
ReusableMedium,
|
||||
)
|
||||
from pretix.helpers import OF_SELF
|
||||
from pretix.helpers.dicts import merge_dicts
|
||||
@@ -103,6 +104,12 @@ class ReusableMediaViewSet(viewsets.ModelViewSet):
|
||||
auth=self.request.auth,
|
||||
data=merge_dicts(self.request.data, {'id': inst.pk})
|
||||
)
|
||||
mt = MEDIA_TYPES.get(serializer.validated_data["type"])
|
||||
if mt:
|
||||
m = mt.handle_new(self.request.organizer, inst, self.request.user, self.request.auth)
|
||||
if m:
|
||||
s = self.get_serializer(m)
|
||||
return Response({"result": s.data})
|
||||
|
||||
@transaction.atomic()
|
||||
def perform_update(self, serializer):
|
||||
@@ -135,12 +142,28 @@ class ReusableMediaViewSet(viewsets.ModelViewSet):
|
||||
s = self.get_serializer(m)
|
||||
return Response({"result": s.data})
|
||||
except ReusableMedium.DoesNotExist:
|
||||
mt = MEDIA_TYPES.get(s.validated_data["type"])
|
||||
if mt:
|
||||
m = mt.handle_unknown(request.organizer, s.validated_data["identifier"], request.user, request.auth)
|
||||
if m:
|
||||
s = self.get_serializer(m)
|
||||
return Response({"result": s.data})
|
||||
try:
|
||||
with scopes_disabled():
|
||||
m = ReusableMedium.objects.get(
|
||||
organizer__in=GiftCardAcceptance.objects.filter(
|
||||
acceptor=request.organizer,
|
||||
active=True,
|
||||
reusable_media=True,
|
||||
).values_list('issuer', flat=True),
|
||||
type=s.validated_data["type"],
|
||||
identifier=s.validated_data["identifier"],
|
||||
)
|
||||
m.linked_orderposition = None # not relevant for cross-organizer
|
||||
m.customer = None # not relevant for cross-organizer
|
||||
s = self.get_serializer(m)
|
||||
return Response({"result": s.data})
|
||||
except ReusableMedium.DoesNotExist:
|
||||
mt = MEDIA_TYPES.get(s.validated_data["type"])
|
||||
if mt:
|
||||
m = mt.handle_unknown(request.organizer, s.validated_data["identifier"], request.user, request.auth)
|
||||
if m:
|
||||
s = self.get_serializer(m)
|
||||
return Response({"result": s.data})
|
||||
|
||||
return Response({"result": None})
|
||||
|
||||
|
||||
@@ -23,9 +23,9 @@ import datetime
|
||||
import mimetypes
|
||||
import os
|
||||
from decimal import Decimal
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import django_filters
|
||||
import pytz
|
||||
from django.db import transaction
|
||||
from django.db.models import (
|
||||
Exists, F, OuterRef, Prefetch, Q, Subquery, prefetch_related_objects,
|
||||
@@ -612,7 +612,7 @@ class OrderViewSet(viewsets.ModelViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
tz = pytz.timezone(self.request.event.settings.timezone)
|
||||
tz = ZoneInfo(self.request.event.settings.timezone)
|
||||
new_date = make_aware(datetime.datetime.combine(
|
||||
new_date,
|
||||
datetime.time(hour=23, minute=59, second=59)
|
||||
@@ -661,7 +661,16 @@ class OrderViewSet(viewsets.ModelViewSet):
|
||||
|
||||
with language(order.locale, self.request.event.settings.region):
|
||||
payment = order.payments.last()
|
||||
|
||||
# OrderCreateSerializer creates at most one payment
|
||||
if payment and payment.state == OrderPayment.PAYMENT_STATE_CONFIRMED:
|
||||
order.log_action(
|
||||
'pretix.event.order.payment.confirmed', {
|
||||
'local_id': payment.local_id,
|
||||
'provider': payment.provider,
|
||||
},
|
||||
user=request.user if request.user.is_authenticated else None,
|
||||
auth=request.auth,
|
||||
)
|
||||
order_placed.send(self.request.event, order=order)
|
||||
if order.status == Order.STATUS_PAID:
|
||||
order_paid.send(self.request.event, order=order)
|
||||
@@ -936,6 +945,7 @@ with scopes_disabled():
|
||||
| Q(addon_to__attendee_email__icontains=value)
|
||||
| Q(order__code__istartswith=value)
|
||||
| Q(order__invoice_address__name_cached__icontains=value)
|
||||
| Q(order__invoice_address__company__icontains=value)
|
||||
| Q(order__email__icontains=value)
|
||||
| Q(pk__in=matching_media)
|
||||
)
|
||||
|
||||
@@ -155,7 +155,9 @@ class GiftCardViewSet(viewsets.ModelViewSet):
|
||||
qs = self.request.organizer.accepted_gift_cards
|
||||
else:
|
||||
qs = self.request.organizer.issued_gift_cards.all()
|
||||
return qs
|
||||
return qs.prefetch_related(
|
||||
'issuer'
|
||||
)
|
||||
|
||||
def get_serializer_context(self):
|
||||
ctx = super().get_serializer_context()
|
||||
@@ -166,7 +168,7 @@ class GiftCardViewSet(viewsets.ModelViewSet):
|
||||
def perform_create(self, serializer):
|
||||
value = serializer.validated_data.pop('value')
|
||||
inst = serializer.save(issuer=self.request.organizer)
|
||||
inst.transactions.create(value=value)
|
||||
inst.transactions.create(value=value, acceptor=self.request.organizer)
|
||||
inst.log_action(
|
||||
'pretix.giftcards.transaction.manual',
|
||||
user=self.request.user,
|
||||
@@ -179,18 +181,32 @@ class GiftCardViewSet(viewsets.ModelViewSet):
|
||||
if 'include_accepted' in self.request.GET:
|
||||
raise PermissionDenied("Accepted gift cards cannot be updated, use transact instead.")
|
||||
GiftCard.objects.select_for_update(of=OF_SELF).get(pk=self.get_object().pk)
|
||||
old_value = serializer.instance.value
|
||||
value = serializer.validated_data.pop('value')
|
||||
inst = serializer.save(secret=serializer.instance.secret, currency=serializer.instance.currency,
|
||||
testmode=serializer.instance.testmode)
|
||||
diff = value - old_value
|
||||
inst.transactions.create(value=diff)
|
||||
inst.log_action(
|
||||
'pretix.giftcards.transaction.manual',
|
||||
user=self.request.user,
|
||||
auth=self.request.auth,
|
||||
data={'value': diff}
|
||||
)
|
||||
|
||||
value = serializer.validated_data.pop('value', None)
|
||||
|
||||
if any(k != 'value' for k in self.request.data):
|
||||
inst = serializer.save(secret=serializer.instance.secret, currency=serializer.instance.currency,
|
||||
testmode=serializer.instance.testmode)
|
||||
inst.log_action(
|
||||
'pretix.giftcards.modified',
|
||||
user=self.request.user,
|
||||
auth=self.request.auth,
|
||||
data=self.request.data,
|
||||
)
|
||||
else:
|
||||
inst = serializer.instance
|
||||
|
||||
if 'value' in self.request.data and value is not None:
|
||||
old_value = serializer.instance.value
|
||||
diff = value - old_value
|
||||
inst.transactions.create(value=diff, acceptor=self.request.organizer)
|
||||
inst.log_action(
|
||||
'pretix.giftcards.transaction.manual',
|
||||
user=self.request.user,
|
||||
auth=self.request.auth,
|
||||
data={'value': diff}
|
||||
)
|
||||
|
||||
return inst
|
||||
|
||||
@action(detail=True, methods=["POST"])
|
||||
@@ -203,18 +219,21 @@ class GiftCardViewSet(viewsets.ModelViewSet):
|
||||
text = serializers.CharField(allow_blank=True, allow_null=True).to_internal_value(
|
||||
request.data.get('text', '')
|
||||
)
|
||||
info = serializers.JSONField(required=False, allow_null=True).to_internal_value(
|
||||
request.data.get('info', {})
|
||||
)
|
||||
if gc.value + value < Decimal('0.00'):
|
||||
return Response({
|
||||
'value': ['The gift card does not have sufficient credit for this operation.']
|
||||
}, status=status.HTTP_409_CONFLICT)
|
||||
gc.transactions.create(value=value, text=text)
|
||||
gc.transactions.create(value=value, text=text, info=info, acceptor=self.request.organizer)
|
||||
gc.log_action(
|
||||
'pretix.giftcards.transaction.manual',
|
||||
user=self.request.user,
|
||||
auth=self.request.auth,
|
||||
data={'value': value, 'text': text}
|
||||
)
|
||||
return Response(GiftCardSerializer(gc).data, status=status.HTTP_200_OK)
|
||||
return Response(GiftCardSerializer(gc, context=self.get_serializer_context()).data, status=status.HTTP_200_OK)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
raise MethodNotAllowed("Gift cards cannot be deleted.")
|
||||
@@ -235,7 +254,7 @@ class GiftCardTransactionViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
return get_object_or_404(qs, pk=self.kwargs.get('giftcard'))
|
||||
|
||||
def get_queryset(self):
|
||||
return self.giftcard.transactions.select_related('order', 'order__event')
|
||||
return self.giftcard.transactions.select_related('order', 'order__event').prefetch_related('acceptor')
|
||||
|
||||
|
||||
class TeamViewSet(viewsets.ModelViewSet):
|
||||
|
||||
@@ -189,6 +189,19 @@ class ParametrizedOrderPositionWebhookEvent(ParametrizedOrderWebhookEvent):
|
||||
return d
|
||||
|
||||
|
||||
class ParametrizedWaitingListEntryWebhookEvent(ParametrizedWebhookEvent):
|
||||
|
||||
def build_payload(self, logentry: LogEntry):
|
||||
# do not use content_object, this is also called in deletion
|
||||
return {
|
||||
'notification_id': logentry.pk,
|
||||
'organizer': logentry.event.organizer.slug,
|
||||
'event': logentry.event.slug,
|
||||
'waitinglistentry': logentry.object_id,
|
||||
'action': logentry.action_type,
|
||||
}
|
||||
|
||||
|
||||
@receiver(register_webhook_events, dispatch_uid="base_register_default_webhook_events")
|
||||
def register_default_webhook_events(sender, **kwargs):
|
||||
return (
|
||||
@@ -321,6 +334,22 @@ def register_default_webhook_events(sender, **kwargs):
|
||||
'pretix.event.testmode.deactivated',
|
||||
_('Test-Mode of shop has been deactivated'),
|
||||
),
|
||||
ParametrizedWaitingListEntryWebhookEvent(
|
||||
'pretix.event.orders.waitinglist.added',
|
||||
_('Waiting list entry added'),
|
||||
),
|
||||
ParametrizedWaitingListEntryWebhookEvent(
|
||||
'pretix.event.orders.waitinglist.changed',
|
||||
_('Waiting list entry changed'),
|
||||
),
|
||||
ParametrizedWaitingListEntryWebhookEvent(
|
||||
'pretix.event.orders.waitinglist.deleted',
|
||||
_('Waiting list entry deleted'),
|
||||
),
|
||||
ParametrizedWaitingListEntryWebhookEvent(
|
||||
'pretix.event.orders.waitinglist.voucher_assigned',
|
||||
_('Waiting list entry received voucher'),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -117,13 +117,15 @@ def oidc_validate_and_complete_config(config):
|
||||
scopes=", ".join(provider_config.get("scopes_supported", []))
|
||||
))
|
||||
|
||||
for k, v in config.items():
|
||||
if k.endswith('_field') and v:
|
||||
if v not in provider_config.get("claims_supported", []): # https://openid.net/specs/openid-connect-core-1_0.html#UserInfo
|
||||
raise ValidationError(_('You are requesting field "{field}" but provider only supports these: {fields}.').format(
|
||||
field=v,
|
||||
fields=", ".join(provider_config.get("claims_supported", []))
|
||||
))
|
||||
if "claims_supported" in provider_config:
|
||||
claims_supported = provider_config.get("claims_supported", [])
|
||||
for k, v in config.items():
|
||||
if k.endswith('_field') and v:
|
||||
if v not in claims_supported: # https://openid.net/specs/openid-connect-core-1_0.html#UserInfo
|
||||
raise ValidationError(_('You are requesting field "{field}" but provider only supports these: {fields}.').format(
|
||||
field=v,
|
||||
fields=", ".join(provider_config.get("claims_supported", []))
|
||||
))
|
||||
|
||||
config['provider_config'] = provider_config
|
||||
return config
|
||||
|
||||
@@ -37,8 +37,8 @@ import tempfile
|
||||
from collections import OrderedDict, namedtuple
|
||||
from decimal import Decimal
|
||||
from typing import Optional, Tuple
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import pytz
|
||||
from defusedcsv import csv
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
@@ -68,7 +68,7 @@ class BaseExporter:
|
||||
self.events = event
|
||||
self.event = None
|
||||
e = self.events.first()
|
||||
self.timezone = e.timezone if e else pytz.timezone(settings.TIME_ZONE)
|
||||
self.timezone = e.timezone if e else ZoneInfo(settings.TIME_ZONE)
|
||||
else:
|
||||
self.events = Event.objects.filter(pk=event.pk)
|
||||
self.timezone = event.timezone
|
||||
@@ -140,7 +140,7 @@ class BaseExporter:
|
||||
"""
|
||||
return {}
|
||||
|
||||
def render(self, form_data: dict) -> Tuple[str, str, bytes]:
|
||||
def render(self, form_data: dict) -> Tuple[str, str, Optional[bytes]]:
|
||||
"""
|
||||
Render the exported file and return a tuple consisting of a filename, a file type
|
||||
and file content.
|
||||
@@ -157,6 +157,13 @@ class BaseExporter:
|
||||
"""
|
||||
raise NotImplementedError() # NOQA
|
||||
|
||||
def available_for_user(self, user) -> bool:
|
||||
"""
|
||||
Allows to do additional checks whether an exporter is available based on the user who calls it. Note that
|
||||
``user`` may be ``None`` e.g. during API usage.
|
||||
"""
|
||||
return True
|
||||
|
||||
|
||||
class OrganizerLevelExportMixin:
|
||||
@property
|
||||
|
||||
@@ -58,6 +58,7 @@ class EventDataExporter(ListExporter):
|
||||
_("Short form"),
|
||||
_("Shop is live"),
|
||||
_("Event currency"),
|
||||
_("Timezone"),
|
||||
_("Event start time"),
|
||||
_("Event end time"),
|
||||
_("Admission time"),
|
||||
@@ -75,16 +76,18 @@ class EventDataExporter(ListExporter):
|
||||
|
||||
for e in self.events.all():
|
||||
m = e.meta_data
|
||||
tz = e.timezone
|
||||
yield [
|
||||
str(e.name),
|
||||
e.slug,
|
||||
_('Yes') if e.live else _('No'),
|
||||
e.currency,
|
||||
date_format(e.date_from, 'SHORT_DATETIME_FORMAT'),
|
||||
date_format(e.date_to, 'SHORT_DATETIME_FORMAT') if e.date_to else '',
|
||||
date_format(e.date_admission, 'SHORT_DATETIME_FORMAT') if e.date_admission else '',
|
||||
date_format(e.presale_start, 'SHORT_DATETIME_FORMAT') if e.presale_start else '',
|
||||
date_format(e.presale_end, 'SHORT_DATETIME_FORMAT') if e.presale_end else '',
|
||||
str(e.timezone),
|
||||
date_format(e.date_from.astimezone(tz), 'SHORT_DATETIME_FORMAT'),
|
||||
date_format(e.date_to.astimezone(tz), 'SHORT_DATETIME_FORMAT') if e.date_to else '',
|
||||
date_format(e.date_admission.astimezone(tz), 'SHORT_DATETIME_FORMAT') if e.date_admission else '',
|
||||
date_format(e.presale_start.astimezone(tz), 'SHORT_DATETIME_FORMAT') if e.presale_start else '',
|
||||
date_format(e.presale_end.astimezone(tz), 'SHORT_DATETIME_FORMAT') if e.presale_end else '',
|
||||
str(e.location),
|
||||
e.geo_lat or '',
|
||||
e.geo_lon or '',
|
||||
|
||||
@@ -103,7 +103,9 @@ class InvoiceExporterMixin:
|
||||
qs = qs.annotate(
|
||||
has_payment_with_provider=Exists(
|
||||
OrderPayment.objects.filter(
|
||||
Q(order=OuterRef('order_id')) & Q(provider=form_data.get('payment_provider'))
|
||||
Q(order=OuterRef('order_id')) & Q(provider=form_data.get('payment_provider')),
|
||||
state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED,
|
||||
OrderPayment.PAYMENT_STATE_PENDING, OrderPayment.PAYMENT_STATE_CREATED),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -34,8 +34,8 @@
|
||||
|
||||
from collections import OrderedDict
|
||||
from decimal import Decimal
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import pytz
|
||||
from django import forms
|
||||
from django.db.models import (
|
||||
Case, CharField, Count, DateTimeField, F, IntegerField, Max, Min, OuterRef,
|
||||
@@ -49,18 +49,24 @@ from django.utils.timezone import get_current_timezone, now
|
||||
from django.utils.translation import (
|
||||
gettext as _, gettext_lazy, pgettext, pgettext_lazy,
|
||||
)
|
||||
from openpyxl.cell import WriteOnlyCell
|
||||
from openpyxl.comments import Comment
|
||||
from openpyxl.styles import Font, PatternFill
|
||||
|
||||
from pretix.base.models import (
|
||||
GiftCard, GiftCardTransaction, Invoice, InvoiceAddress, Order,
|
||||
OrderPosition, Question,
|
||||
)
|
||||
from pretix.base.models.orders import OrderFee, OrderPayment, OrderRefund
|
||||
from pretix.base.models.orders import (
|
||||
OrderFee, OrderPayment, OrderRefund, Transaction,
|
||||
)
|
||||
from pretix.base.services.quotas import QuotaAvailability
|
||||
from pretix.base.settings import PERSON_NAME_SCHEMES, get_name_parts_localized
|
||||
|
||||
from ...control.forms.filter import get_all_payment_providers
|
||||
from ...helpers import GroupConcat
|
||||
from ...helpers.iter import chunked_iterable
|
||||
from ...helpers.safe_openpyxl import remove_invalid_excel_chars
|
||||
from ..exporter import (
|
||||
ListExporter, MultiSheetListExporter, OrganizerLevelExportMixin,
|
||||
)
|
||||
@@ -320,7 +326,7 @@ class OrderListExporter(MultiSheetListExporter):
|
||||
|
||||
yield self.ProgressSetTotal(total=qs.count())
|
||||
for order in qs.order_by('datetime').iterator():
|
||||
tz = pytz.timezone(self.event_object_cache[order.event_id].settings.timezone)
|
||||
tz = ZoneInfo(self.event_object_cache[order.event_id].settings.timezone)
|
||||
|
||||
row = [
|
||||
self.event_object_cache[order.event_id].slug,
|
||||
@@ -453,7 +459,7 @@ class OrderListExporter(MultiSheetListExporter):
|
||||
yield self.ProgressSetTotal(total=qs.count())
|
||||
for op in qs.order_by('order__datetime').iterator():
|
||||
order = op.order
|
||||
tz = pytz.timezone(order.event.settings.timezone)
|
||||
tz = ZoneInfo(order.event.settings.timezone)
|
||||
row = [
|
||||
self.event_object_cache[order.event_id].slug,
|
||||
order.code,
|
||||
@@ -625,7 +631,7 @@ class OrderListExporter(MultiSheetListExporter):
|
||||
|
||||
for op in ops:
|
||||
order = op.order
|
||||
tz = pytz.timezone(self.event_object_cache[order.event_id].settings.timezone)
|
||||
tz = ZoneInfo(self.event_object_cache[order.event_id].settings.timezone)
|
||||
row = [
|
||||
self.event_object_cache[order.event_id].slug,
|
||||
order.code,
|
||||
@@ -759,6 +765,185 @@ class OrderListExporter(MultiSheetListExporter):
|
||||
return '{}_orders'.format(self.event.slug)
|
||||
|
||||
|
||||
class TransactionListExporter(ListExporter):
|
||||
identifier = 'transactions'
|
||||
verbose_name = gettext_lazy('Order transaction data')
|
||||
category = pgettext_lazy('export_category', 'Order data')
|
||||
description = gettext_lazy('Download a spreadsheet of all substantial changes to orders, i.e. all changes to '
|
||||
'products, prices or tax rates. The information is only accurate for changes made with '
|
||||
'pretix versions released after October 2021.')
|
||||
|
||||
@cached_property
|
||||
def providers(self):
|
||||
return dict(get_all_payment_providers())
|
||||
|
||||
@property
|
||||
def additional_form_fields(self):
|
||||
d = [
|
||||
('date_range',
|
||||
DateFrameField(
|
||||
label=_('Date range'),
|
||||
include_future_frames=False,
|
||||
required=False,
|
||||
help_text=_('Only include transactions created within this date range.')
|
||||
)),
|
||||
]
|
||||
d = OrderedDict(d)
|
||||
return d
|
||||
|
||||
@cached_property
|
||||
def event_object_cache(self):
|
||||
return {e.pk: e for e in self.events}
|
||||
|
||||
def get_filename(self):
|
||||
if self.is_multievent:
|
||||
return '{}_transactions'.format(self.organizer.slug)
|
||||
else:
|
||||
return '{}_transactions'.format(self.event.slug)
|
||||
|
||||
def iterate_list(self, form_data):
|
||||
qs = Transaction.objects.filter(
|
||||
order__event__in=self.events,
|
||||
)
|
||||
|
||||
if form_data.get('date_range'):
|
||||
dt_start, dt_end = resolve_timeframe_to_datetime_start_inclusive_end_exclusive(now(), form_data['date_range'], self.timezone)
|
||||
if dt_start:
|
||||
qs = qs.filter(datetime__gte=dt_start)
|
||||
if dt_end:
|
||||
qs = qs.filter(datetime__lt=dt_end)
|
||||
|
||||
qs = qs.select_related(
|
||||
'order', 'order__event', 'item', 'variation', 'subevent',
|
||||
).order_by(
|
||||
'datetime', 'id',
|
||||
)
|
||||
|
||||
headers = [
|
||||
_('Event'),
|
||||
_('Event slug'),
|
||||
_('Currency'),
|
||||
|
||||
_('Order code'),
|
||||
_('Order date'),
|
||||
_('Order time'),
|
||||
|
||||
_('Transaction date'),
|
||||
_('Transaction time'),
|
||||
_('Old data'),
|
||||
|
||||
_('Position ID'),
|
||||
_('Quantity'),
|
||||
|
||||
_('Product ID'),
|
||||
_('Product'),
|
||||
_('Variation ID'),
|
||||
_('Variation'),
|
||||
_('Fee type'),
|
||||
_('Internal fee type'),
|
||||
|
||||
pgettext('subevent', 'Date ID'),
|
||||
pgettext('subevent', 'Date'),
|
||||
|
||||
_('Price'),
|
||||
_('Tax rate'),
|
||||
_('Tax rule ID'),
|
||||
_('Tax rule'),
|
||||
_('Tax value'),
|
||||
_('Gross total'),
|
||||
_('Tax total'),
|
||||
]
|
||||
|
||||
if form_data.get('_format') == 'xlsx':
|
||||
for i in range(len(headers)):
|
||||
headers[i] = WriteOnlyCell(self.__ws, value=headers[i])
|
||||
if i in (0, 12, 14, 18, 22):
|
||||
headers[i].fill = PatternFill(start_color="FFB419", end_color="FFB419", fill_type="solid")
|
||||
headers[i].comment = Comment(
|
||||
text=_(
|
||||
"This value is supplied for informational purposes, it is not part of the original transaction "
|
||||
"data and might have changed since the transaction."
|
||||
),
|
||||
author='system'
|
||||
)
|
||||
headers[i].font = Font(bold=True)
|
||||
|
||||
yield headers
|
||||
|
||||
yield self.ProgressSetTotal(total=qs.count())
|
||||
|
||||
for t in qs.iterator():
|
||||
row = [
|
||||
str(t.order.event.name),
|
||||
t.order.event.slug,
|
||||
t.order.event.currency,
|
||||
|
||||
t.order.code,
|
||||
t.order.datetime.astimezone(self.timezone).strftime('%Y-%m-%d'),
|
||||
t.order.datetime.astimezone(self.timezone).strftime('%H:%M:%S'),
|
||||
|
||||
t.datetime.astimezone(self.timezone).strftime('%Y-%m-%d'),
|
||||
t.datetime.astimezone(self.timezone).strftime('%H:%M:%S'),
|
||||
_('Converted from legacy version') if t.migrated else '',
|
||||
|
||||
t.positionid,
|
||||
t.count,
|
||||
|
||||
t.item_id,
|
||||
str(t.item),
|
||||
t.variation_id or '',
|
||||
str(t.variation) if t.variation_id else '',
|
||||
t.fee_type,
|
||||
t.internal_type,
|
||||
t.subevent_id or '',
|
||||
str(t.subevent) if t.subevent else '',
|
||||
|
||||
t.price,
|
||||
t.tax_rate,
|
||||
t.tax_rule_id or '',
|
||||
str(t.tax_rule.internal_name or t.tax_rule.name) if t.tax_rule_id else '',
|
||||
t.tax_value,
|
||||
t.price * t.count,
|
||||
t.tax_value * t.count,
|
||||
]
|
||||
|
||||
if form_data.get('_format') == 'xlsx':
|
||||
for i in range(len(row)):
|
||||
if t.order.testmode:
|
||||
row[i] = WriteOnlyCell(self.__ws, value=remove_invalid_excel_chars(row[i]))
|
||||
row[i].fill = PatternFill(start_color="FFB419", end_color="FFB419", fill_type="solid")
|
||||
|
||||
yield row
|
||||
|
||||
def prepare_xlsx_sheet(self, ws):
|
||||
self.__ws = ws
|
||||
ws.freeze_panes = 'A2'
|
||||
ws.column_dimensions['A'].width = 25
|
||||
ws.column_dimensions['B'].width = 10
|
||||
ws.column_dimensions['C'].width = 10
|
||||
ws.column_dimensions['D'].width = 10
|
||||
ws.column_dimensions['E'].width = 15
|
||||
ws.column_dimensions['F'].width = 15
|
||||
ws.column_dimensions['G'].width = 15
|
||||
ws.column_dimensions['H'].width = 15
|
||||
ws.column_dimensions['I'].width = 15
|
||||
ws.column_dimensions['J'].width = 10
|
||||
ws.column_dimensions['K'].width = 10
|
||||
ws.column_dimensions['L'].width = 10
|
||||
ws.column_dimensions['M'].width = 25
|
||||
ws.column_dimensions['N'].width = 10
|
||||
ws.column_dimensions['O'].width = 25
|
||||
ws.column_dimensions['P'].width = 20
|
||||
ws.column_dimensions['Q'].width = 20
|
||||
ws.column_dimensions['R'].width = 10
|
||||
ws.column_dimensions['S'].width = 25
|
||||
ws.column_dimensions['T'].width = 15
|
||||
ws.column_dimensions['U'].width = 10
|
||||
ws.column_dimensions['V'].width = 10
|
||||
ws.column_dimensions['W'].width = 20
|
||||
ws.column_dimensions['X'].width = 15
|
||||
|
||||
|
||||
class PaymentListExporter(ListExporter):
|
||||
identifier = 'paymentlist'
|
||||
verbose_name = gettext_lazy('Payments and refunds')
|
||||
@@ -843,7 +1028,7 @@ class PaymentListExporter(ListExporter):
|
||||
|
||||
yield self.ProgressSetTotal(total=len(objs))
|
||||
for obj in objs:
|
||||
tz = pytz.timezone(obj.order.event.settings.timezone)
|
||||
tz = ZoneInfo(obj.order.event.settings.timezone)
|
||||
if isinstance(obj, OrderPayment) and obj.payment_date:
|
||||
d2 = obj.payment_date.astimezone(tz).date().strftime('%Y-%m-%d')
|
||||
elif isinstance(obj, OrderRefund) and obj.execution_date:
|
||||
@@ -962,7 +1147,7 @@ class GiftcardTransactionListExporter(OrganizerLevelExportMixin, ListExporter):
|
||||
def iterate_list(self, form_data):
|
||||
qs = GiftCardTransaction.objects.filter(
|
||||
card__issuer=self.organizer,
|
||||
).order_by('datetime').select_related('card', 'order', 'order__event')
|
||||
).order_by('datetime').select_related('card', 'order', 'order__event', 'acceptor')
|
||||
|
||||
if form_data.get('date_range'):
|
||||
dt_start, dt_end = resolve_timeframe_to_datetime_start_inclusive_end_exclusive(now(), form_data['date_range'], self.timezone)
|
||||
@@ -978,6 +1163,7 @@ class GiftcardTransactionListExporter(OrganizerLevelExportMixin, ListExporter):
|
||||
_('Amount'),
|
||||
_('Currency'),
|
||||
_('Order'),
|
||||
_('Organizer'),
|
||||
]
|
||||
yield headers
|
||||
|
||||
@@ -989,6 +1175,7 @@ class GiftcardTransactionListExporter(OrganizerLevelExportMixin, ListExporter):
|
||||
obj.value,
|
||||
obj.card.currency,
|
||||
obj.order.full_code if obj.order else None,
|
||||
str(obj.acceptor or ""),
|
||||
]
|
||||
yield row
|
||||
|
||||
@@ -1022,7 +1209,7 @@ class GiftcardRedemptionListExporter(ListExporter):
|
||||
yield headers
|
||||
|
||||
for obj in objs:
|
||||
tz = pytz.timezone(obj.order.event.settings.timezone)
|
||||
tz = ZoneInfo(obj.order.event.settings.timezone)
|
||||
gc = GiftCard.objects.get(pk=obj.info_data.get('gift_card'))
|
||||
row = [
|
||||
obj.order.event.slug,
|
||||
@@ -1165,6 +1352,16 @@ def register_multievent_orderlist_exporter(sender, **kwargs):
|
||||
return OrderListExporter
|
||||
|
||||
|
||||
@receiver(register_data_exporters, dispatch_uid="exporter_ordertransactionlist")
|
||||
def register_ordertransactionlist_exporter(sender, **kwargs):
|
||||
return TransactionListExporter
|
||||
|
||||
|
||||
@receiver(register_multievent_data_exporters, dispatch_uid="multiexporter_ordertransactionlist")
|
||||
def register_multievent_ordertransactionlist_exporter(sender, **kwargs):
|
||||
return TransactionListExporter
|
||||
|
||||
|
||||
@receiver(register_data_exporters, dispatch_uid="exporter_paymentlist")
|
||||
def register_paymentlist_exporter(sender, **kwargs):
|
||||
return PaymentListExporter
|
||||
|
||||
@@ -20,8 +20,8 @@
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
from collections import OrderedDict
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import pytz
|
||||
from django import forms
|
||||
from django.db.models import F, Q
|
||||
from django.dispatch import receiver
|
||||
@@ -137,7 +137,7 @@ class WaitingListExporter(ListExporter):
|
||||
|
||||
# which event should be used to output dates in columns "Start date" and "End date"
|
||||
event_for_date_columns = entry.subevent if entry.subevent else entry.event
|
||||
tz = pytz.timezone(entry.event.settings.timezone)
|
||||
tz = ZoneInfo(entry.event.settings.timezone)
|
||||
datetime_format = '%Y-%m-%d %H:%M:%S'
|
||||
|
||||
row = [
|
||||
|
||||
@@ -167,6 +167,7 @@ class SettingsForm(i18nfield.forms.I18nFormMixin, HierarkeyForm):
|
||||
|
||||
class PrefixForm(forms.Form):
|
||||
prefix = forms.CharField(widget=forms.HiddenInput)
|
||||
template_name = "django/forms/table.html"
|
||||
|
||||
|
||||
class SafeSessionWizardView(SessionWizardView):
|
||||
|
||||
@@ -38,10 +38,10 @@ import logging
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
from io import BytesIO
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import dateutil.parser
|
||||
import pycountry
|
||||
import pytz
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
@@ -61,6 +61,7 @@ from django.utils.timezone import get_current_timezone, now
|
||||
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
||||
from django_countries import countries
|
||||
from django_countries.fields import Country, CountryField
|
||||
from geoip2.errors import AddressNotFoundError
|
||||
from phonenumber_field.formfields import PhoneNumberField
|
||||
from phonenumber_field.phonenumber import PhoneNumber
|
||||
from phonenumber_field.widgets import PhoneNumberPrefixWidget
|
||||
@@ -356,9 +357,12 @@ class WrappedPhoneNumberPrefixWidget(PhoneNumberPrefixWidget):
|
||||
def guess_country_from_request(request, event):
|
||||
if settings.HAS_GEOIP:
|
||||
g = GeoIP2()
|
||||
res = g.country(get_client_ip(request))
|
||||
if res['country_code'] and len(res['country_code']) == 2:
|
||||
return Country(res['country_code'])
|
||||
try:
|
||||
res = g.country(get_client_ip(request))
|
||||
if res['country_code'] and len(res['country_code']) == 2:
|
||||
return Country(res['country_code'])
|
||||
except AddressNotFoundError:
|
||||
pass
|
||||
return guess_country(event)
|
||||
|
||||
|
||||
@@ -573,6 +577,7 @@ class BaseQuestionsForm(forms.Form):
|
||||
the attendee name for admission tickets, if the corresponding setting is enabled,
|
||||
as well as additional questions defined by the organizer.
|
||||
"""
|
||||
address_validation = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""
|
||||
@@ -732,7 +737,7 @@ class BaseQuestionsForm(forms.Form):
|
||||
initial = answers[0]
|
||||
else:
|
||||
initial = None
|
||||
tz = pytz.timezone(event.settings.timezone)
|
||||
tz = ZoneInfo(event.settings.timezone)
|
||||
help_text = rich_text(q.help_text)
|
||||
label = escape(q.question) # django-bootstrap3 calls mark_safe
|
||||
required = q.required and not self.all_optional
|
||||
@@ -920,8 +925,14 @@ class BaseQuestionsForm(forms.Form):
|
||||
v.widget.attrs['autocomplete'] = 'section-{} '.format(self.prefix) + v.widget.attrs.get('autocomplete', '')
|
||||
|
||||
def clean(self):
|
||||
from pretix.base.addressvalidation import \
|
||||
validate_address # local import to prevent impact on startup time
|
||||
|
||||
d = super().clean()
|
||||
|
||||
if self.address_validation:
|
||||
self.cleaned_data = d = validate_address(d, True)
|
||||
|
||||
if d.get('city') and d.get('country') and str(d['country']) in COUNTRIES_WITH_STATE_IN_ADDRESS:
|
||||
if not d.get('state'):
|
||||
self.add_error('state', _('This field is required.'))
|
||||
|
||||
63
src/pretix/base/forms/renderers.py
Normal file
63
src/pretix/base/forms/renderers.py
Normal file
@@ -0,0 +1,63 @@
|
||||
#
|
||||
# This file is part of pretix (Community Edition).
|
||||
#
|
||||
# Copyright (C) 2014-2020 Raphael Michel and contributors
|
||||
# Copyright (C) 2020-2021 rami.io GmbH and contributors
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation in version 3 of the License.
|
||||
#
|
||||
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
|
||||
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
|
||||
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
|
||||
# this file, see <https://pretix.eu/about/en/license>.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
from bootstrap3.renderers import (
|
||||
FieldRenderer as BaseFieldRenderer,
|
||||
InlineFieldRenderer as BaseInlineFieldRenderer,
|
||||
)
|
||||
from django.forms import (
|
||||
CheckboxInput, CheckboxSelectMultiple, ClearableFileInput, RadioSelect,
|
||||
SelectDateWidget,
|
||||
)
|
||||
|
||||
|
||||
class FieldRenderer(BaseFieldRenderer):
|
||||
# Local application of https://github.com/zostera/django-bootstrap3/pull/859
|
||||
|
||||
def post_widget_render(self, html):
|
||||
if isinstance(self.widget, CheckboxSelectMultiple):
|
||||
html = self.list_to_class(html, "checkbox")
|
||||
elif isinstance(self.widget, RadioSelect):
|
||||
html = self.list_to_class(html, "radio")
|
||||
elif isinstance(self.widget, SelectDateWidget):
|
||||
html = self.fix_date_select_input(html)
|
||||
elif isinstance(self.widget, ClearableFileInput):
|
||||
html = self.fix_clearable_file_input(html)
|
||||
elif isinstance(self.widget, CheckboxInput):
|
||||
html = self.put_inside_label(html)
|
||||
return html
|
||||
|
||||
|
||||
class InlineFieldRenderer(BaseInlineFieldRenderer):
|
||||
# Local application of https://github.com/zostera/django-bootstrap3/pull/859
|
||||
|
||||
def post_widget_render(self, html):
|
||||
if isinstance(self.widget, CheckboxSelectMultiple):
|
||||
html = self.list_to_class(html, "checkbox")
|
||||
elif isinstance(self.widget, RadioSelect):
|
||||
html = self.list_to_class(html, "radio")
|
||||
elif isinstance(self.widget, SelectDateWidget):
|
||||
html = self.fix_date_select_input(html)
|
||||
elif isinstance(self.widget, ClearableFileInput):
|
||||
html = self.fix_clearable_file_input(html)
|
||||
elif isinstance(self.widget, CheckboxInput):
|
||||
html = self.put_inside_label(html)
|
||||
return html
|
||||
@@ -20,6 +20,8 @@
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
import logging
|
||||
import re
|
||||
import unicodedata
|
||||
from collections import defaultdict
|
||||
from decimal import Decimal
|
||||
from io import BytesIO
|
||||
@@ -28,6 +30,7 @@ from typing import Tuple
|
||||
|
||||
import bleach
|
||||
import vat_moss.exchange_rates
|
||||
from bidi.algorithm import get_display
|
||||
from django.contrib.staticfiles import finders
|
||||
from django.db.models import Sum
|
||||
from django.dispatch import receiver
|
||||
@@ -53,7 +56,8 @@ from pretix.base.models import Event, Invoice, Order, OrderPayment
|
||||
from pretix.base.services.currencies import SOURCE_NAMES
|
||||
from pretix.base.signals import register_invoice_renderers
|
||||
from pretix.base.templatetags.money import money_filter
|
||||
from pretix.helpers.reportlab import ThumbnailingImageReader
|
||||
from pretix.helpers.reportlab import ThumbnailingImageReader, reshaper
|
||||
from pretix.presale.style import get_fonts
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -79,7 +83,12 @@ class NumberedCanvas(Canvas):
|
||||
def draw_page_number(self, page_count):
|
||||
self.saveState()
|
||||
self.setFont(self.font_regular, 8)
|
||||
self.drawRightString(self._pagesize[0] - 20 * mm, 10 * mm, pgettext("invoice", "Page %d of %d") % (self._pageNumber, page_count,))
|
||||
text = pgettext("invoice", "Page %d of %d") % (self._pageNumber, page_count,)
|
||||
try:
|
||||
text = get_display(reshaper.reshape(text))
|
||||
except:
|
||||
logger.exception('Reshaping/Bidi fixes failed on string {}'.format(repr(text)))
|
||||
self.drawRightString(self._pagesize[0] - 20 * mm, 10 * mm, text)
|
||||
self.restoreState()
|
||||
|
||||
|
||||
@@ -139,8 +148,8 @@ class BaseReportlabInvoiceRenderer(BaseInvoiceRenderer):
|
||||
"""
|
||||
Initialize the renderer. By default, this registers fonts and sets ``self.stylesheet``.
|
||||
"""
|
||||
self.stylesheet = self._get_stylesheet()
|
||||
self._register_fonts()
|
||||
self.stylesheet = self._get_stylesheet()
|
||||
|
||||
def _get_stylesheet(self):
|
||||
"""
|
||||
@@ -148,6 +157,10 @@ class BaseReportlabInvoiceRenderer(BaseInvoiceRenderer):
|
||||
"""
|
||||
stylesheet = StyleSheet1()
|
||||
stylesheet.add(ParagraphStyle(name='Normal', fontName=self.font_regular, fontSize=10, leading=12))
|
||||
stylesheet.add(ParagraphStyle(name='Bold', fontName=self.font_bold, fontSize=10, leading=12))
|
||||
stylesheet.add(ParagraphStyle(name='BoldRight', fontName=self.font_bold, fontSize=10, leading=12, alignment=TA_RIGHT))
|
||||
stylesheet.add(ParagraphStyle(name='BoldRightNoSplit', fontName=self.font_bold, fontSize=10, leading=12, alignment=TA_RIGHT,
|
||||
splitLongWords=False))
|
||||
stylesheet.add(ParagraphStyle(name='NormalRight', fontName=self.font_regular, fontSize=10, leading=12, alignment=TA_RIGHT))
|
||||
stylesheet.add(ParagraphStyle(name='BoldInverseCenter', fontName=self.font_bold, fontSize=10, leading=12,
|
||||
textColor=colors.white, alignment=TA_CENTER))
|
||||
@@ -155,6 +168,7 @@ class BaseReportlabInvoiceRenderer(BaseInvoiceRenderer):
|
||||
stylesheet.add(ParagraphStyle(name='Heading1', fontName=self.font_bold, fontSize=15, leading=15 * 1.2))
|
||||
stylesheet.add(ParagraphStyle(name='FineprintHeading', fontName=self.font_bold, fontSize=8, leading=12))
|
||||
stylesheet.add(ParagraphStyle(name='Fineprint', fontName=self.font_regular, fontSize=8, leading=10))
|
||||
stylesheet.add(ParagraphStyle(name='FineprintRight', fontName=self.font_regular, fontSize=8, leading=10, alignment=TA_RIGHT))
|
||||
return stylesheet
|
||||
|
||||
def _register_fonts(self):
|
||||
@@ -168,6 +182,32 @@ class BaseReportlabInvoiceRenderer(BaseInvoiceRenderer):
|
||||
pdfmetrics.registerFontFamily('OpenSans', normal='OpenSans', bold='OpenSansBd',
|
||||
italic='OpenSansIt', boldItalic='OpenSansBI')
|
||||
|
||||
for family, styles in get_fonts().items():
|
||||
if family == self.event.settings.invoice_renderer_font:
|
||||
pdfmetrics.registerFont(TTFont(family, finders.find(styles['regular']['truetype'])))
|
||||
self.font_regular = family
|
||||
if 'italic' in styles:
|
||||
pdfmetrics.registerFont(TTFont(family + ' I', finders.find(styles['italic']['truetype'])))
|
||||
if 'bold' in styles:
|
||||
pdfmetrics.registerFont(TTFont(family + ' B', finders.find(styles['bold']['truetype'])))
|
||||
self.font_bold = family + ' B'
|
||||
if 'bolditalic' in styles:
|
||||
pdfmetrics.registerFont(TTFont(family + ' B I', finders.find(styles['bolditalic']['truetype'])))
|
||||
|
||||
def _normalize(self, text):
|
||||
# reportlab does not support unicode combination characters
|
||||
# It's important we do this before we use ArabicReshaper
|
||||
text = unicodedata.normalize("NFKC", text)
|
||||
|
||||
# reportlab does not support RTL, ligature-heavy scripts like Arabic. Therefore, we use ArabicReshaper
|
||||
# to resolve all ligatures and python-bidi to switch RTL texts.
|
||||
try:
|
||||
text = "<br />".join(get_display(reshaper.reshape(l)) for l in re.split("<br ?/>", text))
|
||||
except:
|
||||
logger.exception('Reshaping/Bidi fixes failed on string {}'.format(repr(text)))
|
||||
|
||||
return text
|
||||
|
||||
def _upper(self, val):
|
||||
# We uppercase labels, but not in every language
|
||||
if get_language().startswith('el'):
|
||||
@@ -247,10 +287,10 @@ class BaseReportlabInvoiceRenderer(BaseInvoiceRenderer):
|
||||
return 'invoice.pdf', 'application/pdf', buffer.read()
|
||||
|
||||
def _clean_text(self, text, tags=None):
|
||||
return bleach.clean(
|
||||
return self._normalize(bleach.clean(
|
||||
text,
|
||||
tags=tags or []
|
||||
).strip().replace('<br>', '<br />').replace('\n', '<br />\n')
|
||||
).strip().replace('<br>', '<br />').replace('\n', '<br />\n'))
|
||||
|
||||
|
||||
class PaidMarker(Flowable):
|
||||
@@ -291,7 +331,7 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
canvas.setFont(self.font_regular, 8)
|
||||
|
||||
for i, line in enumerate(self.invoice.footer_text.split('\n')[::-1]):
|
||||
canvas.drawCentredString(self.pagesize[0] / 2, 25 + (3.5 * i) * mm, line.strip())
|
||||
canvas.drawCentredString(self.pagesize[0] / 2, 25 + (3.5 * i) * mm, self._normalize(line.strip()))
|
||||
|
||||
canvas.restoreState()
|
||||
|
||||
@@ -324,13 +364,13 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
def _draw_invoice_from_label(self, canvas):
|
||||
textobject = canvas.beginText(25 * mm, (297 - 15) * mm)
|
||||
textobject.setFont(self.font_bold, 8)
|
||||
textobject.textLine(self._upper(pgettext('invoice', 'Invoice from')))
|
||||
textobject.textLine(self._normalize(self._upper(pgettext('invoice', 'Invoice from'))))
|
||||
canvas.drawText(textobject)
|
||||
|
||||
def _draw_invoice_to_label(self, canvas):
|
||||
textobject = canvas.beginText(25 * mm, (297 - 50) * mm)
|
||||
textobject.setFont(self.font_bold, 8)
|
||||
textobject.textLine(self._upper(pgettext('invoice', 'Invoice to')))
|
||||
textobject.textLine(self._normalize(self._upper(pgettext('invoice', 'Invoice to'))))
|
||||
canvas.drawText(textobject)
|
||||
|
||||
logo_width = 25 * mm
|
||||
@@ -358,51 +398,51 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
def _draw_metadata(self, canvas):
|
||||
textobject = canvas.beginText(125 * mm, (297 - 38) * mm)
|
||||
textobject.setFont(self.font_bold, 8)
|
||||
textobject.textLine(self._upper(pgettext('invoice', 'Order code')))
|
||||
textobject.textLine(self._normalize(self._upper(pgettext('invoice', 'Order code'))))
|
||||
textobject.moveCursor(0, 5)
|
||||
textobject.setFont(self.font_regular, 10)
|
||||
textobject.textLine(self.invoice.order.full_code)
|
||||
textobject.textLine(self._normalize(self.invoice.order.full_code))
|
||||
canvas.drawText(textobject)
|
||||
|
||||
textobject = canvas.beginText(125 * mm, (297 - 50) * mm)
|
||||
textobject.setFont(self.font_bold, 8)
|
||||
if self.invoice.is_cancellation:
|
||||
textobject.textLine(self._upper(pgettext('invoice', 'Cancellation number')))
|
||||
textobject.textLine(self._normalize(self._upper(pgettext('invoice', 'Cancellation number'))))
|
||||
textobject.moveCursor(0, 5)
|
||||
textobject.setFont(self.font_regular, 10)
|
||||
textobject.textLine(self.invoice.number)
|
||||
textobject.textLine(self._normalize(self.invoice.number))
|
||||
textobject.moveCursor(0, 5)
|
||||
textobject.setFont(self.font_bold, 8)
|
||||
textobject.textLine(self._upper(pgettext('invoice', 'Original invoice')))
|
||||
textobject.textLine(self._normalize(self._upper(pgettext('invoice', 'Original invoice'))))
|
||||
textobject.moveCursor(0, 5)
|
||||
textobject.setFont(self.font_regular, 10)
|
||||
textobject.textLine(self.invoice.refers.number)
|
||||
textobject.textLine(self._normalize(self.invoice.refers.number))
|
||||
else:
|
||||
textobject.textLine(self._upper(pgettext('invoice', 'Invoice number')))
|
||||
textobject.textLine(self._normalize(self._upper(pgettext('invoice', 'Invoice number'))))
|
||||
textobject.moveCursor(0, 5)
|
||||
textobject.setFont(self.font_regular, 10)
|
||||
textobject.textLine(self.invoice.number)
|
||||
textobject.textLine(self._normalize(self.invoice.number))
|
||||
textobject.moveCursor(0, 5)
|
||||
|
||||
if self.invoice.is_cancellation:
|
||||
textobject.setFont(self.font_bold, 8)
|
||||
textobject.textLine(self._upper(pgettext('invoice', 'Cancellation date')))
|
||||
textobject.textLine(self._normalize(self._upper(pgettext('invoice', 'Cancellation date'))))
|
||||
textobject.moveCursor(0, 5)
|
||||
textobject.setFont(self.font_regular, 10)
|
||||
textobject.textLine(date_format(self.invoice.date, "DATE_FORMAT"))
|
||||
textobject.textLine(self._normalize(date_format(self.invoice.date, "DATE_FORMAT")))
|
||||
textobject.moveCursor(0, 5)
|
||||
textobject.setFont(self.font_bold, 8)
|
||||
textobject.textLine(self._upper(pgettext('invoice', 'Original invoice date')))
|
||||
textobject.textLine(self._normalize(self._upper(pgettext('invoice', 'Original invoice date'))))
|
||||
textobject.moveCursor(0, 5)
|
||||
textobject.setFont(self.font_regular, 10)
|
||||
textobject.textLine(date_format(self.invoice.refers.date, "DATE_FORMAT"))
|
||||
textobject.textLine(self._normalize(date_format(self.invoice.refers.date, "DATE_FORMAT")))
|
||||
textobject.moveCursor(0, 5)
|
||||
else:
|
||||
textobject.setFont(self.font_bold, 8)
|
||||
textobject.textLine(self._upper(pgettext('invoice', 'Invoice date')))
|
||||
textobject.textLine(self._normalize(self._upper(pgettext('invoice', 'Invoice date'))))
|
||||
textobject.moveCursor(0, 5)
|
||||
textobject.setFont(self.font_regular, 10)
|
||||
textobject.textLine(date_format(self.invoice.date, "DATE_FORMAT"))
|
||||
textobject.textLine(self._normalize(date_format(self.invoice.date, "DATE_FORMAT")))
|
||||
textobject.moveCursor(0, 5)
|
||||
|
||||
canvas.drawText(textobject)
|
||||
@@ -415,19 +455,19 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
def _draw_event_label(self, canvas):
|
||||
textobject = canvas.beginText(125 * mm, (297 - 15) * mm)
|
||||
textobject.setFont(self.font_bold, 8)
|
||||
textobject.textLine(self._upper(pgettext('invoice', 'Event')))
|
||||
textobject.textLine(self._normalize(self._upper(pgettext('invoice', 'Event'))))
|
||||
canvas.drawText(textobject)
|
||||
|
||||
def _draw_event(self, canvas):
|
||||
def shorten(txt):
|
||||
txt = str(txt)
|
||||
txt = bleach.clean(txt, tags=[]).strip()
|
||||
p = Paragraph(txt.strip().replace('\n', '<br />\n'), style=self.stylesheet['Normal'])
|
||||
p = Paragraph(self._normalize(txt.strip().replace('\n', '<br />\n')), style=self.stylesheet['Normal'])
|
||||
p_size = p.wrap(self.event_width, self.event_height)
|
||||
|
||||
while p_size[1] > 2 * self.stylesheet['Normal'].leading:
|
||||
txt = ' '.join(txt.replace('…', '').split()[:-1]) + '…'
|
||||
p = Paragraph(txt.strip().replace('\n', '<br />\n'), style=self.stylesheet['Normal'])
|
||||
p = Paragraph(self._normalize(txt.strip().replace('\n', '<br />\n')), style=self.stylesheet['Normal'])
|
||||
p_size = p.wrap(self.event_width, self.event_height)
|
||||
return txt
|
||||
|
||||
@@ -453,7 +493,7 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
else:
|
||||
p_str = shorten(self.invoice.event.name)
|
||||
|
||||
p = Paragraph(p_str.strip().replace('\n', '<br />\n'), style=self.stylesheet['Normal'])
|
||||
p = Paragraph(self._normalize(p_str.strip().replace('\n', '<br />\n')), style=self.stylesheet['Normal'])
|
||||
p.wrapOn(canvas, self.event_width, self.event_height)
|
||||
p_size = p.wrap(self.event_width, self.event_height)
|
||||
p.drawOn(canvas, self.event_left, self.pagesize[1] - self.event_top - p_size[1])
|
||||
@@ -462,14 +502,14 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
def _draw_footer(self, canvas):
|
||||
canvas.setFont(self.font_regular, 8)
|
||||
for i, line in enumerate(self.invoice.footer_text.split('\n')[::-1]):
|
||||
canvas.drawCentredString(self.pagesize[0] / 2, 25 + (3.5 * i) * mm, line.strip())
|
||||
canvas.drawCentredString(self.pagesize[0] / 2, 25 + (3.5 * i) * mm, self._normalize(line.strip()))
|
||||
|
||||
def _draw_testmode(self, canvas):
|
||||
if self.invoice.order.testmode:
|
||||
canvas.saveState()
|
||||
canvas.setFont('OpenSansBd', 30)
|
||||
canvas.setFont(self.font_bold, 30)
|
||||
canvas.setFillColorRGB(32, 0, 0)
|
||||
canvas.drawRightString(self.pagesize[0] - 20 * mm, (297 - 100) * mm, gettext('TEST MODE'))
|
||||
canvas.drawRightString(self.pagesize[0] - 20 * mm, (297 - 100) * mm, self._normalize(gettext('TEST MODE')))
|
||||
canvas.restoreState()
|
||||
|
||||
def _on_first_page(self, canvas: Canvas, doc):
|
||||
@@ -517,22 +557,22 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
|
||||
if self.invoice.internal_reference:
|
||||
story.append(Paragraph(
|
||||
pgettext('invoice', 'Customer reference: {reference}').format(
|
||||
self._normalize(pgettext('invoice', 'Customer reference: {reference}').format(
|
||||
reference=self._clean_text(self.invoice.internal_reference),
|
||||
),
|
||||
)),
|
||||
self.stylesheet['Normal']
|
||||
))
|
||||
|
||||
if self.invoice.invoice_to_vat_id:
|
||||
story.append(Paragraph(
|
||||
pgettext('invoice', 'Customer VAT ID') + ': ' +
|
||||
self._normalize(pgettext('invoice', 'Customer VAT ID')) + ': ' +
|
||||
self._clean_text(self.invoice.invoice_to_vat_id),
|
||||
self.stylesheet['Normal']
|
||||
))
|
||||
|
||||
if self.invoice.invoice_to_beneficiary:
|
||||
story.append(Paragraph(
|
||||
pgettext('invoice', 'Beneficiary') + ':<br />' +
|
||||
self._normalize(pgettext('invoice', 'Beneficiary')) + ':<br />' +
|
||||
self._clean_text(self.invoice.invoice_to_beneficiary),
|
||||
self.stylesheet['Normal']
|
||||
))
|
||||
@@ -552,10 +592,10 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
story = [
|
||||
NextPageTemplate('FirstPage'),
|
||||
Paragraph(
|
||||
(
|
||||
self._normalize(
|
||||
pgettext('invoice', 'Tax Invoice') if str(self.invoice.invoice_from_country) == 'AU'
|
||||
else pgettext('invoice', 'Invoice')
|
||||
) if not self.invoice.is_cancellation else pgettext('invoice', 'Cancellation'),
|
||||
) if not self.invoice.is_cancellation else self._normalize(pgettext('invoice', 'Cancellation')),
|
||||
self.stylesheet['Heading1']
|
||||
),
|
||||
Spacer(1, 5 * mm),
|
||||
@@ -577,17 +617,17 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
]
|
||||
if has_taxes:
|
||||
tdata = [(
|
||||
pgettext('invoice', 'Description'),
|
||||
pgettext('invoice', 'Qty'),
|
||||
pgettext('invoice', 'Tax rate'),
|
||||
pgettext('invoice', 'Net'),
|
||||
pgettext('invoice', 'Gross'),
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Description')), self.stylesheet['Bold']),
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Qty')), self.stylesheet['BoldRightNoSplit']),
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Tax rate')), self.stylesheet['BoldRightNoSplit']),
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Net')), self.stylesheet['BoldRightNoSplit']),
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Gross')), self.stylesheet['BoldRightNoSplit']),
|
||||
)]
|
||||
else:
|
||||
tdata = [(
|
||||
pgettext('invoice', 'Description'),
|
||||
pgettext('invoice', 'Qty'),
|
||||
pgettext('invoice', 'Amount'),
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Description')), self.stylesheet['BoldRight']),
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Qty')), self.stylesheet['BoldRightNoSplit']),
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Amount')), self.stylesheet['BoldRightNoSplit']),
|
||||
)]
|
||||
|
||||
def _group_key(line):
|
||||
@@ -634,13 +674,13 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
|
||||
if has_taxes:
|
||||
tdata.append([
|
||||
pgettext('invoice', 'Invoice total'), '', '', '',
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Invoice total')), self.stylesheet['Bold']), '', '', '',
|
||||
money_filter(total, self.invoice.event.currency)
|
||||
])
|
||||
colwidths = [a * doc.width for a in (.50, .05, .15, .15, .15)]
|
||||
else:
|
||||
tdata.append([
|
||||
pgettext('invoice', 'Invoice total'), '',
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Invoice total')), self.stylesheet['Bold']), '',
|
||||
money_filter(total, self.invoice.event.currency)
|
||||
])
|
||||
colwidths = [a * doc.width for a in (.65, .20, .15)]
|
||||
@@ -649,12 +689,16 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
if self.invoice.event.settings.invoice_show_payments and self.invoice.order.status == Order.STATUS_PENDING:
|
||||
pending_sum = self.invoice.order.pending_sum
|
||||
if pending_sum != total:
|
||||
tdata.append([pgettext('invoice', 'Received payments')] + (['', '', ''] if has_taxes else ['']) + [
|
||||
money_filter(pending_sum - total, self.invoice.event.currency)
|
||||
])
|
||||
tdata.append([pgettext('invoice', 'Outstanding payments')] + (['', '', ''] if has_taxes else ['']) + [
|
||||
money_filter(pending_sum, self.invoice.event.currency)
|
||||
])
|
||||
tdata.append(
|
||||
[Paragraph(self._normalize(pgettext('invoice', 'Received payments')), self.stylesheet['Normal'])] +
|
||||
(['', '', ''] if has_taxes else ['']) +
|
||||
[money_filter(pending_sum - total, self.invoice.event.currency)]
|
||||
)
|
||||
tdata.append(
|
||||
[Paragraph(self._normalize(pgettext('invoice', 'Outstanding payments')), self.stylesheet['Bold'])] +
|
||||
(['', '', ''] if has_taxes else ['']) +
|
||||
[money_filter(pending_sum, self.invoice.event.currency)]
|
||||
)
|
||||
tstyledata += [
|
||||
('FONTNAME', (0, len(tdata) - 3), (-1, len(tdata) - 3), self.font_bold),
|
||||
]
|
||||
@@ -667,19 +711,24 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
).aggregate(
|
||||
s=Sum('amount')
|
||||
)['s'] or Decimal('0.00')
|
||||
tdata.append([pgettext('invoice', 'Paid by gift card')] + (['', '', ''] if has_taxes else ['']) + [
|
||||
money_filter(giftcard_sum, self.invoice.event.currency)
|
||||
])
|
||||
tdata.append([pgettext('invoice', 'Remaining amount')] + (['', '', ''] if has_taxes else ['']) + [
|
||||
money_filter(total - giftcard_sum, self.invoice.event.currency)
|
||||
])
|
||||
tdata.append(
|
||||
[Paragraph(self._normalize(pgettext('invoice', 'Paid by gift card')), self.stylesheet['Normal'])] +
|
||||
(['', '', ''] if has_taxes else ['']) +
|
||||
[money_filter(giftcard_sum, self.invoice.event.currency)]
|
||||
)
|
||||
tdata.append(
|
||||
[Paragraph(self._normalize(pgettext('invoice', 'Remaining amount')), self.stylesheet['Bold'])] +
|
||||
(['', '', ''] if has_taxes else ['']) +
|
||||
[money_filter(total - giftcard_sum, self.invoice.event.currency)]
|
||||
)
|
||||
tstyledata += [
|
||||
('FONTNAME', (0, len(tdata) - 3), (-1, len(tdata) - 3), self.font_bold),
|
||||
]
|
||||
elif self.invoice.payment_provider_stamp:
|
||||
pm = PaidMarker(
|
||||
text=self.invoice.payment_provider_stamp,
|
||||
text=self._normalize(self.invoice.payment_provider_stamp),
|
||||
color=colors.HexColor(self.event.settings.theme_color_success),
|
||||
font=self.font_bold,
|
||||
size=16
|
||||
)
|
||||
tdata[-1][-2] = pm
|
||||
@@ -692,7 +741,7 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
|
||||
if self.invoice.payment_provider_text:
|
||||
story.append(Paragraph(
|
||||
self.invoice.payment_provider_text,
|
||||
self._normalize(self.invoice.payment_provider_text),
|
||||
self.stylesheet['Normal']
|
||||
))
|
||||
|
||||
@@ -716,10 +765,10 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
('FONTNAME', (0, 0), (-1, -1), self.font_regular),
|
||||
]
|
||||
thead = [
|
||||
pgettext('invoice', 'Tax rate'),
|
||||
pgettext('invoice', 'Net value'),
|
||||
pgettext('invoice', 'Gross value'),
|
||||
pgettext('invoice', 'Tax'),
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Tax rate')), self.stylesheet['Fineprint']),
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Net value')), self.stylesheet['FineprintRight']),
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Gross value')), self.stylesheet['FineprintRight']),
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Tax')), self.stylesheet['FineprintRight']),
|
||||
''
|
||||
]
|
||||
tdata = [thead]
|
||||
@@ -730,7 +779,7 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
continue
|
||||
tax = taxvalue_map[idx]
|
||||
tdata.append([
|
||||
localize(rate) + " % " + name,
|
||||
Paragraph(self._normalize(localize(rate) + " % " + name), self.stylesheet['Fineprint']),
|
||||
money_filter(gross - tax, self.invoice.event.currency),
|
||||
money_filter(gross, self.invoice.event.currency),
|
||||
money_filter(tax, self.invoice.event.currency),
|
||||
@@ -749,7 +798,7 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
table.setStyle(TableStyle(tstyledata))
|
||||
story.append(Spacer(5 * mm, 5 * mm))
|
||||
story.append(KeepTogether([
|
||||
Paragraph(pgettext('invoice', 'Included taxes'), self.stylesheet['FineprintHeading']),
|
||||
Paragraph(self._normalize(pgettext('invoice', 'Included taxes')), self.stylesheet['FineprintHeading']),
|
||||
table
|
||||
]))
|
||||
|
||||
@@ -766,7 +815,7 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
net = gross - tax
|
||||
|
||||
tdata.append([
|
||||
localize(rate) + " % " + name,
|
||||
Paragraph(self._normalize(localize(rate) + " % " + name), self.stylesheet['Fineprint']),
|
||||
fmt(net), fmt(gross), fmt(tax), ''
|
||||
])
|
||||
|
||||
@@ -776,12 +825,12 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
story.append(KeepTogether([
|
||||
Spacer(1, height=2 * mm),
|
||||
Paragraph(
|
||||
pgettext(
|
||||
self._normalize(pgettext(
|
||||
'invoice', 'Using the conversion rate of 1:{rate} as published by the {authority} on '
|
||||
'{date}, this corresponds to:'
|
||||
).format(rate=localize(self.invoice.foreign_currency_rate),
|
||||
authority=SOURCE_NAMES.get(self.invoice.foreign_currency_source, "?"),
|
||||
date=date_format(self.invoice.foreign_currency_rate_date, "SHORT_DATE_FORMAT")),
|
||||
date=date_format(self.invoice.foreign_currency_rate_date, "SHORT_DATE_FORMAT"))),
|
||||
self.stylesheet['Fineprint']
|
||||
),
|
||||
Spacer(1, height=3 * mm),
|
||||
@@ -790,14 +839,14 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
elif self.invoice.foreign_currency_display and self.invoice.foreign_currency_rate:
|
||||
foreign_total = round_decimal(total * self.invoice.foreign_currency_rate)
|
||||
story.append(Spacer(1, 5 * mm))
|
||||
story.append(Paragraph(
|
||||
story.append(Paragraph(self._normalize(
|
||||
pgettext(
|
||||
'invoice', 'Using the conversion rate of 1:{rate} as published by the {authority} on '
|
||||
'{date}, the invoice total corresponds to {total}.'
|
||||
).format(rate=localize(self.invoice.foreign_currency_rate),
|
||||
date=date_format(self.invoice.foreign_currency_rate_date, "SHORT_DATE_FORMAT"),
|
||||
authority=SOURCE_NAMES.get(self.invoice.foreign_currency_source, "?"),
|
||||
total=fmt(foreign_total)),
|
||||
total=fmt(foreign_total))),
|
||||
self.stylesheet['Fineprint']
|
||||
))
|
||||
|
||||
@@ -843,7 +892,7 @@ class Modern1Renderer(ClassicInvoiceRenderer):
|
||||
self._clean_text(l)
|
||||
for l in self.invoice.address_invoice_from.strip().split('\n')
|
||||
]
|
||||
p = Paragraph(' · '.join(c), style=self.stylesheet['Sender'])
|
||||
p = Paragraph(self._normalize(' · '.join(c)), style=self.stylesheet['Sender'])
|
||||
p.wrapOn(canvas, self.invoice_to_width, 15.7 * mm)
|
||||
p.drawOn(canvas, self.invoice_to_left, self.pagesize[1] - self.invoice_to_top + 2 * mm)
|
||||
super()._draw_invoice_from(canvas)
|
||||
@@ -859,8 +908,12 @@ class Modern1Renderer(ClassicInvoiceRenderer):
|
||||
|
||||
def _get_first_page_frames(self, doc):
|
||||
footer_length = 3.5 * len(self.invoice.footer_text.split('\n')) * mm
|
||||
if self.event.settings.invoice_renderer_highlight_order_code:
|
||||
margin_top = 100 * mm
|
||||
else:
|
||||
margin_top = 95 * mm
|
||||
return [
|
||||
Frame(doc.leftMargin, doc.bottomMargin, doc.width, doc.height - 95 * mm,
|
||||
Frame(doc.leftMargin, doc.bottomMargin, doc.width, doc.height - margin_top,
|
||||
leftPadding=0, rightPadding=0, topPadding=0, bottomPadding=footer_length,
|
||||
id='normal')
|
||||
]
|
||||
@@ -871,25 +924,35 @@ class Modern1Renderer(ClassicInvoiceRenderer):
|
||||
# the font size until it fits.
|
||||
begin_top = 100 * mm
|
||||
|
||||
def _draw(label, value, value_size, x, width):
|
||||
def _draw(label, value, value_size, x, width, bold=False, sublabel=None):
|
||||
if canvas.stringWidth(value, self.font_regular, value_size) > width and value_size > 6:
|
||||
return False
|
||||
textobject = canvas.beginText(x, self.pagesize[1] - begin_top)
|
||||
textobject.setFont(self.font_regular, 8)
|
||||
textobject.textLine(label)
|
||||
textobject.textLine(self._normalize(label))
|
||||
textobject.moveCursor(0, 5)
|
||||
textobject.setFont(self.font_regular, value_size)
|
||||
textobject.textLine(value)
|
||||
textobject.setFont(self.font_bold if bold else self.font_regular, value_size)
|
||||
textobject.textLine(self._normalize(value))
|
||||
|
||||
if sublabel:
|
||||
textobject.moveCursor(0, 1)
|
||||
textobject.setFont(self.font_regular, 8)
|
||||
textobject.textLine(self._normalize(sublabel))
|
||||
|
||||
return textobject
|
||||
|
||||
value_size = 10
|
||||
while value_size >= 5:
|
||||
if self.event.settings.invoice_renderer_highlight_order_code:
|
||||
kwargs = dict(bold=True, sublabel=pgettext('invoice', '(Please quote at all times.)'))
|
||||
else:
|
||||
kwargs = {}
|
||||
objects = [
|
||||
_draw(pgettext('invoice', 'Order code'), self.invoice.order.full_code, value_size, self.left_margin, 45 * mm)
|
||||
_draw(pgettext('invoice', 'Order code'), self.invoice.order.full_code, value_size, self.left_margin, 45 * mm, **kwargs)
|
||||
]
|
||||
|
||||
p = Paragraph(
|
||||
date_format(self.invoice.date, "DATE_FORMAT"),
|
||||
self._normalize(date_format(self.invoice.date, "DATE_FORMAT")),
|
||||
style=ParagraphStyle(name=f'Normal{value_size}', fontName=self.font_regular, fontSize=value_size, leading=value_size * 1.2)
|
||||
)
|
||||
w = stringWidth(p.text, p.frags[0].fontName, p.frags[0].fontSize)
|
||||
@@ -920,9 +983,9 @@ class Modern1Renderer(ClassicInvoiceRenderer):
|
||||
textobject = canvas.beginText(date_x, self.pagesize[1] - begin_top)
|
||||
textobject.setFont(self.font_regular, 8)
|
||||
if self.invoice.is_cancellation:
|
||||
textobject.textLine(pgettext('invoice', 'Cancellation date'))
|
||||
textobject.textLine(self._normalize(pgettext('invoice', 'Cancellation date')))
|
||||
else:
|
||||
textobject.textLine(pgettext('invoice', 'Invoice date'))
|
||||
textobject.textLine(self._normalize(pgettext('invoice', 'Invoice date')))
|
||||
canvas.drawText(textobject)
|
||||
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ Django, for theoretically very valid reasons, creates migrations for *every sing
|
||||
we change on a model. Even the `help_text`! This makes sense, as we don't know if any
|
||||
database backend unknown to us might actually use this information for its database schema.
|
||||
|
||||
However, pretix only supports PostgreSQL, MySQL, MariaDB and SQLite and we can be pretty
|
||||
However, pretix only supports PostgreSQL and SQLite and we can be pretty
|
||||
certain that some changes to models will never require a change to the database. In this case,
|
||||
not creating a migration for certain changes will save us some performance while applying them
|
||||
*and* allow for a cleaner git history. Win-win!
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
import json
|
||||
import sys
|
||||
|
||||
import pytz
|
||||
import pytz_deprecation_shim
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import override
|
||||
from django_scopes import scope
|
||||
@@ -60,7 +60,7 @@ class Command(BaseCommand):
|
||||
sys.exit(1)
|
||||
|
||||
locale = options.get("locale", None)
|
||||
timezone = pytz.timezone(options['timezone']) if options.get('timezone') else None
|
||||
timezone = pytz_deprecation_shim.timezone(options['timezone']) if options.get('timezone') else None
|
||||
|
||||
with scope(organizer=o):
|
||||
if options['event_slug']:
|
||||
|
||||
@@ -51,7 +51,7 @@ class Command(BaseCommand):
|
||||
del options['skip_checks']
|
||||
del options['print_sql']
|
||||
|
||||
if options['print_sql']:
|
||||
if options.get('print_sql'):
|
||||
connection.force_debug_cursor = True
|
||||
logger = logging.getLogger("django.db.backends")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
@@ -49,6 +49,9 @@ class BaseMediaType:
|
||||
def handle_unknown(self, organizer, identifier, user, auth):
|
||||
pass
|
||||
|
||||
def handle_new(self, organizer, medium, user, auth):
|
||||
pass
|
||||
|
||||
def __str__(self):
|
||||
return str(self.verbose_name)
|
||||
|
||||
@@ -108,9 +111,43 @@ class NfcUidMediaType(BaseMediaType):
|
||||
return m
|
||||
|
||||
|
||||
class NfcMf0aesMediaType(BaseMediaType):
|
||||
identifier = 'nfc_mf0aes'
|
||||
verbose_name = 'NFC Mifare Ultralight AES'
|
||||
medium_created_by_server = False
|
||||
supports_giftcard = True
|
||||
supports_orderposition = False
|
||||
|
||||
def handle_new(self, organizer, medium, user, auth):
|
||||
from pretix.base.models import GiftCard
|
||||
|
||||
if organizer.settings.get(f'reusable_media_type_{self.identifier}_autocreate_giftcard', as_type=bool):
|
||||
with transaction.atomic():
|
||||
gc = GiftCard.objects.create(
|
||||
issuer=organizer,
|
||||
expires=organizer.default_gift_card_expiry,
|
||||
currency=organizer.settings.get(f'reusable_media_type_{self.identifier}_autocreate_giftcard_currency'),
|
||||
)
|
||||
medium.linked_giftcard = gc
|
||||
medium.save()
|
||||
medium.log_action(
|
||||
'pretix.reusable_medium.linked_giftcard.changed',
|
||||
user=user, auth=auth,
|
||||
data={
|
||||
'linked_giftcard': gc.pk
|
||||
}
|
||||
)
|
||||
gc.log_action(
|
||||
'pretix.giftcards.created',
|
||||
user=user, auth=auth,
|
||||
)
|
||||
return medium
|
||||
|
||||
|
||||
MEDIA_TYPES = {
|
||||
m.identifier: m for m in [
|
||||
BarcodePlainMediaType(),
|
||||
NfcUidMediaType(),
|
||||
NfcMf0aesMediaType(),
|
||||
]
|
||||
}
|
||||
|
||||
@@ -21,12 +21,12 @@
|
||||
#
|
||||
from collections import OrderedDict
|
||||
from urllib.parse import urlsplit
|
||||
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.http import Http404, HttpRequest, HttpResponse
|
||||
from django.middleware.common import CommonMiddleware
|
||||
from django.urls import get_script_prefix
|
||||
from django.urls import get_script_prefix, resolve
|
||||
from django.utils import timezone, translation
|
||||
from django.utils.cache import patch_vary_headers
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
@@ -98,9 +98,9 @@ class LocaleMiddleware(MiddlewareMixin):
|
||||
tzname = request.user.timezone
|
||||
if tzname:
|
||||
try:
|
||||
timezone.activate(pytz.timezone(tzname))
|
||||
timezone.activate(ZoneInfo(tzname))
|
||||
request.timezone = tzname
|
||||
except pytz.UnknownTimeZoneError:
|
||||
except ZoneInfoNotFoundError:
|
||||
pass
|
||||
else:
|
||||
timezone.deactivate()
|
||||
@@ -230,6 +230,8 @@ class SecurityMiddleware(MiddlewareMixin):
|
||||
)
|
||||
|
||||
def process_response(self, request, resp):
|
||||
url = resolve(request.path_info)
|
||||
|
||||
if settings.DEBUG and resp.status_code >= 400:
|
||||
# Don't use CSP on debug error page as it breaks of Django's fancy error
|
||||
# pages
|
||||
@@ -249,20 +251,28 @@ class SecurityMiddleware(MiddlewareMixin):
|
||||
|
||||
h = {
|
||||
'default-src': ["{static}"],
|
||||
'script-src': ['{static}', 'https://checkout.stripe.com', 'https://js.stripe.com'],
|
||||
'script-src': ['{static}'],
|
||||
'object-src': ["'none'"],
|
||||
'frame-src': ['{static}', 'https://checkout.stripe.com', 'https://js.stripe.com'],
|
||||
'frame-src': ['{static}'],
|
||||
'style-src': ["{static}", "{media}"],
|
||||
'connect-src': ["{dynamic}", "{media}", "https://checkout.stripe.com"],
|
||||
'img-src': ["{static}", "{media}", "data:", "https://*.stripe.com"] + img_src,
|
||||
'connect-src': ["{dynamic}", "{media}"],
|
||||
'img-src': ["{static}", "{media}", "data:"] + img_src,
|
||||
'font-src': ["{static}"],
|
||||
'media-src': ["{static}", "data:"],
|
||||
# form-action is not only used to match on form actions, but also on URLs
|
||||
# form-actions redirect to. In the context of e.g. payment providers or
|
||||
# single-sign-on this can be nearly anything so we cannot really restrict
|
||||
# single-sign-on this can be nearly anything, so we cannot really restrict
|
||||
# this. However, we'll restrict it to HTTPS.
|
||||
'form-action': ["{dynamic}", "https:"] + (['http:'] if settings.SITE_URL.startswith('http://') else []),
|
||||
}
|
||||
# Only include pay.google.com for wallet detection purposes on the Payment selection page
|
||||
if (
|
||||
url.url_name == "event.order.pay.change" or
|
||||
(url.url_name == "event.checkout" and url.kwargs['step'] == "payment")
|
||||
):
|
||||
h['script-src'].append('https://pay.google.com')
|
||||
h['frame-src'].append('https://pay.google.com')
|
||||
h['connect-src'].append('https://google.com/pay')
|
||||
if settings.LOG_CSP:
|
||||
h['report-uri'] = ["/csp_report/"]
|
||||
if 'Content-Security-Policy' in resp:
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
# Generated by Django 1.10.4 on 2017-02-03 14:21
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import django.core.validators
|
||||
import django.db.migrations.operations.special
|
||||
import django.db.models.deletion
|
||||
@@ -26,7 +28,7 @@ def forwards42(apps, schema_editor):
|
||||
for s in EventSetting.objects.filter(key='timezone').values('object_id', 'value')
|
||||
}
|
||||
for order in Order.objects.all():
|
||||
tz = pytz.timezone(etz.get(order.event_id, 'UTC'))
|
||||
tz = ZoneInfo(etz.get(order.event_id, 'UTC'))
|
||||
order.expires = order.expires.astimezone(tz).replace(hour=23, minute=59, second=59)
|
||||
order.save()
|
||||
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
# Generated by Django 1.10.2 on 2016-10-19 17:57
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import pytz
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from django.db import migrations
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
def forwards(apps, schema_editor):
|
||||
@@ -15,7 +15,7 @@ def forwards(apps, schema_editor):
|
||||
for s in EventSetting.objects.filter(key='timezone').values('object_id', 'value')
|
||||
}
|
||||
for order in Order.objects.all():
|
||||
tz = pytz.timezone(etz.get(order.event_id, 'UTC'))
|
||||
tz = ZoneInfo(etz.get(order.event_id, 'UTC'))
|
||||
order.expires = order.expires.astimezone(tz).replace(hour=23, minute=59, second=59)
|
||||
order.save()
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import migrations, models
|
||||
from django_mysql.checks import mysql_connections
|
||||
|
||||
|
||||
def set_attendee_name_parts(apps, schema_editor):
|
||||
@@ -24,40 +23,12 @@ def set_attendee_name_parts(apps, schema_editor):
|
||||
ia.save(update_fields=['name_parts'])
|
||||
|
||||
|
||||
def check_mysqlversion(apps, schema_editor):
|
||||
errors = []
|
||||
any_conn_works = False
|
||||
conns = list(mysql_connections())
|
||||
found = 'Unknown version'
|
||||
for alias, conn in conns:
|
||||
if hasattr(conn, 'mysql_is_mariadb') and conn.mysql_is_mariadb and hasattr(conn, 'mysql_version'):
|
||||
if conn.mysql_version >= (10, 2, 7):
|
||||
any_conn_works = True
|
||||
else:
|
||||
found = 'MariaDB ' + '.'.join(str(v) for v in conn.mysql_version)
|
||||
elif hasattr(conn, 'mysql_version'):
|
||||
if conn.mysql_version >= (5, 7):
|
||||
any_conn_works = True
|
||||
else:
|
||||
found = 'MySQL ' + '.'.join(str(v) for v in conn.mysql_version)
|
||||
|
||||
if conns and not any_conn_works:
|
||||
raise ImproperlyConfigured(
|
||||
'As of pretix 2.2, you need MySQL 5.7+ or MariaDB 10.2.7+ to run pretix. However, we detected a '
|
||||
'database connection to {}'.format(found)
|
||||
)
|
||||
return errors
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('pretixbase', '0101_auto_20181025_2255'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
check_mysqlversion, migrations.RunPython.noop
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='cartposition',
|
||||
old_name='attendee_name',
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
# Generated by Django 3.2.4 on 2021-09-30 10:25
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from django.db import migrations, models
|
||||
from pytz import UTC
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -15,7 +14,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='invoice',
|
||||
name='sent_to_customer',
|
||||
field=models.DateTimeField(blank=True, null=True, default=UTC.localize(datetime(1970, 1, 1, 0, 0, 0, 0))),
|
||||
field=models.DateTimeField(blank=True, null=True, default=datetime(1970, 1, 1, 0, 0, 0, 0, tzinfo=timezone.utc)),
|
||||
preserve_default=False,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -50,6 +50,6 @@ class Migration(migrations.Migration):
|
||||
],
|
||||
options={
|
||||
'unique_together': {('event', 'secret')},
|
||||
} if 'mysql' not in settings.DATABASES['default']['ENGINE'] else {}
|
||||
}
|
||||
),
|
||||
]
|
||||
|
||||
19
src/pretix/base/migrations/0238_giftcard_owner_ticket.py
Normal file
19
src/pretix/base/migrations/0238_giftcard_owner_ticket.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 3.2.18 on 2023-05-04 12:19
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0237_question_valid_string_length'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='giftcard',
|
||||
name='owner_ticket',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='owned_gift_cards', to='pretixbase.orderposition'),
|
||||
),
|
||||
]
|
||||
24
src/pretix/base/migrations/0239_giftcard_info.py
Normal file
24
src/pretix/base/migrations/0239_giftcard_info.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.2.18 on 2023-05-11 11:02
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0238_giftcard_owner_ticket'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='giftcardtransaction',
|
||||
name='acceptor',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='gift_card_transactions', to='pretixbase.organizer'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='giftcardtransaction',
|
||||
name='info',
|
||||
field=models.JSONField(null=True),
|
||||
),
|
||||
]
|
||||
23
src/pretix/base/migrations/0240_auto_20230516_1119.py
Normal file
23
src/pretix/base/migrations/0240_auto_20230516_1119.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.18 on 2023-05-16 11:19
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0239_giftcard_info'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='voucher',
|
||||
name='all_addons_included',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='voucher',
|
||||
name='all_bundles_included',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.19 on 2023-05-25 15:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0240_auto_20230516_1119'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='itemmetaproperty',
|
||||
name='allowed_values',
|
||||
field=models.TextField(null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='itemmetaproperty',
|
||||
name='required',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
38
src/pretix/base/migrations/0242_auto_20230512_1008.py
Normal file
38
src/pretix/base/migrations/0242_auto_20230512_1008.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Generated by Django 3.2.18 on 2023-05-12 10:08
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0241_itemmetaproperties_required_values'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='giftcardacceptance',
|
||||
old_name='collector',
|
||||
new_name='acceptor',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='giftcardacceptance',
|
||||
name='active',
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='giftcardacceptance',
|
||||
name='reusable_media',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='giftcardacceptance',
|
||||
name='issuer',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='gift_card_acceptor_acceptance', to='pretixbase.organizer'),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='giftcardacceptance',
|
||||
unique_together={('issuer', 'acceptor')},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.1.9 on 2023-06-26 10:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0242_auto_20230512_1008'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='device',
|
||||
name='os_name',
|
||||
field=models.CharField(max_length=190, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='device',
|
||||
name='os_version',
|
||||
field=models.CharField(max_length=190, null=True),
|
||||
),
|
||||
]
|
||||
35
src/pretix/base/migrations/0244_mediumkeyset.py
Normal file
35
src/pretix/base/migrations/0244_mediumkeyset.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# Generated by Django 3.2.18 on 2023-05-17 11:32
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0243_device_os_name_and_os_version'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='device',
|
||||
name='rsa_pubkey',
|
||||
field=models.TextField(null=True),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='MediumKeySet',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)),
|
||||
('public_id', models.BigIntegerField(unique=True)),
|
||||
('media_type', models.CharField(max_length=100)),
|
||||
('active', models.BooleanField(default=True)),
|
||||
('uid_key', models.BinaryField()),
|
||||
('diversification_key', models.BinaryField()),
|
||||
('organizer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='medium_key_sets', to='pretixbase.organizer')),
|
||||
],
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='mediumkeyset',
|
||||
constraint=models.UniqueConstraint(condition=models.Q(('active', True)), fields=('organizer', 'media_type'), name='keyset_unique_active'),
|
||||
),
|
||||
]
|
||||
@@ -97,7 +97,7 @@ def _transactions_mark_order_dirty(order_id, using=None):
|
||||
if getattr(dirty_transactions, 'order_ids', None) is None:
|
||||
dirty_transactions.order_ids = set()
|
||||
|
||||
if _check_for_dirty_orders not in [func for savepoint_id, func in conn.run_on_commit]:
|
||||
if _check_for_dirty_orders not in [func for (savepoint_id, func, *__) in conn.run_on_commit]:
|
||||
transaction.on_commit(_check_for_dirty_orders, using)
|
||||
dirty_transactions.order_ids.clear() # This is necessary to clean up after old threads with rollbacked transactions
|
||||
|
||||
|
||||
@@ -178,7 +178,7 @@ class LoggedModel(models.Model, LoggingMixin):
|
||||
|
||||
return LogEntry.objects.filter(
|
||||
content_type=self.logs_content_type, object_id=self.pk
|
||||
).select_related('user', 'event', 'oauth_application', 'api_token', 'device')
|
||||
).select_related('user', 'event', 'event__organizer', 'oauth_application', 'api_token', 'device')
|
||||
|
||||
|
||||
class LockModel:
|
||||
|
||||
@@ -39,6 +39,7 @@ from pretix.base.models.fields import MultiStringField
|
||||
from pretix.base.models.organizer import Organizer
|
||||
from pretix.base.settings import PERSON_NAME_SCHEMES
|
||||
from pretix.helpers.countries import FastCountryField
|
||||
from pretix.helpers.names import build_name
|
||||
|
||||
|
||||
class CustomerSSOProvider(LoggedModel):
|
||||
@@ -120,14 +121,23 @@ class Customer(LoggedModel):
|
||||
if self.email:
|
||||
self.email = self.email.lower()
|
||||
if 'update_fields' in kwargs and 'last_modified' not in kwargs['update_fields']:
|
||||
kwargs['update_fields'] = list(kwargs['update_fields']) + ['last_modified']
|
||||
kwargs['update_fields'] = {'last_modified'}.union(kwargs['update_fields'])
|
||||
if not self.identifier:
|
||||
self.assign_identifier()
|
||||
if 'update_fields' in kwargs:
|
||||
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
|
||||
if self.name_parts:
|
||||
self.name_cached = self.name
|
||||
name = self.name
|
||||
if self.name_cached != name:
|
||||
self.name_cached = name
|
||||
if 'update_fields' in kwargs:
|
||||
kwargs['update_fields'] = {'name_cached'}.union(kwargs['update_fields'])
|
||||
else:
|
||||
self.name_cached = ""
|
||||
self.name_parts = {}
|
||||
if self.name_cached != "" or self.name_parts != {}:
|
||||
self.name_cached = ""
|
||||
self.name_parts = {}
|
||||
if 'update_fields' in kwargs:
|
||||
kwargs['update_fields'] = {'name_cached', 'name_parts'}.union(kwargs['update_fields'])
|
||||
super().save(**kwargs)
|
||||
|
||||
def anonymize(self):
|
||||
@@ -171,15 +181,11 @@ class Customer(LoggedModel):
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if not self.name_parts:
|
||||
return ""
|
||||
if '_legacy' in self.name_parts:
|
||||
return self.name_parts['_legacy']
|
||||
if '_scheme' in self.name_parts:
|
||||
scheme = PERSON_NAME_SCHEMES[self.name_parts['_scheme']]
|
||||
else:
|
||||
raise TypeError("Invalid name given.")
|
||||
return scheme['concatenation'](self.name_parts).strip()
|
||||
return build_name(self.name_parts, fallback_scheme=lambda: self.organizer.settings.name_scheme) or ""
|
||||
|
||||
@property
|
||||
def name_all_components(self):
|
||||
return build_name(self.name_parts, "concatenation_all_components", fallback_scheme=lambda: self.organizer.settings.name_scheme) or ""
|
||||
|
||||
def __str__(self):
|
||||
s = f'#{self.identifier}'
|
||||
@@ -302,15 +308,11 @@ class AttendeeProfile(models.Model):
|
||||
|
||||
@property
|
||||
def attendee_name(self):
|
||||
if not self.attendee_name_parts:
|
||||
return None
|
||||
if '_legacy' in self.attendee_name_parts:
|
||||
return self.attendee_name_parts['_legacy']
|
||||
if '_scheme' in self.attendee_name_parts:
|
||||
scheme = PERSON_NAME_SCHEMES[self.attendee_name_parts['_scheme']]
|
||||
else:
|
||||
scheme = PERSON_NAME_SCHEMES[self.customer.organizer.settings.name_scheme]
|
||||
return scheme['concatenation'](self.attendee_name_parts).strip()
|
||||
return build_name(self.attendee_name_parts, fallback_scheme=lambda: self.customer.organizer.settings.name_scheme)
|
||||
|
||||
@property
|
||||
def attendee_name_all_components(self):
|
||||
return build_name(self.attendee_name_parts, "concatenation_all_components", fallback_scheme=lambda: self.customer.organizer.settings.name_scheme)
|
||||
|
||||
@property
|
||||
def state_name(self):
|
||||
|
||||
@@ -98,6 +98,8 @@ class Gate(LoggedModel):
|
||||
if not Gate.objects.filter(organizer=self.organizer, identifier=code).exists():
|
||||
self.identifier = code
|
||||
break
|
||||
if 'update_fields' in kwargs:
|
||||
kwargs['update_fields'] = {'identifier'}.union(kwargs['update_fields'])
|
||||
return super().save(*args, **kwargs)
|
||||
|
||||
|
||||
@@ -141,6 +143,14 @@ class Device(LoggedModel):
|
||||
max_length=190,
|
||||
null=True, blank=True
|
||||
)
|
||||
os_name = models.CharField(
|
||||
max_length=190,
|
||||
null=True, blank=True
|
||||
)
|
||||
os_version = models.CharField(
|
||||
max_length=190,
|
||||
null=True, blank=True
|
||||
)
|
||||
software_brand = models.CharField(
|
||||
max_length=190,
|
||||
null=True, blank=True
|
||||
@@ -156,6 +166,10 @@ class Device(LoggedModel):
|
||||
null=True,
|
||||
blank=False
|
||||
)
|
||||
rsa_pubkey = models.TextField(
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
info = models.JSONField(
|
||||
null=True, blank=True,
|
||||
)
|
||||
@@ -173,6 +187,8 @@ class Device(LoggedModel):
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.device_id:
|
||||
self.device_id = (self.organizer.devices.aggregate(m=Max('device_id'))['m'] or 0) + 1
|
||||
if 'update_fields' in kwargs:
|
||||
kwargs['update_fields'] = {'device_id'}.union(kwargs['update_fields'])
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def permission_set(self) -> set:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user