Compare commits
255 Commits
fix-api-op
...
improve-ba
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3a1eca7d9c | ||
|
|
37cfc8eb62 | ||
|
|
571c68904d | ||
|
|
e4d2264d5e | ||
|
|
0ba93c06a4 | ||
|
|
8a079a2356 | ||
|
|
7205d0689e | ||
|
|
cde46012cb | ||
|
|
e4a0122938 | ||
|
|
77c08cb710 | ||
|
|
af49a02047 | ||
|
|
11495c80e3 | ||
|
|
00ab996640 | ||
|
|
a4f77b3e4a | ||
|
|
1839dcdb74 | ||
|
|
6bba37288e | ||
|
|
0c3a12b4d3 | ||
|
|
7e0b590e10 | ||
|
|
009f100375 | ||
|
|
4fdbe3912a | ||
|
|
d4af9130e0 | ||
|
|
d56e2de409 | ||
|
|
6a22cb3021 | ||
|
|
814e8fc73b | ||
|
|
6aedfbd42e | ||
|
|
4207b2c0fb | ||
|
|
f35eb2a2f4 | ||
|
|
4b49782fac | ||
|
|
8fb38d8838 | ||
|
|
925077e30f | ||
|
|
9e07a40ae9 | ||
|
|
c7c3aa2c95 | ||
|
|
b55c70817a | ||
|
|
18934810f1 | ||
|
|
865e4c14a2 | ||
|
|
8cb9f2d742 | ||
|
|
d0d3e5ffe4 | ||
|
|
857f56c286 | ||
|
|
167eb85aa3 | ||
|
|
e2f983542e | ||
|
|
cf622392c0 | ||
|
|
913a83b43d | ||
|
|
b79a3a9c2f | ||
|
|
b3a1ee3127 | ||
|
|
18db00f310 | ||
|
|
9e4d6a9e97 | ||
|
|
21148c6772 | ||
|
|
70a3defc2c | ||
|
|
535399b2e1 | ||
|
|
278111b15b | ||
|
|
a4171ef819 | ||
|
|
7f5518dbf6 | ||
|
|
e102a590ab | ||
|
|
383dc5ab9a | ||
|
|
4dc65f3858 | ||
|
|
e4dccf87d4 | ||
|
|
7dece3732c | ||
|
|
015c22662f | ||
|
|
36b3968667 | ||
|
|
4b9932420b | ||
|
|
87cfd8f538 | ||
|
|
0fc7d78281 | ||
|
|
39086e81ac | ||
|
|
c1233ed692 | ||
|
|
1a401ec1e9 | ||
|
|
79beeebfa6 | ||
|
|
ccf5bea367 | ||
|
|
c77790821d | ||
|
|
95ea2849c2 | ||
|
|
e45c162b3d | ||
|
|
9cdb1a9258 | ||
|
|
241169873b | ||
|
|
690edf1c68 | ||
|
|
f606747dc9 | ||
|
|
f2593d9b4b | ||
|
|
cddd720a15 | ||
|
|
3c25dfe861 | ||
|
|
97a7fc89e0 | ||
|
|
1742bb440c | ||
|
|
73717eaacf | ||
|
|
b84aa7f42e | ||
|
|
ef0f2dae77 | ||
|
|
dcec0b03d4 | ||
|
|
db93981bac | ||
|
|
865bd126f3 | ||
|
|
948952875c | ||
|
|
489ad87ad6 | ||
|
|
62f7bd4fa5 | ||
|
|
353c9b4147 | ||
|
|
50a557b247 | ||
|
|
1dd3ed6057 | ||
|
|
b1c6508a5c | ||
|
|
7bc2e3ebb4 | ||
|
|
cd6b21120c | ||
|
|
51d37f7474 | ||
|
|
61d5e66ad4 | ||
|
|
603547f783 | ||
|
|
bf43be9312 | ||
|
|
06a2dbd281 | ||
|
|
0cedb8d4db | ||
|
|
faa6c97536 | ||
|
|
b65424ee3d | ||
|
|
cf060f353d | ||
|
|
cdb5649709 | ||
|
|
8226e6c6d5 | ||
|
|
0d453f3454 | ||
|
|
80e0978054 | ||
|
|
5a8c567d02 | ||
|
|
9199d24df2 | ||
|
|
41a05adb44 | ||
|
|
95baca6920 | ||
|
|
cff882edc0 | ||
|
|
b9feceba49 | ||
|
|
2d584d115d | ||
|
|
b02fb7ffa8 | ||
|
|
e66506fa5b | ||
|
|
65b4741e27 | ||
|
|
b5e5796549 | ||
|
|
b6945687a6 | ||
|
|
b586a52813 | ||
|
|
000407bcaf | ||
|
|
47989fd139 | ||
|
|
4ce51b81ed | ||
|
|
f63fbaca4d | ||
|
|
7bab5b16a2 | ||
|
|
bdb0a176da | ||
|
|
345a05e4ae | ||
|
|
3bb590c1ae | ||
|
|
9a5e07e078 | ||
|
|
a563881659 | ||
|
|
50ebda332a | ||
|
|
9ae25caf5c | ||
|
|
a27a5c65aa | ||
|
|
395dbedbed | ||
|
|
441f32349f | ||
|
|
fd3311ed60 | ||
|
|
6cd8fb5b58 | ||
|
|
62f1e8d64b | ||
|
|
4eca90e287 | ||
|
|
e6c45e40a9 | ||
|
|
0bb41cc44e | ||
|
|
6c7f5f4888 | ||
|
|
c13d873f1f | ||
|
|
bddeb35520 | ||
|
|
9ce519bb0b | ||
|
|
1f9b0b842f | ||
|
|
69b482849a | ||
|
|
e57fa635ed | ||
|
|
40574a00f7 | ||
|
|
c65bb13ba4 | ||
|
|
713b13b027 | ||
|
|
8cf539a573 | ||
|
|
8a8171bd24 | ||
|
|
d7922571e7 | ||
|
|
f244c11f5e | ||
|
|
436ed26185 | ||
|
|
696daae641 | ||
|
|
5e860c30f3 | ||
|
|
e5b7afe85a | ||
|
|
53a0d63dce | ||
|
|
93a202e8c8 | ||
|
|
66a60f402e | ||
|
|
94900cd386 | ||
|
|
c90cb1f19c | ||
|
|
b06e98ace4 | ||
|
|
59edb355f8 | ||
|
|
a3bff0a697 | ||
|
|
adfe2764e3 | ||
|
|
0d407ce36f | ||
|
|
f3a77d8154 | ||
|
|
932a2b16ac | ||
|
|
8502387af8 | ||
|
|
157484b42a | ||
|
|
839585a3a9 | ||
|
|
9101b5b69d | ||
|
|
f6fa9b4b16 | ||
|
|
826f1fcfa8 | ||
|
|
ede8d5bc60 | ||
|
|
ffde690d9e | ||
|
|
319b95c3fb | ||
|
|
76cf9c4c54 | ||
|
|
c40e5cca80 | ||
|
|
4f7c7800b0 | ||
|
|
f0922c42d1 | ||
|
|
aa56675594 | ||
|
|
19045a86b4 | ||
|
|
15d8613a0e | ||
|
|
19de9bf22f | ||
|
|
84ae93be26 | ||
|
|
d628acc62a | ||
|
|
4cc249e20e | ||
|
|
0d1ebf4e58 | ||
|
|
748ea38e15 | ||
|
|
50ab762905 | ||
|
|
b72d6478d2 | ||
|
|
87cea200a9 | ||
|
|
32ab7c3d4f | ||
|
|
8c63659050 | ||
|
|
23e5af13ad | ||
|
|
09eb14fe37 | ||
|
|
129e831e06 | ||
|
|
1ffe87ee18 | ||
|
|
b1b4177947 | ||
|
|
fe28a8f539 | ||
|
|
86be7b7934 | ||
|
|
5ded99c74a | ||
|
|
a52cee2c45 | ||
|
|
c2cb968b82 | ||
|
|
52fafa115c | ||
|
|
39f7bfe16f | ||
|
|
3c0ba3c8e8 | ||
|
|
db1c480905 | ||
|
|
96b57f9a50 | ||
|
|
0faf245290 | ||
|
|
cee72b5a6d | ||
|
|
76e8cc42c2 | ||
|
|
d22feada57 | ||
|
|
c792621bcb | ||
|
|
d9a58cf27f | ||
|
|
79ba2185fd | ||
|
|
fcf4750d5f | ||
|
|
5c56139b56 | ||
|
|
1f8da968ba | ||
|
|
6ee034784d | ||
|
|
1ab701c100 | ||
|
|
f0661fb11c | ||
|
|
443283de66 | ||
|
|
a4a9c07506 | ||
|
|
866cd1f0e5 | ||
|
|
f539bf9b13 | ||
|
|
c016bcdb1c | ||
|
|
f2fcf8fb11 | ||
|
|
243a94723e | ||
|
|
2437692e69 | ||
|
|
af27154d8d | ||
|
|
7f0604ff8b | ||
|
|
20e281d0a4 | ||
|
|
b8761b3b37 | ||
|
|
ca860f73c2 | ||
|
|
8326f762ab | ||
|
|
0d8a4c1f4d | ||
|
|
bfc9773142 | ||
|
|
f141a27cc2 | ||
|
|
d1c9aa47f5 | ||
|
|
6fbca2e55f | ||
|
|
a3eb59de36 | ||
|
|
e87548becd | ||
|
|
82a79d9a4a | ||
|
|
0200c3b243 | ||
|
|
5a6b7d783b | ||
|
|
c6c8e00f43 | ||
|
|
127eb08f19 | ||
|
|
e6e5c8f733 | ||
|
|
e62a5e18a2 | ||
|
|
550cb28a0e |
23
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,23 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Please only create issues for bug reports. Feature requests or general questions
|
||||
should start as a "Discussion" on GitHub.
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- Please only create issues for bug reports. Feature requests or general questions should start as a "Discussion" on GitHub. -->
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
53
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
name: Bug report
|
||||
description: Please only create issues for bug reports. Feature requests or general questions should start as a "Discussion" on GitHub.
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: Please make sure to search our issues for similar bugs first! If bug has been reported already, react with a thumbs-up, and/or leave a comment providing further details.
|
||||
- type: textarea
|
||||
id: current
|
||||
attributes:
|
||||
label: Problem and impact
|
||||
description: What problem you're running into? What impact does it have on you / your event?
|
||||
placeholder: When trying to do ____, pretix suddenly shows me an error saying "...".
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected behaviour
|
||||
description: Sometimes bugs are subtle and the expected behaviour may need some explanation. Leave empty if it's just "Don't be broken."
|
||||
- type: textarea
|
||||
id: reproduction
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: "Please give as much context as possible: Are there any settings that impact this behaviour?"
|
||||
placeholder: |
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
4.
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots
|
||||
description: If possible, show screenshots of the problem.
|
||||
- type: input
|
||||
id: link
|
||||
attributes:
|
||||
label: Link
|
||||
description: Link to the page where the bug occurs
|
||||
- type: input
|
||||
id: browser
|
||||
attributes:
|
||||
label: Browser (software, desktop or mobile?) and version
|
||||
description: Leave empty for backend problems
|
||||
- type: input
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system, dependency versions
|
||||
description: Leave empty for frontend problems
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: The pretix version in use. (Leave empty if unknown.)
|
||||
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Community Support
|
||||
url: https://github.com/pretix/pretix/discussions/categories/q-a
|
||||
about: Not sure how to do Y? Please post your support requests in the Q&A section of our GitHub Discussions instead!
|
||||
- name: Feature ideas
|
||||
url: https://github.com/pretix/pretix/discussions/categories/ideas
|
||||
about: Please post your idea in the Ideas section of our GitHub Discussions instead!
|
||||
20
SECURITY.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# Security policy
|
||||
|
||||
## Reporting a vulnerability
|
||||
|
||||
If you discover a vulnerability with our software or server systems, please report it to us in private. Do not to attempt to harm our users, customer's data or our system's availability when looking for vulneratbilities.
|
||||
|
||||
Please contact us at security@pretix.eu with full details and steps to reproduce and allow reasonable time for us to resolve the issue before publishing your findings. If you wish to encrypt your email, you can find our GPG key [here](https://pretix.eu/.well-known/security@pretix.eu.asc).
|
||||
|
||||
We're not large enough to run a formal bug bounty program, but if you find a serious vulnerability in our service, we will find a way to show our gratitude.
|
||||
|
||||
## Version support
|
||||
|
||||
Security support is provided for the current stable release as well as the two previous stable releases.
|
||||
Be sure to keep your pretix installation up to date.
|
||||
|
||||
New releases and security issues will be announced on our [blog](https://pretix.eu/about/en/blog/). If you
|
||||
subscribe to our [newsletter](https://pretix.eu/about/en/blog/) in the "News about self-hosting pretix"
|
||||
category, we will also send you an email on security issues.
|
||||
|
||||
Past security issues are listed [on our website](https://pretix.eu/about/en/security).
|
||||
@@ -2,6 +2,7 @@
|
||||
file=/tmp/supervisor.sock
|
||||
|
||||
[supervisord]
|
||||
environment = AUTOMIGRATE="skip"
|
||||
logfile=/dev/stdout
|
||||
logfile_maxbytes=0
|
||||
loglevel=info
|
||||
|
||||
@@ -3155,14 +3155,14 @@ a .fa, a .wy-menu-vertical li span.toctree-expand, .wy-menu-vertical li a span.t
|
||||
vertical-align: -15%
|
||||
}
|
||||
|
||||
.wy-alert, .rst-content .note, .rst-content .attention, .rst-content .caution, .rst-content .danger, .rst-content .error, .rst-content .hint, .rst-content .important, .rst-content .tip, .rst-content .warning, .rst-content .seealso, .rst-content .admonition-todo {
|
||||
.wy-alert, .rst-content .note, .rst-content .attention, .rst-content .caution, .rst-content .danger, .rst-content .error, .rst-content .hint, .rst-content .important, .rst-content .tip, .rst-content .warning, .rst-content .seealso, .rst-content .admonition-todo, .rst-content div.deprecated {
|
||||
padding: 12px;
|
||||
line-height: 24px;
|
||||
margin-bottom: 24px;
|
||||
background: #e7f2fa
|
||||
}
|
||||
|
||||
.wy-alert-title, .rst-content .admonition-title {
|
||||
.wy-alert-title, .rst-content .admonition-title, .rst-content .deprecated .versionmodified {
|
||||
color: #fff;
|
||||
font-weight: bold;
|
||||
display: block;
|
||||
@@ -6067,6 +6067,10 @@ url('../opensans_regular_macroman/OpenSans-Regular-webfont.svg#open_sansregular'
|
||||
img.screenshot, a.screenshot img {
|
||||
box-shadow: 0 4px 18px 0 rgba(0,0,0,0.1), 0 6px 20px 0 rgba(0,0,0,0.09);
|
||||
}
|
||||
section > a.screenshot {
|
||||
display: block;
|
||||
margin-bottom: 24px;
|
||||
}
|
||||
|
||||
/* Changes */
|
||||
.versionchanged {
|
||||
|
||||
@@ -240,6 +240,9 @@ The following snippet is an example on how to configure a nginx proxy for pretix
|
||||
listen 80 default_server;
|
||||
listen [::]:80 ipv6only=on default_server;
|
||||
server_name pretix.mydomain.com;
|
||||
location / {
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
}
|
||||
server {
|
||||
listen 443 default_server;
|
||||
|
||||
@@ -225,6 +225,9 @@ The following snippet is an example on how to configure a nginx proxy for pretix
|
||||
listen 80 default_server;
|
||||
listen [::]:80 ipv6only=on default_server;
|
||||
server_name pretix.mydomain.com;
|
||||
location / {
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
}
|
||||
server {
|
||||
listen 443 default_server;
|
||||
|
||||
@@ -105,6 +105,37 @@ following endpoint:
|
||||
|
||||
You will receive a response equivalent to the response of your initialization request.
|
||||
|
||||
Device Information
|
||||
------------------
|
||||
|
||||
You can request information about your device and the server with one call:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /api/v1/device/info HTTP/1.1
|
||||
Host: pretix.eu
|
||||
|
||||
The response will look like this:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"device": {
|
||||
"organizer": "foo",
|
||||
"device_id": 5,
|
||||
"unique_serial": "HHZ9LW9JWP390VFZ",
|
||||
"api_token": "1kcsh572fonm3hawalrncam4l1gktr2rzx25a22l8g9hx108o9oi0rztpcvwnfnd",
|
||||
"name": "Bar",
|
||||
"gate": {
|
||||
"id": 3,
|
||||
"name": "South entrance"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Creating a new API key
|
||||
----------------------
|
||||
|
||||
|
||||
346
doc/api/resources/checkin.rst
Normal file
@@ -0,0 +1,346 @@
|
||||
.. spelling:: checkin
|
||||
|
||||
.. _rest-checkin:
|
||||
|
||||
Check-in
|
||||
========
|
||||
|
||||
This page describes special APIs built for ticket scanning apps. For managing check-in configuration or other operations,
|
||||
please also see :ref:`rest-checkinlists`. The check-in list API also contains endpoints to obtain statistics or log
|
||||
failed scans.
|
||||
|
||||
.. versionchanged:: 4.12
|
||||
|
||||
The endpoints listed on this page have been added.
|
||||
|
||||
.. _`rest-checkin-redeem`:
|
||||
|
||||
Checking a ticket in
|
||||
--------------------
|
||||
|
||||
.. http:post:: /api/v1/organizers/(organizer)/checkinrpc/redeem/
|
||||
|
||||
Tries to redeem an order position, i.e. checks the attendee in (or out). This is the recommended endpoint to use
|
||||
if you build any kind of scanning app that performs check-ins for scanned barcodes. It is safe to use with untrusted
|
||||
inputs in the ``secret`` field.
|
||||
|
||||
This endpoint supports passing multiple check-in lists to perform a multi-event scan. However, each check-in list
|
||||
passed needs to be from a distinct event.
|
||||
|
||||
:<json string secret: Scanned QR code corresponding to the ``secret`` attribute of a ticket.
|
||||
:<json array lists: List of check-in list IDs to search on. No two check-in lists may be from the same event.
|
||||
:<json string type: Send ``"exit"`` for an exit and ``"entry"`` (default) for an entry.
|
||||
:<json datetime datetime: Specifies the datetime of the check-in. If not supplied, the current time will be used.
|
||||
:<json boolean force: Specifies that the check-in should succeed regardless of revoked barcode, previous check-ins or required
|
||||
questions that have not been filled. This is usually used to upload offline scans that already happened,
|
||||
because there's no point in validating them since they happened whether they are valid or not. Defaults to ``false``.
|
||||
:<json boolean questions_supported: When this parameter is set to ``true``, handling of questions is supported. If
|
||||
you do not implement question handling in your user interface, you **must**
|
||||
set this to ``false``. In that case, questions will just be ignored. Defaults
|
||||
to ``true``.
|
||||
:<json boolean ignore_unpaid: Specifies that the check-in should succeed even if the order is in pending state.
|
||||
Defaults to ``false`` and only works when ``include_pending`` is set on the check-in
|
||||
list.
|
||||
:<json object answers: If questions are supported/required, you may/must supply a mapping of question IDs to their
|
||||
respective answers. The answers should always be strings. In case of (multiple-)choice-type
|
||||
answers, the string should contain the (comma-separated) IDs of the selected options.
|
||||
:<json string nonce: You can set this parameter to a unique random value to identify this check-in. If you're sending
|
||||
this request twice with the same nonce, the second request will also succeed but will always
|
||||
create only one check-in object even when the previous request was successful as well. This
|
||||
allows for a certain level of idempotency and enables you to re-try after a connection failure.
|
||||
:>json string status: ``"ok"``, ``"incomplete"``, or ``"error"``
|
||||
:>json string reason: Reason code, only set on status ``"error"``, see below for possible values.
|
||||
:>json string reason_explanation: Human-readable explanation, only set on status ``"error"`` and reason ``"rules"``, can be null.
|
||||
:>json object position: Copy of the matching order position (if any was found). The contents are the same as the
|
||||
:ref:`order-position-resource`, with the following differences: (1) The ``checkins`` value
|
||||
will only include check-ins for the selected list. (2) An additional boolean property
|
||||
``require_attention`` will inform you whether either the order or the item have the
|
||||
``checkin_attention`` flag set. (3) If ``attendee_name`` is empty, it may automatically fall
|
||||
back to values from a parent product or from invoice addresses.
|
||||
:>json boolean require_attention: Whether or not the ``require_attention`` flag is set on the item or order.
|
||||
:>json object list: Excerpt of information about the matching :ref:`check-in list <rest-checkinlists>` (if any was found),
|
||||
including the attributes ``id``, ``name``, ``event``, ``subevent``, and ``include_pending``.
|
||||
:>json object questions: List of questions to be answered for check-in, only set on status ``"incomplete"``.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
POST /api/v1/organizers/bigevents/checkinrpc/redeem/ HTTP/1.1
|
||||
Host: pretix.eu
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
{
|
||||
"secret": "M5BO19XmFwAjLd4nDYUAL9ISjhti0e9q",
|
||||
"lists": [1],
|
||||
"force": false,
|
||||
"ignore_unpaid": false,
|
||||
"nonce": "Pvrk50vUzQd0DhdpNRL4I4OcXsvg70uA",
|
||||
"datetime": null,
|
||||
"questions_supported": true,
|
||||
"answers": {
|
||||
"4": "XS"
|
||||
}
|
||||
}
|
||||
|
||||
**Example successful response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 201 Created
|
||||
Vary: Accept
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"status": "ok",
|
||||
"position": {
|
||||
…
|
||||
},
|
||||
"require_attention": false,
|
||||
"list": {
|
||||
"id": 1,
|
||||
"name": "Default check-in list",
|
||||
"event": "sampleconf",
|
||||
"subevent": null,
|
||||
"include_pending": false
|
||||
}
|
||||
}
|
||||
|
||||
**Example response with required questions**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 400 Bad Request
|
||||
Content-Type: text/json
|
||||
|
||||
{
|
||||
"status": "incomplete",
|
||||
"position": {
|
||||
…
|
||||
},
|
||||
"require_attention": false,
|
||||
"list": {
|
||||
"id": 1,
|
||||
"name": "Default check-in list",
|
||||
"event": "sampleconf",
|
||||
"subevent": null,
|
||||
"include_pending": false
|
||||
},
|
||||
"questions": [
|
||||
{
|
||||
"id": 1,
|
||||
"question": {"en": "T-Shirt size"},
|
||||
"type": "C",
|
||||
"required": false,
|
||||
"items": [1, 2],
|
||||
"position": 1,
|
||||
"identifier": "WY3TP9SL",
|
||||
"ask_during_checkin": true,
|
||||
"options": [
|
||||
{
|
||||
"id": 1,
|
||||
"identifier": "LVETRWVU",
|
||||
"position": 0,
|
||||
"answer": {"en": "S"}
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"identifier": "DFEMJWMJ",
|
||||
"position": 1,
|
||||
"answer": {"en": "M"}
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"identifier": "W9AH7RDE",
|
||||
"position": 2,
|
||||
"answer": {"en": "L"}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
**Example error response (invalid ticket)**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 404 Not Found
|
||||
Content-Type: text/json
|
||||
|
||||
{
|
||||
"detail": "Not found.",
|
||||
"status": "error",
|
||||
"reason": "invalid",
|
||||
"reason_explanation": null,
|
||||
"require_attention": false
|
||||
}
|
||||
|
||||
**Example error response (known, but invalid ticket)**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Content-Type: text/json
|
||||
|
||||
{
|
||||
"status": "error",
|
||||
"reason": "unpaid",
|
||||
"reason_explanation": null,
|
||||
"require_attention": false,
|
||||
"list": {
|
||||
"id": 1,
|
||||
"name": "Default check-in list",
|
||||
"event": "sampleconf",
|
||||
"subevent": null,
|
||||
"include_pending": false
|
||||
},
|
||||
"position": {
|
||||
…
|
||||
}
|
||||
}
|
||||
|
||||
Possible error reasons:
|
||||
|
||||
* ``invalid`` - Ticket is not known.
|
||||
* ``unpaid`` - Ticket is not paid for.
|
||||
* ``canceled`` – Ticket is canceled or expired.
|
||||
* ``already_redeemed`` - Ticket already has been redeemed.
|
||||
* ``product`` - Tickets with this product may not be scanned at this device.
|
||||
* ``rules`` - Check-in prevented by a user-defined rule.
|
||||
* ``ambiguous`` - Multiple tickets match scan, rejected.
|
||||
* ``revoked`` - Ticket code has been revoked.
|
||||
* ``error`` - Internal error.
|
||||
|
||||
In case of reason ``rules``, there might be an additional response field ``reason_explanation`` with a human-readable
|
||||
description of the violated rules. However, that field can also be missing or be ``null``.
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer to fetch
|
||||
:statuscode 201: no error
|
||||
:statuscode 400: Invalid or incomplete request, see above
|
||||
:statuscode 401: Authentication failure
|
||||
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
|
||||
:statuscode 404: The requested order position does not exist.
|
||||
|
||||
Performing a ticket search
|
||||
--------------------------
|
||||
|
||||
.. http:get:: /api/v1/organizers/(organizer)/checkinrpc/search/
|
||||
|
||||
Returns a list of all order positions matching a given search request. The result is the same as
|
||||
the :ref:`order-position-resource`, with the following differences:
|
||||
|
||||
* The ``checkins`` value will only include check-ins for the selected list.
|
||||
|
||||
* An additional boolean property ``require_attention`` will inform you whether either the order or the item
|
||||
have the ``checkin_attention`` flag set.
|
||||
|
||||
* If ``attendee_name`` is empty, it will automatically fall back to values from a parent product or from invoice
|
||||
addresses.
|
||||
|
||||
This endpoint supports passing multiple check-in lists to perform a multi-event search. However, each check-in list
|
||||
passed needs to be from a distinct event.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /api/v1/organizers/bigevents/checkinrpc/search/?list=1&search=Peter HTTP/1.1
|
||||
Host: pretix.eu
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"count": 1,
|
||||
"next": null,
|
||||
"previous": null,
|
||||
"results": [
|
||||
{
|
||||
"id": 23442,
|
||||
"order": "ABC12",
|
||||
"positionid": 1,
|
||||
"item": 1345,
|
||||
"variation": null,
|
||||
"price": "23.00",
|
||||
"attendee_name": "Peter",
|
||||
"attendee_name_parts": {
|
||||
"full_name": "Peter",
|
||||
},
|
||||
"attendee_email": null,
|
||||
"voucher": null,
|
||||
"tax_rate": "0.00",
|
||||
"tax_rule": null,
|
||||
"tax_value": "0.00",
|
||||
"secret": "z3fsn8jyufm5kpk768q69gkbyr5f4h6w",
|
||||
"addon_to": null,
|
||||
"subevent": null,
|
||||
"pseudonymization_id": "MQLJvANO3B",
|
||||
"seat": null,
|
||||
"checkins": [
|
||||
{
|
||||
"list": 1,
|
||||
"type": "entry",
|
||||
"gate": null,
|
||||
"device": 2,
|
||||
"datetime": "2017-12-25T12:45:23Z",
|
||||
"auto_checked_in": true
|
||||
}
|
||||
],
|
||||
"answers": [
|
||||
{
|
||||
"question": 12,
|
||||
"answer": "Foo",
|
||||
"options": []
|
||||
}
|
||||
],
|
||||
"downloads": [
|
||||
{
|
||||
"output": "pdf",
|
||||
"url": "https://pretix.eu/api/v1/organizers/bigevents/events/sampleconf/orderpositions/23442/download/pdf/"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
:query string search: Fuzzy search matching the attendee name, order code, invoice address name as well as to the beginning of the secret.
|
||||
:query integer list: The check-in list to search on, can be passed multiple times.
|
||||
:query integer page: The page number in case of a multi-page result set, default is 1
|
||||
:query string ignore_status: If set to ``true``, results will be returned regardless of the state of
|
||||
the order they belong to and you will need to do your own filtering by order status.
|
||||
:query string ordering: Manually set the ordering of results. Valid fields to be used are ``order__code``,
|
||||
``order__datetime``, ``positionid``, ``attendee_name``, ``last_checked_in`` and ``order__email``. Default:
|
||||
``attendee_name,positionid``
|
||||
:query string order: Only return positions of the order with the given order code
|
||||
:query string search: Fuzzy search matching the attendee name, order code, invoice address name as well as to the beginning of the secret.
|
||||
:query string expand: Expand a field into a full object. Currently only ``subevent``, ``item``, and ``variation`` are supported. Can be passed multiple times.
|
||||
:query integer item: Only return positions with the purchased item matching the given ID.
|
||||
:query integer item__in: Only return positions with the purchased item matching one of the given comma-separated IDs.
|
||||
:query integer variation: Only return positions with the purchased item variation matching the given ID.
|
||||
:query integer variation__in: Only return positions with one of the purchased item variation matching the given
|
||||
comma-separated IDs.
|
||||
:query string attendee_name: Only return positions with the given value in the attendee_name field. Also, add-on
|
||||
products positions are shown if they refer to an attendee with the given name.
|
||||
:query string secret: Only return positions with the given ticket secret.
|
||||
:query string order__status: Only return positions with the given order status.
|
||||
:query string order__status__in: Only return positions with one the given comma-separated order status.
|
||||
:query boolean has_checkin: If set to ``true`` or ``false``, only return positions that have or have not been
|
||||
checked in already.
|
||||
:query integer subevent: Only return positions of the sub-event with the given ID
|
||||
:query integer subevent__in: Only return positions of one of the sub-events with the given comma-separated IDs
|
||||
:query integer addon_to: Only return positions that are add-ons to the position with the given ID.
|
||||
:query integer addon_to__in: Only return positions that are add-ons to one of the positions with the given
|
||||
comma-separated IDs.
|
||||
:query string voucher: Only return positions with a specific voucher.
|
||||
:query string voucher__code: Only return positions with a specific voucher code.
|
||||
:param organizer: The ``slug`` field of the organizer to fetch
|
||||
:statuscode 200: no error
|
||||
:statuscode 401: Authentication failure
|
||||
:statuscode 403: The requested organizer or check-in list does not exist **or** you have no permission to view this resource.
|
||||
:statuscode 404: The requested check-in list does not exist.
|
||||
@@ -1,5 +1,7 @@
|
||||
.. spelling:: checkin
|
||||
|
||||
.. _rest-checkinlists:
|
||||
|
||||
Check-in lists
|
||||
==============
|
||||
|
||||
@@ -34,6 +36,7 @@ allow_multiple_entries boolean If ``true``, su
|
||||
allow_entry_after_exit boolean If ``true``, subsequent scans of a ticket on this list are valid if the last scan of the ticket was an exit scan.
|
||||
rules object Custom check-in logic. The contents of this field are currently not considered a stable API and modifications through the API are highly discouraged.
|
||||
exit_all_at datetime Automatically check out (i.e. perform an exit scan) at this point in time. After this happened, this property will automatically be set exactly one day into the future. Note that this field is considered "internal configuration" and if you pull the list with ``If-Modified-Since``, the daily change in this field will not trigger a response.
|
||||
addon_match boolean If ``true``, tickets on this list can be redeemed by scanning their parent ticket if this still leads to an unambiguous match.
|
||||
===================================== ========================== =======================================================
|
||||
|
||||
.. versionchanged:: 3.9
|
||||
@@ -53,6 +56,10 @@ exit_all_at datetime Automatically c
|
||||
|
||||
The ``ends_after`` and ``expand`` query parameters have been added.
|
||||
|
||||
.. versionchanged:: 4.12
|
||||
|
||||
The ``addon_match`` attribute has been added.
|
||||
|
||||
Endpoints
|
||||
---------
|
||||
|
||||
@@ -94,6 +101,7 @@ Endpoints
|
||||
"allow_entry_after_exit": true,
|
||||
"exit_all_at": null,
|
||||
"rules": {},
|
||||
"addon_match": false,
|
||||
"auto_checkin_sales_channels": [
|
||||
"pretixpos"
|
||||
]
|
||||
@@ -146,6 +154,7 @@ Endpoints
|
||||
"allow_entry_after_exit": true,
|
||||
"exit_all_at": null,
|
||||
"rules": {},
|
||||
"addon_match": false,
|
||||
"auto_checkin_sales_channels": [
|
||||
"pretixpos"
|
||||
]
|
||||
@@ -245,6 +254,7 @@ Endpoints
|
||||
"subevent": null,
|
||||
"allow_multiple_entries": false,
|
||||
"allow_entry_after_exit": true,
|
||||
"addon_match": false,
|
||||
"auto_checkin_sales_channels": [
|
||||
"pretixpos"
|
||||
]
|
||||
@@ -269,6 +279,7 @@ Endpoints
|
||||
"subevent": null,
|
||||
"allow_multiple_entries": false,
|
||||
"allow_entry_after_exit": true,
|
||||
"addon_match": false,
|
||||
"auto_checkin_sales_channels": [
|
||||
"pretixpos"
|
||||
]
|
||||
@@ -323,6 +334,7 @@ Endpoints
|
||||
"subevent": null,
|
||||
"allow_multiple_entries": false,
|
||||
"allow_entry_after_exit": true,
|
||||
"addon_match": false,
|
||||
"auto_checkin_sales_channels": [
|
||||
"pretixpos"
|
||||
]
|
||||
@@ -415,6 +427,9 @@ Order position endpoints
|
||||
* If ``attendee_name`` is empty, it will automatically fall back to values from a parent product or from invoice
|
||||
addresses.
|
||||
|
||||
You can use this endpoint to implement a ticket search. We also provide a dedicated search input as part of our
|
||||
:ref:`check-in API <rest-checkin>` that supports search across multiple events.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
@@ -604,15 +619,23 @@ Order position endpoints
|
||||
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
|
||||
:statuscode 404: The requested order position or check-in list does not exist.
|
||||
|
||||
.. _`rest-checkin-redeem`:
|
||||
|
||||
.. http:post:: /api/v1/organizers/(organizer)/events/(event)/checkinlists/(list)/positions/(id)/redeem/
|
||||
|
||||
Tries to redeem an order position, identified by its internal ID, i.e. checks the attendee in. This endpoint
|
||||
accepts a number of optional requests in the body.
|
||||
|
||||
**Tip:** Instead of an ID, you can also use the ``secret`` field as the lookup parameter.
|
||||
**Tip:** Instead of an ID, you can also use the ``secret`` field as the lookup parameter. In this case, you should
|
||||
always set ``untrusted_input=true`` as a query parameter to avoid security issues.
|
||||
|
||||
.. note::
|
||||
|
||||
We no longer recommend using this API if you're building a ticket scanning application, as it has a few design
|
||||
flaws that can lead to `security issues`_ or compatibility issues due to barcode content characters that are not
|
||||
URL-safe. We recommend to use our new :ref:`check-in API <rest-checkin>` instead.
|
||||
|
||||
:query boolean untrusted_input: If set to true, the lookup parameter is **always** interpreted as a ``secret``, never
|
||||
as an ``id``. This should be always set if you are passing through untrusted, scanned
|
||||
data to avoid guessing of ticket IDs.
|
||||
:<json boolean questions_supported: When this parameter is set to ``true``, handling of questions is supported. If
|
||||
you do not implement question handling in your user interface, you **must**
|
||||
set this to ``false``. In that case, questions will just be ignored. Defaults
|
||||
@@ -733,12 +756,15 @@ Order position endpoints
|
||||
|
||||
Possible error reasons:
|
||||
|
||||
* ``unpaid`` - Ticket is not paid for
|
||||
* ``canceled`` – Ticket is canceled or expired. This reason is only sent when your request sets
|
||||
* ``invalid`` - Ticket code not known.
|
||||
* ``unpaid`` - Ticket is not paid for.
|
||||
* ``canceled`` – Ticket is canceled or expired. This reason is only sent when your request sets.
|
||||
``canceled_supported`` to ``true``, otherwise these orders return ``unpaid``.
|
||||
* ``already_redeemed`` - Ticket already has been redeemed
|
||||
* ``product`` - Tickets with this product may not be scanned at this device
|
||||
* ``rules`` - Check-in prevented by a user-defined rule
|
||||
* ``already_redeemed`` - Ticket already has been redeemed.
|
||||
* ``product`` - Tickets with this product may not be scanned at this device.
|
||||
* ``rules`` - Check-in prevented by a user-defined rule.
|
||||
* ``ambiguous`` - Multiple tickets match scan, rejected.
|
||||
* ``revoked`` - Ticket code has been revoked.
|
||||
|
||||
In case of reason ``rules``, there might be an additional response field ``reason_explanation`` with a human-readable
|
||||
description of the violated rules. However, that field can also be missing or be ``null``.
|
||||
@@ -752,3 +778,6 @@ Order position endpoints
|
||||
:statuscode 401: Authentication failure
|
||||
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
|
||||
:statuscode 404: The requested order position or check-in list does not exist.
|
||||
|
||||
|
||||
.. _security issues: https://pretix.eu/about/de/blog/20220705-release-4111/
|
||||
@@ -14,7 +14,10 @@ The customer resource contains the following public fields:
|
||||
Field Type Description
|
||||
===================================== ========================== =======================================================
|
||||
identifier string Internal ID of the customer
|
||||
external_identifier string External ID of the customer (or ``null``)
|
||||
external_identifier string External ID of the customer (or ``null``). This field can
|
||||
be changed for customers created manually or through
|
||||
the API, but is read-only for customers created through a
|
||||
SSO integration.
|
||||
email string Customer email address
|
||||
name string Name of this customer (or ``null``)
|
||||
name_parts object of strings Decomposition of name (i.e. given name, family name)
|
||||
@@ -26,10 +29,16 @@ date_joined datetime Date and time o
|
||||
locale string Preferred language of the customer
|
||||
last_modified datetime Date and time of modification of the record
|
||||
notes string Internal notes and comments (or ``null``)
|
||||
password string Can only be set during creation of a new customer, will
|
||||
not be included in any responses.
|
||||
===================================== ========================== =======================================================
|
||||
|
||||
.. versionadded:: 4.0
|
||||
|
||||
.. versionchanged:: 4.3
|
||||
|
||||
Passwords can now be set through the API during customer creation.
|
||||
|
||||
Endpoints
|
||||
---------
|
||||
|
||||
@@ -131,7 +140,9 @@ Endpoints
|
||||
|
||||
.. http:post:: /api/v1/organizers/(organizer)/customers/
|
||||
|
||||
Creates a new customer
|
||||
Creates a new customer. In addition to the fields defined on the resource, you can pass the field ``send_email``
|
||||
to control whether the system should send an account activation email with a password reset link (defaults to
|
||||
``false``).
|
||||
|
||||
**Example request**:
|
||||
|
||||
@@ -143,7 +154,9 @@ Endpoints
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"email": "test@example.org"
|
||||
"email": "test@example.org",
|
||||
"password": "verysecret",
|
||||
"send_email": true
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
@@ -173,8 +186,8 @@ Endpoints
|
||||
the resource, other fields will be reset to default. With ``PATCH``, you only need to provide the fields that you
|
||||
want to change.
|
||||
|
||||
You can change all fields of the resource except the ``identifier``, ``last_login``, ``date_joined``, ``name``,
|
||||
and ``last_modified`` fields.
|
||||
You can change all fields of the resource except the ``identifier``, ``last_login``, ``date_joined``,
|
||||
``name`` (which is auto-generated from ``name_parts``), and ``last_modified`` fields.
|
||||
|
||||
**Example request**:
|
||||
|
||||
|
||||
@@ -182,7 +182,7 @@ endpoints:
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"download": "https://pretix.eu/api/v1/organizers/bigevents/events/sampleconf/orderlist/download/29891ede-196f-4942-9e26-d055a36e98b8/3f279f13-c198-4137-b49b-9b360ce9fcce/"
|
||||
"download": "https://pretix.eu/api/v1/organizers/bigevents/events/sampleconf/exporters/orderlist/download/29891ede-196f-4942-9e26-d055a36e98b8/3f279f13-c198-4137-b49b-9b360ce9fcce/"
|
||||
}
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer to fetch
|
||||
|
||||
@@ -25,6 +25,7 @@ at :ref:`plugin-docs`.
|
||||
invoices
|
||||
vouchers
|
||||
discounts
|
||||
checkin
|
||||
checkinlists
|
||||
waitinglist
|
||||
customers
|
||||
@@ -37,6 +38,7 @@ at :ref:`plugin-docs`.
|
||||
webhooks
|
||||
seatingplans
|
||||
exporters
|
||||
shredders
|
||||
sendmail_rules
|
||||
billing_invoices
|
||||
billing_var
|
||||
@@ -60,6 +60,7 @@ invoice_address object Invoice address
|
||||
├ state string Customer state (ISO 3166-2 code). Only supported in
|
||||
AU, BR, CA, CN, MY, MX, and US.
|
||||
├ internal_reference string Customer's internal reference to be printed on the invoice
|
||||
├ custom_field string Custom invoice address field
|
||||
├ vat_id string Customer VAT ID
|
||||
└ vat_id_validated string ``true``, if the VAT ID has been validated against the
|
||||
EU VAT service and validation was successful. This only
|
||||
@@ -427,7 +428,7 @@ List of all orders
|
||||
``last_modified``, and ``status``. Default: ``datetime``
|
||||
:query string code: Only return orders that match the given order code
|
||||
:query string status: Only return orders in the given order status (see above)
|
||||
:query string search: Only return orders matching a given search query
|
||||
:query string search: Only return orders matching a given search query (matching for names, email addresses, and company names)
|
||||
:query integer item: Only return orders with a position that contains this item ID. *Warning:* Result will also include orders if they contain mixed items, and it will even return orders where the item is only contained in a canceled position.
|
||||
:query integer variation: Only return orders with a position that contains this variation ID. *Warning:* Result will also include orders if they contain mixed items and variations, and it will even return orders where the variation is only contained in a canceled position.
|
||||
:query boolean testmode: Only return orders with ``testmode`` set to ``true`` or ``false``
|
||||
@@ -852,7 +853,7 @@ Creating orders
|
||||
|
||||
You can supply the following fields of the resource:
|
||||
|
||||
* ``code`` (optional)
|
||||
* ``code`` (optional) – Only ``A-Z`` and ``0-9``, but without ``O`` and ``1``.
|
||||
* ``status`` (optional) – Defaults to pending for non-free orders and paid for free orders. You can only set this to
|
||||
``"n"`` for pending or ``"p"`` for paid. We will create a payment object for this order either in state ``created``
|
||||
or in state ``confirmed``, depending on this value. If you create a paid order, the ``order_paid`` signal will
|
||||
|
||||
@@ -89,7 +89,8 @@ Endpoints
|
||||
:query integer page: The page number in case of a multi-page result set, default is 1
|
||||
:query string ordering: Manually set the ordering of results. Valid fields to be used are ``id`` and ``position``.
|
||||
Default: ``position``
|
||||
:query integer subevent: Only return quotas of the sub-event with the given ID
|
||||
:query integer subevent: Only return quotas of the sub-event with the given ID.
|
||||
:query integer subevent__in: Only return quotas of sub-events with one the given IDs (comma-separated).
|
||||
:query string with_availability: Set to ``true`` to get availability information. Can lead to increased answer times.
|
||||
:param organizer: The ``slug`` field of the organizer to fetch
|
||||
:param event: The ``slug`` field of the event to fetch
|
||||
|
||||
177
doc/api/resources/shredders.rst
Normal file
@@ -0,0 +1,177 @@
|
||||
.. spelling:: checkin
|
||||
|
||||
Data shredders
|
||||
==============
|
||||
|
||||
pretix and it's plugins include a number of data shredders that allow you to clear personal information from the system.
|
||||
This page shows you how to use these shredders through the API.
|
||||
|
||||
.. versionchanged:: 4.12
|
||||
|
||||
This feature has been added to the API.
|
||||
|
||||
.. warning::
|
||||
|
||||
Unlike the user interface, the API will not force you to download tax-relevant data before you delete it.
|
||||
|
||||
Listing available shredders
|
||||
---------------------------
|
||||
|
||||
.. http:get:: /api/v1/organizers/(organizer)/events/(event)/shredders/
|
||||
|
||||
Returns a list of all exporters shredders for a given event.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /api/v1/organizers/bigevents/events/sampleconf/shredders/ HTTP/1.1
|
||||
Host: pretix.eu
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Vary: Accept
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"count": 1,
|
||||
"next": null,
|
||||
"previous": null,
|
||||
"results": [
|
||||
{
|
||||
"identifier": "question_answers",
|
||||
"verbose_name": "Answers to questions"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
:query integer page: The page number in case of a multi-page result set, default is 1
|
||||
:param organizer: The ``slug`` field of the organizer to fetch
|
||||
:param event: The ``slug`` field of the event to fetch
|
||||
:statuscode 200: no error
|
||||
:statuscode 401: Authentication failure
|
||||
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
|
||||
|
||||
Running an export
|
||||
-----------------
|
||||
|
||||
Before you can delete data, you need to start a data export.
|
||||
Since exports often include large data sets, they might take longer than the duration of an HTTP request. Therefore,
|
||||
creating an export is a two-step process. First you need to start an export task with one of the following to API
|
||||
endpoints:
|
||||
|
||||
.. http:post:: /api/v1/organizers/(organizer)/events/(event)/shredders/export/
|
||||
|
||||
Starts an export task. If your input parameters validate correctly, a ``202 Accepted`` status code is returned.
|
||||
The body points you to the download URL of the result as well as the URL for the next step.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
POST /api/v1/organizers/bigevents/events/sampleconf/shredders/export/ HTTP/1.1
|
||||
Host: pretix.eu
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"shredders": ["question_answers"]
|
||||
}
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 202 Accepted
|
||||
Vary: Accept
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"download": "https://pretix.eu/api/v1/organizers/bigevents/events/sampleconf/shredders/download/29891ede-196f-4942-9e26-d055a36e98b8/3f279f13-c198-4137-b49b-9b360ce9fcce/",
|
||||
"shred": "https://pretix.eu/api/v1/organizers/bigevents/events/sampleconf/shredders/shred/29891ede-196f-4942-9e26-d055a36e98b8/3f279f13-c198-4137-b49b-9b360ce9fcce/"
|
||||
}
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer to fetch
|
||||
:param event: The ``slug`` field of the event to fetch
|
||||
:param identifier: The ``identifier`` field of the exporter to run
|
||||
:statuscode 202: no error
|
||||
:statuscode 400: Invalid input options or event data is not ready to be deleted
|
||||
:statuscode 401: Authentication failure
|
||||
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
|
||||
|
||||
|
||||
Downloading the result
|
||||
----------------------
|
||||
|
||||
When starting an export, you receive a ``download`` URL for downloading the result. Running a ``GET`` request on that result will
|
||||
yield one of the following status codes:
|
||||
|
||||
* ``200 OK`` – The export succeeded. The body will be your resulting file. Might be large!
|
||||
* ``409 Conflict`` – Your export is still running. The body will be JSON with the structure ``{"status": "running"}``. ``status`` can be ``waiting`` before the task is actually being processed. Please retry, but wait at least one second before you do.
|
||||
* ``410 Gone`` – Running the export has failed permanently. The body will be JSON with the structure ``{"status": "failed", "message": "Error message"}``
|
||||
* ``404 Not Found`` – The export does not exist / is expired / belongs to a different API key.
|
||||
|
||||
|
||||
Shredding the data
|
||||
------------------
|
||||
|
||||
When starting an export, you receive a ``shred`` URL for actually shredding the data.
|
||||
You can only start the actual shredding process after the export file was generated, however you are not forced to download
|
||||
the file (we'd recommend it in most cases, though).
|
||||
The download will no longer be possible after the shredding.
|
||||
Since shredding often requires deleting large data sets, it might take longer than the duration of an HTTP request.
|
||||
Therefore, shredding again is a two-step process. First you need to start a shredder task with one of the following to API
|
||||
endpoints:
|
||||
|
||||
.. http:post:: /api/v1/organizers/(organizer)/events/(event)/shredders/shred/(id1)/(id2)/
|
||||
|
||||
Starts an export task. If your input parameters validate correctly, a ``202 Accepted`` status code is returned.
|
||||
The body points you to an URL you can use to check the status.
|
||||
|
||||
**Example request**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
POST /api/v1/organizers/bigevents/events/sampleconf/shredders/shred/29891ede-196f-4942-9e26-d055a36e98b8/3f279f13-c198-4137-b49b-9b360ce9fcce/ HTTP/1.1
|
||||
Host: pretix.eu
|
||||
Accept: application/json, text/javascript
|
||||
|
||||
**Example response**:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 202 Accepted
|
||||
Vary: Accept
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"status": "https://pretix.eu/api/v1/organizers/bigevents/events/sampleconf/shredders/status/29891ede-196f-4942-9e26-d055a36e98b8/3f279f13-c198-4137-b49b-9b360ce9fcce/"
|
||||
}
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer to fetch
|
||||
:param event: The ``slug`` field of the event to fetch
|
||||
:param id1: Opaque value given to you in the previous response
|
||||
:param id2: Opaque value given to you in the previous response
|
||||
:statuscode 202: no error
|
||||
:statuscode 400: Invalid input options
|
||||
:statuscode 401: Authentication failure
|
||||
:statuscode 404: The export does not exist / is expired / belongs to a different API key.
|
||||
:statuscode 403: The requested organizer/event does not exist **or** you have no permission to view this resource.
|
||||
:statuscode 409: Your export is still running. The body will be JSON with the structure ``{"status": "running"}``. ``status`` can be ``waiting`` before the task is actually being processed. Please retry, but wait at least one second before you do.
|
||||
:statuscode 410: Either the job has timed out or running the export has failed permanently. The body will be JSON with the structure ``{"status": "failed", "message": "Error message"}``
|
||||
|
||||
|
||||
Checking the result
|
||||
-------------------
|
||||
|
||||
When starting to shred, you receive a ``status`` URL for checking for success.
|
||||
Running a ``GET`` request on that result will yield one of the following status codes:
|
||||
|
||||
* ``200 OK`` – The shredding succeeded.
|
||||
* ``409 Conflict`` – Shredding is still running. The body will be JSON with the structure ``{"status": "running"}``. ``status`` can be ``waiting`` before the task is actually being processed. Please retry, but wait at least one second before you do.
|
||||
* ``410 Gone`` – We no longer know about this process, probably the process was started more than an hour ago. Might also occur after successful operations on small pretix installations without asynchronous task handling.
|
||||
* ``417 Expectation Failed`` – Running the export has failed permanently. The body will be JSON with the structure ``{"status": "failed", "message": "Error message"}``
|
||||
@@ -36,10 +36,16 @@ The following values for ``action_types`` are valid with pretix core:
|
||||
* ``pretix.event.order.canceled``
|
||||
* ``pretix.event.order.reactivated``
|
||||
* ``pretix.event.order.expired``
|
||||
* ``pretix.event.order.expirychanged``
|
||||
* ``pretix.event.order.modified``
|
||||
* ``pretix.event.order.contact.changed``
|
||||
* ``pretix.event.order.changed.*``
|
||||
* ``pretix.event.order.refund.created``
|
||||
* ``pretix.event.order.refund.created.externally``
|
||||
* ``pretix.event.order.refund.requested``
|
||||
* ``pretix.event.order.refund.done``
|
||||
* ``pretix.event.order.refund.canceled``
|
||||
* ``pretix.event.order.refund.failed``
|
||||
* ``pretix.event.order.approved``
|
||||
* ``pretix.event.order.denied``
|
||||
* ``pretix.event.checkin``
|
||||
@@ -50,6 +56,10 @@ The following values for ``action_types`` are valid with pretix core:
|
||||
* ``pretix.subevent.added``
|
||||
* ``pretix.subevent.changed``
|
||||
* ``pretix.subevent.deleted``
|
||||
* ``pretix.event.live.activated``
|
||||
* ``pretix.event.live.deactivated``
|
||||
* ``pretix.event.testmode.activated``
|
||||
* ``pretix.event.testmode.deactivated``
|
||||
|
||||
Installed plugins might register more valid values.
|
||||
|
||||
|
||||
@@ -92,9 +92,10 @@ If any other status code is returned, we will assume you did not receive the cal
|
||||
or ``304 Not Modified`` response will be treated as a failure. pretix will not follow any ``301`` or ``302`` redirect
|
||||
headers and pretix will ignore all other information in your response headers or body.
|
||||
|
||||
If we do not receive a status code in the range of ``200`` and ``299``, pretix will retry to deliver for up to three
|
||||
days with an exponential back off. Therefore, we recommend that you implement your endpoint in a way where calling it
|
||||
multiple times for the same event due to a perceived error does not do any harm.
|
||||
If we do not receive a status code in the range of ``200`` and ``299`` or do not receive any response within a 30 second
|
||||
time frame, pretix will retry to deliver for up to three days with an exponential back off. Therefore, we recommend that
|
||||
you implement your endpoint in a way where calling it multiple times for the same event due to a perceived error does
|
||||
not do any harm.
|
||||
|
||||
There is only one exception: If status code ``410 Gone`` is returned, we will assume the
|
||||
endpoint does not exist any more and automatically disable the webhook.
|
||||
|
||||
|
Before Width: | Height: | Size: 236 KiB After Width: | Height: | Size: 274 KiB |
@@ -2,8 +2,25 @@
|
||||
|
||||
|
||||
partition "data-based check" {
|
||||
"Check based on local database" --> "Is the order in status PAID or PENDING\nand is the position not canceled?"
|
||||
"Check based on local database" -down-> "Is addon_match set to true?"
|
||||
--> if "" then
|
||||
-down->[no] "Is the order in status PAID or PENDING\nand is the position not canceled?"
|
||||
else
|
||||
-right->[yes] "Build a list that includes the position\nas well as all its add-ons"
|
||||
-down-> "Filter list for products that are part of the check-in list"
|
||||
--> if "" then
|
||||
-down->[one found] Proceed with the matching position
|
||||
--> "Is the order in status PAID or PENDING\nand is the position not canceled?"
|
||||
else
|
||||
--> if "" then
|
||||
-right->[none found] "Return error PRODUCT "
|
||||
else
|
||||
-down->[multiple found] Return error AMBIGUOUS
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
"Is the order in status PAID or PENDING\nand is the position not canceled?" --> if "" then
|
||||
-right->[no] "Return error CANCELED"
|
||||
else
|
||||
-down->[yes] "Is the product part of the check-in list?"
|
||||
|
||||
|
Before Width: | Height: | Size: 147 KiB After Width: | Height: | Size: 175 KiB |
@@ -19,8 +19,25 @@ else
|
||||
endif
|
||||
|
||||
|
||||
===CHECK=== -down-> "Is the order in status PAID or PENDING\nand is the position not canceled?"
|
||||
===CHECK=== -down-> "Is addon_match set to true?"
|
||||
--> if "" then
|
||||
-down->[no] "Is the order in status PAID or PENDING\nand is the position not canceled?"
|
||||
else
|
||||
-right->[yes] "Build a list that includes the position\nas well as all its add-ons"
|
||||
-down-> "Filter list for products that are part of the check-in list"
|
||||
--> if "" then
|
||||
-down->[one found] Proceed with the matching position
|
||||
--> "Is the order in status PAID or PENDING\nand is the position not canceled?"
|
||||
else
|
||||
--> if "" then
|
||||
-right->[none found] "Return error PRODUCT "
|
||||
else
|
||||
-down->[multiple found] Return error AMBIGUOUS
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
"Is the order in status PAID or PENDING\nand is the position not canceled?" --> if "" then
|
||||
-right->[no] "Return error CANCELED"
|
||||
else
|
||||
-down->[yes] "Is the product part of the check-in list?"
|
||||
|
||||
@@ -532,6 +532,7 @@ event object Object describi
|
||||
├ imprint_url string URL to legal notice page. If not ``null``, a button in the app should link to this page.
|
||||
├ privacy_url string URL to privacy notice page. If not ``null``, a button in the app should link to this page.
|
||||
├ help_url string URL to help page. If not ``null``, a button in the app should link to this page.
|
||||
├ terms_url string URL to terms of service. If not ``null``, a button in the app should link to this page.
|
||||
├ logo_url string URL to event logo. If not ``null``, this logo may be shown in the app.
|
||||
├ slug string Event short form
|
||||
└ organizer string Organizer short form
|
||||
@@ -567,6 +568,7 @@ scan_types list of objects Only used for a
|
||||
"imprint_url": null,
|
||||
"privacy_url": null,
|
||||
"help_url": null,
|
||||
"terms_url": null,
|
||||
"logo_url": null,
|
||||
"organizer": "sampleconf"
|
||||
},
|
||||
|
||||
BIN
doc/screens/organizer/customer.png
Normal file
|
After Width: | Height: | Size: 96 KiB |
BIN
doc/screens/organizer/customer_edit.png
Normal file
|
After Width: | Height: | Size: 69 KiB |
BIN
doc/screens/organizer/customer_ssoclient_add.png
Normal file
|
After Width: | Height: | Size: 76 KiB |
BIN
doc/screens/organizer/customer_ssoprovider_add.png
Normal file
|
After Width: | Height: | Size: 87 KiB |
BIN
doc/screens/organizer/customers.png
Normal file
|
After Width: | Height: | Size: 104 KiB |
BIN
doc/screens/organizer/edit_customer.png
Normal file
|
After Width: | Height: | Size: 98 KiB |
@@ -66,6 +66,7 @@ iterable
|
||||
Jimdo
|
||||
jwt
|
||||
JWT
|
||||
JWTs
|
||||
libpretixprint
|
||||
libsass
|
||||
linters
|
||||
@@ -88,7 +89,9 @@ nginx
|
||||
nodejs
|
||||
NotificationType
|
||||
npm
|
||||
OIDC
|
||||
ons
|
||||
OpenID
|
||||
optimizations
|
||||
overpayment
|
||||
param
|
||||
@@ -133,6 +136,7 @@ serializer
|
||||
serializers
|
||||
sexualized
|
||||
SQL
|
||||
SSO
|
||||
startup
|
||||
stdout
|
||||
stylesheet
|
||||
@@ -159,6 +163,8 @@ untrusted
|
||||
uptime
|
||||
username
|
||||
url
|
||||
URI
|
||||
URIs
|
||||
validator
|
||||
versa
|
||||
versioning
|
||||
|
||||
210
doc/user/customers/index.rst
Normal file
@@ -0,0 +1,210 @@
|
||||
.. _customers:
|
||||
|
||||
Customer accounts
|
||||
=================
|
||||
|
||||
By default, pretix only offers guest checkout, i.e. ticket buyers do not sign up and sign back in, but create a new
|
||||
checkout session every time. In some situations it may be convenient to allow ticket buyers to create
|
||||
accounts that they can later log in to again. Working with customer accounts is even required for some advanced
|
||||
use cases such as described in the :ref:`seasontickets` article.
|
||||
|
||||
Enabling customer accounts
|
||||
--------------------------
|
||||
|
||||
To enable customer accounts, head to your organizer page in the backend and then select "Settings" → "General" →
|
||||
"Customer accounts" and turn on the checkbox "Allow customers to create accounts".
|
||||
|
||||
Using the other settings on the same tab you can fine-tune how the customer account system behaves:
|
||||
|
||||
.. thumbnail:: ../../screens/organizer/edit_customer.png
|
||||
:align: center
|
||||
:class: screenshot
|
||||
|
||||
Allow customers to log in with email address and password
|
||||
In all simple setups, this option should be checked. If this checkbox is removed, it is impossible to log in or
|
||||
sign up unless you connect a SSO provider (see below).
|
||||
|
||||
Match orders based on email address
|
||||
If this option is selected, customers will see orders made with their email address within their account even if
|
||||
they did not make those orders while logged in.
|
||||
|
||||
Name format, Allowed titles
|
||||
This controls how we'll ask your customers for their name, similar to the respective settings on event level.
|
||||
|
||||
Managing customer accounts
|
||||
--------------------------
|
||||
|
||||
After customer accounts have been enabled, you will find a new menu option "Customer accounts" in the organizer-level
|
||||
main menu. The first sub-item, "Customers", allows you to search and inspect the list of your customer accounts, as well
|
||||
as to create a new customer account from the backend:
|
||||
|
||||
.. thumbnail:: ../../screens/organizer/customers.png
|
||||
:align: center
|
||||
:class: screenshot
|
||||
|
||||
If you click on a customer ID, you can see all details of this customer account, including registration information,
|
||||
active memberships, past ticket orders, and account history:
|
||||
|
||||
.. thumbnail:: ../../screens/organizer/customer.png
|
||||
:align: center
|
||||
:class: screenshot
|
||||
|
||||
You can also perform various actions from this view, such as:
|
||||
|
||||
- Send a password reset link
|
||||
- Change registration information
|
||||
- Anonymize the customer account (does not anonymize connected orders)
|
||||
|
||||
When creating or changing a customer, you will be presented with the following form:
|
||||
|
||||
.. thumbnail:: ../../screens/organizer/customer_edit.png
|
||||
:align: center
|
||||
:class: screenshot
|
||||
|
||||
Most fields, such as name, e-mail address, phone number, and language should be self-explanatory. The following fields
|
||||
might require some explanation:
|
||||
|
||||
Account active
|
||||
If this checkbox is removed, the customer will not be able to log in.
|
||||
|
||||
External identifier
|
||||
This field can be used to cross-reference your customer database with other sources. For example, if the customer
|
||||
already has a number in another system, you can insert that number here. This can be especially powerful if you
|
||||
use our API for synchronization with an external system.
|
||||
|
||||
Verified email address
|
||||
This checkbox signifies whether you have verified that this customer in fact controls the given email address.
|
||||
This will automatically be checked after a successful registration or after a successful password reset. Before it
|
||||
is checked, the customer will not be able to log in. You should usually not modify this field manually.
|
||||
|
||||
Notes
|
||||
Entries in this field will only be visible to you and your team, not to the customer.
|
||||
|
||||
Single-Sign-On (SSO)
|
||||
--------------------
|
||||
|
||||
"Single-Sign-On" (SSO) is a technical term for a situation in which a person can log in to multiple systems using just
|
||||
one login. This can be convenient if you have multiple applications that are exposed to your customers: They won't have
|
||||
to remember multiple passwords or understand how your application landscape is structured, they can just always log in
|
||||
with the same credentials whenever they see your brand.
|
||||
|
||||
In this scenario, pretix can be **either** the "SSO provider" **or** the "SSO client".
|
||||
If pretix is the SSO provider, pretix will be the central source of truth for your customer accounts and your other
|
||||
applications can connect to pretix to use pretix's login functionality.
|
||||
If pretix is the SSO client, one of your existing systems will be the source of truth for the customer accounts and
|
||||
pretix will use that system's login functionality.
|
||||
|
||||
All SSO support for customer accounts in pretix is currently built on the `OpenID Connect`_ standard, a modern and
|
||||
widely accepted standard for SSO in all industries.
|
||||
|
||||
Connecting SSO clients (pretix as the SSO provider)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To connect an external application as a SSO client, go to "Customer accounts" → "SSO clients" → "Create a new SSO client"
|
||||
in your organizer account.
|
||||
|
||||
.. thumbnail:: ../../screens/organizer/customer_ssoclient_add.png
|
||||
:align: center
|
||||
:class: screenshot
|
||||
|
||||
You will need to fill out the following fields:
|
||||
|
||||
Active
|
||||
If this checkbox is removed, the SSO client can not be used.
|
||||
|
||||
Application name
|
||||
The name of your external application, e.g. "digital event marketplace".
|
||||
|
||||
Client type
|
||||
For a server-side application which is able to store a secret that will be inaccessible to end users, chose
|
||||
"confidential". For a client-side application, such as many mobile apps, choose "public".
|
||||
|
||||
Grant type
|
||||
This value depends on the OpenID Connect implementation of your software.
|
||||
|
||||
Redirection URIs
|
||||
One or multiple URIs that the user might be redirected to after the successful or failed login.
|
||||
|
||||
Allowed access scopes
|
||||
The types of data the SSO client may access about the customer.
|
||||
|
||||
After you submitted all data, you will receive a client ID as well as a client secret. The client secret is shown
|
||||
in the green success message and will only ever be shown once. If you need it again, use the option "Invalidate old
|
||||
client secret and generate a new one".
|
||||
|
||||
You will need the client ID and client secret to configure your external application. The application will also likely
|
||||
need some other information from you, such as your **issuer URI**. If you use pretix Hosted and your organizer account
|
||||
does not have a custom domain, your issuer will be ``https://pretix.eu/myorgname``, where ``myorgname`` is the short
|
||||
form of your organizer account. If you use a custom domain, such as ``tickets.mycompany.net``, then your issuer will be
|
||||
``https://tickets.mycompany.net``.
|
||||
|
||||
Technical details
|
||||
"""""""""""""""""
|
||||
|
||||
We implement `OpenID Connect Core 1.0`_, except for some optional parts that do not make sense for pretix or bring no
|
||||
additional value. For example, we do not currently support encrypted tokens, offline access, refresh tokens, or passing
|
||||
request parameters as JWTs.
|
||||
|
||||
We implement the provider metadata section from `OpenID Connect Discovery 1.0`_. You can find the endpoint relative
|
||||
to the issuer URI as described above, for example ``http://pretix.eu/demo/.well-known/openid-configuration``.
|
||||
|
||||
We implement all three OpenID Connect Core flows:
|
||||
|
||||
- Authorization Code Flow (response type ``code``)
|
||||
- Implicit Flow (response types ``id_token token`` and ``id_token``)
|
||||
- Hybrid Flow (response types ``code id_token``, ``code id_token token``, and ``code token``)
|
||||
|
||||
We implement the response modes ``query`` and ``fragment``.
|
||||
|
||||
We currently offer the following scopes: ``openid``, ``profile``, ``email``, ``phone``
|
||||
|
||||
As well as the following standardized claims: ``iss``, ``aud``, ``exp``, ``iat``, ``auth_time``, ``nonce``, ``c_hash``,
|
||||
``at_hash``, ``sub``, ``locale``, ``name``, ``given_name``, ``family_name``, ``middle_name``, ``nickname``, ``email``,
|
||||
``email_verified``, ``phone_number``.
|
||||
|
||||
The various endpoints are located relative to the issuer URI as described above:
|
||||
|
||||
- Authorization: ``<issuer>/oauth2/v1/authorize``
|
||||
- Token: ``<issuer>/oauth2/v1/token``
|
||||
- User info: ``<issuer>/oauth2/v1/userinfo``
|
||||
- Keys: ``<issuer>/oauth2/v1/keys``
|
||||
|
||||
We currently do not reproduce their documentation here as they follow the OpenID Connect and OAuth specifications
|
||||
without any special behavior.
|
||||
|
||||
Connecting SSO providers (pretix as the SSO client)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To connect an external application as a SSO client, go to "Customer accounts" → "SSO providers" → "Create a new SSO provider"
|
||||
in your organizer account.
|
||||
|
||||
.. thumbnail:: ../../screens/organizer/customer_ssoprovider_add.png
|
||||
:align: center
|
||||
:class: screenshot
|
||||
|
||||
The "Provider name" and "Login button label" is what we'll use to show the new login option to the user. For the actual
|
||||
connection, we will require information such as the issuer URL, client ID, client secret, scope, and field (or claim)
|
||||
names that you will receive from your SSO provider.
|
||||
|
||||
.. note::
|
||||
|
||||
If you want your customers to *only* use your SSO provider, it makes sense to turn off the "Allow customers to log in
|
||||
with email address and password" settings option (see above).
|
||||
|
||||
Technical details
|
||||
"""""""""""""""""
|
||||
|
||||
We assume that SSO providers fulfill the following requirements:
|
||||
|
||||
- Implementation according to `OpenID Connect Core 1.0`_.
|
||||
|
||||
- Published meta-data document at ``<issuer>/.well-known/openid-configuration`` as specified in `OpenID Connect Discovery 1.0`_.
|
||||
|
||||
- Support for Authorization code flow (``response_type=code``) with ``response_mode=query``.
|
||||
|
||||
- Support for client authentication using client ID and client secret and without public key cryptography.
|
||||
|
||||
|
||||
.. _OpenID Connect: https://en.wikipedia.org/wiki/OpenID#OpenID_Connect_(OIDC)
|
||||
.. _OpenID Connect Core 1.0: https://openid.net/specs/openid-connect-core-1_0.html
|
||||
.. _OpenID Connect Discovery 1.0: https://openid.net/specs/openid-connect-discovery-1_0.html
|
||||
@@ -78,7 +78,7 @@ Synchronization setting any
|
||||
----------------------------------------------- ----------------------------------- ----------------------------------------------------------------------- -----------------------------------------------------------------------
|
||||
Ticket secrets any Random Signed Random Signed
|
||||
=============================================== =================================== =================================== =================================== ================================= =====================================
|
||||
Scenario supported on platforms Android, Desktop, iOS Android, Desktop, iOS Android, Desktop Android, Desktop Android, Desktop
|
||||
Scenario supported on platforms Android, Desktop, iOS Android, Desktop, iOS Android, Desktop Android, Desktop, iOS Android, Desktop, iOS
|
||||
Synchronization speed for large data sets slow slow fast fast
|
||||
Tickets can be scanned yes yes yes no yes
|
||||
Ticket is valid after sale immediately next sync (~5 minutes) immediately never immediately
|
||||
@@ -90,6 +90,7 @@ Name and seat visible on scanner yes
|
||||
Order-specific check-in attention flag yes yes yes (except directly after sale) n/a no
|
||||
Ticket search by order code or name yes yes yes (except directly after sale) no no
|
||||
Check-in statistics on scanner yes yes mostly accurate no no
|
||||
Support for add-on check-in with main ticket yes yes yes (except directly after sale) no no
|
||||
=============================================== =================================== =================================== =================================== ================================= =====================================
|
||||
|
||||
.. _EdDSA: https://en.wikipedia.org/wiki/EdDSA#Ed25519
|
||||
|
||||
@@ -135,6 +135,10 @@ Alternatively, you can select one or more categories to be shown::
|
||||
|
||||
<pretix-widget event="https://pretix.eu/demo/democon/" categories="12,25"></pretix-widget>
|
||||
|
||||
Or variation IDs::
|
||||
|
||||
<pretix-widget event="https://pretix.eu/demo/democon/" variations="15,2,68"></pretix-widget>
|
||||
|
||||
Multi-event selection
|
||||
---------------------
|
||||
|
||||
@@ -407,7 +411,7 @@ Hosted or pretix Enterprise are active, you can pass the following fields:
|
||||
};
|
||||
</script>
|
||||
|
||||
If you use ```analytics.js` (Universal Analytics)::
|
||||
If you use ``analytics.js`` (Universal Analytics)::
|
||||
|
||||
<script>
|
||||
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
|
||||
|
||||
@@ -12,6 +12,7 @@ wanting to use pretix to sell tickets.
|
||||
events/settings
|
||||
events/structureguide
|
||||
events/widget
|
||||
customers/index
|
||||
events/giftcards
|
||||
faq
|
||||
markdown
|
||||
|
||||
@@ -14,6 +14,8 @@ recursive-include pretix/plugins/manualpayment/templates *
|
||||
recursive-include pretix/plugins/manualpayment/static *
|
||||
recursive-include pretix/plugins/paypal/templates *
|
||||
recursive-include pretix/plugins/paypal/static *
|
||||
recursive-include pretix/plugins/paypal2/templates *
|
||||
recursive-include pretix/plugins/paypal2/static *
|
||||
recursive-include pretix/plugins/pretixdroid/templates *
|
||||
recursive-include pretix/plugins/pretixdroid/static *
|
||||
recursive-include pretix/plugins/sendmail/templates *
|
||||
|
||||
@@ -19,4 +19,4 @@
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
__version__ = "4.11.0.dev0"
|
||||
__version__ = "4.14.0.dev0"
|
||||
|
||||
@@ -68,6 +68,8 @@ class PretixScanSecurityProfile(AllowListSecurityProfile):
|
||||
('GET', 'api-v1:orderposition-pdf_image'),
|
||||
('GET', 'api-v1:event.settings'),
|
||||
('POST', 'api-v1:upload'),
|
||||
('POST', 'api-v1:checkinrpc.redeem'),
|
||||
('GET', 'api-v1:checkinrpc.search'),
|
||||
)
|
||||
|
||||
|
||||
@@ -98,6 +100,8 @@ class PretixScanNoSyncNoSearchSecurityProfile(AllowListSecurityProfile):
|
||||
('GET', 'api-v1:orderposition-pdf_image'),
|
||||
('GET', 'api-v1:event.settings'),
|
||||
('POST', 'api-v1:upload'),
|
||||
('POST', 'api-v1:checkinrpc.redeem'),
|
||||
('GET', 'api-v1:checkinrpc.search'),
|
||||
)
|
||||
|
||||
|
||||
@@ -129,6 +133,8 @@ class PretixScanNoSyncSecurityProfile(AllowListSecurityProfile):
|
||||
('GET', 'api-v1:orderposition-pdf_image'),
|
||||
('GET', 'api-v1:event.settings'),
|
||||
('POST', 'api-v1:upload'),
|
||||
('POST', 'api-v1:checkinrpc.redeem'),
|
||||
('GET', 'api-v1:checkinrpc.search'),
|
||||
)
|
||||
|
||||
|
||||
@@ -194,6 +200,8 @@ class PretixPosSecurityProfile(AllowListSecurityProfile):
|
||||
('GET', 'plugins:pretix_seating:event.plan'),
|
||||
('GET', 'plugins:pretix_seating:selection.simple'),
|
||||
('POST', 'api-v1:upload'),
|
||||
('POST', 'api-v1:checkinrpc.redeem'),
|
||||
('GET', 'api-v1:checkinrpc.search'),
|
||||
)
|
||||
|
||||
|
||||
|
||||
29
src/pretix/api/migrations/0008_webhookcallretry.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Generated by Django 3.2.12 on 2022-09-13 14:48
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0218_checkinlist_addon_match'),
|
||||
('pretixapi', '0007_alter_webhookcall_target_url'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='WebHookCallRetry',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('retry_not_before', models.DateTimeField(auto_now_add=True)),
|
||||
('retry_count', models.PositiveIntegerField(default=0)),
|
||||
('action_type', models.CharField(max_length=255)),
|
||||
('logentry', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='webhook_retries', to='pretixbase.logentry')),
|
||||
('webhook', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='retries', to='pretixapi.webhook')),
|
||||
],
|
||||
options={
|
||||
'unique_together': {('webhook', 'logentry')},
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -133,6 +133,18 @@ class WebHookCall(models.Model):
|
||||
ordering = ("-datetime",)
|
||||
|
||||
|
||||
class WebHookCallRetry(models.Model):
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
webhook = models.ForeignKey('WebHook', on_delete=models.CASCADE, related_name='retries')
|
||||
logentry = models.ForeignKey('pretixbase.LogEntry', on_delete=models.CASCADE, related_name='webhook_retries')
|
||||
retry_not_before = models.DateTimeField(auto_now_add=True)
|
||||
retry_count = models.PositiveIntegerField(default=0)
|
||||
action_type = models.CharField(max_length=255)
|
||||
|
||||
class Meta:
|
||||
unique_together = (('webhook', 'logentry'),)
|
||||
|
||||
|
||||
class ApiCall(models.Model):
|
||||
idempotency_key = models.CharField(max_length=190, db_index=True)
|
||||
auth_hash = models.CharField(max_length=190, db_index=True)
|
||||
|
||||
@@ -26,7 +26,7 @@ from rest_framework.exceptions import ValidationError
|
||||
from pretix.api.serializers.event import SubEventSerializer
|
||||
from pretix.api.serializers.i18n import I18nAwareModelSerializer
|
||||
from pretix.base.channels import get_all_sales_channels
|
||||
from pretix.base.models import CheckinList
|
||||
from pretix.base.models import Checkin, CheckinList
|
||||
|
||||
|
||||
class CheckinListSerializer(I18nAwareModelSerializer):
|
||||
@@ -37,7 +37,7 @@ class CheckinListSerializer(I18nAwareModelSerializer):
|
||||
model = CheckinList
|
||||
fields = ('id', 'name', 'all_products', 'limit_products', 'subevent', 'checkin_count', 'position_count',
|
||||
'include_pending', 'auto_checkin_sales_channels', 'allow_multiple_entries', 'allow_entry_after_exit',
|
||||
'rules', 'exit_all_at')
|
||||
'rules', 'exit_all_at', 'addon_match')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
@@ -78,3 +78,31 @@ class CheckinListSerializer(I18nAwareModelSerializer):
|
||||
CheckinList.validate_rules(data.get('rules'))
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class CheckinRPCRedeemInputSerializer(serializers.Serializer):
|
||||
lists = serializers.PrimaryKeyRelatedField(required=True, many=True, queryset=CheckinList.objects.none())
|
||||
secret = serializers.CharField(required=True, allow_null=False)
|
||||
force = serializers.BooleanField(default=False, required=False)
|
||||
type = serializers.ChoiceField(choices=Checkin.CHECKIN_TYPES, default=Checkin.TYPE_ENTRY)
|
||||
ignore_unpaid = serializers.BooleanField(default=False, required=False)
|
||||
questions_supported = serializers.BooleanField(default=True, required=False)
|
||||
nonce = serializers.CharField(required=False, allow_null=True)
|
||||
datetime = serializers.DateTimeField(required=False, allow_null=True)
|
||||
answers = serializers.JSONField(required=False, allow_null=True)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['lists'].child_relation.queryset = CheckinList.objects.filter(event__in=self.context['events']).select_related('event')
|
||||
|
||||
|
||||
class MiniCheckinListSerializer(I18nAwareModelSerializer):
|
||||
event = serializers.SlugRelatedField(slug_field='slug', read_only=True)
|
||||
subevent = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = CheckinList
|
||||
fields = ('id', 'name', 'event', 'subevent', 'include_pending')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@@ -54,7 +54,10 @@ from pretix.base.models.items import SubEventItem, SubEventItemVariation
|
||||
from pretix.base.services.seating import (
|
||||
SeatProtected, generate_seats, validate_plan_change,
|
||||
)
|
||||
from pretix.base.settings import LazyI18nStringList, validate_event_settings
|
||||
from pretix.base.settings import (
|
||||
PERSON_NAME_SALUTATIONS, PERSON_NAME_SCHEMES, PERSON_NAME_TITLE_GROUPS,
|
||||
LazyI18nStringList, validate_event_settings,
|
||||
)
|
||||
from pretix.base.signals import api_event_settings_fields
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -762,6 +765,7 @@ class EventSettingsSerializer(SettingsSerializer):
|
||||
'cancel_allow_user_paid_adjust_fees_step',
|
||||
'cancel_allow_user_paid_refund_as_giftcard',
|
||||
'cancel_allow_user_paid_require_approval',
|
||||
'cancel_allow_user_paid_require_approval_fee_unknown',
|
||||
'change_allow_user_variation',
|
||||
'change_allow_user_addons',
|
||||
'change_allow_user_until',
|
||||
@@ -776,6 +780,7 @@ class EventSettingsSerializer(SettingsSerializer):
|
||||
'logo_image_large',
|
||||
'logo_show_title',
|
||||
'og_image',
|
||||
'name_scheme',
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -841,4 +846,25 @@ class DeviceEventSettingsSerializer(EventSettingsSerializer):
|
||||
'invoice_address_from_country',
|
||||
'invoice_address_from_tax_id',
|
||||
'invoice_address_from_vat_id',
|
||||
'name_scheme',
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['_name_scheme_fields'] = serializers.JSONField(
|
||||
read_only=True,
|
||||
default=[{"key": k, "label": str(v), "weight": w} for k, v, w, *__ in PERSON_NAME_SCHEMES.get(self.event.settings.name_scheme)['fields']]
|
||||
)
|
||||
self.fields['_name_scheme_salutations'] = serializers.JSONField(
|
||||
read_only=True,
|
||||
default=[{"key": k, "label": str(v)} for k, v in PERSON_NAME_SALUTATIONS]
|
||||
)
|
||||
self.fields['_name_scheme_titles'] = serializers.JSONField(
|
||||
read_only=True,
|
||||
default=(
|
||||
[{"key": k, "label": k}
|
||||
for k in PERSON_NAME_TITLE_GROUPS.get(self.event.settings.name_scheme_titles)[1]]
|
||||
if self.event.settings.name_scheme_titles
|
||||
else []
|
||||
)
|
||||
)
|
||||
|
||||
@@ -184,8 +184,9 @@ class ItemSerializer(I18nAwareModelSerializer):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['require_membership_types'].queryset = self.context['event'].organizer.membership_types.all()
|
||||
self.fields['grant_membership_type'].queryset = self.context['event'].organizer.membership_types.all()
|
||||
if not self.read_only:
|
||||
self.fields['require_membership_types'].queryset = self.context['event'].organizer.membership_types.all()
|
||||
self.fields['grant_membership_type'].queryset = self.context['event'].organizer.membership_types.all()
|
||||
|
||||
def validate(self, data):
|
||||
data = super().validate(data)
|
||||
|
||||
@@ -92,7 +92,7 @@ class InvoiceAddressSerializer(I18nAwareModelSerializer):
|
||||
class Meta:
|
||||
model = InvoiceAddress
|
||||
fields = ('last_modified', 'is_business', 'company', 'name', 'name_parts', 'street', 'zipcode', 'city', 'country',
|
||||
'state', 'vat_id', 'vat_id_validated', 'internal_reference')
|
||||
'state', 'vat_id', 'vat_id_validated', 'custom_field', 'internal_reference')
|
||||
read_only_fields = ('last_modified',)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -341,10 +341,10 @@ class PdfDataSerializer(serializers.Field):
|
||||
# we serialize a list.
|
||||
|
||||
if 'vars' not in self.context:
|
||||
self.context['vars'] = get_variables(self.context['request'].event)
|
||||
self.context['vars'] = get_variables(self.context['event'])
|
||||
|
||||
if 'vars_images' not in self.context:
|
||||
self.context['vars_images'] = get_images(self.context['request'].event)
|
||||
self.context['vars_images'] = get_images(self.context['event'])
|
||||
|
||||
for k, f in self.context['vars'].items():
|
||||
try:
|
||||
@@ -422,7 +422,14 @@ class OrderPositionSerializer(I18nAwareModelSerializer):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
request = self.context.get('request')
|
||||
if request and (not request.query_params.get('pdf_data', 'false') == 'true' or 'can_view_orders' not in request.eventpermset):
|
||||
pdf_data_forbidden = (
|
||||
# We check this based on permission if we are on /events/…/orders/ or /events/…/orderpositions/ or
|
||||
# /events/…/checkinlists/…/positions/
|
||||
# We're unable to check this on this level if we're on /checkinrpc/, in which case we rely on the view
|
||||
# layer to not set pdf_data=true in the first place.
|
||||
request and hasattr(request, 'event') and 'can_view_orders' not in request.eventpermset
|
||||
)
|
||||
if ('pdf_data' in self.context and not self.context['pdf_data']) or pdf_data_forbidden:
|
||||
self.fields.pop('pdf_data', None)
|
||||
|
||||
def validate(self, data):
|
||||
@@ -481,13 +488,13 @@ class CheckinListOrderPositionSerializer(OrderPositionSerializer):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if 'subevent' in self.context['request'].query_params.getlist('expand'):
|
||||
if 'subevent' in self.context['expand']:
|
||||
self.fields['subevent'] = SubEventSerializer(read_only=True)
|
||||
|
||||
if 'item' in self.context['request'].query_params.getlist('expand'):
|
||||
if 'item' in self.context['expand']:
|
||||
self.fields['item'] = ItemSerializer(read_only=True, context=self.context)
|
||||
|
||||
if 'variation' in self.context['request'].query_params.getlist('expand'):
|
||||
if 'variation' in self.context['expand']:
|
||||
self.fields['variation'] = InlineItemVariationSerializer(read_only=True)
|
||||
|
||||
|
||||
@@ -590,10 +597,10 @@ class OrderSerializer(I18nAwareModelSerializer):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if not self.context['request'].query_params.get('pdf_data', 'false') == 'true':
|
||||
if not self.context['pdf_data']:
|
||||
self.fields['positions'].child.fields.pop('pdf_data', None)
|
||||
|
||||
for exclude_field in self.context['request'].query_params.getlist('exclude'):
|
||||
for exclude_field in self.context['exclude']:
|
||||
p = exclude_field.split('.')
|
||||
if p[0] in self.fields:
|
||||
if len(p) == 1:
|
||||
|
||||
@@ -396,7 +396,6 @@ class OrderChangeOperationSerializer(serializers.Serializer):
|
||||
def validate(self, data):
|
||||
seen_positions = set()
|
||||
for d in data.get('patch_positions', []):
|
||||
print(d, seen_positions)
|
||||
if d['position'] in seen_positions:
|
||||
raise ValidationError({'patch_positions': ['You have specified the same object twice.']})
|
||||
seen_positions.add(d['position'])
|
||||
|
||||
@@ -74,6 +74,20 @@ class CustomerSerializer(I18nAwareModelSerializer):
|
||||
fields = ('identifier', 'external_identifier', 'email', 'name', 'name_parts', 'is_active', 'is_verified', 'last_login', 'date_joined',
|
||||
'locale', 'last_modified', 'notes')
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
if instance and instance.provider_id:
|
||||
validated_data['external_identifier'] = instance.external_identifier
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class CustomerCreateSerializer(CustomerSerializer):
|
||||
send_email = serializers.BooleanField(default=False, required=False, allow_null=True)
|
||||
password = serializers.CharField(write_only=True, required=False, allow_null=True)
|
||||
|
||||
class Meta:
|
||||
model = Customer
|
||||
fields = CustomerSerializer.Meta.fields + ('send_email', 'password')
|
||||
|
||||
|
||||
class MembershipTypeSerializer(I18nAwareModelSerializer):
|
||||
|
||||
@@ -105,20 +119,21 @@ class GiftCardSerializer(I18nAwareModelSerializer):
|
||||
|
||||
def validate(self, data):
|
||||
data = super().validate(data)
|
||||
s = data['secret']
|
||||
qs = GiftCard.objects.filter(
|
||||
secret=s
|
||||
).filter(
|
||||
Q(issuer=self.context["organizer"]) | Q(
|
||||
issuer__gift_card_collector_acceptance__collector=self.context["organizer"])
|
||||
)
|
||||
if self.instance:
|
||||
qs = qs.exclude(pk=self.instance.pk)
|
||||
if qs.exists():
|
||||
raise ValidationError(
|
||||
{'secret': _(
|
||||
'A gift card with the same secret already exists in your or an affiliated organizer account.')}
|
||||
if 'secret' in data:
|
||||
s = data['secret']
|
||||
qs = GiftCard.objects.filter(
|
||||
secret=s
|
||||
).filter(
|
||||
Q(issuer=self.context["organizer"]) | Q(
|
||||
issuer__gift_card_collector_acceptance__collector=self.context["organizer"])
|
||||
)
|
||||
if self.instance:
|
||||
qs = qs.exclude(pk=self.instance.pk)
|
||||
if qs.exists():
|
||||
raise ValidationError(
|
||||
{'secret': _(
|
||||
'A gift card with the same secret already exists in your or an affiliated organizer account.')}
|
||||
)
|
||||
return data
|
||||
|
||||
class Meta:
|
||||
@@ -274,6 +289,7 @@ class TeamMemberSerializer(serializers.ModelSerializer):
|
||||
class OrganizerSettingsSerializer(SettingsSerializer):
|
||||
default_fields = [
|
||||
'customer_accounts',
|
||||
'customer_accounts_native',
|
||||
'customer_accounts_link_by_email',
|
||||
'invoice_regenerate_allowed',
|
||||
'contact_mail',
|
||||
|
||||
36
src/pretix/api/serializers/shredders.py
Normal file
@@ -0,0 +1,36 @@
|
||||
#
|
||||
# This file is part of pretix (Community Edition).
|
||||
#
|
||||
# Copyright (C) 2014-2020 Raphael Michel and contributors
|
||||
# Copyright (C) 2020-2021 rami.io GmbH and contributors
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation in version 3 of the License.
|
||||
#
|
||||
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
|
||||
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
|
||||
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
|
||||
# this file, see <https://pretix.eu/about/en/license>.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class ShredderSerializer(serializers.Serializer):
|
||||
identifier = serializers.CharField()
|
||||
verbose_name = serializers.CharField()
|
||||
|
||||
|
||||
class JobRunSerializer(serializers.Serializer):
|
||||
shredders = serializers.MultipleChoiceField(choices=[])
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
shredders = kwargs.pop('shredders')
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['shredders'].choices = ((k.identifier, k.identifier) for k in shredders)
|
||||
@@ -42,7 +42,8 @@ from pretix.api.views import cart
|
||||
|
||||
from .views import (
|
||||
checkin, device, discount, event, exporters, idempotency, item, oauth,
|
||||
order, organizer, upload, user, version, voucher, waitinglist, webhooks,
|
||||
order, organizer, shredders, upload, user, version, voucher, waitinglist,
|
||||
webhooks,
|
||||
)
|
||||
|
||||
router = routers.DefaultRouter()
|
||||
@@ -84,6 +85,7 @@ event_router.register(r'waitinglistentries', waitinglist.WaitingListViewSet)
|
||||
event_router.register(r'checkinlists', checkin.CheckinListViewSet)
|
||||
event_router.register(r'cartpositions', cart.CartPositionViewSet)
|
||||
event_router.register(r'exporters', exporters.EventExportersViewSet, basename='exporters')
|
||||
event_router.register(r'shredders', shredders.EventShreddersViewSet, basename='shredders')
|
||||
|
||||
checkinlist_router = routers.DefaultRouter()
|
||||
checkinlist_router.register(r'positions', checkin.CheckinListPositionViewSet, basename='checkinlistpos')
|
||||
@@ -112,6 +114,10 @@ for app in apps.get_app_configs():
|
||||
urlpatterns = [
|
||||
re_path(r'^', include(router.urls)),
|
||||
re_path(r'^organizers/(?P<organizer>[^/]+)/', include(orga_router.urls)),
|
||||
re_path(r'^organizers/(?P<organizer>[^/]+)/checkinrpc/redeem/$', checkin.CheckinRPCRedeemView.as_view(),
|
||||
name="checkinrpc.redeem"),
|
||||
re_path(r'^organizers/(?P<organizer>[^/]+)/checkinrpc/search/$', checkin.CheckinRPCSearchView.as_view(),
|
||||
name="checkinrpc.search"),
|
||||
re_path(r'^organizers/(?P<organizer>[^/]+)/settings/$', organizer.OrganizerSettingsView.as_view(),
|
||||
name="organizer.settings"),
|
||||
re_path(r'^organizers/(?P<organizer>[^/]+)/giftcards/(?P<giftcard>[^/]+)/', include(giftcard_router.urls)),
|
||||
@@ -132,6 +138,7 @@ urlpatterns = [
|
||||
re_path(r"^device/update$", device.UpdateView.as_view(), name="device.update"),
|
||||
re_path(r"^device/roll$", device.RollKeyView.as_view(), name="device.roll"),
|
||||
re_path(r"^device/revoke$", device.RevokeKeyView.as_view(), name="device.revoke"),
|
||||
re_path(r"^device/info$", device.InfoView.as_view(), name="device.info"),
|
||||
re_path(r"^device/eventselection$", device.EventSelectionView.as_view(), name="device.eventselection"),
|
||||
re_path(r"^idempotency_query$", idempotency.IdempotencyQueryView.as_view(), name="idempotency.query"),
|
||||
re_path(r"^upload$", upload.UploadView.as_view(), name="upload"),
|
||||
|
||||
@@ -19,12 +19,16 @@
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
import operator
|
||||
from functools import reduce
|
||||
|
||||
import django_filters
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.exceptions import ValidationError as BaseValidationError
|
||||
from django.db import transaction
|
||||
from django.db.models import (
|
||||
Count, Exists, F, Max, OrderBy, OuterRef, Prefetch, Q, Subquery,
|
||||
prefetch_related_objects,
|
||||
)
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.http import Http404
|
||||
@@ -34,13 +38,18 @@ from django.utils.timezone import now
|
||||
from django_filters.rest_framework import DjangoFilterBackend, FilterSet
|
||||
from django_scopes import scopes_disabled
|
||||
from packaging.version import parse
|
||||
from rest_framework import viewsets
|
||||
from rest_framework import views, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import PermissionDenied, ValidationError
|
||||
from rest_framework.fields import DateTimeField
|
||||
from rest_framework.generics import ListAPIView
|
||||
from rest_framework.permissions import SAFE_METHODS
|
||||
from rest_framework.response import Response
|
||||
|
||||
from pretix.api.serializers.checkin import CheckinListSerializer
|
||||
from pretix.api.serializers.checkin import (
|
||||
CheckinListSerializer, CheckinRPCRedeemInputSerializer,
|
||||
MiniCheckinListSerializer,
|
||||
)
|
||||
from pretix.api.serializers.item import QuestionSerializer
|
||||
from pretix.api.serializers.order import (
|
||||
CheckinListOrderPositionSerializer, FailedCheckinSerializer,
|
||||
@@ -50,7 +59,7 @@ from pretix.api.views.order import OrderPositionFilter
|
||||
from pretix.base.i18n import language
|
||||
from pretix.base.models import (
|
||||
CachedFile, Checkin, CheckinList, Device, Event, Order, OrderPosition,
|
||||
Question,
|
||||
Question, RevokedTicketSecret, TeamAPIToken,
|
||||
)
|
||||
from pretix.base.services.checkin import (
|
||||
CheckInError, RequiredQuestionsError, SQLLogic, perform_checkin,
|
||||
@@ -265,6 +274,399 @@ with scopes_disabled():
|
||||
return queryset.filter(SQLLogic(self.checkinlist).apply(self.checkinlist.rules))
|
||||
|
||||
|
||||
def _handle_file_upload(data, user, auth):
|
||||
try:
|
||||
cf = CachedFile.objects.get(
|
||||
session_key=f'api-upload-{str(type(user or auth))}-{(user or auth).pk}',
|
||||
file__isnull=False,
|
||||
pk=data[len("file:"):],
|
||||
)
|
||||
except (ValidationError, BaseValidationError, IndexError): # invalid uuid
|
||||
raise ValidationError('The submitted file ID "{fid}" was not found.'.format(fid=data))
|
||||
except CachedFile.DoesNotExist:
|
||||
raise ValidationError('The submitted file ID "{fid}" was not found.'.format(fid=data))
|
||||
|
||||
allowed_types = (
|
||||
'image/png', 'image/jpeg', 'image/gif', 'application/pdf'
|
||||
)
|
||||
if cf.type not in allowed_types:
|
||||
raise ValidationError('The submitted file "{fid}" has a file type that is not allowed in this field.'.format(fid=data))
|
||||
if cf.file.size > settings.FILE_UPLOAD_MAX_SIZE_OTHER:
|
||||
raise ValidationError('The submitted file "{fid}" is too large to be used in this field.'.format(fid=data))
|
||||
|
||||
return cf.file
|
||||
|
||||
|
||||
def _checkin_list_position_queryset(checkinlists, ignore_status=False, ignore_products=False, pdf_data=False, expand=None):
|
||||
list_by_event = {cl.event_id: cl for cl in checkinlists}
|
||||
if not checkinlists:
|
||||
raise ValidationError('No check-in list passed.')
|
||||
if len(list_by_event) != len(checkinlists):
|
||||
raise ValidationError('Selecting two check-in lists from the same event is unsupported.')
|
||||
|
||||
cqs = Checkin.objects.filter(
|
||||
position_id=OuterRef('pk'),
|
||||
list_id__in=[cl.pk for cl in checkinlists]
|
||||
).order_by().values('position_id').annotate(
|
||||
m=Max('datetime')
|
||||
).values('m')
|
||||
|
||||
qs = OrderPosition.objects.filter(
|
||||
order__event__in=list_by_event.keys(),
|
||||
).annotate(
|
||||
last_checked_in=Subquery(cqs)
|
||||
).prefetch_related('order__event', 'order__event__organizer')
|
||||
|
||||
lists_qs = []
|
||||
for checkinlist in checkinlists:
|
||||
list_q = Q(order__event_id=checkinlist.event_id)
|
||||
if checkinlist.subevent:
|
||||
list_q &= Q(subevent=checkinlist.subevent)
|
||||
if not ignore_status:
|
||||
list_q &= Q(order__status__in=[Order.STATUS_PAID, Order.STATUS_PENDING] if checkinlist.include_pending else [Order.STATUS_PAID])
|
||||
if not checkinlist.all_products and not ignore_products:
|
||||
list_q &= Q(item__in=checkinlist.limit_products.values_list('id', flat=True))
|
||||
lists_qs.append(list_q)
|
||||
|
||||
qs = qs.filter(reduce(operator.or_, lists_qs))
|
||||
|
||||
if pdf_data:
|
||||
qs = qs.prefetch_related(
|
||||
Prefetch(
|
||||
lookup='checkins',
|
||||
queryset=Checkin.objects.filter(list_id__in=[cl.pk for cl in checkinlists])
|
||||
),
|
||||
'answers', 'answers__options', 'answers__question',
|
||||
Prefetch('addons', OrderPosition.objects.select_related('item', 'variation')),
|
||||
Prefetch('order', Order.objects.select_related('invoice_address').prefetch_related(
|
||||
Prefetch(
|
||||
'event',
|
||||
Event.objects.select_related('organizer')
|
||||
),
|
||||
Prefetch(
|
||||
'positions',
|
||||
OrderPosition.objects.prefetch_related(
|
||||
Prefetch('checkins', queryset=Checkin.objects.all()),
|
||||
'item', 'variation', 'answers', 'answers__options', 'answers__question',
|
||||
)
|
||||
)
|
||||
))
|
||||
).select_related(
|
||||
'item', 'variation', 'item__category', 'addon_to', 'order', 'order__invoice_address', 'seat'
|
||||
)
|
||||
else:
|
||||
qs = qs.prefetch_related(
|
||||
Prefetch(
|
||||
lookup='checkins',
|
||||
queryset=Checkin.objects.filter(list_id__in=[cl.pk for cl in checkinlists])
|
||||
),
|
||||
'answers', 'answers__options', 'answers__question',
|
||||
Prefetch('addons', OrderPosition.objects.select_related('item', 'variation'))
|
||||
).select_related('item', 'variation', 'order', 'addon_to', 'order__invoice_address', 'order', 'seat')
|
||||
|
||||
if expand and 'subevent' in expand:
|
||||
qs = qs.prefetch_related(
|
||||
'subevent', 'subevent__event', 'subevent__subeventitem_set', 'subevent__subeventitemvariation_set',
|
||||
'subevent__seat_category_mappings', 'subevent__meta_values'
|
||||
)
|
||||
|
||||
if expand and 'item' in expand:
|
||||
qs = qs.prefetch_related('item', 'item__addons', 'item__bundles', 'item__meta_values',
|
||||
'item__variations').select_related('item__tax_rule')
|
||||
|
||||
if expand and 'variation' in expand:
|
||||
qs = qs.prefetch_related('variation')
|
||||
|
||||
return qs
|
||||
|
||||
|
||||
def _redeem_process(*, checkinlists, raw_barcode, answers_data, datetime, force, checkin_type, ignore_unpaid, nonce,
|
||||
untrusted_input, user, auth, expand, pdf_data, request, questions_supported, canceled_supported,
|
||||
legacy_url_support=False):
|
||||
if not checkinlists:
|
||||
raise ValidationError('No check-in list passed.')
|
||||
|
||||
list_by_event = {cl.event_id: cl for cl in checkinlists}
|
||||
prefetch_related_objects([cl for cl in checkinlists if not cl.all_products], 'limit_products')
|
||||
|
||||
device = auth if isinstance(auth, Device) else None
|
||||
gate = auth.gate if isinstance(auth, Device) else None
|
||||
|
||||
context = {
|
||||
'request': request,
|
||||
'expand': expand,
|
||||
}
|
||||
|
||||
def _make_context(context, event):
|
||||
return {
|
||||
**context,
|
||||
'event': op.order.event,
|
||||
'pdf_data': pdf_data and (
|
||||
user if user and user.is_authenticated else auth
|
||||
).has_event_permission(request.organizer, event, 'can_view_orders', request),
|
||||
}
|
||||
|
||||
common_checkin_args = dict(
|
||||
raw_barcode=raw_barcode,
|
||||
type=checkin_type,
|
||||
list=checkinlists[0],
|
||||
datetime=datetime,
|
||||
device=device,
|
||||
gate=gate,
|
||||
nonce=nonce,
|
||||
forced=force,
|
||||
)
|
||||
raw_barcode_for_checkin = None
|
||||
from_revoked_secret = False
|
||||
|
||||
# 1. Gather a list of positions that could be the one we looking fore, either from their ID, secret or
|
||||
# parent secret
|
||||
queryset = _checkin_list_position_queryset(checkinlists, pdf_data=pdf_data, ignore_status=True, ignore_products=True).order_by(
|
||||
F('addon_to').asc(nulls_first=True)
|
||||
)
|
||||
|
||||
q = Q(secret=raw_barcode)
|
||||
if any(cl.addon_match for cl in checkinlists):
|
||||
q |= Q(addon_to__secret=raw_barcode)
|
||||
if raw_barcode.isnumeric() and not untrusted_input and legacy_url_support:
|
||||
q |= Q(pk=raw_barcode)
|
||||
|
||||
op_candidates = list(queryset.filter(q))
|
||||
if not op_candidates and '+' in raw_barcode and legacy_url_support:
|
||||
# In application/x-www-form-urlencoded, you can encodes space ' ' with '+' instead of '%20'.
|
||||
# `id`, however, is part of a path where this technically is not allowed. Old versions of our
|
||||
# scan apps still do it, so we try work around it!
|
||||
q = Q(secret=raw_barcode.replace('+', ' '))
|
||||
if any(cl.addon_match for cl in checkinlists):
|
||||
q |= Q(addon_to__secret=raw_barcode.replace('+', ' '))
|
||||
op_candidates = list(queryset.filter(q))
|
||||
|
||||
# 2. Handle the "nothing found" case: Either it's really a bogus secret that we don't know (-> error), or it
|
||||
# might be a revoked one that we actually know (-> error, but with better error message and logging and
|
||||
# with respecting the force option).
|
||||
if not op_candidates:
|
||||
revoked_matches = list(RevokedTicketSecret.objects.filter(event_id__in=list_by_event.keys(), secret=raw_barcode))
|
||||
if len(revoked_matches) == 0:
|
||||
checkinlists[0].event.log_action('pretix.event.checkin.unknown', data={
|
||||
'datetime': datetime,
|
||||
'type': checkin_type,
|
||||
'list': checkinlists[0].pk,
|
||||
'barcode': raw_barcode,
|
||||
'searched_lists': [cl.pk for cl in checkinlists]
|
||||
}, user=user, auth=auth)
|
||||
|
||||
for cl in checkinlists:
|
||||
for k, s in cl.event.ticket_secret_generators.items():
|
||||
try:
|
||||
parsed = s.parse_secret(raw_barcode)
|
||||
common_checkin_args.update({
|
||||
'raw_item': parsed.item,
|
||||
'raw_variation': parsed.variation,
|
||||
'raw_subevent': parsed.subevent,
|
||||
})
|
||||
except:
|
||||
pass
|
||||
|
||||
Checkin.objects.create(
|
||||
position=None,
|
||||
successful=False,
|
||||
error_reason=Checkin.REASON_INVALID,
|
||||
**common_checkin_args,
|
||||
)
|
||||
|
||||
if force and legacy_url_support and isinstance(auth, Device):
|
||||
# There was a bug in libpretixsync: If you scanned a ticket in offline mode that was
|
||||
# valid at the time but no longer exists at time of upload, the device would retry to
|
||||
# upload the same scan over and over again. Since we can't update all devices quickly,
|
||||
# here's a dirty workaround to make it stop.
|
||||
try:
|
||||
brand = auth.software_brand
|
||||
ver = parse(auth.software_version)
|
||||
legacy_mode = (
|
||||
(brand == 'pretixSCANPROXY' and ver < parse('0.0.3')) or
|
||||
(brand == 'pretixSCAN Android' and ver < parse('1.11.2')) or
|
||||
(brand == 'pretixSCAN' and ver < parse('1.11.2'))
|
||||
)
|
||||
if legacy_mode:
|
||||
return Response({
|
||||
'status': 'error',
|
||||
'reason': Checkin.REASON_ALREADY_REDEEMED,
|
||||
'reason_explanation': None,
|
||||
'require_attention': False,
|
||||
'__warning': 'Compatibility hack active due to detected old pretixSCAN version',
|
||||
}, status=400)
|
||||
except: # we don't care e.g. about invalid version numbers
|
||||
pass
|
||||
|
||||
return Response({
|
||||
'detail': 'Not found.', # for backwards compatibility
|
||||
'status': 'error',
|
||||
'reason': Checkin.REASON_INVALID,
|
||||
'reason_explanation': None,
|
||||
'require_attention': False,
|
||||
'list': MiniCheckinListSerializer(checkinlists[0]).data,
|
||||
}, status=404)
|
||||
elif revoked_matches and force:
|
||||
op_candidates = [revoked_matches[0].position]
|
||||
if list_by_event[revoked_matches[0].event_id].addon_match:
|
||||
op_candidates += list(revoked_matches[0].position.addons.all())
|
||||
raw_barcode_for_checkin = raw_barcode
|
||||
from_revoked_secret = True
|
||||
else:
|
||||
op = revoked_matches[0].position
|
||||
op.order.log_action('pretix.event.checkin.revoked', data={
|
||||
'datetime': datetime,
|
||||
'type': checkin_type,
|
||||
'list': list_by_event[revoked_matches[0].event_id].pk,
|
||||
'barcode': raw_barcode
|
||||
}, user=user, auth=auth)
|
||||
common_checkin_args['list'] = list_by_event[revoked_matches[0].event_id]
|
||||
Checkin.objects.create(
|
||||
position=op,
|
||||
successful=False,
|
||||
error_reason=Checkin.REASON_REVOKED,
|
||||
**common_checkin_args
|
||||
)
|
||||
return Response({
|
||||
'status': 'error',
|
||||
'reason': Checkin.REASON_REVOKED,
|
||||
'reason_explanation': None,
|
||||
'require_attention': False,
|
||||
'position': CheckinListOrderPositionSerializer(op, context=_make_context(context, revoked_matches[0].event)).data,
|
||||
'list': MiniCheckinListSerializer(list_by_event[revoked_matches[0].event_id]).data,
|
||||
}, status=400)
|
||||
|
||||
# 3. Handle the "multiple options found" case: Except for the unlikely case of a secret being also a valid primary
|
||||
# key on the same list, we're probably dealing with the ``addon_match`` case here and need to figure out
|
||||
# which add-on has the right product.
|
||||
if len(op_candidates) > 1:
|
||||
op_candidates_matching_product = [
|
||||
op for op in op_candidates
|
||||
if (
|
||||
(list_by_event[op.order.event_id].addon_match or op.secret == raw_barcode or legacy_url_support) and
|
||||
(list_by_event[op.order.event_id].all_products or op.item_id in {i.pk for i in list_by_event[op.order.event_id].limit_products.all()})
|
||||
)
|
||||
]
|
||||
|
||||
if len(op_candidates_matching_product) == 0:
|
||||
# None of the found add-ons has the correct product, too bad! We could just error out here, but
|
||||
# instead we just continue with *any* product and have it rejected by the check in perform_checkin.
|
||||
# This has the advantage of a better error message.
|
||||
op_candidates = [op_candidates[0]]
|
||||
elif len(op_candidates_matching_product) > 1:
|
||||
# It's still ambiguous, we'll error out.
|
||||
# We choose the first match (regardless of product) for the logging since it's most likely to be the
|
||||
# base product according to our order_by above.
|
||||
op = op_candidates[0]
|
||||
op.order.log_action('pretix.event.checkin.denied', data={
|
||||
'position': op.id,
|
||||
'positionid': op.positionid,
|
||||
'errorcode': Checkin.REASON_AMBIGUOUS,
|
||||
'reason_explanation': None,
|
||||
'force': force,
|
||||
'datetime': datetime,
|
||||
'type': checkin_type,
|
||||
'list': list_by_event[op.order.event_id].pk,
|
||||
}, user=user, auth=auth)
|
||||
common_checkin_args['list'] = list_by_event[op.order.event_id]
|
||||
Checkin.objects.create(
|
||||
position=op,
|
||||
successful=False,
|
||||
error_reason=Checkin.REASON_AMBIGUOUS,
|
||||
error_explanation=None,
|
||||
**common_checkin_args,
|
||||
)
|
||||
return Response({
|
||||
'status': 'error',
|
||||
'reason': Checkin.REASON_AMBIGUOUS,
|
||||
'reason_explanation': None,
|
||||
'require_attention': op.item.checkin_attention or op.order.checkin_attention,
|
||||
'position': CheckinListOrderPositionSerializer(op, context=_make_context(context, op.order.event)).data,
|
||||
'list': MiniCheckinListSerializer(list_by_event[op.order.event_id]).data,
|
||||
}, status=400)
|
||||
else:
|
||||
op_candidates = op_candidates_matching_product
|
||||
|
||||
op = op_candidates[0]
|
||||
common_checkin_args['list'] = list_by_event[op.order.event_id]
|
||||
|
||||
# 5. Pre-validate all incoming answers, handle file upload
|
||||
given_answers = {}
|
||||
if answers_data:
|
||||
for q in op.item.questions.filter(ask_during_checkin=True):
|
||||
if str(q.pk) in answers_data:
|
||||
try:
|
||||
if q.type == Question.TYPE_FILE:
|
||||
given_answers[q] = _handle_file_upload(answers_data[str(q.pk)], user, auth)
|
||||
else:
|
||||
given_answers[q] = q.clean_answer(answers_data[str(q.pk)])
|
||||
except (ValidationError, BaseValidationError):
|
||||
pass
|
||||
|
||||
# 6. Pass to our actual check-in logic
|
||||
with language(op.order.event.settings.locale):
|
||||
try:
|
||||
perform_checkin(
|
||||
op=op,
|
||||
clist=list_by_event[op.order.event_id],
|
||||
given_answers=given_answers,
|
||||
force=force,
|
||||
ignore_unpaid=ignore_unpaid,
|
||||
nonce=nonce,
|
||||
datetime=datetime,
|
||||
questions_supported=questions_supported,
|
||||
canceled_supported=canceled_supported,
|
||||
user=user,
|
||||
auth=auth,
|
||||
type=checkin_type,
|
||||
raw_barcode=raw_barcode_for_checkin,
|
||||
from_revoked_secret=from_revoked_secret,
|
||||
)
|
||||
except RequiredQuestionsError as e:
|
||||
return Response({
|
||||
'status': 'incomplete',
|
||||
'require_attention': op.item.checkin_attention or op.order.checkin_attention,
|
||||
'position': CheckinListOrderPositionSerializer(op, context=_make_context(context, op.order.event)).data,
|
||||
'questions': [
|
||||
QuestionSerializer(q).data for q in e.questions
|
||||
],
|
||||
'list': MiniCheckinListSerializer(list_by_event[op.order.event_id]).data,
|
||||
}, status=400)
|
||||
except CheckInError as e:
|
||||
op.order.log_action('pretix.event.checkin.denied', data={
|
||||
'position': op.id,
|
||||
'positionid': op.positionid,
|
||||
'errorcode': e.code,
|
||||
'reason_explanation': e.reason,
|
||||
'force': force,
|
||||
'datetime': datetime,
|
||||
'type': checkin_type,
|
||||
'list': list_by_event[op.order.event_id].pk,
|
||||
}, user=user, auth=auth)
|
||||
Checkin.objects.create(
|
||||
position=op,
|
||||
successful=False,
|
||||
error_reason=e.code,
|
||||
error_explanation=e.reason,
|
||||
**common_checkin_args,
|
||||
)
|
||||
return Response({
|
||||
'status': 'error',
|
||||
'reason': e.code,
|
||||
'reason_explanation': e.reason,
|
||||
'require_attention': op.item.checkin_attention or op.order.checkin_attention,
|
||||
'position': CheckinListOrderPositionSerializer(op, context=_make_context(context, op.order.event)).data,
|
||||
'list': MiniCheckinListSerializer(list_by_event[op.order.event_id]).data,
|
||||
}, status=400)
|
||||
else:
|
||||
return Response({
|
||||
'status': 'ok',
|
||||
'require_attention': op.item.checkin_attention or op.order.checkin_attention,
|
||||
'position': CheckinListOrderPositionSerializer(op, context=_make_context(context, op.order.event)).data,
|
||||
'list': MiniCheckinListSerializer(list_by_event[op.order.event_id]).data,
|
||||
}, status=201)
|
||||
|
||||
|
||||
class ExtendedBackend(DjangoFilterBackend):
|
||||
def get_filterset_kwargs(self, request, queryset, view):
|
||||
kwargs = super().get_filterset_kwargs(request, queryset, view)
|
||||
@@ -280,7 +682,7 @@ class CheckinListPositionViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
serializer_class = CheckinListOrderPositionSerializer
|
||||
queryset = OrderPosition.all.none()
|
||||
filter_backends = (ExtendedBackend, RichOrderingFilter)
|
||||
ordering = ('attendee_name_cached', 'positionid')
|
||||
ordering = (F('attendee_name_cached').asc(nulls_last=True), 'positionid')
|
||||
ordering_fields = (
|
||||
'order__code', 'order__datetime', 'positionid', 'attendee_name',
|
||||
'last_checked_in', 'order__email',
|
||||
@@ -309,6 +711,8 @@ class CheckinListPositionViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
def get_serializer_context(self):
|
||||
ctx = super().get_serializer_context()
|
||||
ctx['event'] = self.request.event
|
||||
ctx['expand'] = self.request.query_params.getlist('expand')
|
||||
ctx['pdf_data'] = self.request.query_params.get('pdf_data', 'false') == 'true'
|
||||
return ctx
|
||||
|
||||
def get_filterset_kwargs(self):
|
||||
@@ -319,80 +723,18 @@ class CheckinListPositionViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
@cached_property
|
||||
def checkinlist(self):
|
||||
try:
|
||||
return get_object_or_404(CheckinList, event=self.request.event, pk=self.kwargs.get("list"))
|
||||
return get_object_or_404(self.request.event.checkin_lists, pk=self.kwargs.get("list"))
|
||||
except ValueError:
|
||||
raise Http404()
|
||||
|
||||
def get_queryset(self, ignore_status=False, ignore_products=False):
|
||||
cqs = Checkin.objects.filter(
|
||||
position_id=OuterRef('pk'),
|
||||
list_id=self.checkinlist.pk
|
||||
).order_by().values('position_id').annotate(
|
||||
m=Max('datetime')
|
||||
).values('m')
|
||||
|
||||
qs = OrderPosition.objects.filter(
|
||||
order__event=self.request.event,
|
||||
).annotate(
|
||||
last_checked_in=Subquery(cqs)
|
||||
).prefetch_related('order__event', 'order__event__organizer')
|
||||
if self.checkinlist.subevent:
|
||||
qs = qs.filter(
|
||||
subevent=self.checkinlist.subevent
|
||||
)
|
||||
|
||||
if self.request.query_params.get('ignore_status', 'false') != 'true' and not ignore_status:
|
||||
qs = qs.filter(
|
||||
order__status__in=[Order.STATUS_PAID, Order.STATUS_PENDING] if self.checkinlist.include_pending else [Order.STATUS_PAID]
|
||||
)
|
||||
if self.request.query_params.get('pdf_data', 'false') == 'true':
|
||||
qs = qs.prefetch_related(
|
||||
Prefetch(
|
||||
lookup='checkins',
|
||||
queryset=Checkin.objects.filter(list_id=self.checkinlist.pk)
|
||||
),
|
||||
'answers', 'answers__options', 'answers__question',
|
||||
Prefetch('addons', OrderPosition.objects.select_related('item', 'variation')),
|
||||
Prefetch('order', Order.objects.select_related('invoice_address').prefetch_related(
|
||||
Prefetch(
|
||||
'event',
|
||||
Event.objects.select_related('organizer')
|
||||
),
|
||||
Prefetch(
|
||||
'positions',
|
||||
OrderPosition.objects.prefetch_related(
|
||||
Prefetch('checkins', queryset=Checkin.objects.all()),
|
||||
'item', 'variation', 'answers', 'answers__options', 'answers__question',
|
||||
)
|
||||
)
|
||||
))
|
||||
).select_related(
|
||||
'item', 'variation', 'item__category', 'addon_to', 'order', 'order__invoice_address', 'seat'
|
||||
)
|
||||
else:
|
||||
qs = qs.prefetch_related(
|
||||
Prefetch(
|
||||
lookup='checkins',
|
||||
queryset=Checkin.objects.filter(list_id=self.checkinlist.pk)
|
||||
),
|
||||
'answers', 'answers__options', 'answers__question',
|
||||
Prefetch('addons', OrderPosition.objects.select_related('item', 'variation'))
|
||||
).select_related('item', 'variation', 'order', 'addon_to', 'order__invoice_address', 'order', 'seat')
|
||||
|
||||
if not self.checkinlist.all_products and not ignore_products:
|
||||
qs = qs.filter(item__in=self.checkinlist.limit_products.values_list('id', flat=True))
|
||||
|
||||
if 'subevent' in self.request.query_params.getlist('expand'):
|
||||
qs = qs.prefetch_related(
|
||||
'subevent', 'subevent__event', 'subevent__subeventitem_set', 'subevent__subeventitemvariation_set',
|
||||
'subevent__seat_category_mappings', 'subevent__meta_values'
|
||||
)
|
||||
|
||||
if 'item' in self.request.query_params.getlist('expand'):
|
||||
qs = qs.prefetch_related('item', 'item__addons', 'item__bundles', 'item__meta_values', 'item__variations').select_related('item__tax_rule')
|
||||
|
||||
if 'variation' in self.request.query_params.getlist('expand'):
|
||||
qs = qs.prefetch_related('variation')
|
||||
qs = _checkin_list_position_queryset(
|
||||
[self.checkinlist],
|
||||
ignore_status=self.request.query_params.get('ignore_status', 'false') == 'true' or ignore_status,
|
||||
ignore_products=ignore_products,
|
||||
pdf_data=self.request.query_params.get('pdf_data', 'false') == 'true',
|
||||
expand=self.request.query_params.getlist('expand'),
|
||||
)
|
||||
|
||||
if 'pk' not in self.request.resolver_match.kwargs and 'can_view_orders' not in self.request.eventpermset \
|
||||
and len(self.request.query_params.get('search', '')) < 3:
|
||||
@@ -403,217 +745,153 @@ class CheckinListPositionViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
@action(detail=False, methods=['POST'], url_name='redeem', url_path='(?P<pk>.*)/redeem')
|
||||
def redeem(self, *args, **kwargs):
|
||||
force = bool(self.request.data.get('force', False))
|
||||
type = self.request.data.get('type', None) or Checkin.TYPE_ENTRY
|
||||
if type not in dict(Checkin.CHECKIN_TYPES):
|
||||
checkin_type = self.request.data.get('type', None) or Checkin.TYPE_ENTRY
|
||||
if checkin_type not in dict(Checkin.CHECKIN_TYPES):
|
||||
raise ValidationError("Invalid check-in type.")
|
||||
ignore_unpaid = bool(self.request.data.get('ignore_unpaid', False))
|
||||
nonce = self.request.data.get('nonce')
|
||||
untrusted_input = (
|
||||
self.request.GET.get('untrusted_input', '') not in ('0', 'false', 'False', '')
|
||||
or (isinstance(self.request.auth, Device) and 'pretixscan' in (self.request.auth.software_brand or '').lower())
|
||||
)
|
||||
|
||||
if 'datetime' in self.request.data:
|
||||
dt = DateTimeField().to_internal_value(self.request.data.get('datetime'))
|
||||
else:
|
||||
dt = now()
|
||||
|
||||
common_checkin_args = dict(
|
||||
raw_barcode=self.kwargs['pk'],
|
||||
type=type,
|
||||
list=self.checkinlist,
|
||||
answers_data = self.request.data.get('answers')
|
||||
return _redeem_process(
|
||||
checkinlists=[self.checkinlist],
|
||||
raw_barcode=kwargs['pk'],
|
||||
answers_data=answers_data,
|
||||
datetime=dt,
|
||||
device=self.request.auth if isinstance(self.request.auth, Device) else None,
|
||||
gate=self.request.auth.gate if isinstance(self.request.auth, Device) else None,
|
||||
force=force,
|
||||
checkin_type=checkin_type,
|
||||
ignore_unpaid=ignore_unpaid,
|
||||
nonce=nonce,
|
||||
forced=force,
|
||||
untrusted_input=untrusted_input,
|
||||
user=self.request.user,
|
||||
auth=self.request.auth,
|
||||
expand=self.request.query_params.getlist('expand'),
|
||||
pdf_data=self.request.query_params.get('pdf_data', 'false') == 'true',
|
||||
questions_supported=self.request.data.get('questions_supported', True),
|
||||
canceled_supported=self.request.data.get('canceled_supported', False),
|
||||
request=self.request, # this is not clean, but we need it in the serializers for URL generation
|
||||
legacy_url_support=True,
|
||||
)
|
||||
raw_barcode_for_checkin = None
|
||||
from_revoked_secret = False
|
||||
|
||||
try:
|
||||
queryset = self.get_queryset(ignore_status=True, ignore_products=True)
|
||||
if self.kwargs['pk'].isnumeric():
|
||||
op = queryset.get(Q(pk=self.kwargs['pk']) | Q(secret=self.kwargs['pk']))
|
||||
else:
|
||||
# In application/x-www-form-urlencoded, you can encodes space ' ' with '+' instead of '%20'.
|
||||
# `id`, however, is part of a path where this technically is not allowed. Old versions of our
|
||||
# scan apps still do it, so we try work around it!
|
||||
try:
|
||||
op = queryset.get(secret=self.kwargs['pk'])
|
||||
except OrderPosition.DoesNotExist:
|
||||
op = queryset.get(secret=self.kwargs['pk'].replace('+', ' '))
|
||||
except OrderPosition.DoesNotExist:
|
||||
revoked_matches = list(self.request.event.revoked_secrets.filter(secret=self.kwargs['pk']))
|
||||
if len(revoked_matches) == 0:
|
||||
self.request.event.log_action('pretix.event.checkin.unknown', data={
|
||||
'datetime': dt,
|
||||
'type': type,
|
||||
'list': self.checkinlist.pk,
|
||||
'barcode': self.kwargs['pk']
|
||||
}, user=self.request.user, auth=self.request.auth)
|
||||
|
||||
for k, s in self.request.event.ticket_secret_generators.items():
|
||||
try:
|
||||
parsed = s.parse_secret(self.kwargs['pk'])
|
||||
common_checkin_args.update({
|
||||
'raw_item': parsed.item,
|
||||
'raw_variation': parsed.variation,
|
||||
'raw_subevent': parsed.subevent,
|
||||
})
|
||||
except:
|
||||
pass
|
||||
|
||||
Checkin.objects.create(
|
||||
position=None,
|
||||
successful=False,
|
||||
error_reason=Checkin.REASON_INVALID,
|
||||
**common_checkin_args,
|
||||
)
|
||||
|
||||
if force and isinstance(self.request.auth, Device):
|
||||
# There was a bug in libpretixsync: If you scanned a ticket in offline mode that was
|
||||
# valid at the time but no longer exists at time of upload, the device would retry to
|
||||
# upload the same scan over and over again. Since we can't update all devices quickly,
|
||||
# here's a dirty workaround to make it stop.
|
||||
try:
|
||||
brand = self.request.auth.software_brand
|
||||
ver = parse(self.request.auth.software_version)
|
||||
legacy_mode = (
|
||||
(brand == 'pretixSCANPROXY' and ver < parse('0.0.3')) or
|
||||
(brand == 'pretixSCAN Android' and ver < parse('1.11.2')) or
|
||||
(brand == 'pretixSCAN' and ver < parse('1.11.2'))
|
||||
)
|
||||
if legacy_mode:
|
||||
return Response({
|
||||
'status': 'error',
|
||||
'reason': Checkin.REASON_ALREADY_REDEEMED,
|
||||
'reason_explanation': None,
|
||||
'require_attention': False,
|
||||
'__warning': 'Compatibility hack active due to detected old pretixSCAN version',
|
||||
}, status=400)
|
||||
except: # we don't care e.g. about invalid version numbers
|
||||
pass
|
||||
|
||||
return Response({
|
||||
'detail': 'Not found.', # for backwards compatibility
|
||||
'status': 'error',
|
||||
'reason': Checkin.REASON_INVALID,
|
||||
'reason_explanation': None,
|
||||
'require_attention': False,
|
||||
}, status=404)
|
||||
elif revoked_matches and force:
|
||||
op = revoked_matches[0].position
|
||||
raw_barcode_for_checkin = self.kwargs['pk']
|
||||
from_revoked_secret = True
|
||||
else:
|
||||
op = revoked_matches[0].position
|
||||
op.order.log_action('pretix.event.checkin.revoked', data={
|
||||
'datetime': dt,
|
||||
'type': type,
|
||||
'list': self.checkinlist.pk,
|
||||
'barcode': self.kwargs['pk']
|
||||
}, user=self.request.user, auth=self.request.auth)
|
||||
Checkin.objects.create(
|
||||
position=op,
|
||||
successful=False,
|
||||
error_reason=Checkin.REASON_REVOKED,
|
||||
**common_checkin_args
|
||||
)
|
||||
return Response({
|
||||
'status': 'error',
|
||||
'reason': Checkin.REASON_REVOKED,
|
||||
'reason_explanation': None,
|
||||
'require_attention': False,
|
||||
'position': CheckinListOrderPositionSerializer(op, context=self.get_serializer_context()).data
|
||||
}, status=400)
|
||||
|
||||
given_answers = {}
|
||||
if 'answers' in self.request.data:
|
||||
aws = self.request.data.get('answers')
|
||||
for q in op.item.questions.filter(ask_during_checkin=True):
|
||||
if str(q.pk) in aws:
|
||||
try:
|
||||
if q.type == Question.TYPE_FILE:
|
||||
given_answers[q] = self._handle_file_upload(aws[str(q.pk)])
|
||||
else:
|
||||
given_answers[q] = q.clean_answer(aws[str(q.pk)])
|
||||
except ValidationError:
|
||||
pass
|
||||
|
||||
with language(self.request.event.settings.locale):
|
||||
try:
|
||||
perform_checkin(
|
||||
op=op,
|
||||
clist=self.checkinlist,
|
||||
given_answers=given_answers,
|
||||
force=force,
|
||||
ignore_unpaid=ignore_unpaid,
|
||||
nonce=nonce,
|
||||
datetime=dt,
|
||||
questions_supported=self.request.data.get('questions_supported', True),
|
||||
canceled_supported=self.request.data.get('canceled_supported', False),
|
||||
user=self.request.user,
|
||||
auth=self.request.auth,
|
||||
type=type,
|
||||
raw_barcode=raw_barcode_for_checkin,
|
||||
from_revoked_secret=from_revoked_secret,
|
||||
)
|
||||
except RequiredQuestionsError as e:
|
||||
return Response({
|
||||
'status': 'incomplete',
|
||||
'require_attention': op.item.checkin_attention or op.order.checkin_attention,
|
||||
'position': CheckinListOrderPositionSerializer(op, context=self.get_serializer_context()).data,
|
||||
'questions': [
|
||||
QuestionSerializer(q).data for q in e.questions
|
||||
]
|
||||
}, status=400)
|
||||
except CheckInError as e:
|
||||
op.order.log_action('pretix.event.checkin.denied', data={
|
||||
'position': op.id,
|
||||
'positionid': op.positionid,
|
||||
'errorcode': e.code,
|
||||
'reason_explanation': e.reason,
|
||||
'force': force,
|
||||
'datetime': dt,
|
||||
'type': type,
|
||||
'list': self.checkinlist.pk
|
||||
}, user=self.request.user, auth=self.request.auth)
|
||||
Checkin.objects.create(
|
||||
position=op,
|
||||
successful=False,
|
||||
error_reason=e.code,
|
||||
error_explanation=e.reason,
|
||||
**common_checkin_args,
|
||||
)
|
||||
return Response({
|
||||
'status': 'error',
|
||||
'reason': e.code,
|
||||
'reason_explanation': e.reason,
|
||||
'require_attention': op.item.checkin_attention or op.order.checkin_attention,
|
||||
'position': CheckinListOrderPositionSerializer(op, context=self.get_serializer_context()).data
|
||||
}, status=400)
|
||||
else:
|
||||
return Response({
|
||||
'status': 'ok',
|
||||
'require_attention': op.item.checkin_attention or op.order.checkin_attention,
|
||||
'position': CheckinListOrderPositionSerializer(op, context=self.get_serializer_context()).data
|
||||
}, status=201)
|
||||
|
||||
def _handle_file_upload(self, data):
|
||||
try:
|
||||
cf = CachedFile.objects.get(
|
||||
session_key=f'api-upload-{str(type(self.request.user or self.request.auth))}-{(self.request.user or self.request.auth).pk}',
|
||||
file__isnull=False,
|
||||
pk=data[len("file:"):],
|
||||
class CheckinRPCRedeemView(views.APIView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
if isinstance(self.request.auth, (TeamAPIToken, Device)):
|
||||
events = self.request.auth.get_events_with_permission(('can_change_orders', 'can_checkin_orders'))
|
||||
elif self.request.user.is_authenticated:
|
||||
events = self.request.user.get_events_with_permission(('can_change_orders', 'can_checkin_orders'), self.request).filter(
|
||||
organizer=self.request.organizer
|
||||
)
|
||||
except (ValidationError, IndexError): # invalid uuid
|
||||
raise ValidationError('The submitted file ID "{fid}" was not found.'.format(fid=data))
|
||||
except CachedFile.DoesNotExist:
|
||||
raise ValidationError('The submitted file ID "{fid}" was not found.'.format(fid=data))
|
||||
else:
|
||||
raise ValueError("unknown authentication method")
|
||||
|
||||
allowed_types = (
|
||||
'image/png', 'image/jpeg', 'image/gif', 'application/pdf'
|
||||
s = CheckinRPCRedeemInputSerializer(data=request.data, context={'events': events})
|
||||
s.is_valid(raise_exception=True)
|
||||
return _redeem_process(
|
||||
checkinlists=s.validated_data['lists'],
|
||||
raw_barcode=s.validated_data['secret'],
|
||||
answers_data=s.validated_data.get('answers'),
|
||||
datetime=s.validated_data.get('datetime') or now(),
|
||||
force=s.validated_data['force'],
|
||||
checkin_type=s.validated_data['type'],
|
||||
ignore_unpaid=s.validated_data['ignore_unpaid'],
|
||||
nonce=s.validated_data.get('nonce'),
|
||||
untrusted_input=True,
|
||||
user=self.request.user,
|
||||
auth=self.request.auth,
|
||||
expand=self.request.query_params.getlist('expand'),
|
||||
pdf_data=self.request.query_params.get('pdf_data', 'false') == 'true',
|
||||
questions_supported=s.validated_data['questions_supported'],
|
||||
canceled_supported=True,
|
||||
request=self.request, # this is not clean, but we need it in the serializers for URL generation
|
||||
legacy_url_support=False,
|
||||
)
|
||||
if cf.type not in allowed_types:
|
||||
raise ValidationError('The submitted file "{fid}" has a file type that is not allowed in this field.'.format(fid=data))
|
||||
if cf.file.size > settings.FILE_UPLOAD_MAX_SIZE_OTHER:
|
||||
raise ValidationError('The submitted file "{fid}" is too large to be used in this field.'.format(fid=data))
|
||||
|
||||
return cf.file
|
||||
|
||||
class CheckinRPCSearchView(ListAPIView):
|
||||
serializer_class = CheckinListOrderPositionSerializer
|
||||
queryset = OrderPosition.all.none()
|
||||
filter_backends = (ExtendedBackend, RichOrderingFilter)
|
||||
ordering = (F('attendee_name_cached').asc(nulls_last=True), 'positionid')
|
||||
ordering_fields = (
|
||||
'order__code', 'order__datetime', 'positionid', 'attendee_name',
|
||||
'last_checked_in', 'order__email',
|
||||
)
|
||||
ordering_custom = {
|
||||
'attendee_name': {
|
||||
'_order': F('display_name').asc(nulls_first=True),
|
||||
'display_name': Coalesce('attendee_name_cached', 'addon_to__attendee_name_cached')
|
||||
},
|
||||
'-attendee_name': {
|
||||
'_order': F('display_name').desc(nulls_last=True),
|
||||
'display_name': Coalesce('attendee_name_cached', 'addon_to__attendee_name_cached')
|
||||
},
|
||||
'last_checked_in': {
|
||||
'_order': OrderBy(F('last_checked_in'), nulls_first=True),
|
||||
},
|
||||
'-last_checked_in': {
|
||||
'_order': OrderBy(F('last_checked_in'), nulls_last=True, descending=True),
|
||||
},
|
||||
}
|
||||
filterset_class = OrderPositionFilter
|
||||
|
||||
def get_serializer_context(self):
|
||||
ctx = super().get_serializer_context()
|
||||
ctx['expand'] = self.request.query_params.getlist('expand')
|
||||
ctx['pdf_data'] = False
|
||||
return ctx
|
||||
|
||||
@cached_property
|
||||
def lists(self):
|
||||
if isinstance(self.request.auth, (TeamAPIToken, Device)):
|
||||
events = self.request.auth.get_events_with_permission(('can_view_orders', 'can_checkin_orders'))
|
||||
elif self.request.user.is_authenticated:
|
||||
events = self.request.user.get_events_with_permission(('can_view_orders', 'can_checkin_orders'), self.request).filter(
|
||||
organizer=self.request.organizer
|
||||
)
|
||||
else:
|
||||
raise ValueError("unknown authentication method")
|
||||
requested_lists = [int(l) for l in self.request.query_params.getlist('list') if l.isdigit()]
|
||||
lists = list(
|
||||
CheckinList.objects.filter(event__in=events).select_related('event').filter(id__in=requested_lists)
|
||||
)
|
||||
if len(lists) != len(requested_lists):
|
||||
missing_lists = set(requested_lists) - {l.pk for l in lists}
|
||||
raise PermissionDenied("You requested lists that do not exist or that you do not have access to: " + ", ".join(str(l) for l in missing_lists))
|
||||
return lists
|
||||
|
||||
@cached_property
|
||||
def has_full_access_permission(self):
|
||||
if isinstance(self.request.auth, (TeamAPIToken, Device)):
|
||||
events = self.request.auth.get_events_with_permission('can_view_orders')
|
||||
elif self.request.user.is_authenticated:
|
||||
events = self.request.user.get_events_with_permission('can_view_orders', self.request).filter(
|
||||
organizer=self.request.organizer
|
||||
)
|
||||
else:
|
||||
raise ValueError("unknown authentication method")
|
||||
|
||||
full_access_lists = CheckinList.objects.filter(event__in=events).filter(id__in=[c.pk for c in self.lists]).count()
|
||||
return len(self.lists) == full_access_lists
|
||||
|
||||
def get_queryset(self, ignore_status=False, ignore_products=False):
|
||||
qs = _checkin_list_position_queryset(
|
||||
self.lists,
|
||||
ignore_status=self.request.query_params.get('ignore_status', 'false') == 'true' or ignore_status,
|
||||
ignore_products=ignore_products,
|
||||
pdf_data=self.request.query_params.get('pdf_data', 'false') == 'true',
|
||||
expand=self.request.query_params.getlist('expand'),
|
||||
)
|
||||
|
||||
if len(self.request.query_params.get('search', '')) < 3 and not self.has_full_access_permission:
|
||||
qs = qs.none()
|
||||
|
||||
return qs
|
||||
|
||||
@@ -29,7 +29,9 @@ from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from pretix import __version__
|
||||
from pretix.api.auth.device import DeviceTokenAuthentication
|
||||
from pretix.api.views.version import numeric_version
|
||||
from pretix.base.models import CheckinList, Device, SubEvent
|
||||
from pretix.base.models.devices import Gate, generate_api_token
|
||||
|
||||
@@ -151,6 +153,24 @@ class RevokeKeyView(APIView):
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
class InfoView(APIView):
|
||||
authentication_classes = (DeviceTokenAuthentication,)
|
||||
|
||||
def get(self, request, format=None):
|
||||
device = request.auth
|
||||
serializer = DeviceSerializer(device)
|
||||
return Response({
|
||||
'device': serializer.data,
|
||||
'server': {
|
||||
'version': {
|
||||
'pretix': __version__,
|
||||
'pretix_numeric': numeric_version(__version__),
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
|
||||
class EventSelectionView(APIView):
|
||||
authentication_classes = (DeviceTokenAuthentication,)
|
||||
|
||||
|
||||
@@ -461,7 +461,9 @@ with scopes_disabled():
|
||||
class QuotaFilter(FilterSet):
|
||||
class Meta:
|
||||
model = Quota
|
||||
fields = ['subevent']
|
||||
fields = {
|
||||
'subevent': ['exact', 'in'],
|
||||
}
|
||||
|
||||
|
||||
class QuotaViewSet(ConditionalListView, viewsets.ModelViewSet):
|
||||
|
||||
@@ -63,9 +63,10 @@ from pretix.api.serializers.orderchange import (
|
||||
)
|
||||
from pretix.base.i18n import language
|
||||
from pretix.base.models import (
|
||||
CachedCombinedTicket, CachedTicket, Checkin, Device, Event, Invoice,
|
||||
InvoiceAddress, Order, OrderFee, OrderPayment, OrderPosition, OrderRefund,
|
||||
Quota, SubEvent, TaxRule, TeamAPIToken, generate_secret,
|
||||
CachedCombinedTicket, CachedTicket, Checkin, Device, EventMetaValue,
|
||||
Invoice, InvoiceAddress, ItemMetaValue, Order, OrderFee, OrderPayment,
|
||||
OrderPosition, OrderRefund, Quota, SubEvent, SubEventMetaValue, TaxRule,
|
||||
TeamAPIToken, generate_secret,
|
||||
)
|
||||
from pretix.base.models.orders import QuestionAnswer, RevokedTicketSecret
|
||||
from pretix.base.payment import PaymentException
|
||||
@@ -187,6 +188,8 @@ class OrderViewSet(viewsets.ModelViewSet):
|
||||
def get_serializer_context(self):
|
||||
ctx = super().get_serializer_context()
|
||||
ctx['event'] = self.request.event
|
||||
ctx['pdf_data'] = self.request.query_params.get('pdf_data', 'false') == 'true'
|
||||
ctx['exclude'] = self.request.query_params.getlist('exclude')
|
||||
return ctx
|
||||
|
||||
def get_queryset(self):
|
||||
@@ -213,14 +216,29 @@ class OrderViewSet(viewsets.ModelViewSet):
|
||||
else:
|
||||
opq = OrderPosition.objects
|
||||
if request.query_params.get('pdf_data', 'false') == 'true':
|
||||
prefetch_related_objects([request.organizer], 'meta_properties')
|
||||
prefetch_related_objects(
|
||||
[request.event],
|
||||
Prefetch('meta_values', queryset=EventMetaValue.objects.select_related('property'), to_attr='meta_values_cached'),
|
||||
'questions',
|
||||
'item_meta_properties',
|
||||
)
|
||||
return Prefetch(
|
||||
'positions',
|
||||
opq.all().prefetch_related(
|
||||
Prefetch('checkins', queryset=Checkin.objects.all()),
|
||||
'item', 'variation', 'answers', 'answers__options', 'answers__question',
|
||||
'item__category', 'addon_to', 'seat',
|
||||
Prefetch('item', queryset=self.request.event.items.prefetch_related(
|
||||
Prefetch('meta_values', ItemMetaValue.objects.select_related('property'), to_attr='meta_values_cached')
|
||||
)),
|
||||
'variation',
|
||||
'answers', 'answers__options', 'answers__question',
|
||||
'item__category',
|
||||
'addon_to__answers', 'addon_to__answers__options', 'addon_to__answers__question',
|
||||
Prefetch('subevent', queryset=self.request.event.subevents.prefetch_related(
|
||||
Prefetch('meta_values', to_attr='meta_values_cached', queryset=SubEventMetaValue.objects.select_related('property'))
|
||||
)),
|
||||
Prefetch('addons', opq.select_related('item', 'variation', 'seat'))
|
||||
)
|
||||
).select_related('seat', 'addon_to', 'addon_to__seat')
|
||||
)
|
||||
else:
|
||||
return Prefetch(
|
||||
@@ -932,6 +950,7 @@ class OrderPositionViewSet(viewsets.ModelViewSet):
|
||||
def get_serializer_context(self):
|
||||
ctx = super().get_serializer_context()
|
||||
ctx['event'] = self.request.event
|
||||
ctx['pdf_data'] = self.request.query_params.get('pdf_data', 'false') == 'true'
|
||||
return ctx
|
||||
|
||||
def get_queryset(self):
|
||||
@@ -942,25 +961,49 @@ class OrderPositionViewSet(viewsets.ModelViewSet):
|
||||
|
||||
qs = qs.filter(order__event=self.request.event)
|
||||
if self.request.query_params.get('pdf_data', 'false') == 'true':
|
||||
prefetch_related_objects([self.request.organizer], 'meta_properties')
|
||||
prefetch_related_objects(
|
||||
[self.request.event],
|
||||
Prefetch('meta_values', queryset=EventMetaValue.objects.select_related('property'), to_attr='meta_values_cached'),
|
||||
'questions',
|
||||
'item_meta_properties',
|
||||
)
|
||||
qs = qs.prefetch_related(
|
||||
Prefetch('checkins', queryset=Checkin.objects.all()),
|
||||
Prefetch('item', queryset=self.request.event.items.prefetch_related(
|
||||
Prefetch('meta_values', ItemMetaValue.objects.select_related('property'),
|
||||
to_attr='meta_values_cached')
|
||||
)),
|
||||
'variation',
|
||||
'answers', 'answers__options', 'answers__question',
|
||||
'item__category',
|
||||
'addon_to__answers', 'addon_to__answers__options', 'addon_to__answers__question',
|
||||
Prefetch('addons', qs.select_related('item', 'variation')),
|
||||
Prefetch('order', Order.objects.select_related('invoice_address').prefetch_related(
|
||||
Prefetch(
|
||||
'event',
|
||||
Event.objects.select_related('organizer')
|
||||
),
|
||||
Prefetch('subevent', queryset=self.request.event.subevents.prefetch_related(
|
||||
Prefetch('meta_values', to_attr='meta_values_cached',
|
||||
queryset=SubEventMetaValue.objects.select_related('property'))
|
||||
)),
|
||||
Prefetch('order', self.request.event.orders.select_related('invoice_address').prefetch_related(
|
||||
Prefetch(
|
||||
'positions',
|
||||
qs.prefetch_related(
|
||||
'item', 'variation', 'answers', 'answers__options', 'answers__question',
|
||||
Prefetch('checkins', queryset=Checkin.objects.all()),
|
||||
)
|
||||
Prefetch('item', queryset=self.request.event.items.prefetch_related(
|
||||
Prefetch('meta_values', ItemMetaValue.objects.select_related('property'),
|
||||
to_attr='meta_values_cached')
|
||||
)),
|
||||
'variation', 'answers', 'answers__options', 'answers__question',
|
||||
'item__category',
|
||||
Prefetch('subevent', queryset=self.request.event.subevents.prefetch_related(
|
||||
Prefetch('meta_values', to_attr='meta_values_cached',
|
||||
queryset=SubEventMetaValue.objects.select_related('property'))
|
||||
)),
|
||||
Prefetch('addons', qs.select_related('item', 'variation', 'seat'))
|
||||
).select_related('addon_to', 'seat', 'addon_to__seat')
|
||||
)
|
||||
))
|
||||
).select_related(
|
||||
'item', 'variation', 'item__category', 'addon_to', 'seat'
|
||||
'addon_to', 'seat', 'addon_to__seat'
|
||||
)
|
||||
else:
|
||||
qs = qs.prefetch_related(
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
from decimal import Decimal
|
||||
|
||||
import django_filters
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from django.db import transaction
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.functional import cached_property
|
||||
@@ -38,8 +39,8 @@ from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from pretix.api.models import OAuthAccessToken
|
||||
from pretix.api.serializers.organizer import (
|
||||
CustomerSerializer, DeviceSerializer, GiftCardSerializer,
|
||||
GiftCardTransactionSerializer, MembershipSerializer,
|
||||
CustomerCreateSerializer, CustomerSerializer, DeviceSerializer,
|
||||
GiftCardSerializer, GiftCardTransactionSerializer, MembershipSerializer,
|
||||
MembershipTypeSerializer, OrganizerSerializer, OrganizerSettingsSerializer,
|
||||
SeatingPlanSerializer, TeamAPITokenSerializer, TeamInviteSerializer,
|
||||
TeamMemberSerializer, TeamSerializer,
|
||||
@@ -514,15 +515,24 @@ class CustomerViewSet(viewsets.ModelViewSet):
|
||||
raise MethodNotAllowed("Customers cannot be deleted.")
|
||||
|
||||
@transaction.atomic()
|
||||
def perform_create(self, serializer):
|
||||
inst = serializer.save(organizer=self.request.organizer)
|
||||
def perform_create(self, serializer, send_email=False, password=None):
|
||||
customer = serializer.save(organizer=self.request.organizer, password=make_password(password))
|
||||
serializer.instance.log_action(
|
||||
'pretix.customer.created',
|
||||
user=self.request.user,
|
||||
auth=self.request.auth,
|
||||
data=self.request.data,
|
||||
)
|
||||
return inst
|
||||
if send_email:
|
||||
customer.send_activation_mail()
|
||||
return customer
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = CustomerCreateSerializer(data=request.data, context=self.get_serializer_context())
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.perform_create(serializer, send_email=serializer.validated_data.pop('send_email', False), password=serializer.validated_data.pop('password', None))
|
||||
headers = self.get_success_headers(serializer.data)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
@transaction.atomic()
|
||||
def perform_update(self, serializer):
|
||||
|
||||
240
src/pretix/api/views/shredders.py
Normal file
@@ -0,0 +1,240 @@
|
||||
#
|
||||
# This file is part of pretix (Community Edition).
|
||||
#
|
||||
# Copyright (C) 2014-2020 Raphael Michel and contributors
|
||||
# Copyright (C) 2020-2021 rami.io GmbH and contributors
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation in version 3 of the License.
|
||||
#
|
||||
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
|
||||
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
|
||||
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
|
||||
# this file, see <https://pretix.eu/about/en/license>.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
from datetime import timedelta
|
||||
|
||||
from celery.result import AsyncResult
|
||||
from django.conf import settings
|
||||
from django.http import Http404
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.timezone import now
|
||||
from rest_framework import status, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.reverse import reverse
|
||||
|
||||
from pretix.api.serializers.shredders import (
|
||||
JobRunSerializer, ShredderSerializer,
|
||||
)
|
||||
from pretix.base.models import CachedFile
|
||||
from pretix.base.services.shredder import export, shred
|
||||
from pretix.base.shredder import shred_constraints
|
||||
from pretix.helpers.http import ChunkBasedFileResponse
|
||||
|
||||
|
||||
class ShreddersMixin:
|
||||
def list(self, request, *args, **kwargs):
|
||||
res = ShredderSerializer(self.shredders, many=True)
|
||||
return Response({
|
||||
"count": len(self.shredders),
|
||||
"next": None,
|
||||
"previous": None,
|
||||
"results": res.data
|
||||
})
|
||||
|
||||
def get_object(self):
|
||||
instances = [e for e in self.shredders if e.identifier == self.kwargs.get('pk')]
|
||||
if not instances:
|
||||
raise Http404()
|
||||
return instances[0]
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
serializer = ShredderSerializer(instance)
|
||||
return Response(serializer.data)
|
||||
|
||||
@action(detail=False, methods=['GET'], url_name='download', url_path='download/(?P<asyncid>[^/]+)/(?P<cfid>[^/]+)')
|
||||
def download(self, *args, **kwargs):
|
||||
cf = get_object_or_404(
|
||||
CachedFile,
|
||||
id=kwargs['cfid'],
|
||||
session_key=f'api-shredder-{str(type(self.request.user or self.request.auth))}-{(self.request.user or self.request.auth).pk}'
|
||||
)
|
||||
if cf.file:
|
||||
resp = ChunkBasedFileResponse(cf.file.file, content_type=cf.type)
|
||||
resp['Content-Disposition'] = 'attachment; filename="{}"'.format(cf.filename).encode("ascii", "ignore")
|
||||
return resp
|
||||
elif not settings.HAS_CELERY:
|
||||
return Response(
|
||||
{'status': 'failed', 'message': 'Unknown file ID or export failed'},
|
||||
status=status.HTTP_410_GONE
|
||||
)
|
||||
|
||||
res = AsyncResult(kwargs['asyncid'])
|
||||
if res.failed():
|
||||
if isinstance(res.info, dict) and res.info['exc_type'] in ('ShredError', 'ExportError'):
|
||||
msg = res.info['exc_message']
|
||||
else:
|
||||
msg = 'Internal error'
|
||||
return Response(
|
||||
{'status': 'failed', 'message': msg},
|
||||
status=status.HTTP_410_GONE
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
'status': 'running' if res.state in ('PROGRESS', 'STARTED', 'SUCCESS') else 'waiting',
|
||||
},
|
||||
status=status.HTTP_409_CONFLICT
|
||||
)
|
||||
|
||||
@action(detail=False, methods=['GET'], url_name='status', url_path='status/(?P<asyncid>[^/]+)/(?P<cfid>[^/]+)')
|
||||
def status(self, *args, **kwargs):
|
||||
if settings.HAS_CELERY:
|
||||
res = AsyncResult(kwargs['asyncid'])
|
||||
if res.failed():
|
||||
if isinstance(res.info, dict) and res.info['exc_type'] in ('ShredError', 'ExportError'):
|
||||
msg = res.info['exc_message']
|
||||
else:
|
||||
msg = 'Internal error'
|
||||
return Response(
|
||||
{'status': 'failed', 'message': msg},
|
||||
status=status.HTTP_417_EXPECTATION_FAILED
|
||||
)
|
||||
elif res.successful():
|
||||
return Response(
|
||||
{'status': 'ok', 'message': 'OK'},
|
||||
status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
try:
|
||||
CachedFile.objects.get(
|
||||
id=kwargs['cfid'],
|
||||
session_key=f'api-shredder-{str(type(self.request.user or self.request.auth))}-{(self.request.user or self.request.auth).pk}'
|
||||
)
|
||||
except CachedFile.DoesNotExist:
|
||||
return Response(
|
||||
{'status': 'gone', 'message': 'May have succeeded or timed out'},
|
||||
status=status.HTTP_410_GONE
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
'status': 'running' if res.state in ('PROGRESS', 'STARTED', 'SUCCESS') else 'waiting',
|
||||
},
|
||||
status=status.HTTP_409_CONFLICT
|
||||
)
|
||||
|
||||
@action(detail=False, methods=['POST'], url_name='shred', url_path='shred/(?P<asyncid>[^/]+)/(?P<cfid>[^/]+)')
|
||||
def shred(self, *args, **kwargs):
|
||||
cf = get_object_or_404(
|
||||
CachedFile,
|
||||
id=kwargs['cfid'],
|
||||
session_key=f'api-shredder-{str(type(self.request.user or self.request.auth))}-{(self.request.user or self.request.auth).pk}'
|
||||
)
|
||||
if cf.file:
|
||||
async_result = self.do_shred(cf)
|
||||
url_kwargs = {
|
||||
'asyncid': str(async_result.id),
|
||||
'cfid': str(cf.id),
|
||||
}
|
||||
url_kwargs.update(self.kwargs)
|
||||
return Response({
|
||||
'status': reverse('api-v1:shredders-status', kwargs=url_kwargs, request=self.request),
|
||||
}, status=status.HTTP_202_ACCEPTED)
|
||||
elif not settings.HAS_CELERY:
|
||||
return Response(
|
||||
{'status': 'failed', 'message': 'Unknown file ID or export failed'},
|
||||
status=status.HTTP_410_GONE
|
||||
)
|
||||
|
||||
res = AsyncResult(kwargs['asyncid'])
|
||||
if res.failed():
|
||||
if isinstance(res.info, dict) and res.info['exc_type'] in ('ShredError', 'ExportError'):
|
||||
msg = res.info['exc_message']
|
||||
else:
|
||||
msg = 'Internal error'
|
||||
return Response(
|
||||
{'status': 'failed', 'message': msg},
|
||||
status=status.HTTP_410_GONE
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
'status': 'running' if res.state in ('PROGRESS', 'STARTED', 'SUCCESS') else 'waiting',
|
||||
},
|
||||
status=status.HTTP_409_CONFLICT
|
||||
)
|
||||
|
||||
@action(detail=False, methods=['POST'])
|
||||
def export(self, *args, **kwargs):
|
||||
serializer = JobRunSerializer(shredders=self.shredders, data=self.request.data, **self.get_serializer_kwargs())
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
cf = CachedFile(web_download=False)
|
||||
cf.date = now()
|
||||
cf.expires = now() + timedelta(hours=2)
|
||||
cf.session_key = f'api-shredder-{str(type(self.request.user or self.request.auth))}-{(self.request.user or self.request.auth).pk}'
|
||||
cf.save()
|
||||
d = serializer.data
|
||||
for k, v in d.items():
|
||||
if isinstance(v, set):
|
||||
d[k] = list(v)
|
||||
async_result = self.do_export(cf, serializer.validated_data['shredders'])
|
||||
|
||||
url_kwargs = {
|
||||
'asyncid': str(async_result.id),
|
||||
'cfid': str(cf.id),
|
||||
}
|
||||
url_kwargs.update(self.kwargs)
|
||||
return Response({
|
||||
'download': reverse('api-v1:shredders-download', kwargs=url_kwargs, request=self.request),
|
||||
'shred': reverse('api-v1:shredders-shred', kwargs=url_kwargs, request=self.request),
|
||||
}, status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
|
||||
class EventShreddersViewSet(ShreddersMixin, viewsets.ViewSet):
|
||||
permission = 'can_change_orders'
|
||||
|
||||
def get_serializer_kwargs(self):
|
||||
return {}
|
||||
|
||||
@cached_property
|
||||
def shredders(self):
|
||||
shredders = []
|
||||
for k, v in sorted(self.request.event.get_data_shredders().items(), key=lambda s: s[1].verbose_name):
|
||||
shredders.append(v)
|
||||
return shredders
|
||||
|
||||
def do_export(self, cf, shredders):
|
||||
constr = shred_constraints(self.request.event)
|
||||
if constr:
|
||||
raise ValidationError(constr)
|
||||
|
||||
return export.apply_async(args=(
|
||||
self.request.event.id,
|
||||
list(shredders),
|
||||
f'api-shredder-{str(type(self.request.user or self.request.auth))}-{(self.request.user or self.request.auth).pk}',
|
||||
str(cf.pk)
|
||||
))
|
||||
|
||||
def do_shred(self, cf):
|
||||
constr = shred_constraints(self.request.event)
|
||||
if constr:
|
||||
raise ValidationError(constr)
|
||||
|
||||
return shred.apply_async(args=(
|
||||
self.request.event.id,
|
||||
str(cf.pk),
|
||||
True,
|
||||
))
|
||||
@@ -23,19 +23,24 @@ import json
|
||||
import logging
|
||||
import time
|
||||
from collections import OrderedDict
|
||||
from datetime import timedelta
|
||||
|
||||
import requests
|
||||
from celery.exceptions import MaxRetriesExceededError
|
||||
from django.db import DatabaseError, connection, transaction
|
||||
from django.db.models import Exists, OuterRef, Q
|
||||
from django.dispatch import receiver
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
||||
from django_scopes import scope, scopes_disabled
|
||||
from requests import RequestException
|
||||
|
||||
from pretix.api.models import WebHook, WebHookCall, WebHookEventListener
|
||||
from pretix.api.models import (
|
||||
WebHook, WebHookCall, WebHookCallRetry, WebHookEventListener,
|
||||
)
|
||||
from pretix.api.signals import register_webhook_events
|
||||
from pretix.base.models import LogEntry
|
||||
from pretix.base.services.tasks import ProfiledTask, TransactionAwareTask
|
||||
from pretix.base.signals import periodic_task
|
||||
from pretix.celery_app import app
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -219,6 +224,10 @@ def register_default_webhook_events(sender, **kwargs):
|
||||
'pretix.event.order.expired',
|
||||
_('Order expired'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.expirychanged',
|
||||
_('Order expiry date changed'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.modified',
|
||||
_('Order information changed'),
|
||||
@@ -231,10 +240,30 @@ def register_default_webhook_events(sender, **kwargs):
|
||||
'pretix.event.order.changed.*',
|
||||
_('Order changed'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.refund.created',
|
||||
_('Refund of payment created'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.refund.created.externally',
|
||||
_('External refund of payment'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.refund.requested',
|
||||
_('Refund of payment requested by customer'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.refund.done',
|
||||
_('Refund of payment completed'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.refund.canceled',
|
||||
_('Refund of payment canceled'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.refund.failed',
|
||||
_('Refund of payment failed'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.approved',
|
||||
_('Order approved'),
|
||||
@@ -275,6 +304,22 @@ def register_default_webhook_events(sender, **kwargs):
|
||||
'pretix.subevent.deleted',
|
||||
pgettext_lazy('subevent', 'Event series date deleted'),
|
||||
),
|
||||
ParametrizedEventWebhookEvent(
|
||||
'pretix.event.live.activated',
|
||||
_('Shop taken live'),
|
||||
),
|
||||
ParametrizedEventWebhookEvent(
|
||||
'pretix.event.live.deactivated',
|
||||
_('Shop taken offline'),
|
||||
),
|
||||
ParametrizedEventWebhookEvent(
|
||||
'pretix.event.testmode.activated',
|
||||
_('Test-Mode of shop has been activated'),
|
||||
),
|
||||
ParametrizedEventWebhookEvent(
|
||||
'pretix.event.testmode.deactivated',
|
||||
_('Test-Mode of shop has been deactivated'),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -316,59 +361,163 @@ def notify_webhooks(logentry_ids: list):
|
||||
send_webhook.apply_async(args=(logentry.id, notification_type.action_type, wh.pk))
|
||||
|
||||
|
||||
@app.task(base=ProfiledTask, bind=True, max_retries=9, acks_late=True)
|
||||
def send_webhook(self, logentry_id: int, action_type: str, webhook_id: int):
|
||||
# 9 retries with 2**(2*x) timing is roughly 72 hours
|
||||
@app.task(base=ProfiledTask, bind=True, max_retries=5, default_retry_delay=60, acks_late=True, autoretry_for=(DatabaseError,),)
|
||||
def send_webhook(self, logentry_id: int, action_type: str, webhook_id: int, retry_count: int = 0):
|
||||
"""
|
||||
Sends out a specific webhook using adequate retry and error handling logic.
|
||||
|
||||
Our retry logic is a little complex since we have different constraints here:
|
||||
|
||||
1. We historically documented that we retry for up to three days, so we want to keep that
|
||||
promise. We want to use (approximately) exponentially increasing times to keep load
|
||||
manageable.
|
||||
|
||||
2. We want to use Celery's ``acks_late=True`` options which prevents lost tasks if a worker
|
||||
crashes.
|
||||
|
||||
3. A limitation of Celery's redis broker implementation is that it can not properly handle
|
||||
tasks that *run or wait* longer than `visibility_timeout`, which defaults to 1h, when
|
||||
``acks_late`` is enabled. So any task with a *retry interval* of >1h will be restarted
|
||||
many times because celery believes the worker has crashed.
|
||||
|
||||
4. We do like that the first few retries happen within a few seconds to work around very
|
||||
intermittent connectivity issues quickly. For the longer retries with multiple hours,
|
||||
we don't care if they are emitted a few minutes too late.
|
||||
|
||||
We therefore have a two-phase retry process:
|
||||
|
||||
- For all retry intervals below 5 minutes, which is the first 3 retries currently, we
|
||||
schedule a new celery task directly with an increased retry_count. We do *not* use
|
||||
celery's retry() call currently to make the retry process in both phases more similar,
|
||||
there should not be much of a difference though (except that the initial task will be in
|
||||
SUCCESS state, but we don't check that status anywhere).
|
||||
|
||||
- For all retry intervals of at least 5 minutes, we create a database entry. Then, the
|
||||
periodic task ``schedule_webhook_retries_on_celery`` will schedule celery tasks for them
|
||||
once their time has come.
|
||||
"""
|
||||
retry_intervals = (
|
||||
5, # + 5 seconds
|
||||
30, # + 30 seconds
|
||||
60, # + 1 minute
|
||||
300, # + 5 minutes
|
||||
1200, # + 20 minutes
|
||||
3600, # + 60 minutes
|
||||
1440, # + 4 hours
|
||||
21600, # + 6 hours
|
||||
43200, # + 12 hours
|
||||
43200, # + 24 hours
|
||||
86400, # + 24 hours
|
||||
) # added up, these are approximately 3 days, as documented
|
||||
retry_celery_cutoff = 300
|
||||
|
||||
with scopes_disabled():
|
||||
webhook = WebHook.objects.get(id=webhook_id)
|
||||
with scope(organizer=webhook.organizer):
|
||||
|
||||
with scope(organizer=webhook.organizer), transaction.atomic():
|
||||
logentry = LogEntry.all.get(id=logentry_id)
|
||||
types = get_all_webhook_events()
|
||||
event_type = types.get(action_type)
|
||||
if not event_type or not webhook.enabled:
|
||||
return # Ignore, e.g. plugin not installed
|
||||
return 'obsolete-webhook' # Ignore, e.g. plugin not installed
|
||||
|
||||
payload = event_type.build_payload(logentry)
|
||||
if payload is None:
|
||||
# Content object deleted?
|
||||
return
|
||||
return 'obsolete-payload'
|
||||
|
||||
t = time.time()
|
||||
|
||||
try:
|
||||
try:
|
||||
resp = requests.post(
|
||||
webhook.target_url,
|
||||
json=payload,
|
||||
allow_redirects=False
|
||||
resp = requests.post(
|
||||
webhook.target_url,
|
||||
json=payload,
|
||||
allow_redirects=False,
|
||||
timeout=30,
|
||||
)
|
||||
WebHookCall.objects.create(
|
||||
webhook=webhook,
|
||||
action_type=logentry.action_type,
|
||||
target_url=webhook.target_url,
|
||||
is_retry=self.request.retries > 0,
|
||||
execution_time=time.time() - t,
|
||||
return_code=resp.status_code,
|
||||
payload=json.dumps(payload),
|
||||
response_body=resp.text[:1024 * 1024],
|
||||
success=200 <= resp.status_code <= 299
|
||||
)
|
||||
if resp.status_code == 410:
|
||||
webhook.enabled = False
|
||||
webhook.save()
|
||||
return 'gone'
|
||||
elif resp.status_code > 299:
|
||||
if retry_count >= len(retry_intervals):
|
||||
return 'retry-given-up'
|
||||
elif retry_intervals[retry_count] < retry_celery_cutoff:
|
||||
send_webhook.apply_async(args=(logentry_id, action_type, webhook_id, retry_count + 1),
|
||||
countdown=retry_intervals[retry_count])
|
||||
return 'retry-via-celery'
|
||||
else:
|
||||
webhook.retries.update_or_create(
|
||||
logentry=logentry,
|
||||
defaults=dict(
|
||||
retry_not_before=now() + timedelta(seconds=retry_intervals[retry_count]),
|
||||
retry_count=retry_count + 1,
|
||||
action_type=action_type,
|
||||
),
|
||||
)
|
||||
return 'retry-via-db'
|
||||
return 'ok'
|
||||
except RequestException as e:
|
||||
WebHookCall.objects.create(
|
||||
webhook=webhook,
|
||||
action_type=logentry.action_type,
|
||||
target_url=webhook.target_url,
|
||||
is_retry=self.request.retries > 0,
|
||||
execution_time=time.time() - t,
|
||||
return_code=0,
|
||||
payload=json.dumps(payload),
|
||||
response_body=str(e)[:1024 * 1024]
|
||||
)
|
||||
if retry_count >= len(retry_intervals):
|
||||
return 'retry-given-up'
|
||||
elif retry_intervals[retry_count] < retry_celery_cutoff:
|
||||
send_webhook.apply_async(args=(logentry_id, action_type, webhook_id, retry_count + 1))
|
||||
return 'retry-via-celery'
|
||||
else:
|
||||
webhook.retries.update_or_create(
|
||||
logentry=logentry,
|
||||
defaults=dict(
|
||||
retry_not_before=now() + timedelta(seconds=retry_intervals[retry_count]),
|
||||
retry_count=retry_count + 1,
|
||||
action_type=action_type,
|
||||
),
|
||||
)
|
||||
WebHookCall.objects.create(
|
||||
webhook=webhook,
|
||||
action_type=logentry.action_type,
|
||||
target_url=webhook.target_url,
|
||||
is_retry=self.request.retries > 0,
|
||||
execution_time=time.time() - t,
|
||||
return_code=resp.status_code,
|
||||
payload=json.dumps(payload),
|
||||
response_body=resp.text[:1024 * 1024],
|
||||
success=200 <= resp.status_code <= 299
|
||||
)
|
||||
if resp.status_code == 410:
|
||||
webhook.enabled = False
|
||||
webhook.save()
|
||||
elif resp.status_code > 299:
|
||||
raise self.retry(countdown=2 ** (self.request.retries * 2)) # max is 2 ** (8*2) = 65536 seconds = ~18 hours
|
||||
except RequestException as e:
|
||||
WebHookCall.objects.create(
|
||||
webhook=webhook,
|
||||
action_type=logentry.action_type,
|
||||
target_url=webhook.target_url,
|
||||
is_retry=self.request.retries > 0,
|
||||
execution_time=time.time() - t,
|
||||
return_code=0,
|
||||
payload=json.dumps(payload),
|
||||
response_body=str(e)[:1024 * 1024]
|
||||
)
|
||||
raise self.retry(countdown=2 ** (self.request.retries * 2)) # max is 2 ** (8*2) = 65536 seconds = ~18 hours
|
||||
except MaxRetriesExceededError:
|
||||
pass
|
||||
return 'retry-via-db'
|
||||
|
||||
|
||||
@app.task(base=TransactionAwareTask)
|
||||
def manually_retry_all_calls(webhook_id: int):
|
||||
with scopes_disabled():
|
||||
webhook = WebHook.objects.get(id=webhook_id)
|
||||
with scope(organizer=webhook.organizer), transaction.atomic():
|
||||
for whcr in webhook.retries.select_for_update(
|
||||
skip_locked=connection.features.has_select_for_update_skip_locked
|
||||
):
|
||||
send_webhook.apply_async(
|
||||
args=(whcr.logentry_id, whcr.action_type, whcr.webhook_id, whcr.retry_count),
|
||||
)
|
||||
whcr.delete()
|
||||
|
||||
|
||||
@receiver(signal=periodic_task, dispatch_uid='pretixapi_schedule_webhook_retries_on_celery')
|
||||
@scopes_disabled()
|
||||
def schedule_webhook_retries_on_celery(sender, **kwargs):
|
||||
with transaction.atomic():
|
||||
for whcr in WebHookCallRetry.objects.select_for_update(
|
||||
skip_locked=connection.features.has_select_for_update_skip_locked
|
||||
).filter(retry_not_before__lt=now()):
|
||||
send_webhook.apply_async(
|
||||
args=(whcr.logentry_id, whcr.action_type, whcr.webhook_id, whcr.retry_count),
|
||||
)
|
||||
whcr.delete()
|
||||
|
||||
21
src/pretix/base/customersso/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
#
|
||||
# This file is part of pretix (Community Edition).
|
||||
#
|
||||
# Copyright (C) 2014-2020 Raphael Michel and contributors
|
||||
# Copyright (C) 2020-2021 rami.io GmbH and contributors
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation in version 3 of the License.
|
||||
#
|
||||
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
|
||||
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
|
||||
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
|
||||
# this file, see <https://pretix.eu/about/en/license>.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
295
src/pretix/base/customersso/oidc.py
Normal file
@@ -0,0 +1,295 @@
|
||||
#
|
||||
# This file is part of pretix (Community Edition).
|
||||
#
|
||||
# Copyright (C) 2014-2020 Raphael Michel and contributors
|
||||
# Copyright (C) 2020-2021 rami.io GmbH and contributors
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation in version 3 of the License.
|
||||
#
|
||||
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
|
||||
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
|
||||
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
|
||||
# this file, see <https://pretix.eu/about/en/license>.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
import base64
|
||||
import hashlib
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
import jwt
|
||||
import requests
|
||||
from cryptography.hazmat.primitives.asymmetric.rsa import generate_private_key
|
||||
from cryptography.hazmat.primitives.serialization import (
|
||||
Encoding, NoEncryption, PrivateFormat, PublicFormat,
|
||||
)
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from requests import RequestException
|
||||
|
||||
from pretix.multidomain.urlreverse import build_absolute_uri
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
"""
|
||||
This module contains utilities for implementing OpenID Connect for customer authentication both as a receiving party (RP)
|
||||
as well as an OpenID Provider (OP).
|
||||
"""
|
||||
|
||||
|
||||
def _urljoin(base, path):
|
||||
if not base.endswith("/"):
|
||||
base += "/"
|
||||
return urljoin(base, path)
|
||||
|
||||
|
||||
def oidc_validate_and_complete_config(config):
|
||||
for k in ("base_url", "client_id", "client_secret", "uid_field", "email_field", "scope"):
|
||||
if not config.get(k):
|
||||
raise ValidationError(_('Configuration option "{name}" is missing.').format(name=k))
|
||||
|
||||
conf_url = _urljoin(config["base_url"], ".well-known/openid-configuration")
|
||||
try:
|
||||
resp = requests.get(conf_url, timeout=10)
|
||||
resp.raise_for_status()
|
||||
provider_config = resp.json()
|
||||
except RequestException as e:
|
||||
raise ValidationError(_('Unable to retrieve configuration from "{url}". Error message: "{error}".').format(
|
||||
url=conf_url,
|
||||
error=str(e)
|
||||
))
|
||||
except ValueError as e:
|
||||
raise ValidationError(_('Unable to retrieve configuration from "{url}". Error message: "{error}".').format(
|
||||
url=conf_url,
|
||||
error=str(e)
|
||||
))
|
||||
|
||||
if not provider_config.get("authorization_endpoint"):
|
||||
raise ValidationError(_('Incompatible SSO provider: "{error}".').format(
|
||||
error="authorization_endpoint not set"
|
||||
))
|
||||
|
||||
if not provider_config.get("userinfo_endpoint"):
|
||||
raise ValidationError(_('Incompatible SSO provider: "{error}".').format(
|
||||
error="userinfo_endpoint not set"
|
||||
))
|
||||
|
||||
if not provider_config.get("token_endpoint"):
|
||||
raise ValidationError(_('Incompatible SSO provider: "{error}".').format(
|
||||
error="token_endpoint not set"
|
||||
))
|
||||
|
||||
if "code" not in provider_config.get("response_types_supported", []):
|
||||
raise ValidationError(_('Incompatible SSO provider: "{error}".').format(
|
||||
error=f"provider supports response types {','.join(provider_config.get('response_types_supported', []))}, but we only support 'code'."
|
||||
))
|
||||
|
||||
if "query" not in provider_config.get("response_modes_supported", ["query", "fragment"]):
|
||||
raise ValidationError(_('Incompatible SSO provider: "{error}".').format(
|
||||
error=f"provider supports response modes {','.join(provider_config.get('response_modes_supported', []))}, but we only support 'query'."
|
||||
))
|
||||
|
||||
if "authorization_code" not in provider_config.get("grant_types_supported", ["authorization_code", "implicit"]):
|
||||
raise ValidationError(_('Incompatible SSO provider: "{error}".').format(
|
||||
error=f"provider supports grant types {','.join(provider_config.get('grant_types_supported', ''))}, but we only support 'authorization_code'."
|
||||
))
|
||||
|
||||
if "openid" not in config["scope"].split(" "):
|
||||
raise ValidationError(
|
||||
_('You are not requesting "{scope}".').format(
|
||||
scope="openid",
|
||||
))
|
||||
|
||||
for scope in config["scope"].split(" "):
|
||||
if scope not in provider_config.get("scopes_supported", []):
|
||||
raise ValidationError(_('You are requesting scope "{scope}" but provider only supports these: {scopes}.').format(
|
||||
scope=scope,
|
||||
scopes=", ".join(provider_config.get("scopes_supported", []))
|
||||
))
|
||||
|
||||
for k, v in config.items():
|
||||
if k.endswith('_field') and v:
|
||||
if v not in provider_config.get("claims_supported", []): # https://openid.net/specs/openid-connect-core-1_0.html#UserInfo
|
||||
raise ValidationError(_('You are requesting field "{field}" but provider only supports these: {fields}.').format(
|
||||
field=v,
|
||||
fields=", ".join(provider_config.get("claims_supported", []))
|
||||
))
|
||||
|
||||
config['provider_config'] = provider_config
|
||||
return config
|
||||
|
||||
|
||||
def oidc_authorize_url(provider, state, redirect_uri):
|
||||
endpoint = provider.configuration['provider_config']['authorization_endpoint']
|
||||
params = {
|
||||
# https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.1
|
||||
# https://openid.net/specs/openid-connect-core-1_0.html#AuthorizationEndpoint
|
||||
'response_type': 'code',
|
||||
'client_id': provider.configuration['client_id'],
|
||||
'scope': provider.configuration['scope'],
|
||||
'state': state,
|
||||
'redirect_uri': redirect_uri,
|
||||
}
|
||||
return endpoint + '?' + urlencode(params)
|
||||
|
||||
|
||||
def oidc_validate_authorization(provider, code, redirect_uri):
|
||||
endpoint = provider.configuration['provider_config']['token_endpoint']
|
||||
params = {
|
||||
# https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.3
|
||||
# https://openid.net/specs/openid-connect-core-1_0.html#TokenEndpoint
|
||||
'grant_type': 'authorization_code',
|
||||
'code': code,
|
||||
'redirect_uri': redirect_uri,
|
||||
}
|
||||
try:
|
||||
resp = requests.post(
|
||||
endpoint,
|
||||
data=params,
|
||||
headers={
|
||||
'Accept': 'application/json',
|
||||
},
|
||||
auth=(provider.configuration['client_id'], provider.configuration['client_secret']),
|
||||
)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
except RequestException:
|
||||
logger.exception('Could not retrieve authorization token')
|
||||
raise ValidationError(
|
||||
_('Login was not successful. Error message: "{error}".').format(
|
||||
error='could not reach login provider',
|
||||
)
|
||||
)
|
||||
|
||||
if 'access_token' not in data:
|
||||
raise ValidationError(
|
||||
_('Login was not successful. Error message: "{error}".').format(
|
||||
error='access token missing',
|
||||
)
|
||||
)
|
||||
|
||||
endpoint = provider.configuration['provider_config']['userinfo_endpoint']
|
||||
try:
|
||||
# https://openid.net/specs/openid-connect-core-1_0.html#UserInfo
|
||||
resp = requests.get(
|
||||
endpoint,
|
||||
headers={
|
||||
'Authorization': f'Bearer {data["access_token"]}'
|
||||
},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
userinfo = resp.json()
|
||||
except RequestException:
|
||||
logger.exception('Could not retrieve user info')
|
||||
raise ValidationError(
|
||||
_('Login was not successful. Error message: "{error}".').format(
|
||||
error='could not fetch user info',
|
||||
)
|
||||
)
|
||||
|
||||
if 'email_verified' in userinfo and not userinfo['email_verified']:
|
||||
# todo: how universal is this, do we need to make this configurable?
|
||||
raise ValidationError(_('The email address on this account is not yet verified. Please first confirm the '
|
||||
'email address in your customer account.'))
|
||||
|
||||
profile = {}
|
||||
for k, v in provider.configuration.items():
|
||||
if k.endswith('_field'):
|
||||
profile[k[:-6]] = userinfo.get(v)
|
||||
|
||||
if not profile.get('uid'):
|
||||
raise ValidationError(
|
||||
_('Login was not successful. Error message: "{error}".').format(
|
||||
error='could not fetch user id',
|
||||
)
|
||||
)
|
||||
|
||||
if not profile.get('email'):
|
||||
raise ValidationError(
|
||||
_('Login was not successful. Error message: "{error}".').format(
|
||||
error='could not fetch user email',
|
||||
)
|
||||
)
|
||||
|
||||
return profile
|
||||
|
||||
|
||||
def _hash_scheme(value):
|
||||
# As described in https://openid.net/specs/openid-connect-core-1_0.html#HybridIDToken
|
||||
digest = hashlib.sha256(value.encode()).digest()
|
||||
digest_truncated = digest[:(len(digest) // 2)]
|
||||
return base64.urlsafe_b64encode(digest_truncated).decode().rstrip("=")
|
||||
|
||||
|
||||
def customer_claims(customer, scope):
|
||||
scope = scope.split(' ')
|
||||
claims = {
|
||||
'sub': customer.identifier,
|
||||
'locale': customer.locale,
|
||||
}
|
||||
if 'profile' in scope:
|
||||
if customer.name:
|
||||
claims['name'] = customer.name
|
||||
if 'given_name' in customer.name_parts:
|
||||
claims['given_name'] = customer.name_parts['given_name']
|
||||
if 'family_name' in customer.name_parts:
|
||||
claims['family_name'] = customer.name_parts['family_name']
|
||||
if 'middle_name' in customer.name_parts:
|
||||
claims['middle_name'] = customer.name_parts['middle_name']
|
||||
if 'calling_name' in customer.name_parts:
|
||||
claims['nickname'] = customer.name_parts['calling_name']
|
||||
if 'email' in scope and customer.email:
|
||||
claims['email'] = customer.email
|
||||
claims['email_verified'] = customer.is_verified
|
||||
if 'phone' in scope and customer.phone:
|
||||
claims['phone_number'] = customer.phone.as_international
|
||||
return claims
|
||||
|
||||
|
||||
def _get_or_create_server_keypair(organizer):
|
||||
if not organizer.settings.sso_server_signing_key_rsa256_private:
|
||||
privkey = generate_private_key(key_size=4096, public_exponent=65537)
|
||||
pubkey = privkey.public_key()
|
||||
organizer.settings.sso_server_signing_key_rsa256_private = privkey.private_bytes(
|
||||
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
|
||||
).decode()
|
||||
organizer.settings.sso_server_signing_key_rsa256_public = pubkey.public_bytes(
|
||||
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
|
||||
).decode()
|
||||
return organizer.settings.sso_server_signing_key_rsa256_private, organizer.settings.sso_server_signing_key_rsa256_public
|
||||
|
||||
|
||||
def generate_id_token(customer, client, auth_time, nonce, scope, expires: datetime, scope_claims=False, with_code=None, with_access_token=None):
|
||||
payload = {
|
||||
'iss': build_absolute_uri(client.organizer, 'presale:organizer.index').rstrip('/'),
|
||||
'aud': client.client_id,
|
||||
'exp': int(expires.timestamp()),
|
||||
'iat': int(time.time()),
|
||||
'auth_time': auth_time,
|
||||
**customer_claims(customer, client.evaluated_scope(scope) if scope_claims else ''),
|
||||
}
|
||||
if nonce:
|
||||
payload['nonce'] = nonce
|
||||
if with_code:
|
||||
payload['c_hash'] = _hash_scheme(with_code)
|
||||
if with_access_token:
|
||||
payload['at_hash'] = _hash_scheme(with_access_token)
|
||||
privkey, pubkey = _get_or_create_server_keypair(client.organizer)
|
||||
return jwt.encode(
|
||||
payload,
|
||||
privkey,
|
||||
headers={
|
||||
"kid": hashlib.sha256(pubkey.encode()).hexdigest()[:16]
|
||||
},
|
||||
algorithm="RS256",
|
||||
)
|
||||
@@ -43,6 +43,7 @@ from pretix.base.i18n import (
|
||||
LazyCurrencyNumber, LazyDate, LazyExpiresDate, LazyNumber,
|
||||
)
|
||||
from pretix.base.models import Event
|
||||
from pretix.base.reldate import RelativeDateWrapper
|
||||
from pretix.base.settings import PERSON_NAME_SCHEMES
|
||||
from pretix.base.signals import (
|
||||
register_html_mail_renderers, register_mail_placeholders,
|
||||
@@ -299,7 +300,8 @@ def get_email_context(**kwargs):
|
||||
kwargs.setdefault("position_or_address", kwargs['position'])
|
||||
if 'order' in kwargs:
|
||||
try:
|
||||
kwargs['invoice_address'] = kwargs['order'].invoice_address
|
||||
if not kwargs.get('invoice_address'):
|
||||
kwargs['invoice_address'] = kwargs['order'].invoice_address
|
||||
except InvoiceAddress.DoesNotExist:
|
||||
kwargs['invoice_address'] = InvoiceAddress(order=kwargs['order'])
|
||||
finally:
|
||||
@@ -469,14 +471,30 @@ def base_placeholders(sender, **kwargs):
|
||||
}
|
||||
),
|
||||
),
|
||||
SimpleFunctionalMailTextPlaceholder(
|
||||
'order_modification_deadline_date_and_time', ['order', 'event'],
|
||||
lambda order, event:
|
||||
date_format(order.modify_deadline.astimezone(event.timezone), 'SHORT_DATETIME_FORMAT')
|
||||
if order.modify_deadline
|
||||
else '',
|
||||
lambda event: date_format(
|
||||
event.settings.get(
|
||||
'last_order_modification_date', as_type=RelativeDateWrapper
|
||||
).datetime(event).astimezone(event.timezone),
|
||||
'SHORT_DATETIME_FORMAT'
|
||||
) if event.settings.get('last_order_modification_date') else '',
|
||||
),
|
||||
SimpleFunctionalMailTextPlaceholder(
|
||||
'event_location', ['event_or_subevent'], lambda event_or_subevent: str(event_or_subevent.location or ''),
|
||||
lambda event: str(event.location or ''),
|
||||
),
|
||||
SimpleFunctionalMailTextPlaceholder(
|
||||
'event_admission_time', ['event_or_subevent'],
|
||||
lambda event_or_subevent: date_format(event_or_subevent.date_admission, 'TIME_FORMAT') if event_or_subevent.date_admission else '',
|
||||
lambda event: date_format(event.date_admission, 'TIME_FORMAT') if event.date_admission else '',
|
||||
lambda event_or_subevent:
|
||||
date_format(event_or_subevent.date_admission.astimezone(event_or_subevent.timezone), 'TIME_FORMAT')
|
||||
if event_or_subevent.date_admission
|
||||
else '',
|
||||
lambda event: date_format(event.date_admission.astimezone(event.timezone), 'TIME_FORMAT') if event.date_admission else '',
|
||||
),
|
||||
SimpleFunctionalMailTextPlaceholder(
|
||||
'subevent', ['waiting_list_entry', 'event'],
|
||||
|
||||
@@ -23,6 +23,7 @@ from .answers import * # noqa
|
||||
from .dekodi import * # noqa
|
||||
from .events import * # noqa
|
||||
from .invoices import * # noqa
|
||||
from .items import * # noqa
|
||||
from .json import * # noqa
|
||||
from .mail import * # noqa
|
||||
from .orderlist import * # noqa
|
||||
|
||||
222
src/pretix/base/exporters/items.py
Normal file
@@ -0,0 +1,222 @@
|
||||
#
|
||||
# This file is part of pretix (Community Edition).
|
||||
#
|
||||
# Copyright (C) 2014-2020 Raphael Michel and contributors
|
||||
# Copyright (C) 2020-2021 rami.io GmbH and contributors
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation in version 3 of the License.
|
||||
#
|
||||
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
|
||||
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
|
||||
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
|
||||
# this file, see <https://pretix.eu/about/en/license>.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
from django.db.models import Prefetch
|
||||
from django.dispatch import receiver
|
||||
from django.utils.formats import date_format
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from openpyxl.styles import Alignment
|
||||
from openpyxl.utils import get_column_letter
|
||||
|
||||
from ...helpers.safe_openpyxl import SafeCell
|
||||
from ..channels import get_all_sales_channels
|
||||
from ..exporter import ListExporter
|
||||
from ..models import ItemMetaValue
|
||||
from ..signals import register_data_exporters
|
||||
|
||||
|
||||
def _max(a1, a2):
|
||||
if a1 and a2:
|
||||
return max(a1, a2)
|
||||
return a1 or a2
|
||||
|
||||
|
||||
def _min(a1, a2):
|
||||
if a1 and a2:
|
||||
return min(a1, a2)
|
||||
return a1 or a2
|
||||
|
||||
|
||||
class ItemDataExporter(ListExporter):
|
||||
identifier = 'itemdata'
|
||||
verbose_name = _('Product data')
|
||||
|
||||
def iterate_list(self, form_data):
|
||||
locales = self.event.settings.locales
|
||||
scs = get_all_sales_channels()
|
||||
header = [
|
||||
_("Product ID"),
|
||||
_("Variation ID"),
|
||||
_("Product category"),
|
||||
_("Internal name"),
|
||||
]
|
||||
for l in locales:
|
||||
header.append(
|
||||
_("Item name") + f" ({l})"
|
||||
)
|
||||
for l in locales:
|
||||
header.append(
|
||||
_("Variation") + f" ({l})"
|
||||
)
|
||||
header += [
|
||||
_("Active"),
|
||||
_("Sales channels"),
|
||||
_("Default price"),
|
||||
_("Free price input"),
|
||||
_("Sales tax"),
|
||||
_("Is an admission ticket"),
|
||||
_("Generate tickets"),
|
||||
_("Waiting list"),
|
||||
_("Available from"),
|
||||
_("Available until"),
|
||||
_("This product can only be bought using a voucher."),
|
||||
_("This product will only be shown if a voucher matching the product is redeemed."),
|
||||
_("Buying this product requires approval"),
|
||||
_("Only sell this product as part of a bundle"),
|
||||
_("Allow product to be canceled or changed"),
|
||||
_("Minimum amount per order"),
|
||||
_("Maximum amount per order"),
|
||||
_("Requires special attention"),
|
||||
_("Original price"),
|
||||
_("This product is a gift card"),
|
||||
_("Require a valid membership"),
|
||||
_("Hide without a valid membership"),
|
||||
]
|
||||
props = list(self.event.item_meta_properties.all())
|
||||
for p in props:
|
||||
header.append(p.name)
|
||||
|
||||
if form_data["_format"] == "xlsx":
|
||||
row = []
|
||||
for h in header:
|
||||
c = SafeCell(self.__ws, value=h)
|
||||
c.alignment = Alignment(wrap_text=True, vertical='top')
|
||||
row.append(c)
|
||||
else:
|
||||
row = header
|
||||
|
||||
yield row
|
||||
|
||||
for i in self.event.items.prefetch_related(
|
||||
'variations',
|
||||
Prefetch(
|
||||
'meta_values',
|
||||
ItemMetaValue.objects.select_related('property'),
|
||||
to_attr='meta_values_cached'
|
||||
)
|
||||
).select_related('category', 'tax_rule'):
|
||||
m = i.meta_data
|
||||
vars = list(i.variations.all())
|
||||
|
||||
if vars:
|
||||
for v in vars:
|
||||
row = [
|
||||
i.pk,
|
||||
v.pk,
|
||||
str(i.category) if i.category else "",
|
||||
i.internal_name or "",
|
||||
]
|
||||
for l in locales:
|
||||
row.append(i.name.localize(l))
|
||||
for l in locales:
|
||||
row.append(v.value.localize(l))
|
||||
row += [
|
||||
_("Yes") if i.active and v.active else "",
|
||||
", ".join([str(sn.verbose_name) for s, sn in scs.items() if s in i.sales_channels and s in v.sales_channels]),
|
||||
v.default_price or i.default_price,
|
||||
_("Yes") if i.free_price else "",
|
||||
str(i.tax_rule) if i.tax_rule else "",
|
||||
_("Yes") if i.admission else "",
|
||||
_("Yes") if i.generate_tickets else (_("Default") if i.generate_tickets is None else ""),
|
||||
_("Yes") if i.allow_waitinglist else "",
|
||||
date_format(_max(i.available_from, v.available_from).astimezone(self.timezone),
|
||||
"SHORT_DATETIME_FORMAT") if i.available_from or v.available_from else "",
|
||||
date_format(_min(i.available_until, v.available_until).astimezone(self.timezone),
|
||||
"SHORT_DATETIME_FORMAT") if i.available_until or v.available_until else "",
|
||||
_("Yes") if i.require_voucher else "",
|
||||
_("Yes") if i.hide_without_voucher or v.hide_without_voucher else "",
|
||||
_("Yes") if i.require_approval or v.require_approval else "",
|
||||
_("Yes") if i.require_bundling else "",
|
||||
_("Yes") if i.allow_cancel else "",
|
||||
i.min_per_order if i.min_per_order is not None else "",
|
||||
i.max_per_order if i.max_per_order is not None else "",
|
||||
_("Yes") if i.checkin_attention else "",
|
||||
v.original_price or i.original_price or "",
|
||||
_("Yes") if i.issue_giftcard else "",
|
||||
_("Yes") if i.require_membership or v.require_membership else "",
|
||||
_("Yes") if i.require_membership_hidden or v.require_membership_hidden else "",
|
||||
]
|
||||
row += [
|
||||
m.get(p.name, '') for p in props
|
||||
]
|
||||
yield row
|
||||
|
||||
else:
|
||||
row = [
|
||||
i.pk,
|
||||
"",
|
||||
str(i.category) if i.category else "",
|
||||
i.internal_name or "",
|
||||
]
|
||||
for l in locales:
|
||||
row.append(i.name.localize(l))
|
||||
for l in locales:
|
||||
row.append("")
|
||||
row += [
|
||||
_("Yes") if i.active else "",
|
||||
", ".join([str(sn.verbose_name) for s, sn in scs.items() if s in i.sales_channels]),
|
||||
i.default_price,
|
||||
_("Yes") if i.free_price else "",
|
||||
str(i.tax_rule) if i.tax_rule else "",
|
||||
_("Yes") if i.admission else "",
|
||||
_("Yes") if i.generate_tickets else (_("Default") if i.generate_tickets is None else ""),
|
||||
_("Yes") if i.allow_waitinglist else "",
|
||||
date_format(i.available_from.astimezone(self.timezone),
|
||||
"SHORT_DATETIME_FORMAT") if i.available_from else "",
|
||||
date_format(i.available_until.astimezone(self.timezone),
|
||||
"SHORT_DATETIME_FORMAT") if i.available_until else "",
|
||||
_("Yes") if i.require_voucher else "",
|
||||
_("Yes") if i.hide_without_voucher else "",
|
||||
_("Yes") if i.require_approval else "",
|
||||
_("Yes") if i.require_bundling else "",
|
||||
_("Yes") if i.allow_cancel else "",
|
||||
i.min_per_order if i.min_per_order is not None else "",
|
||||
i.max_per_order if i.max_per_order is not None else "",
|
||||
_("Yes") if i.checkin_attention else "",
|
||||
i.original_price or "",
|
||||
_("Yes") if i.issue_giftcard else "",
|
||||
_("Yes") if i.require_membership else "",
|
||||
_("Yes") if i.require_membership_hidden else "",
|
||||
]
|
||||
|
||||
row += [
|
||||
m.get(p.name, '') for p in props
|
||||
]
|
||||
yield row
|
||||
|
||||
def get_filename(self):
|
||||
return '{}_products'.format(self.events.first().organizer.slug)
|
||||
|
||||
def prepare_xlsx_sheet(self, ws):
|
||||
self.__ws = ws
|
||||
ws.freeze_panes = 'A1'
|
||||
ws.column_dimensions['C'].width = 25
|
||||
ws.column_dimensions['D'].width = 25
|
||||
for i in range(len(self.event.settings.locales)):
|
||||
ws.column_dimensions[get_column_letter(5 + 2 * i + 0)].width = 25
|
||||
ws.column_dimensions[get_column_letter(5 + 2 * i + 1)].width = 25
|
||||
ws.column_dimensions[get_column_letter(5 + 2 * len(self.event.settings.locales) + 1)].width = 25
|
||||
ws.row_dimensions[1].height = 40
|
||||
|
||||
|
||||
@receiver(register_data_exporters, dispatch_uid="exporter_itemdata")
|
||||
def register_itemdata_exporter(sender, **kwargs):
|
||||
return ItemDataExporter
|
||||
@@ -610,7 +610,10 @@ class OrderListExporter(MultiSheetListExporter):
|
||||
for k, label, w in name_scheme['fields']:
|
||||
headers.append(_('Invoice address name') + ': ' + str(label))
|
||||
headers += [
|
||||
_('Address'), _('ZIP code'), _('City'), _('Country'), pgettext('address', 'State'), _('VAT ID'),
|
||||
_('Invoice address street'), _('Invoice address ZIP code'), _('Invoice address city'),
|
||||
_('Invoice address country'),
|
||||
pgettext('address', 'Invoice address state'),
|
||||
_('VAT ID'),
|
||||
]
|
||||
headers += [
|
||||
_('Sales channel'), _('Order locale'),
|
||||
|
||||
@@ -51,6 +51,7 @@ from django.core.validators import (
|
||||
)
|
||||
from django.db.models import QuerySet
|
||||
from django.forms import Select, widgets
|
||||
from django.forms.widgets import FILE_INPUT_CONTRADICTION
|
||||
from django.utils.formats import date_format
|
||||
from django.utils.html import escape
|
||||
from django.utils.safestring import mark_safe
|
||||
@@ -253,7 +254,7 @@ class NamePartsFormField(forms.MultiValueField):
|
||||
if self.require_all_fields and not all(v for v in value):
|
||||
raise forms.ValidationError(self.error_messages['incomplete'], code='required')
|
||||
|
||||
if sum(len(v) for v in value if v) > 250:
|
||||
if sum(len(v) for v in value.values() if v) > 250:
|
||||
raise forms.ValidationError(_('Please enter a shorter name.'), code='max_length')
|
||||
|
||||
return value
|
||||
@@ -429,7 +430,7 @@ class PortraitImageWidget(UploadedFileWidget):
|
||||
|
||||
def value_from_datadict(self, data, files, name):
|
||||
d = super().value_from_datadict(data, files, name)
|
||||
if d is not None and d is not False:
|
||||
if d is not None and d is not False and d is not FILE_INPUT_CONTRADICTION:
|
||||
d._cropdata = json.loads(data.get(name + '_cropdata', '{}') or '{}')
|
||||
return d
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ from typing import Tuple
|
||||
import bleach
|
||||
import vat_moss.exchange_rates
|
||||
from django.contrib.staticfiles import finders
|
||||
from django.db.models import Sum
|
||||
from django.dispatch import receiver
|
||||
from django.utils.formats import date_format, localize
|
||||
from django.utils.translation import (
|
||||
@@ -47,7 +48,7 @@ from reportlab.platypus import (
|
||||
)
|
||||
|
||||
from pretix.base.decimal import round_decimal
|
||||
from pretix.base.models import Event, Invoice, Order
|
||||
from pretix.base.models import Event, Invoice, Order, OrderPayment
|
||||
from pretix.base.signals import register_invoice_renderers
|
||||
from pretix.base.templatetags.money import money_filter
|
||||
from pretix.helpers.reportlab import ThumbnailingImageReader
|
||||
@@ -532,6 +533,7 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
tstyledata = [
|
||||
('ALIGN', (1, 0), (-1, -1), 'RIGHT'),
|
||||
('VALIGN', (0, 0), (-1, -1), 'TOP'),
|
||||
('FONTNAME', (0, 0), (-1, -1), self.font_regular),
|
||||
('FONTNAME', (0, 0), (-1, 0), self.font_bold),
|
||||
('FONTNAME', (0, -1), (-1, -1), self.font_bold),
|
||||
('LEFTPADDING', (0, 0), (0, -1), 0),
|
||||
@@ -589,15 +591,33 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
])
|
||||
colwidths = [a * doc.width for a in (.65, .05, .30)]
|
||||
|
||||
if self.invoice.event.settings.invoice_show_payments and not self.invoice.is_cancellation and \
|
||||
self.invoice.order.status == Order.STATUS_PENDING:
|
||||
pending_sum = self.invoice.order.pending_sum
|
||||
if pending_sum != total:
|
||||
tdata.append([pgettext('invoice', 'Received payments')] + (['', '', ''] if has_taxes else ['']) + [
|
||||
money_filter(pending_sum - total, self.invoice.event.currency)
|
||||
if self.invoice.event.settings.invoice_show_payments and not self.invoice.is_cancellation:
|
||||
if self.invoice.order.status == Order.STATUS_PENDING:
|
||||
pending_sum = self.invoice.order.pending_sum
|
||||
if pending_sum != total:
|
||||
tdata.append([pgettext('invoice', 'Received payments')] + (['', '', ''] if has_taxes else ['']) + [
|
||||
money_filter(pending_sum - total, self.invoice.event.currency)
|
||||
])
|
||||
tdata.append([pgettext('invoice', 'Outstanding payments')] + (['', '', ''] if has_taxes else ['']) + [
|
||||
money_filter(pending_sum, self.invoice.event.currency)
|
||||
])
|
||||
tstyledata += [
|
||||
('FONTNAME', (0, len(tdata) - 3), (-1, len(tdata) - 3), self.font_bold),
|
||||
]
|
||||
elif self.invoice.order.payments.filter(
|
||||
state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED), provider='giftcard'
|
||||
).exists():
|
||||
giftcard_sum = self.invoice.order.payments.filter(
|
||||
state__in=(OrderPayment.PAYMENT_STATE_CONFIRMED, OrderPayment.PAYMENT_STATE_REFUNDED),
|
||||
provider='giftcard'
|
||||
).aggregate(
|
||||
s=Sum('amount')
|
||||
)['s'] or Decimal('0.00')
|
||||
tdata.append([pgettext('invoice', 'Paid by gift card')] + (['', '', ''] if has_taxes else ['']) + [
|
||||
money_filter(giftcard_sum, self.invoice.event.currency)
|
||||
])
|
||||
tdata.append([pgettext('invoice', 'Outstanding payments')] + (['', '', ''] if has_taxes else ['']) + [
|
||||
money_filter(pending_sum, self.invoice.event.currency)
|
||||
tdata.append([pgettext('invoice', 'Remaining amount')] + (['', '', ''] if has_taxes else ['']) + [
|
||||
money_filter(total - giftcard_sum, self.invoice.event.currency)
|
||||
])
|
||||
tstyledata += [
|
||||
('FONTNAME', (0, len(tdata) - 3), (-1, len(tdata) - 3), self.font_bold),
|
||||
|
||||
18
src/pretix/base/migrations/0218_checkinlist_addon_match.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.2 on 2022-06-29 17:37
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0217_eventfooterlink_organizerfooterlink'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='checkinlist',
|
||||
name='addon_match',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
38
src/pretix/base/migrations/0219_auto_20220706_0913.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Generated by Django 3.2.12 on 2022-07-06 09:13
|
||||
|
||||
import django.db.models.deletion
|
||||
import i18nfield.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
import pretix.base.models.base
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0218_checkinlist_addon_match'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CustomerSSOProvider',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('name', i18nfield.fields.I18nCharField(max_length=200)),
|
||||
('is_active', models.BooleanField(default=True)),
|
||||
('button_label', i18nfield.fields.I18nCharField(max_length=200)),
|
||||
('method', models.CharField(max_length=190)),
|
||||
('configuration', models.JSONField()),
|
||||
('organizer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sso_providers', to='pretixbase.organizer')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=(models.Model, pretix.base.models.base.LoggingMixin),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='customer',
|
||||
name='provider',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='customers', to='pretixbase.customerssoprovider'),
|
||||
),
|
||||
]
|
||||
68
src/pretix/base/migrations/0220_auto_20220811_1002.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# Generated by Django 3.2.12 on 2022-08-11 10:02
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import pretix.base.models.base
|
||||
import pretix.base.models.customers
|
||||
import pretix.base.models.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0219_auto_20220706_0913'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CustomerSSOClient',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('is_active', models.BooleanField(default=True)),
|
||||
('client_id', models.CharField(db_index=True, default=pretix.base.models.customers.generate_client_id, max_length=100, unique=True)),
|
||||
('client_secret', models.CharField(max_length=255)),
|
||||
('client_type', models.CharField(default='confidential', max_length=32)),
|
||||
('authorization_grant_type', models.CharField(default='authorization-code', max_length=32)),
|
||||
('redirect_uris', models.TextField()),
|
||||
('allowed_scopes', pretix.base.models.fields.MultiStringField(default=['openid', 'profile', 'email', 'phone'])),
|
||||
('organizer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sso_clients', to='pretixbase.organizer')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=(models.Model, pretix.base.models.base.LoggingMixin),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customer',
|
||||
name='provider',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='customers', to='pretixbase.customerssoprovider'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='CustomerSSOGrant',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('code', models.CharField(max_length=255, unique=True)),
|
||||
('nonce', models.CharField(max_length=255, null=True)),
|
||||
('auth_time', models.IntegerField()),
|
||||
('expires', models.DateTimeField()),
|
||||
('redirect_uri', models.TextField()),
|
||||
('scope', models.TextField()),
|
||||
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='grants', to='pretixbase.customerssoclient')),
|
||||
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sso_grants', to='pretixbase.customer')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='CustomerSSOAccessToken',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('from_code', models.CharField(max_length=255, null=True)),
|
||||
('token', models.CharField(max_length=255, unique=True)),
|
||||
('expires', models.DateTimeField()),
|
||||
('scope', models.TextField()),
|
||||
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='access_tokens', to='pretixbase.customerssoclient')),
|
||||
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sso_access_tokens', to='pretixbase.customer')),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.4 on 2021-12-01 11:55
|
||||
|
||||
from django.db import migrations
|
||||
from django.db.models import Count
|
||||
|
||||
|
||||
def change_unique_identifiers(apps, schema_editor):
|
||||
# We cannot really know if a position was bundled or an add-on, but we can at least guess
|
||||
Question = apps.get_model("pretixbase", "Question")
|
||||
|
||||
for r in Question.objects.values('event', 'identifier').order_by().annotate(c=Count('*')).filter(c__gt=1):
|
||||
qs = Question.objects.filter(identifier=r['identifier'], event_id=r['event'])
|
||||
for i, q in enumerate(qs[1:]):
|
||||
q.identifier += f'_{i + 2}'
|
||||
q.save(update_fields=['identifier'])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('pretixbase', '0220_auto_20220811_1002'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
change_unique_identifiers,
|
||||
migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 3.2.4 on 2021-12-01 12:04
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0221_clean_nonunique_question_identifiers'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name='question',
|
||||
unique_together={('event', 'identifier')},
|
||||
),
|
||||
]
|
||||
@@ -34,7 +34,9 @@
|
||||
|
||||
import binascii
|
||||
import json
|
||||
import operator
|
||||
from datetime import timedelta
|
||||
from functools import reduce
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import webauthn
|
||||
@@ -491,11 +493,14 @@ class User(AbstractBaseUser, PermissionsMixin, LoggingMixin):
|
||||
if request and self.has_active_staff_session(request.session.session_key):
|
||||
return Event.objects.all()
|
||||
|
||||
kwargs = {permission: True}
|
||||
if isinstance(permission, (tuple, list)):
|
||||
q = reduce(operator.or_, [Q(**{p: True}) for p in permission])
|
||||
else:
|
||||
q = Q(**{permission: True})
|
||||
|
||||
return Event.objects.filter(
|
||||
Q(organizer_id__in=self.teams.filter(all_events=True, **kwargs).values_list('organizer', flat=True))
|
||||
| Q(id__in=self.teams.filter(**kwargs).values_list('limit_events__id', flat=True))
|
||||
Q(organizer_id__in=self.teams.filter(q, all_events=True).values_list('organizer', flat=True))
|
||||
| Q(id__in=self.teams.filter(q).values_list('limit_events__id', flat=True))
|
||||
)
|
||||
|
||||
@scopes_disabled()
|
||||
|
||||
@@ -56,6 +56,12 @@ class CheckinList(LoggedModel):
|
||||
default=False,
|
||||
help_text=_('With this option, people will be able to check in even if the '
|
||||
'order has not been paid.'))
|
||||
addon_match = models.BooleanField(
|
||||
verbose_name=_('Allow checking in add-on tickets by scanning the main ticket'),
|
||||
default=False,
|
||||
help_text=_('A scan will only be possible if the check-in list is configured such that there is always exactly '
|
||||
'one matching add-on ticket. Ambiguous scans will be rejected..')
|
||||
)
|
||||
gates = models.ManyToManyField(
|
||||
'Gate', verbose_name=_("Gates"), blank=True,
|
||||
help_text=_("Does not have any effect for the validation of tickets, only for the automatic configuration of "
|
||||
@@ -258,6 +264,7 @@ class Checkin(models.Model):
|
||||
REASON_REVOKED = 'revoked'
|
||||
REASON_INCOMPLETE = 'incomplete'
|
||||
REASON_ALREADY_REDEEMED = 'already_redeemed'
|
||||
REASON_AMBIGUOUS = 'ambiguous'
|
||||
REASON_ERROR = 'error'
|
||||
REASONS = (
|
||||
(REASON_CANCELED, _('Order canceled')),
|
||||
@@ -268,6 +275,7 @@ class Checkin(models.Model):
|
||||
(REASON_INCOMPLETE, _('Information required')),
|
||||
(REASON_ALREADY_REDEEMED, _('Ticket already used')),
|
||||
(REASON_PRODUCT, _('Ticket type not allowed here')),
|
||||
(REASON_AMBIGUOUS, _('Ticket code is ambiguous on list')),
|
||||
(REASON_ERROR, _('Server error')),
|
||||
)
|
||||
|
||||
|
||||
@@ -24,27 +24,59 @@ from django.conf import settings
|
||||
from django.contrib.auth.hashers import (
|
||||
check_password, is_password_usable, make_password,
|
||||
)
|
||||
from django.core.validators import RegexValidator
|
||||
from django.core.validators import RegexValidator, URLValidator
|
||||
from django.db import models
|
||||
from django.db.models import F, Q
|
||||
from django.utils.crypto import get_random_string, salted_hmac
|
||||
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
||||
from django_scopes import ScopedManager, scopes_disabled
|
||||
from i18nfield.fields import I18nCharField
|
||||
from phonenumber_field.modelfields import PhoneNumberField
|
||||
|
||||
from pretix.base.banlist import banned
|
||||
from pretix.base.models.base import LoggedModel
|
||||
from pretix.base.models.fields import MultiStringField
|
||||
from pretix.base.models.organizer import Organizer
|
||||
from pretix.base.settings import PERSON_NAME_SCHEMES
|
||||
from pretix.helpers.countries import FastCountryField
|
||||
|
||||
|
||||
class CustomerSSOProvider(LoggedModel):
|
||||
METHOD_OIDC = 'oidc'
|
||||
METHODS = (
|
||||
(METHOD_OIDC, 'OpenID Connect'),
|
||||
)
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
organizer = models.ForeignKey(Organizer, related_name='sso_providers', on_delete=models.CASCADE)
|
||||
name = I18nCharField(
|
||||
max_length=200,
|
||||
verbose_name=_("Provider name"),
|
||||
)
|
||||
is_active = models.BooleanField(default=True, verbose_name=_('Active'))
|
||||
button_label = I18nCharField(
|
||||
max_length=200,
|
||||
verbose_name=_("Login button label"),
|
||||
)
|
||||
method = models.CharField(
|
||||
max_length=190,
|
||||
verbose_name=_("Single-sign-on method"),
|
||||
null=False, blank=False,
|
||||
choices=METHODS,
|
||||
)
|
||||
configuration = models.JSONField()
|
||||
|
||||
def allow_delete(self):
|
||||
return not self.customers.exists()
|
||||
|
||||
|
||||
class Customer(LoggedModel):
|
||||
"""
|
||||
Represents a registered customer of an organizer.
|
||||
"""
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
organizer = models.ForeignKey(Organizer, related_name='customers', on_delete=models.CASCADE)
|
||||
provider = models.ForeignKey(CustomerSSOProvider, related_name='customers', on_delete=models.PROTECT, null=True, blank=True)
|
||||
identifier = models.CharField(
|
||||
max_length=190,
|
||||
db_index=True,
|
||||
@@ -216,6 +248,27 @@ class Customer(LoggedModel):
|
||||
testmode=testmode,
|
||||
)
|
||||
|
||||
def send_activation_mail(self):
|
||||
from pretix.base.services.mail import mail
|
||||
from pretix.multidomain.urlreverse import build_absolute_uri
|
||||
from pretix.presale.forms.customer import TokenGenerator
|
||||
|
||||
ctx = self.get_email_context()
|
||||
token = TokenGenerator().make_token(self)
|
||||
ctx['url'] = build_absolute_uri(
|
||||
self.organizer,
|
||||
'presale:organizer.customer.activate'
|
||||
) + '?id=' + self.identifier + '&token=' + token
|
||||
mail(
|
||||
self.email,
|
||||
_('Activate your account at {organizer}').format(organizer=self.organizer.name),
|
||||
self.organizer.settings.mail_text_customer_registration,
|
||||
ctx,
|
||||
locale=self.locale,
|
||||
customer=self,
|
||||
organizer=self.organizer,
|
||||
)
|
||||
|
||||
|
||||
class AttendeeProfile(models.Model):
|
||||
customer = models.ForeignKey(
|
||||
@@ -296,3 +349,134 @@ class AttendeeProfile(models.Model):
|
||||
parts.append(f'{a["field_label"]}: {val}')
|
||||
|
||||
return '\n'.join([str(p).strip() for p in parts if p and str(p).strip()])
|
||||
|
||||
|
||||
def generate_client_id():
|
||||
return get_random_string(40)
|
||||
|
||||
|
||||
def generate_client_secret():
|
||||
return get_random_string(40)
|
||||
|
||||
|
||||
class CustomerSSOClient(LoggedModel):
|
||||
CLIENT_CONFIDENTIAL = "confidential"
|
||||
CLIENT_PUBLIC = "public"
|
||||
CLIENT_TYPES = (
|
||||
(CLIENT_CONFIDENTIAL, pgettext_lazy("openidconnect", "Confidential")),
|
||||
(CLIENT_PUBLIC, pgettext_lazy("openidconnect", "Public")),
|
||||
)
|
||||
|
||||
GRANT_AUTHORIZATION_CODE = "authorization-code"
|
||||
GRANT_IMPLICIT = "implicit"
|
||||
GRANT_TYPES = (
|
||||
(GRANT_AUTHORIZATION_CODE, pgettext_lazy("openidconnect", "Authorization code")),
|
||||
(GRANT_IMPLICIT, pgettext_lazy("openidconnect", "Implicit")),
|
||||
)
|
||||
|
||||
SCOPE_CHOICES = (
|
||||
('openid', _('OpenID Connect access (required)')),
|
||||
('profile', _('Profile data (name, addresses)')),
|
||||
('email', _('E-mail address')),
|
||||
('phone', _('Phone number')),
|
||||
)
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
organizer = models.ForeignKey(Organizer, related_name='sso_clients', on_delete=models.CASCADE)
|
||||
|
||||
name = models.CharField(verbose_name=_("Application name"), max_length=255, blank=False)
|
||||
is_active = models.BooleanField(default=True, verbose_name=_('Active'))
|
||||
|
||||
client_id = models.CharField(
|
||||
verbose_name=_("Client ID"),
|
||||
max_length=100, unique=True, default=generate_client_id, db_index=True
|
||||
)
|
||||
client_secret = models.CharField(
|
||||
max_length=255, blank=False,
|
||||
)
|
||||
|
||||
client_type = models.CharField(
|
||||
max_length=32, choices=CLIENT_TYPES, verbose_name=_("Client type"), default=CLIENT_CONFIDENTIAL,
|
||||
)
|
||||
authorization_grant_type = models.CharField(
|
||||
max_length=32, choices=GRANT_TYPES, verbose_name=_("Grant type"), default=GRANT_AUTHORIZATION_CODE,
|
||||
)
|
||||
redirect_uris = models.TextField(
|
||||
blank=False,
|
||||
verbose_name=_("Redirection URIs"),
|
||||
help_text=_("Allowed URIs list, space separated")
|
||||
)
|
||||
allowed_scopes = MultiStringField(
|
||||
default=['openid', 'profile', 'email', 'phone'],
|
||||
delimiter=" ",
|
||||
blank=True,
|
||||
verbose_name=_('Allowed access scopes'),
|
||||
help_text=_('Separate multiple values with spaces'),
|
||||
)
|
||||
|
||||
def is_usable(self):
|
||||
return self.is_active
|
||||
|
||||
def allow_redirect_uri(self, redirect_uri):
|
||||
return self.redirect_uris and any(r.strip() == redirect_uri for r in self.redirect_uris.split(' '))
|
||||
|
||||
def allow_delete(self):
|
||||
return True
|
||||
|
||||
def evaluated_scope(self, scope):
|
||||
scope = set(scope.split(' '))
|
||||
allowed_scopes = set(self.allowed_scopes)
|
||||
return ' '.join(scope & allowed_scopes)
|
||||
|
||||
def clean(self):
|
||||
redirect_uris = self.redirect_uris.strip().split()
|
||||
|
||||
if redirect_uris:
|
||||
validator = URLValidator()
|
||||
for uri in redirect_uris:
|
||||
validator(uri)
|
||||
|
||||
def set_client_secret(self):
|
||||
secret = get_random_string(64)
|
||||
self.client_secret = make_password(secret)
|
||||
return secret
|
||||
|
||||
def check_client_secret(self, raw_secret):
|
||||
"""
|
||||
Return a boolean of whether the ra_secret was correct. Handles
|
||||
hashing formats behind the scenes.
|
||||
"""
|
||||
def setter(raw_secret):
|
||||
self.client_secret = make_password(raw_secret)
|
||||
self.save(update_fields=["client_secret"])
|
||||
return check_password(raw_secret, self.client_secret, setter)
|
||||
|
||||
|
||||
class CustomerSSOGrant(models.Model):
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
client = models.ForeignKey(
|
||||
CustomerSSOClient, on_delete=models.CASCADE, related_name="grants"
|
||||
)
|
||||
customer = models.ForeignKey(
|
||||
Customer, on_delete=models.CASCADE, related_name="sso_grants"
|
||||
)
|
||||
code = models.CharField(max_length=255, unique=True)
|
||||
nonce = models.CharField(max_length=255, null=True, blank=True)
|
||||
auth_time = models.IntegerField()
|
||||
expires = models.DateTimeField()
|
||||
redirect_uri = models.TextField()
|
||||
scope = models.TextField(blank=True)
|
||||
|
||||
|
||||
class CustomerSSOAccessToken(models.Model):
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
client = models.ForeignKey(
|
||||
CustomerSSOClient, on_delete=models.CASCADE, related_name="access_tokens"
|
||||
)
|
||||
customer = models.ForeignKey(
|
||||
Customer, on_delete=models.CASCADE, related_name="sso_access_tokens"
|
||||
)
|
||||
from_code = models.CharField(max_length=255, null=True, blank=True)
|
||||
token = models.CharField(max_length=255, unique=True)
|
||||
expires = models.DateTimeField()
|
||||
scope = models.TextField(blank=True)
|
||||
|
||||
@@ -255,7 +255,9 @@ class Device(LoggedModel):
|
||||
:param request: Ignored, for compatibility with User model
|
||||
:return: Iterable of Events
|
||||
"""
|
||||
if permission in self.permission_set():
|
||||
if (
|
||||
isinstance(permission, (list, tuple)) and any(p in self.permission_set() for p in permission)
|
||||
) or (isinstance(permission, str) and permission in self.permission_set()):
|
||||
return self.get_events_with_any_permission()
|
||||
else:
|
||||
return self.organizer.events.none()
|
||||
|
||||
@@ -327,7 +327,7 @@ class Discount(LoggedModel):
|
||||
candidates = []
|
||||
cardinality = None
|
||||
for se, l in subevent_to_idx.items():
|
||||
l = [ll for ll in l if ll not in current_group]
|
||||
l = [ll for ll in l if ll in initial_candidates and ll not in current_group]
|
||||
if cardinality and len(l) != cardinality:
|
||||
continue
|
||||
if se not in {positions[idx][1] for idx in current_group}:
|
||||
|
||||
@@ -69,6 +69,7 @@ from pretix.base.reldate import RelativeDateWrapper
|
||||
from pretix.base.validators import EventSlugBanlistValidator
|
||||
from pretix.helpers.database import GroupConcat
|
||||
from pretix.helpers.daterange import daterange
|
||||
from pretix.helpers.hierarkey import clean_filename
|
||||
from pretix.helpers.json import safe_string
|
||||
from pretix.helpers.thumb import get_thumbnail
|
||||
|
||||
@@ -122,6 +123,16 @@ class EventMixin:
|
||||
("SHORT_" if short else "") + ("DATETIME_FORMAT" if self.settings.show_times and show_times else "DATE_FORMAT")
|
||||
)
|
||||
|
||||
def get_weekday_from_display(self, tz=None, short=False) -> str:
|
||||
"""
|
||||
Returns a formatted string containing the weekday of the start date of the event with respect
|
||||
to the current locale.
|
||||
"""
|
||||
tz = tz or self.timezone
|
||||
return _date(
|
||||
self.date_from.astimezone(tz), ("D" if short else "l")
|
||||
)
|
||||
|
||||
def get_time_from_display(self, tz=None) -> str:
|
||||
"""
|
||||
Returns a formatted string containing the start time of the event, ignoring
|
||||
@@ -146,6 +157,18 @@ class EventMixin:
|
||||
("SHORT_" if short else "") + ("DATETIME_FORMAT" if self.settings.show_times and show_times else "DATE_FORMAT")
|
||||
)
|
||||
|
||||
def get_weekday_to_display(self, tz=None, short=False) -> str:
|
||||
"""
|
||||
Returns a formatted string containing the weekday of the end date of the event with respect
|
||||
to the current locale.
|
||||
"""
|
||||
tz = tz or self.timezone
|
||||
if not self.settings.show_date_to or not self.date_to:
|
||||
return ""
|
||||
return _date(
|
||||
self.date_to.astimezone(tz), ("D" if short else "l")
|
||||
)
|
||||
|
||||
def get_date_range_display(self, tz=None, force_show_end=False, as_html=False) -> str:
|
||||
"""
|
||||
Returns a formatted string containing the start date and the end date
|
||||
@@ -608,11 +631,14 @@ class Event(EventMixin, LoggedModel):
|
||||
return super().presale_has_ended
|
||||
|
||||
def delete_all_orders(self, really=False):
|
||||
from .orders import OrderFee, OrderPayment, OrderPosition, OrderRefund
|
||||
from .orders import (
|
||||
OrderFee, OrderPayment, OrderPosition, OrderRefund, Transaction,
|
||||
)
|
||||
|
||||
if not really:
|
||||
raise TypeError("Pass really=True as a parameter.")
|
||||
|
||||
Transaction.objects.filter(order__event=self).delete()
|
||||
OrderPosition.all.filter(order__event=self, addon_to__isnull=False).delete()
|
||||
OrderPosition.all.filter(order__event=self).delete()
|
||||
OrderFee.objects.filter(order__event=self).delete()
|
||||
@@ -914,11 +940,13 @@ class Event(EventMixin, LoggedModel):
|
||||
s.object = self
|
||||
s.pk = None
|
||||
if s.value.startswith('file://'):
|
||||
fi = default_storage.open(s.value[7:], 'rb')
|
||||
fi = default_storage.open(s.value[len('file://'):], 'rb')
|
||||
nonce = get_random_string(length=8)
|
||||
fname_base = clean_filename(os.path.basename(s.value))
|
||||
|
||||
# TODO: make sure pub is always correct
|
||||
fname = 'pub/%s/%s/%s.%s.%s' % (
|
||||
self.organizer.slug, self.slug, s.key, nonce, s.value.split('.')[-1]
|
||||
self.organizer.slug, self.slug, fname_base, nonce, s.value.split('.')[-1]
|
||||
)
|
||||
newname = default_storage.save(fname, fi)
|
||||
s.value = 'file://' + newname
|
||||
@@ -1445,7 +1473,10 @@ class SubEvent(EventMixin, LoggedModel):
|
||||
@property
|
||||
def meta_data(self):
|
||||
data = self.event.meta_data
|
||||
data.update({v.property.name: v.value for v in self.meta_values.select_related('property').all()})
|
||||
if hasattr(self, 'meta_values_cached'):
|
||||
data.update({v.property.name: v.value for v in self.meta_values_cached})
|
||||
else:
|
||||
data.update({v.property.name: v.value for v in self.meta_values.select_related('property').all()})
|
||||
return data
|
||||
|
||||
@property
|
||||
|
||||
@@ -33,7 +33,8 @@ class MultiStringField(TextField):
|
||||
'delimiter_found': _('No value can contain the delimiter character.')
|
||||
}
|
||||
|
||||
def __init__(self, verbose_name=None, name=None, **kwargs):
|
||||
def __init__(self, verbose_name=None, name=None, delimiter=DELIMITER, **kwargs):
|
||||
self.delimiter = delimiter
|
||||
super().__init__(verbose_name, name, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
@@ -44,13 +45,13 @@ class MultiStringField(TextField):
|
||||
if isinstance(value, (list, tuple)):
|
||||
return value
|
||||
elif value:
|
||||
return [v for v in value.split(DELIMITER) if v]
|
||||
return [v for v in value.split(self.delimiter) if v]
|
||||
else:
|
||||
return []
|
||||
|
||||
def get_prep_value(self, value):
|
||||
if isinstance(value, (list, tuple)):
|
||||
return DELIMITER + DELIMITER.join(value) + DELIMITER
|
||||
return self.delimiter + self.delimiter.join(value) + self.delimiter
|
||||
elif value is None:
|
||||
if self.null:
|
||||
return None
|
||||
@@ -63,14 +64,14 @@ class MultiStringField(TextField):
|
||||
|
||||
def from_db_value(self, value, expression, connection):
|
||||
if value:
|
||||
return [v for v in value.split(DELIMITER) if v]
|
||||
return [v for v in value.split(self.delimiter) if v]
|
||||
else:
|
||||
return []
|
||||
|
||||
def validate(self, value, model_instance):
|
||||
super().validate(value, model_instance)
|
||||
for l in value:
|
||||
if DELIMITER in l:
|
||||
if self.delimiter in l:
|
||||
raise exceptions.ValidationError(
|
||||
self.error_messages['delimiter_found'],
|
||||
code='delimiter_found',
|
||||
@@ -78,9 +79,9 @@ class MultiStringField(TextField):
|
||||
|
||||
def get_lookup(self, lookup_name):
|
||||
if lookup_name == 'contains':
|
||||
return MultiStringContains
|
||||
return make_multistring_contains_lookup(self.delimiter)
|
||||
elif lookup_name == 'icontains':
|
||||
return MultiStringIContains
|
||||
return make_multistring_icontains_lookup(self.delimiter)
|
||||
elif lookup_name == 'isnull':
|
||||
return builtin_lookups.IsNull
|
||||
raise NotImplementedError(
|
||||
@@ -88,18 +89,22 @@ class MultiStringField(TextField):
|
||||
)
|
||||
|
||||
|
||||
class MultiStringContains(builtin_lookups.Contains):
|
||||
def process_rhs(self, qn, connection):
|
||||
sql, params = super().process_rhs(qn, connection)
|
||||
params[0] = "%" + DELIMITER + params[0][1:-1] + DELIMITER + "%"
|
||||
return sql, params
|
||||
def make_multistring_contains_lookup(delimiter):
|
||||
class Cls(builtin_lookups.Contains):
|
||||
def process_rhs(self, qn, connection):
|
||||
sql, params = super().process_rhs(qn, connection)
|
||||
params[0] = "%" + delimiter + params[0][1:-1] + delimiter + "%"
|
||||
return sql, params
|
||||
return Cls
|
||||
|
||||
|
||||
class MultiStringIContains(builtin_lookups.IContains):
|
||||
def process_rhs(self, qn, connection):
|
||||
sql, params = super().process_rhs(qn, connection)
|
||||
params[0] = "%" + DELIMITER + params[0][1:-1] + DELIMITER + "%"
|
||||
return sql, params
|
||||
def make_multistring_icontains_lookup(delimiter):
|
||||
class Cls(builtin_lookups.IContains):
|
||||
def process_rhs(self, qn, connection):
|
||||
sql, params = super().process_rhs(qn, connection)
|
||||
params[0] = "%" + delimiter + params[0][1:-1] + delimiter + "%"
|
||||
return sql, params
|
||||
return Cls
|
||||
|
||||
|
||||
class MultiStringSerializer(serializers.Field):
|
||||
|
||||
@@ -600,10 +600,14 @@ class Item(LoggedModel):
|
||||
invoice_address=invoice_address,
|
||||
base_price_is='gross',
|
||||
currency=currency)
|
||||
compare_price = self.tax_rule.tax(b.designated_price * b.count,
|
||||
override_tax_rate=override_tax_rate,
|
||||
invoice_address=invoice_address,
|
||||
currency=currency)
|
||||
if not self.tax_rule:
|
||||
compare_price = TaxedPrice(gross=b.designated_price * b.count, net=b.designated_price * b.count,
|
||||
tax=Decimal('0.00'), rate=Decimal('0.00'), name='')
|
||||
else:
|
||||
compare_price = self.tax_rule.tax(b.designated_price * b.count,
|
||||
override_tax_rate=override_tax_rate,
|
||||
invoice_address=invoice_address,
|
||||
currency=currency)
|
||||
t.net += bprice.net - compare_price.net
|
||||
t.tax += bprice.tax - compare_price.tax
|
||||
t.name = "MIXED!"
|
||||
@@ -1325,6 +1329,7 @@ class Question(LoggedModel):
|
||||
verbose_name = _("Question")
|
||||
verbose_name_plural = _("Questions")
|
||||
ordering = ('position', 'id')
|
||||
unique_together = (('event', 'identifier'),)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.question)
|
||||
@@ -1340,7 +1345,7 @@ class Question(LoggedModel):
|
||||
@staticmethod
|
||||
def _clean_identifier(event, code, instance=None):
|
||||
qs = Question.objects.filter(event=event, identifier__iexact=code)
|
||||
if instance:
|
||||
if instance and instance.pk:
|
||||
qs = qs.exclude(pk=instance.pk)
|
||||
if qs.exists():
|
||||
raise ValidationError(_('This identifier is already used for a different question.'))
|
||||
|
||||
@@ -268,7 +268,10 @@ class Order(LockModel, LoggedModel):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if 'require_approval' not in self.get_deferred_fields() and 'status' not in self.get_deferred_fields():
|
||||
self.__initial_status_paid_or_pending = self.status in (Order.STATUS_PENDING, Order.STATUS_PAID) and not self.require_approval
|
||||
self._transaction_key_reset()
|
||||
|
||||
def _transaction_key_reset(self):
|
||||
self.__initial_status_paid_or_pending = self.status in (Order.STATUS_PENDING, Order.STATUS_PAID) and not self.require_approval
|
||||
|
||||
def gracefully_delete(self, user=None, auth=None):
|
||||
from . import GiftCard, GiftCardTransaction, Membership, Voucher
|
||||
@@ -746,6 +749,19 @@ class Order(LockModel, LoggedModel):
|
||||
length += 1
|
||||
iteration = 0
|
||||
|
||||
@property
|
||||
def modify_deadline(self):
|
||||
modify_deadline = self.event.settings.get('last_order_modification_date', as_type=RelativeDateWrapper)
|
||||
if self.event.has_subevents and modify_deadline:
|
||||
dates = [
|
||||
modify_deadline.datetime(se)
|
||||
for se in self.event.subevents.filter(id__in=self.positions.values_list('subevent', flat=True))
|
||||
]
|
||||
return min(dates) if dates else None
|
||||
elif modify_deadline:
|
||||
return modify_deadline.datetime(self.event)
|
||||
return None
|
||||
|
||||
@property
|
||||
def can_modify_answers(self) -> bool:
|
||||
"""
|
||||
@@ -758,16 +774,7 @@ class Order(LockModel, LoggedModel):
|
||||
if self.status not in (Order.STATUS_PENDING, Order.STATUS_PAID, Order.STATUS_EXPIRED):
|
||||
return False
|
||||
|
||||
modify_deadline = self.event.settings.get('last_order_modification_date', as_type=RelativeDateWrapper)
|
||||
if self.event.has_subevents and modify_deadline:
|
||||
dates = [
|
||||
modify_deadline.datetime(se)
|
||||
for se in self.event.subevents.filter(id__in=self.positions.values_list('subevent', flat=True))
|
||||
]
|
||||
modify_deadline = min(dates) if dates else None
|
||||
elif modify_deadline:
|
||||
modify_deadline = modify_deadline.datetime(self.event)
|
||||
|
||||
modify_deadline = self.modify_deadline
|
||||
if modify_deadline is not None and now() > modify_deadline:
|
||||
return False
|
||||
|
||||
@@ -843,7 +850,7 @@ class Order(LockModel, LoggedModel):
|
||||
if terms:
|
||||
term_last = min(terms)
|
||||
else:
|
||||
term_last = None
|
||||
return None
|
||||
else:
|
||||
term_last = term_last.datetime(self.event).date()
|
||||
term_last = make_aware(datetime.combine(
|
||||
@@ -1052,12 +1059,14 @@ class Order(LockModel, LoggedModel):
|
||||
if p.canceled and not _backfill_before_cancellation:
|
||||
continue
|
||||
target_transaction_count[Transaction.key(p)] += 1
|
||||
p._transaction_key_reset()
|
||||
|
||||
fees = self.fees.all() if fees is None else fees
|
||||
for f in fees:
|
||||
if f.canceled and not _backfill_before_cancellation:
|
||||
continue
|
||||
target_transaction_count[Transaction.key(f)] += 1
|
||||
f._transaction_key_reset()
|
||||
|
||||
keys = set(target_transaction_count.keys()) | set(current_transaction_count.keys())
|
||||
create = []
|
||||
@@ -1084,6 +1093,7 @@ class Order(LockModel, LoggedModel):
|
||||
create.sort(key=lambda t: (0 if t.count < 0 else 1, t.positionid or 0))
|
||||
if save:
|
||||
Transaction.objects.bulk_create(create)
|
||||
self._transaction_key_reset()
|
||||
_transactions_mark_order_clean(self.pk)
|
||||
return create
|
||||
|
||||
@@ -1588,7 +1598,7 @@ class OrderPayment(models.Model):
|
||||
if status_change:
|
||||
self.order.create_transactions()
|
||||
|
||||
def fail(self, info=None, user=None, auth=None):
|
||||
def fail(self, info=None, user=None, auth=None, log_data=None):
|
||||
"""
|
||||
Marks the order as failed and sets info to ``info``, but only if the order is in ``created`` or ``pending``
|
||||
state. This is equivalent to setting ``state`` to ``OrderPayment.PAYMENT_STATE_FAILED`` and logging a failure,
|
||||
@@ -1616,6 +1626,7 @@ class OrderPayment(models.Model):
|
||||
'local_id': self.local_id,
|
||||
'provider': self.provider,
|
||||
'info': info,
|
||||
'data': log_data,
|
||||
}, user=user, auth=auth)
|
||||
|
||||
def confirm(self, count_waitinglist=True, send_mail=True, force=False, user=None, auth=None, mail_text='',
|
||||
@@ -2071,8 +2082,20 @@ class OrderFee(models.Model):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if not self.get_deferred_fields():
|
||||
self.__initial_transaction_key = Transaction.key(self)
|
||||
self.__initial_canceled = self.canceled
|
||||
self._transaction_key_reset()
|
||||
|
||||
def refresh_from_db(self, using=None, fields=None):
|
||||
"""
|
||||
Reload field values from the database. Similar to django's implementation
|
||||
with adjustment for our method that forces us to create ``Transaction`` instances.
|
||||
"""
|
||||
if not self.get_deferred_fields():
|
||||
self._transaction_key_reset()
|
||||
return super().refresh_from_db(using, fields)
|
||||
|
||||
def _transaction_key_reset(self):
|
||||
self.__initial_transaction_key = Transaction.key(self)
|
||||
self.__initial_canceled = self.canceled
|
||||
|
||||
def __str__(self):
|
||||
if self.description:
|
||||
@@ -2192,8 +2215,20 @@ class OrderPosition(AbstractPosition):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if not self.get_deferred_fields():
|
||||
self.__initial_transaction_key = Transaction.key(self)
|
||||
self.__initial_canceled = self.canceled
|
||||
self._transaction_key_reset()
|
||||
|
||||
def refresh_from_db(self, using=None, fields=None):
|
||||
"""
|
||||
Reload field values from the database. Similar to django's implementation
|
||||
with adjustment for our method that forces us to create ``Transaction`` instances.
|
||||
"""
|
||||
if not self.get_deferred_fields():
|
||||
self._transaction_key_reset()
|
||||
return super().refresh_from_db(using, fields)
|
||||
|
||||
def _transaction_key_reset(self):
|
||||
self.__initial_transaction_key = Transaction.key(self)
|
||||
self.__initial_canceled = self.canceled
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Order position")
|
||||
|
||||
@@ -461,7 +461,9 @@ class TeamAPIToken(models.Model):
|
||||
:param request: Ignored, for compatibility with User model
|
||||
:return: Iterable of Events
|
||||
"""
|
||||
if getattr(self.team, permission, False):
|
||||
if (
|
||||
isinstance(permission, (list, tuple)) and any(getattr(self.team, p, False) for p in permission)
|
||||
) or (isinstance(permission, str) and getattr(self.team, permission, False)):
|
||||
return self.get_events_with_any_permission()
|
||||
else:
|
||||
return self.team.organizer.events.none()
|
||||
|
||||
@@ -114,7 +114,7 @@ EU_CURRENCIES = {
|
||||
'RO': 'RON',
|
||||
'SE': 'SEK'
|
||||
}
|
||||
VAT_ID_COUNTRIES = EU_COUNTRIES | {'CH'}
|
||||
VAT_ID_COUNTRIES = EU_COUNTRIES | {'CH', 'NO'}
|
||||
|
||||
|
||||
def is_eu_country(cc):
|
||||
|
||||
@@ -392,7 +392,7 @@ class InvoiceAddressCountry(ImportColumn):
|
||||
return list(countries)
|
||||
|
||||
def clean(self, value, previous_values):
|
||||
if value and not Country(value).numeric:
|
||||
if value and not (Country(value).numeric or value in settings.COUNTRIES_OVERRIDE):
|
||||
raise ValidationError(_("Please enter a valid country code."))
|
||||
return value
|
||||
|
||||
@@ -538,7 +538,7 @@ class AttendeeCountry(ImportColumn):
|
||||
return list(countries)
|
||||
|
||||
def clean(self, value, previous_values):
|
||||
if value and not Country(value).numeric:
|
||||
if value and not (Country(value).numeric or value in settings.COUNTRIES_OVERRIDE):
|
||||
raise ValidationError(_("Please enter a valid country code."))
|
||||
return value
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ from django.utils.html import conditional_escape
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _, pgettext
|
||||
from i18nfield.strings import LazyI18nString
|
||||
from PyPDF2 import PdfFileReader
|
||||
from PyPDF2 import PdfReader
|
||||
from pytz import timezone
|
||||
from reportlab.graphics import renderPDF
|
||||
from reportlab.graphics.barcode.qr import QrCodeWidget
|
||||
@@ -251,6 +251,11 @@ DEFAULT_VARIABLES = OrderedDict((
|
||||
"editor_sample": _("20:00"),
|
||||
"evaluate": lambda op, order, ev: ev.get_time_from_display()
|
||||
}),
|
||||
("event_begin_weekday", {
|
||||
"label": _("Event begin weekday"),
|
||||
"editor_sample": _("Friday"),
|
||||
"evaluate": lambda op, order, ev: ev.get_weekday_from_display()
|
||||
}),
|
||||
("event_end", {
|
||||
"label": _("Event end date and time"),
|
||||
"editor_sample": _("2017-05-31 22:00"),
|
||||
@@ -275,6 +280,11 @@ DEFAULT_VARIABLES = OrderedDict((
|
||||
"TIME_FORMAT"
|
||||
) if ev.date_to else ""
|
||||
}),
|
||||
("event_end_weekday", {
|
||||
"label": _("Event end weekday"),
|
||||
"editor_sample": _("Friday"),
|
||||
"evaluate": lambda op, order, ev: ev.get_weekday_to_display()
|
||||
}),
|
||||
("event_admission", {
|
||||
"label": _("Event admission date and time"),
|
||||
"editor_sample": _("2017-05-31 19:00"),
|
||||
@@ -646,7 +656,7 @@ class Renderer:
|
||||
self.event = event
|
||||
if self.background_file:
|
||||
self.bg_bytes = self.background_file.read()
|
||||
self.bg_pdf = PdfFileReader(BytesIO(self.bg_bytes), strict=False)
|
||||
self.bg_pdf = PdfReader(BytesIO(self.bg_bytes), strict=False)
|
||||
else:
|
||||
self.bg_bytes = None
|
||||
self.bg_pdf = None
|
||||
@@ -831,9 +841,10 @@ class Renderer:
|
||||
textColor=Color(o['color'][0] / 255, o['color'][1] / 255, o['color'][2] / 255),
|
||||
alignment=align_map[o['align']]
|
||||
)
|
||||
# add an almost-invisible space   after hyphens as word-wrap in ReportLab only works on space chars
|
||||
text = conditional_escape(
|
||||
self._get_text_content(op, order, o) or "",
|
||||
).replace("\n", "<br/>\n")
|
||||
).replace("\n", "<br/>\n").replace("-", "- ")
|
||||
|
||||
# reportlab does not support RTL, ligature-heavy scripts like Arabic. Therefore, we use ArabicReshaper
|
||||
# to resolve all ligatures and python-bidi to switch RTL texts.
|
||||
@@ -860,7 +871,7 @@ class Renderer:
|
||||
canvas.restoreState()
|
||||
|
||||
def draw_page(self, canvas: Canvas, order: Order, op: OrderPosition, show_page=True, only_page=None):
|
||||
page_count = self.bg_pdf.getNumPages()
|
||||
page_count = len(self.bg_pdf.pages)
|
||||
|
||||
if not only_page and not show_page:
|
||||
raise ValueError("only_page=None and show_page=False cannot be combined")
|
||||
@@ -880,7 +891,11 @@ class Renderer:
|
||||
elif o['type'] == "poweredby":
|
||||
self._draw_poweredby(canvas, op, o)
|
||||
if self.bg_pdf:
|
||||
canvas.setPageSize((self.bg_pdf.getPage(page).mediaBox[2], self.bg_pdf.getPage(page).mediaBox[3]))
|
||||
page_size = (self.bg_pdf.pages[0].mediabox[2], self.bg_pdf.pages[0].mediabox[3])
|
||||
if self.bg_pdf.pages[0].get('/Rotate') in (90, 270):
|
||||
# swap dimensions due to pdf being rotated
|
||||
page_size = page_size[::-1]
|
||||
canvas.setPageSize(page_size)
|
||||
if show_page:
|
||||
canvas.showPage()
|
||||
|
||||
@@ -904,17 +919,41 @@ class Renderer:
|
||||
with open(os.path.join(d, 'out.pdf'), 'rb') as f:
|
||||
return BytesIO(f.read())
|
||||
else:
|
||||
from PyPDF2 import PdfFileReader, PdfFileWriter
|
||||
from PyPDF2 import PdfReader, PdfWriter, Transformation
|
||||
from PyPDF2.generic import RectangleObject
|
||||
buffer.seek(0)
|
||||
new_pdf = PdfFileReader(buffer)
|
||||
output = PdfFileWriter()
|
||||
new_pdf = PdfReader(buffer)
|
||||
output = PdfWriter()
|
||||
|
||||
for i, page in enumerate(new_pdf.pages):
|
||||
bg_page = copy.copy(self.bg_pdf.getPage(i))
|
||||
bg_page.mergePage(page)
|
||||
output.addPage(bg_page)
|
||||
bg_page = copy.copy(self.bg_pdf.pages[i])
|
||||
bg_rotation = bg_page.get('/Rotate')
|
||||
if bg_rotation:
|
||||
# /Rotate is clockwise, transformation.rotate is counter-clockwise
|
||||
t = Transformation().rotate(bg_rotation)
|
||||
w = float(page.mediabox.getWidth())
|
||||
h = float(page.mediabox.getHeight())
|
||||
if bg_rotation in (90, 270):
|
||||
# offset due to rotation base
|
||||
if bg_rotation == 90:
|
||||
t = t.translate(h, 0)
|
||||
else:
|
||||
t = t.translate(0, w)
|
||||
# rotate mediabox as well
|
||||
page.mediabox = RectangleObject((
|
||||
page.mediabox.left.as_numeric(),
|
||||
page.mediabox.bottom.as_numeric(),
|
||||
page.mediabox.top.as_numeric(),
|
||||
page.mediabox.right.as_numeric(),
|
||||
))
|
||||
page.trimbox = page.mediabox
|
||||
elif bg_rotation == 180:
|
||||
t = t.translate(w, h)
|
||||
page.add_transformation(t)
|
||||
bg_page.merge_page(page)
|
||||
output.add_page(bg_page)
|
||||
|
||||
output.addMetadata({
|
||||
output.add_metadata({
|
||||
'/Title': str(title),
|
||||
'/Creator': 'pretix',
|
||||
})
|
||||
|
||||
@@ -143,8 +143,8 @@ class Sig1TicketSecretGenerator(BaseTicketSecretGenerator):
|
||||
The resulting string is REVERSED, to avoid all secrets of same length beginning with the same 10
|
||||
characters, which would make it impossible to search for secrets manually.
|
||||
"""
|
||||
verbose_name = _('pretix signature scheme 1 (for very large events, does not work with pretixSCAN on iOS and '
|
||||
'changes semantics of offline scanning – please refer to documentation or support for details)')
|
||||
verbose_name = _('pretix signature scheme 1 (for very large events, changes semantics of offline scanning – '
|
||||
'please refer to documentation or support for details)')
|
||||
identifier = 'pretix_sig1'
|
||||
use_revocation_list = True
|
||||
|
||||
|
||||
@@ -947,11 +947,11 @@ class CartManager:
|
||||
|
||||
if voucher_available_count < 1:
|
||||
if op.voucher in self._voucher_depend_on_cart:
|
||||
err = err or error_messages['voucher_redeemed_cart'] % self.event.settings.reservation_time
|
||||
err = err or _(error_messages['voucher_redeemed_cart']) % self.event.settings.reservation_time
|
||||
else:
|
||||
err = err or error_messages['voucher_redeemed']
|
||||
elif voucher_available_count < requested_count:
|
||||
err = err or error_messages['voucher_redeemed_partial'] % voucher_available_count
|
||||
err = err or _(error_messages['voucher_redeemed_partial']) % voucher_available_count
|
||||
|
||||
available_count = min(quota_available_count, voucher_available_count)
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ from django.utils.timezone import now
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
from pretix.base.models import CachedCombinedTicket, CachedTicket
|
||||
from pretix.base.models.customers import CustomerSSOGrant
|
||||
|
||||
from ..models import CachedFile, CartPosition, InvoiceAddress
|
||||
from ..signals import periodic_task
|
||||
@@ -68,3 +69,9 @@ def clean_cached_tickets(sender, **kwargs):
|
||||
@scopes_disabled()
|
||||
def clearsessions(sender, **kwargs):
|
||||
call_command('clearsessions')
|
||||
|
||||
|
||||
@receiver(signal=periodic_task)
|
||||
@scopes_disabled()
|
||||
def clear_oidc_data(sender, **kwargs):
|
||||
CustomerSSOGrant.objects.filter(expires__lt=now() - timedelta(days=14)).delete()
|
||||
|
||||
@@ -121,7 +121,7 @@ def mail(email: Union[str, Sequence[str]], subject: str, template: Union[str, La
|
||||
:param order: The order this email is related to (optional). If set, this will be used to include a link to the
|
||||
order below the email.
|
||||
|
||||
:param order: The order position this email is related to (optional). If set, this will be used to include a link
|
||||
:param position: The order position this email is related to (optional). If set, this will be used to include a link
|
||||
to the order position instead of the order below the email.
|
||||
|
||||
:param headers: A dict of custom mail headers to add to the mail
|
||||
@@ -141,7 +141,7 @@ def mail(email: Union[str, Sequence[str]], subject: str, template: Union[str, La
|
||||
|
||||
:param user: The user this email is sent to
|
||||
|
||||
:param customer: The user this email is sent to
|
||||
:param customer: The customer this email is sent to
|
||||
|
||||
:param attach_cached_files: A list of cached file to attach to this email.
|
||||
|
||||
@@ -502,11 +502,11 @@ def mail_send_task(self, *args, to: List[str], subject: str, body: str, html: st
|
||||
rc.expire(redis_key, 300)
|
||||
|
||||
max_retries = 10
|
||||
retry_after = 30 + cnt * 10
|
||||
retry_after = min(30 + cnt * 10, 1800)
|
||||
else:
|
||||
# Most likely some other kind of temporary failure, retry again (but pretty soon)
|
||||
max_retries = 5
|
||||
retry_after = 2 ** (self.request.retries * 3) # max is 2 ** (4*3) = 4096 seconds = 68 minutes
|
||||
retry_after = [10, 30, 60, 300, 900, 900][self.request.retries]
|
||||
|
||||
try:
|
||||
self.retry(max_retries=max_retries, countdown=retry_after)
|
||||
@@ -542,7 +542,7 @@ def mail_send_task(self, *args, to: List[str], subject: str, body: str, html: st
|
||||
if not any(c >= 500 for c in smtp_codes):
|
||||
# Not a permanent failure (mailbox full, service unavailable), retry later, but with large intervals
|
||||
try:
|
||||
self.retry(max_retries=5, countdown=2 ** (self.request.retries * 3) * 4) # max is 2 ** (4*3) * 4 = 16384 seconds = approx 4.5 hours
|
||||
self.retry(max_retries=5, countdown=[60, 300, 600, 1200, 1800][self.request.retries])
|
||||
except MaxRetriesExceededError:
|
||||
# ignore and go on with logging the error
|
||||
pass
|
||||
@@ -567,7 +567,7 @@ def mail_send_task(self, *args, to: List[str], subject: str, body: str, html: st
|
||||
except Exception as e:
|
||||
if isinstance(e, (smtplib.SMTPServerDisconnected, smtplib.SMTPConnectError, ssl.SSLError, OSError)):
|
||||
try:
|
||||
self.retry(max_retries=5, countdown=2 ** (self.request.retries * 3)) # max is 2 ** (4*3) = 4096 seconds = 68 minutes
|
||||
self.retry(max_retries=5, countdown=[10, 30, 60, 300, 900, 900][self.request.retries])
|
||||
except MaxRetriesExceededError:
|
||||
if log_target:
|
||||
log_target.log_action(
|
||||
|
||||
@@ -36,6 +36,7 @@ from pretix.base.models import (
|
||||
from pretix.base.models.orders import Transaction
|
||||
from pretix.base.orderimport import get_all_columns
|
||||
from pretix.base.services.invoices import generate_invoice, invoice_qualified
|
||||
from pretix.base.services.locking import NoLockManager
|
||||
from pretix.base.services.tasks import ProfiledEventTask
|
||||
from pretix.base.signals import order_paid, order_placed
|
||||
from pretix.celery_app import app
|
||||
@@ -85,9 +86,9 @@ def setif(record, obj, attr, setting):
|
||||
|
||||
@app.task(base=ProfiledEventTask, throws=(DataImportError,))
|
||||
def import_orders(event: Event, fileid: str, settings: dict, locale: str, user) -> None:
|
||||
# TODO: quotacheck?
|
||||
cf = CachedFile.objects.get(id=fileid)
|
||||
user = User.objects.get(pk=user)
|
||||
seats_used = False
|
||||
with language(locale, event.settings.region):
|
||||
cols = get_all_columns(event)
|
||||
parsed = parse_csv(cf.file)
|
||||
@@ -133,6 +134,8 @@ def import_orders(event: Event, fileid: str, settings: dict, locale: str, user)
|
||||
position = OrderPosition(positionid=len(order._positions) + 1)
|
||||
position.attendee_name_parts = {'_scheme': event.settings.name_scheme}
|
||||
position.meta_info = {}
|
||||
if position.seat is not None:
|
||||
seats_used = True
|
||||
order._positions.append(position)
|
||||
position.assign_pseudonymization_id()
|
||||
|
||||
@@ -144,9 +147,12 @@ def import_orders(event: Event, fileid: str, settings: dict, locale: str, user)
|
||||
_('Invalid data in row {row}: {message}').format(row=i, message=str(e))
|
||||
)
|
||||
|
||||
# quota check?
|
||||
with event.lock():
|
||||
with transaction.atomic():
|
||||
# We don't support vouchers, quotas, or memberships here, so we only need to lock if seats
|
||||
# are in use
|
||||
lockfn = event.lock if seats_used else NoLockManager
|
||||
|
||||
try:
|
||||
with lockfn(), transaction.atomic():
|
||||
save_transactions = []
|
||||
for o in orders:
|
||||
o.total = sum([c.price for c in o._positions]) # currently no support for fees
|
||||
@@ -204,4 +210,7 @@ def import_orders(event: Event, fileid: str, settings: dict, locale: str, user)
|
||||
) and not o.invoices.last()
|
||||
if gen_invoice:
|
||||
generate_invoice(o, trigger_pdf=True)
|
||||
except DataImportError:
|
||||
raise ValidationError(_('We were not able to process your request completely as the server was too busy. '
|
||||
'Please try again.'))
|
||||
cf.delete()
|
||||
|
||||
@@ -2304,6 +2304,11 @@ class OrderChangeManager:
|
||||
# Do nothing
|
||||
return
|
||||
|
||||
# Clear prefetched objects cache of order. We're going to modify the positions and fees and we have no guarantee
|
||||
# that every operation tuple points to a position/fee instance that has been fetched from the same object cache,
|
||||
# so it's dangerous to keep the cache around.
|
||||
self.order._prefetched_objects_cache = {}
|
||||
|
||||
# finally, incorporate difference in payment fees
|
||||
self._payment_fee_diff()
|
||||
|
||||
|
||||
@@ -51,7 +51,7 @@ from pretix.celery_app import app
|
||||
|
||||
|
||||
@app.task(base=ProfiledEventTask)
|
||||
def export(event: Event, shredders: List[str], session_key=None) -> None:
|
||||
def export(event: Event, shredders: List[str], session_key=None, cfid=None) -> None:
|
||||
known_shredders = event.get_data_shredders()
|
||||
|
||||
with NamedTemporaryFile() as rawfile:
|
||||
@@ -85,13 +85,16 @@ def export(event: Event, shredders: List[str], session_key=None) -> None:
|
||||
|
||||
rawfile.seek(0)
|
||||
|
||||
cf = CachedFile()
|
||||
cf.date = now()
|
||||
if cfid:
|
||||
cf = CachedFile.objects.get(pk=cfid)
|
||||
else:
|
||||
cf = CachedFile()
|
||||
cf.date = now()
|
||||
cf.session_key = session_key
|
||||
cf.web_download = True
|
||||
cf.expires = now() + timedelta(hours=1)
|
||||
cf.filename = event.slug + '.zip'
|
||||
cf.type = 'application/zip'
|
||||
cf.session_key = session_key
|
||||
cf.web_download = True
|
||||
cf.expires = now() + timedelta(hours=1)
|
||||
cf.save()
|
||||
cf.file.save(cachedfile_name(cf, cf.filename), rawfile)
|
||||
|
||||
@@ -115,7 +118,7 @@ def shred(event: Event, fileid: str, confirm_code: str) -> None:
|
||||
if not shredder:
|
||||
continue
|
||||
shredders.append(shredder)
|
||||
if any(shredder.require_download_confirmation for shredder in shredders):
|
||||
if confirm_code is not True and any(shredder.require_download_confirmation for shredder in shredders):
|
||||
if indexdata['confirm_code'] != confirm_code:
|
||||
raise ShredError(_("The confirm code you entered was incorrect."))
|
||||
if event.logentry_set.filter(datetime__gte=parse(indexdata['time'])):
|
||||
|
||||
@@ -22,9 +22,9 @@
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from urllib.error import HTTPError
|
||||
from xml.etree import ElementTree
|
||||
|
||||
import vat_moss.errors
|
||||
import requests
|
||||
import vat_moss.id
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -35,6 +35,16 @@ from zeep.exceptions import Fault
|
||||
from pretix.base.models.tax import cc_to_vat_prefix, is_eu_country
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
error_messages = {
|
||||
'unavailable': _(
|
||||
'Your VAT ID could not be checked, as the VAT checking service of '
|
||||
'your country is currently not available. We will therefore '
|
||||
'need to charge VAT on your invoice. You can get the tax amount '
|
||||
'back via the VAT reimbursement process.'
|
||||
),
|
||||
'invalid': _('This VAT ID is not valid. Please re-check your input.'),
|
||||
'country_mismatch': _('Your VAT ID does not match the selected country.'),
|
||||
}
|
||||
|
||||
|
||||
class VATIDError(Exception):
|
||||
@@ -50,33 +60,107 @@ class VATIDTemporaryError(VATIDError):
|
||||
pass
|
||||
|
||||
|
||||
def _validate_vat_id_EU(vat_id, country_code):
|
||||
if vat_id[:2] != cc_to_vat_prefix(country_code):
|
||||
raise VATIDFinalError(_('Your VAT ID does not match the selected country.'))
|
||||
def _validate_vat_id_NO(vat_id, country_code):
|
||||
# Inspired by vat_moss library
|
||||
vat_id = vat_moss.id.normalize(vat_id)
|
||||
|
||||
if not vat_id or len(vat_id) < 3 or not re.match('^\\d{9}MVA$', vat_id[2:]):
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
|
||||
organization_number = vat_id[2:].replace('MVA', '')
|
||||
validation_url = 'https://data.brreg.no/enhetsregisteret/api/enheter/%s' % organization_number
|
||||
|
||||
try:
|
||||
result = vat_moss.id.validate(vat_id)
|
||||
if result:
|
||||
country_code, normalized_id, company_name = result
|
||||
return normalized_id
|
||||
except (vat_moss.errors.InvalidError, ValueError):
|
||||
raise VATIDFinalError(_('This VAT ID is not valid. Please re-check your input.'))
|
||||
except vat_moss.errors.WebServiceUnavailableError:
|
||||
response = requests.get(validation_url, timeout=10)
|
||||
if response.status_code in (404, 400):
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
|
||||
response.raise_for_status()
|
||||
|
||||
info = response.json()
|
||||
# This should never happen, but keeping it incase the API is changed
|
||||
if 'organisasjonsnummer' not in info or info['organisasjonsnummer'] != organization_number:
|
||||
logger.warning(
|
||||
'VAT ID checking failed for Norway due to missing or mismatching organisasjonsnummer in repsonse'
|
||||
)
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
except requests.RequestException:
|
||||
logger.exception('VAT ID checking failed for country {}'.format(country_code))
|
||||
raise VATIDTemporaryError(_(
|
||||
'Your VAT ID could not be checked, as the VAT checking service of '
|
||||
'your country is currently not available. We will therefore '
|
||||
'need to charge VAT on your invoice. You can get the tax amount '
|
||||
'back via the VAT reimbursement process.'
|
||||
))
|
||||
except (vat_moss.errors.WebServiceError, HTTPError):
|
||||
raise VATIDTemporaryError(error_messages['unavailable'])
|
||||
else:
|
||||
return vat_id
|
||||
|
||||
|
||||
def _validate_vat_id_EU(vat_id, country_code):
|
||||
# Inspired by vat_moss library
|
||||
try:
|
||||
vat_id = vat_moss.id.normalize(vat_id)
|
||||
except ValueError:
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
|
||||
if not vat_id or len(vat_id) < 3:
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
|
||||
number = vat_id[2:]
|
||||
|
||||
if vat_id[:2] != cc_to_vat_prefix(country_code):
|
||||
raise VATIDFinalError(error_messages['country_mismatch'])
|
||||
|
||||
if not re.match(vat_moss.id.ID_PATTERNS[cc_to_vat_prefix(country_code)]['regex'], number):
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
|
||||
# We are relying on the country code of the normalized VAT-ID and not the user/InvoiceAddress-provided
|
||||
# VAT-ID, since Django and the EU have different ideas of which country is using which country code.
|
||||
# For example: For django and most people, Greece is GR. However, the VAT-service expects EL.
|
||||
payload = """
|
||||
<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:urn="urn:ec.europa.eu:taxud:vies:services:checkVat:types">
|
||||
<soapenv:Header/>
|
||||
<soapenv:Body>
|
||||
<urn:checkVat>
|
||||
<urn:countryCode>%s</urn:countryCode>
|
||||
<urn:vatNumber>%s</urn:vatNumber>
|
||||
</urn:checkVat>
|
||||
</soapenv:Body>
|
||||
</soapenv:Envelope>
|
||||
""".strip() % (vat_id[:2], number)
|
||||
|
||||
try:
|
||||
response = requests.post(
|
||||
'https://ec.europa.eu/taxation_customs/vies/services/checkVatService',
|
||||
data=payload,
|
||||
timeout=10,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
return_xml = response.text
|
||||
|
||||
try:
|
||||
envelope = ElementTree.fromstring(return_xml)
|
||||
except ElementTree.ParseError:
|
||||
logger.error(
|
||||
f'VAT ID checking failed for {country_code} due to XML parse error'
|
||||
)
|
||||
raise VATIDTemporaryError(error_messages['unavailable'])
|
||||
|
||||
namespaces = {
|
||||
'soap': 'http://schemas.xmlsoap.org/soap/envelope/',
|
||||
'vat': 'urn:ec.europa.eu:taxud:vies:services:checkVat:types'
|
||||
}
|
||||
valid_elements = envelope.findall('./soap:Body/vat:checkVatResponse/vat:valid', namespaces)
|
||||
if not valid_elements:
|
||||
logger.error(
|
||||
f'VAT ID checking failed for {country_code} due to missing <valid> tag'
|
||||
)
|
||||
raise VATIDTemporaryError(error_messages['unavailable'])
|
||||
|
||||
if valid_elements[0].text.lower() != 'true':
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
|
||||
except requests.RequestException:
|
||||
logger.exception('VAT ID checking failed for country {}'.format(country_code))
|
||||
raise VATIDTemporaryError(_(
|
||||
'Your VAT ID could not be checked, as the VAT checking service of '
|
||||
'your country returned an incorrect result. We will therefore '
|
||||
'need to charge VAT on your invoice. Please contact support to '
|
||||
'resolve this manually.'
|
||||
))
|
||||
raise VATIDTemporaryError(error_messages['unavailable'])
|
||||
else:
|
||||
return vat_id
|
||||
|
||||
|
||||
def _validate_vat_id_CH(vat_id, country_code):
|
||||
@@ -85,10 +169,13 @@ def _validate_vat_id_CH(vat_id, country_code):
|
||||
|
||||
vat_id = re.sub('[^A-Z0-9]', '', vat_id.replace('HR', '').replace('MWST', ''))
|
||||
try:
|
||||
transport = Transport(cache=SqliteCache(os.path.join(settings.CACHE_DIR, "validate_vat_id_ch_zeep_cache.db")))
|
||||
transport = Transport(
|
||||
cache=SqliteCache(os.path.join(settings.CACHE_DIR, "validate_vat_id_ch_zeep_cache.db")),
|
||||
timeout=10
|
||||
)
|
||||
client = Client(
|
||||
'https://www.uid-wse.admin.ch/V5.0/PublicServices.svc?wsdl',
|
||||
transport=transport
|
||||
transport=transport,
|
||||
)
|
||||
result = client.service.ValidateUID(uid=vat_id)
|
||||
except Fault as e:
|
||||
@@ -125,10 +212,14 @@ def _validate_vat_id_CH(vat_id, country_code):
|
||||
|
||||
|
||||
def validate_vat_id(vat_id, country_code):
|
||||
if not vat_id:
|
||||
return vat_id
|
||||
country_code = str(country_code)
|
||||
if is_eu_country(country_code):
|
||||
return _validate_vat_id_EU(vat_id, country_code)
|
||||
elif country_code == 'CH':
|
||||
return _validate_vat_id_CH(vat_id, country_code)
|
||||
elif country_code == 'NO':
|
||||
return _validate_vat_id_NO(vat_id, country_code)
|
||||
|
||||
raise VATIDTemporaryError(f'VAT ID should not be entered for country {country_code}')
|
||||
|
||||
@@ -57,6 +57,7 @@ from django_countries.fields import Country
|
||||
from hierarkey.models import GlobalSettingsBase, Hierarkey
|
||||
from i18nfield.forms import I18nFormField, I18nTextarea, I18nTextInput
|
||||
from i18nfield.strings import LazyI18nString
|
||||
from phonenumbers import PhoneNumber, parse
|
||||
from rest_framework import serializers
|
||||
|
||||
from pretix.api.serializers.fields import (
|
||||
@@ -145,6 +146,17 @@ DEFAULTS = {
|
||||
"advanced features like memberships.")
|
||||
)
|
||||
},
|
||||
'customer_accounts_native': {
|
||||
'default': 'True',
|
||||
'type': bool,
|
||||
'form_class': forms.BooleanField,
|
||||
'serializer_class': serializers.BooleanField,
|
||||
'form_kwargs': dict(
|
||||
label=_("Allow customers to log in with email address and password"),
|
||||
help_text=_("If disabled, you will need to connect one or more single-sign-on providers."),
|
||||
widget=forms.CheckboxInput(attrs={'data-display-dependency': '#id_settings-customer_accounts'}),
|
||||
)
|
||||
},
|
||||
'customer_accounts_link_by_email': {
|
||||
'default': 'False',
|
||||
'type': bool,
|
||||
@@ -1514,6 +1526,15 @@ DEFAULTS = {
|
||||
"before the order is canceled and a refund is issued."),
|
||||
)
|
||||
},
|
||||
'cancel_allow_user_paid_require_approval_fee_unknown': {
|
||||
'default': 'False',
|
||||
'type': bool,
|
||||
'form_class': forms.BooleanField,
|
||||
'serializer_class': serializers.BooleanField,
|
||||
'form_kwargs': dict(
|
||||
label=_("Do not show the cancellation fee to users when they request cancellation."),
|
||||
)
|
||||
},
|
||||
'cancel_allow_user_paid_refund_as_giftcard': {
|
||||
'default': 'off',
|
||||
'type': str,
|
||||
@@ -2585,7 +2606,9 @@ Your {organizer} team"""))
|
||||
},
|
||||
'name_scheme': {
|
||||
'default': 'full', # default for new events is 'given_family'
|
||||
'type': str
|
||||
'type': str,
|
||||
'serializer_class': serializers.ChoiceField,
|
||||
'serializer_kwargs': {},
|
||||
},
|
||||
'giftcard_length': {
|
||||
'default': settings.ENTROPY['giftcard_secret'],
|
||||
@@ -2980,6 +3003,9 @@ PERSON_NAME_SCHEMES = OrderedDict([
|
||||
},
|
||||
}),
|
||||
])
|
||||
|
||||
DEFAULTS['name_scheme']['serializer_kwargs']['choices'] = ((k, k) for k in PERSON_NAME_SCHEMES)
|
||||
|
||||
COUNTRIES_WITH_STATE_IN_ADDRESS = {
|
||||
# Source: http://www.bitboost.com/ref/international-address-formats.html
|
||||
# This is not a list of countries that *have* states, this is a list of countries where states
|
||||
@@ -3016,6 +3042,7 @@ settings_hierarkey.add_type(LazyI18nStringList,
|
||||
settings_hierarkey.add_type(RelativeDateWrapper,
|
||||
serialize=lambda rdw: rdw.to_string(),
|
||||
unserialize=lambda s: RelativeDateWrapper.from_string(s))
|
||||
settings_hierarkey.add_type(PhoneNumber, lambda pn: pn.as_international, lambda s: parse(s))
|
||||
|
||||
|
||||
@settings_hierarkey.set_global(cache_namespace='global')
|
||||
|
||||
52
src/pretix/base/templates/400_hostname.html
Normal file
@@ -0,0 +1,52 @@
|
||||
{% extends "error.html" %}
|
||||
{% load i18n %}
|
||||
{% load static %}
|
||||
{% block title %}{% trans "Unknown host" %}{% endblock %}
|
||||
{% block content %}
|
||||
<i class="fa fa-question-circle-o fa-fw big-icon"></i>
|
||||
<div class="error-details">
|
||||
<h1>{% trans "Unknown host" %}</h1>
|
||||
<p>
|
||||
{% blocktrans trimmed with host=header_host %}
|
||||
Your browser told us that you want to access "{{ header_host }}". Unfortunately, we don't have
|
||||
any content for this domain.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
{% if is_fresh_install %}
|
||||
<p>
|
||||
{% blocktrans trimmed %}
|
||||
It looks like this is a fresh installation of pretix. This error message is probably caused due to
|
||||
the fact that either your configuration includes the wrong site URL or your reverse proxy is sending
|
||||
the wrong header.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
<dl>
|
||||
<dt>{% trans "Expected host according to configuration" %}</dt>
|
||||
<dd><code>{{ site_host }}</code></dd>
|
||||
<dt>{% trans "Received headers" %}</dt>
|
||||
<dd>
|
||||
<code>Host: {{ request.headers.Host }}</code>
|
||||
{% if xfh %}
|
||||
<br>
|
||||
<code>X-Forwarded-For: {{ xfh }}</code>
|
||||
{% if not settings.USE_X_FORWARDED_HOST %}({% trans "ignored" %}){% endif %}
|
||||
{% endif %}
|
||||
</dd>
|
||||
<dt>{% trans "Derived host from headers" %}</dt>
|
||||
<dd><code>{{ header_host }}</code></dd>
|
||||
</dl>
|
||||
{% else %}
|
||||
<p>
|
||||
{% blocktrans trimmed %}
|
||||
If you just configured this as a domain for your ticket shop, you now need to set this up as a "custom domain"
|
||||
in your organizer account.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
{% endif %}
|
||||
<p class="links">
|
||||
<a id='goback' href='#'>{% trans "Take a step back" %}</a>
|
||||
· <a id='reload' href='#'>{% trans "Try again" %}</a>
|
||||
</p>
|
||||
<img src="{% static "pretixbase/img/pretix-logo.svg" %}" class="logo"/>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -199,7 +199,7 @@
|
||||
<tr>
|
||||
<td style="line-height: 0">
|
||||
<img class="wide" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAlgAAAA8CAAAAACf95tlAAAAAXNCSVQI5gpbmQAAAAlwSFlzAAAOxAAADsQBlSsOGwAAABl0RVh0U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAAAG/SURBVHja7dvRboMwDIXhvf/DLiQQAwkku9+qDgq2hPyfN6j1qTlx06/uMunbLMnnhL98fuzRDtYILEeZ7GBNwAIWsIB1LdkOVgaWo4gdLAGWo6x2sFZgOUq1g1WB5SjNDlYDlqcEK1dDB5anmK3eE7C4FnIpBNbVFLo7sB7d3huwKFlULGA9pWQJsJxls4G1ActbooWr2IHlLbMFrBlY7rJbwNqBxb2QZ8nAuiUGO9ICLOo71R1YN0X9td8KLJ8ZeDEDrAd+Za3A4mLIz4TAujGqv+tUYPmN4v8LcweW3zS1t++hActzCrtRYD3pMJQOLOeJ7NyBpZFdoWaFDVjuJ6BRswpTBZbCAn5hpsDq/fbHpDMTBZbC1TAzT2ApyMIVsDROQ2GWwFJo8PR2YP3eOtywzwrsGYD1J9vlHXzcmSKw7q/wU2OEwHpdtALHILA00jJfV8DSaVofvYOPlckB658sp/8VNrBkANahqnXqfhhXJgasgymHD8REZwfWmezzga+tQdhcAet0qry1FYV3osD6dP1QJL3YbYUkhfUCsK6einWRPI0pxjROWZbK+QcsAiwCLEKARYBFgEXIu/wAYbjtwujw8KwAAAAASUVORK5CYII="
|
||||
style="max-height: 60px;">
|
||||
style="max-height: 60px;" alt="">
|
||||
</td>
|
||||
</tr>
|
||||
<!--<![endif]-->
|
||||
@@ -233,7 +233,7 @@
|
||||
<td style="line-height: 0">
|
||||
<br>
|
||||
<img class="wide" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAlgAAAA8CAYAAAC6nMS5AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAAOxAAADsQBlSsOGwAAABl0RVh0U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAAAPnSURBVHic7d3dbuJIEAbQsg2Ecd7/TQeDf3svVuFmdjJLxsGm+xwJKXcpqS76U3VTVCmlFAAArKbeugAAgNwIWAAAKxOwAABWJmABAKxMwAIAWNlh6wIAIiKWZYllWWKe5/vfEREppfsnIqKqqvsnIqKu66jrOpqmuf8NsDUBC3i6lFJM0xTjOMY0TbEsS6y1MaaqqqjrOg6HQxyPxzgcDvcwBvAslT1YwDN8BKpxHGOe56f+76Zp4ng83gMXwHcTsIBvsyxLDMMQfd/fr/y2Vtd1nE6nOJ1O0TTN1uUAmRKwgNV9hKppmrYu5VOHwyHO53Mcj8etSwEyI2ABqxmGIW6329OvAP9WXddxPp/j7e1t61KATAhYwF/r+z5ut9turgG/StAC1iJgAV82z3N0Xbf7q8BHNU0Tbdt6EA98mYAFPCylFNfrNfq+37qUb3U6naJtW2segIcJWMBDhmGIrutW21u1d1VVRdu2cTqdti4FeCECFvC/lDK1+h3TLOARAhbwR/M8x+VyeblvB66taZp4f3+3Pwv4IwEL+NQ4jnG5XIq5EvyTqqri/f3d7izgUwIW8FvDMMTlctm6jF1q29Y6B+C3BCzgP91ut7her1uXsWvn8zl+/PixdRnADglYwC+6riv2Mfuj3t7eom3brcsAdqbeugBgX4Srx/R9H13XbV0GsDMCFnB3u92Eqy/4+KkggA8CFhAR/z5o9+bq60reEQb8SsAC7qsY+Dtd18U4jluXAeyAgAWFW5ZFuFqRhaxAhIAFxfv586cloitKKVnMCghYULKu60xbvsE8z96zQeEELCjUOI4eZX+jvu+9x4KCCVhQoI9rLL6Xq0Iol4AFBbperw7+J0gpuSqEQglYUJh5nl0NPlHf9zFN09ZlAE8mYEFh/KzL85liQXkELCiIaco2pmmKYRi2LgN4IgELCuL38rZjigVlEbCgEMMwxLIsW5dRrGVZTLGgIAIWFML0ant6AOUQsKAA4zja2L4D8zxbPgqFELCgACYn+6EXUAYBCzK3LItvDu7INE3ewkEBBCzInIfV+6MnkD8BCzLnMN8fPYH8CViQsXmePW7fIX2B/AlYkDGTkv3SG8ibgAUZ87h9v/QG8iZgQaZSSg7xHZumKVJKW5cBfBMBCzIlXO2fHkG+BCzIlMN7//QI8iVgQaYc3vunR5AvAQsyZQ3A/tnoDvkSsCBDKSUPqF/Asiz6BJkSsCBDplevQ68gTwIWZMjV0+vQK8iTgAUZMhV5HXoFeRKwIEPe9bwOvYI8CViQIYf269AryJOABQCwMgELMmQq8jr0CvIkYEGGHNqvQ68gT/8AETAn3pyLgvsAAAAASUVORK5CYII="
|
||||
style="max-height: 60px;">
|
||||
style="max-height: 60px;" alt="">
|
||||
</td>
|
||||
</tr>
|
||||
<!--<![endif]-->
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
<td>
|
||||
<div style="line-height: 18px;height: 18px;"> </div>
|
||||
<img class="wide" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAlgAAAAEBAMAAACgm1xKAAABhGlDQ1BJQ0MgcHJvZmlsZQAAKJF9kT1Iw0AcxV/TSkUrDu0g4pChOlkQleKoVShChVArtOpgcukXNGlIUlwcBdeCgx+LVQcXZ10dXAVB8APExdVJ0UVK/F9aaBHjwXE/3t173L0DhEaFaVZgAtB020wnE2I2tyoGXxFCAP0IIy4zy5iTpBQ8x9c9fHy9i/Es73N/jgE1bzHAJxLPMsO0iTeI45u2wXmfOMJKskp8Tjxu0gWJH7mutPiNc9FlgWdGzEx6njhCLBa7WOliVjI14mniqKrplC9kW6xy3uKsVWqsfU/+wlBeX1nmOs0RJLGIJUgQoaCGMiqwEaNVJ8VCmvYTHv5h1y+RSyFXGYwcC6hCg+z6wf/gd7dWYWqylRRKAD0vjvMxCgR3gWbdcb6PHad5AvifgSu94682gJlP0usdLXoEDG4DF9cdTdkDLneAoSdDNmVX8tMUCgXg/Yy+KQeEb4G+tVZv7X2cPgAZ6ip1AxwcAmNFyl73eHdvd2//nmn39wNhNnKgJpT5BQAAAC1QTFRF7u7u7+/v8PDw8fHx8vLy9PT09fX19/f3+Pj4+fn5+vr6/Pz8/f39/v7+////BLnnfgAAAAlwSFlzAAAOxAAADsQBlSsOGwAAAAd0SU1FB+MMBAsUDD3bzUUAAABhSURBVDjLY2BAgLp3CNCAJO6HJH4ASZwXSfwxkjgnkvhzJHFWJPHXSOKMSOLvFJAk1iGJJyCJ5yGJL0AS10MSf4Akzo0k/hRJnB1J/CWSOAuS+FsG7GA0sEYDazSwBiSwAPzzGpfLqBMlAAAAAElFTkSuQmCC"
|
||||
style="max-height: 4px;">
|
||||
style="max-height: 4px;" alt="">
|
||||
<div style="line-height: 18px;height: 18px;"> </div>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
@@ -96,6 +96,7 @@ ALLOWED_ATTRIBUTES = {
|
||||
'div': ['class'],
|
||||
'p': ['class'],
|
||||
'span': ['class', 'title'],
|
||||
'ol': ['start'],
|
||||
# Update doc/user/markdown.rst if you change this!
|
||||
}
|
||||
|
||||
|
||||
@@ -216,7 +216,8 @@ class AsyncFormView(AsyncMixin, FormView):
|
||||
task_base = ProfiledEventTask
|
||||
|
||||
def __init_subclass__(cls):
|
||||
def async_execute(self, *, request_path, query_string, form_kwargs, locale, tz, organizer=None, event=None, user=None, session_key=None):
|
||||
def async_execute(self, *, request_path, query_string, form_kwargs, locale, tz, url_kwargs=None, url_args=None,
|
||||
organizer=None, event=None, user=None, session_key=None):
|
||||
view_instance = cls()
|
||||
form_kwargs['data'] = QueryDict(form_kwargs['data'])
|
||||
req = RequestFactory().post(
|
||||
@@ -225,6 +226,8 @@ class AsyncFormView(AsyncMixin, FormView):
|
||||
content_type='application/x-www-form-urlencoded'
|
||||
)
|
||||
view_instance.request = req
|
||||
view_instance.kwargs = url_kwargs
|
||||
view_instance.args = url_args
|
||||
if event:
|
||||
view_instance.request.event = event
|
||||
view_instance.request.organizer = event.organizer
|
||||
@@ -284,6 +287,8 @@ class AsyncFormView(AsyncMixin, FormView):
|
||||
'request_path': self.request.path,
|
||||
'query_string': self.request.GET.urlencode(),
|
||||
'form_kwargs': form_kwargs,
|
||||
'url_args': self.args,
|
||||
'url_kwargs': self.kwargs,
|
||||
'locale': get_language(),
|
||||
'tz': get_current_timezone().zone,
|
||||
}
|
||||
@@ -336,6 +341,8 @@ class AsyncPostView(AsyncMixin, View):
|
||||
content_type='application/x-www-form-urlencoded'
|
||||
)
|
||||
view_instance.request = req
|
||||
view_instance.kwargs = url_kwargs
|
||||
view_instance.args = url_args
|
||||
if event:
|
||||
view_instance.request.event = event
|
||||
view_instance.request.organizer = event.organizer
|
||||
|
||||
@@ -71,7 +71,7 @@ def _default_context(request):
|
||||
except Resolver404:
|
||||
return {}
|
||||
|
||||
if not request.path.startswith(get_script_prefix() + 'control'):
|
||||
if not request.path.startswith(get_script_prefix() + 'control') or not hasattr(request, 'user'):
|
||||
return {}
|
||||
ctx = {
|
||||
'url_name': url.url_name,
|
||||
|
||||
@@ -48,6 +48,8 @@ from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_scopes.forms import SafeModelMultipleChoiceField
|
||||
|
||||
from pretix.helpers.hierarkey import clean_filename
|
||||
|
||||
from ...base.forms import I18nModelForm
|
||||
|
||||
# Import for backwards compatibility with okd import paths
|
||||
@@ -127,7 +129,7 @@ class ClearableBasenameFileInput(forms.ClearableFileInput):
|
||||
def __str__(self):
|
||||
if hasattr(self.file, 'display_name'):
|
||||
return self.file.display_name
|
||||
return os.path.basename(self.file.name).split('.', 1)[-1]
|
||||
return clean_filename(os.path.basename(self.file.name))
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
|
||||
@@ -22,9 +22,10 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from django import forms
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import get_current_timezone, make_aware, now
|
||||
from django.utils.translation import pgettext_lazy
|
||||
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
||||
from django_scopes.forms import (
|
||||
SafeModelChoiceField, SafeModelMultipleChoiceField,
|
||||
)
|
||||
@@ -109,6 +110,7 @@ class CheckinListForm(forms.ModelForm):
|
||||
'rules',
|
||||
'gates',
|
||||
'exit_all_at',
|
||||
'addon_match',
|
||||
]
|
||||
widgets = {
|
||||
'limit_products': forms.CheckboxSelectMultiple(attrs={
|
||||
@@ -130,6 +132,12 @@ class CheckinListForm(forms.ModelForm):
|
||||
def clean(self):
|
||||
d = super().clean()
|
||||
d['rules'] = CheckinList.validate_rules(d.get('rules'))
|
||||
|
||||
if d.get('addon_match') and d.get('all_products'):
|
||||
raise ValidationError(_('If you allow checking in add-on tickets by scanning the main ticket, you must '
|
||||
'select a specific set of products for this check-in list, only including the '
|
||||
'possible add-on products.'))
|
||||
|
||||
return d
|
||||
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ from urllib.parse import urlencode, urlparse
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import validate_email
|
||||
from django.core.validators import MaxValueValidator, validate_email
|
||||
from django.db.models import Prefetch, Q, prefetch_related_objects
|
||||
from django.forms import (
|
||||
CheckboxSelectMultiple, formset_factory, inlineformset_factory,
|
||||
@@ -674,6 +674,7 @@ class CancelSettingsForm(SettingsForm):
|
||||
'cancel_allow_user_paid_adjust_fees_step',
|
||||
'cancel_allow_user_paid_refund_as_giftcard',
|
||||
'cancel_allow_user_paid_require_approval',
|
||||
'cancel_allow_user_paid_require_approval_fee_unknown',
|
||||
'change_allow_user_variation',
|
||||
'change_allow_user_price',
|
||||
'change_allow_user_until',
|
||||
@@ -847,6 +848,7 @@ class InvoiceSettingsForm(SettingsForm):
|
||||
self.fields['invoice_generate_sales_channels'].choices = (
|
||||
(c.identifier, c.verbose_name) for c in get_all_sales_channels().values()
|
||||
)
|
||||
self.fields['invoice_numbers_counter_length'].validators.append(MaxValueValidator(15))
|
||||
|
||||
def clean(self):
|
||||
data = super().clean()
|
||||
|
||||
@@ -129,6 +129,11 @@ class QuestionForm(I18nModelForm):
|
||||
|
||||
return val
|
||||
|
||||
def clean_identifier(self):
|
||||
val = self.cleaned_data.get('identifier')
|
||||
Question._clean_identifier(self.instance.event, val, self.instance)
|
||||
return val
|
||||
|
||||
def clean(self):
|
||||
d = super().clean()
|
||||
if d.get('dependency_question') and not d.get('dependency_values'):
|
||||
|
||||