mirror of
https://github.com/pretix/pretix.git
synced 2025-12-06 21:42:49 +00:00
Compare commits
238 Commits
fix-pdf-ed
...
release/4.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9e91197c5d | ||
|
|
10a8cf3758 | ||
|
|
d1deb35711 | ||
|
|
c4d2b0bff7 | ||
|
|
2d8ceb3255 | ||
|
|
176e5f115b | ||
|
|
9939793e91 | ||
|
|
7d3cd16785 | ||
|
|
7c5fac306a | ||
|
|
37683781d0 | ||
|
|
89dda69205 | ||
|
|
f2c72e5ff8 | ||
|
|
780ebfe120 | ||
|
|
c7d5b687f3 | ||
|
|
5fcb51f372 | ||
|
|
9b08f1b286 | ||
|
|
4f35be7a25 | ||
|
|
884dbff4b8 | ||
|
|
51768eaef9 | ||
|
|
45f579caf2 | ||
|
|
a29dbd88ac | ||
|
|
957337b091 | ||
|
|
4983073172 | ||
|
|
b99d21df69 | ||
|
|
2cfffe6526 | ||
|
|
87a413ea42 | ||
|
|
4146437380 | ||
|
|
b4a7369642 | ||
|
|
f9b51a8abb | ||
|
|
d69d70cfb1 | ||
|
|
ba2d908a89 | ||
|
|
c05abcbccd | ||
|
|
e16fd61bec | ||
|
|
a29d69d8f7 | ||
|
|
e063ad7dda | ||
|
|
7c2bacf3b5 | ||
|
|
c921ca4e65 | ||
|
|
29a36057ed | ||
|
|
5eeecf9214 | ||
|
|
5992abcb7d | ||
|
|
0db7ec3169 | ||
|
|
8046bf98b7 | ||
|
|
9ed39ab0fa | ||
|
|
7e79fc8b5e | ||
|
|
9da68645da | ||
|
|
f7a4b66da1 | ||
|
|
c9212a483b | ||
|
|
cc4e946d95 | ||
|
|
9d1cfd1eb6 | ||
|
|
38969747f4 | ||
|
|
6e7af4c64b | ||
|
|
fb45f9f08c | ||
|
|
6848ce24eb | ||
|
|
dac4fd8d3c | ||
|
|
6905d3e801 | ||
|
|
909b16be64 | ||
|
|
a18162cc47 | ||
|
|
6f0fc9ed49 | ||
|
|
2409c513d6 | ||
|
|
0a95f90012 | ||
|
|
edbd24e942 | ||
|
|
3940af868b | ||
|
|
8b4197d868 | ||
|
|
632e441c24 | ||
|
|
c73ede81ae | ||
|
|
c4b7aeaaa2 | ||
|
|
b5bd98336a | ||
|
|
5af52f6087 | ||
|
|
c5e4d06921 | ||
|
|
917cc00091 | ||
|
|
63cb88bfb8 | ||
|
|
ac1fe15b6c | ||
|
|
ddaa0570bc | ||
|
|
07352743f2 | ||
|
|
f99ef5fff2 | ||
|
|
9d686072e2 | ||
|
|
4e44a2809b | ||
|
|
370e4eafc2 | ||
|
|
b7ec372ebc | ||
|
|
60cdfe4029 | ||
|
|
74e14285ee | ||
|
|
8f56ab54a4 | ||
|
|
4ac58654a0 | ||
|
|
167eb06aeb | ||
|
|
9a0cc7e8c1 | ||
|
|
d4ff1808d5 | ||
|
|
0ff22786cb | ||
|
|
abfb53872c | ||
|
|
67f60a9e09 | ||
|
|
1d04d40507 | ||
|
|
14fdd7cfca | ||
|
|
402ed61756 | ||
|
|
66c75cbb1b | ||
|
|
c32791c7dd | ||
|
|
d6846d8415 | ||
|
|
b1c8efa33f | ||
|
|
f14d031de4 | ||
|
|
25c86db6f5 | ||
|
|
7205d0689e | ||
|
|
cde46012cb | ||
|
|
e4a0122938 | ||
|
|
77c08cb710 | ||
|
|
af49a02047 | ||
|
|
11495c80e3 | ||
|
|
00ab996640 | ||
|
|
a4f77b3e4a | ||
|
|
1839dcdb74 | ||
|
|
6bba37288e | ||
|
|
0c3a12b4d3 | ||
|
|
7e0b590e10 | ||
|
|
009f100375 | ||
|
|
4fdbe3912a | ||
|
|
d4af9130e0 | ||
|
|
d56e2de409 | ||
|
|
6a22cb3021 | ||
|
|
814e8fc73b | ||
|
|
6aedfbd42e | ||
|
|
4207b2c0fb | ||
|
|
f35eb2a2f4 | ||
|
|
4b49782fac | ||
|
|
8fb38d8838 | ||
|
|
925077e30f | ||
|
|
9e07a40ae9 | ||
|
|
c7c3aa2c95 | ||
|
|
b55c70817a | ||
|
|
18934810f1 | ||
|
|
865e4c14a2 | ||
|
|
8cb9f2d742 | ||
|
|
d0d3e5ffe4 | ||
|
|
857f56c286 | ||
|
|
167eb85aa3 | ||
|
|
e2f983542e | ||
|
|
cf622392c0 | ||
|
|
913a83b43d | ||
|
|
b79a3a9c2f | ||
|
|
b3a1ee3127 | ||
|
|
18db00f310 | ||
|
|
9e4d6a9e97 | ||
|
|
21148c6772 | ||
|
|
70a3defc2c | ||
|
|
535399b2e1 | ||
|
|
278111b15b | ||
|
|
a4171ef819 | ||
|
|
7f5518dbf6 | ||
|
|
e102a590ab | ||
|
|
383dc5ab9a | ||
|
|
4dc65f3858 | ||
|
|
e4dccf87d4 | ||
|
|
7dece3732c | ||
|
|
015c22662f | ||
|
|
36b3968667 | ||
|
|
4b9932420b | ||
|
|
87cfd8f538 | ||
|
|
0fc7d78281 | ||
|
|
39086e81ac | ||
|
|
c1233ed692 | ||
|
|
1a401ec1e9 | ||
|
|
79beeebfa6 | ||
|
|
ccf5bea367 | ||
|
|
c77790821d | ||
|
|
95ea2849c2 | ||
|
|
e45c162b3d | ||
|
|
9cdb1a9258 | ||
|
|
241169873b | ||
|
|
690edf1c68 | ||
|
|
f606747dc9 | ||
|
|
f2593d9b4b | ||
|
|
cddd720a15 | ||
|
|
3c25dfe861 | ||
|
|
97a7fc89e0 | ||
|
|
1742bb440c | ||
|
|
73717eaacf | ||
|
|
b84aa7f42e | ||
|
|
ef0f2dae77 | ||
|
|
dcec0b03d4 | ||
|
|
db93981bac | ||
|
|
865bd126f3 | ||
|
|
948952875c | ||
|
|
489ad87ad6 | ||
|
|
62f7bd4fa5 | ||
|
|
353c9b4147 | ||
|
|
50a557b247 | ||
|
|
1dd3ed6057 | ||
|
|
b1c6508a5c | ||
|
|
7bc2e3ebb4 | ||
|
|
cd6b21120c | ||
|
|
51d37f7474 | ||
|
|
61d5e66ad4 | ||
|
|
603547f783 | ||
|
|
bf43be9312 | ||
|
|
06a2dbd281 | ||
|
|
0cedb8d4db | ||
|
|
faa6c97536 | ||
|
|
b65424ee3d | ||
|
|
cf060f353d | ||
|
|
cdb5649709 | ||
|
|
8226e6c6d5 | ||
|
|
0d453f3454 | ||
|
|
80e0978054 | ||
|
|
5a8c567d02 | ||
|
|
9199d24df2 | ||
|
|
41a05adb44 | ||
|
|
95baca6920 | ||
|
|
cff882edc0 | ||
|
|
b9feceba49 | ||
|
|
2d584d115d | ||
|
|
b02fb7ffa8 | ||
|
|
e66506fa5b | ||
|
|
65b4741e27 | ||
|
|
b5e5796549 | ||
|
|
b6945687a6 | ||
|
|
b586a52813 | ||
|
|
000407bcaf | ||
|
|
47989fd139 | ||
|
|
4ce51b81ed | ||
|
|
f63fbaca4d | ||
|
|
7bab5b16a2 | ||
|
|
bdb0a176da | ||
|
|
345a05e4ae | ||
|
|
3bb590c1ae | ||
|
|
9a5e07e078 | ||
|
|
a563881659 | ||
|
|
50ebda332a | ||
|
|
9ae25caf5c | ||
|
|
a27a5c65aa | ||
|
|
395dbedbed | ||
|
|
441f32349f | ||
|
|
fd3311ed60 | ||
|
|
6cd8fb5b58 | ||
|
|
62f1e8d64b | ||
|
|
4eca90e287 | ||
|
|
e6c45e40a9 | ||
|
|
0bb41cc44e | ||
|
|
436ed26185 | ||
|
|
696daae641 | ||
|
|
5e860c30f3 | ||
|
|
e5b7afe85a | ||
|
|
53a0d63dce |
23
.github/ISSUE_TEMPLATE/bug_report.md
vendored
23
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,23 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Please only create issues for bug reports. Feature requests or general questions
|
||||
should start as a "Discussion" on GitHub.
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- Please only create issues for bug reports. Feature requests or general questions should start as a "Discussion" on GitHub. -->
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
53
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
53
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
name: Bug report
|
||||
description: Please only create issues for bug reports. Feature requests or general questions should start as a "Discussion" on GitHub.
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: Please make sure to search our issues for similar bugs first! If bug has been reported already, react with a thumbs-up, and/or leave a comment providing further details.
|
||||
- type: textarea
|
||||
id: current
|
||||
attributes:
|
||||
label: Problem and impact
|
||||
description: What problem you're running into? What impact does it have on you / your event?
|
||||
placeholder: When trying to do ____, pretix suddenly shows me an error saying "...".
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected behaviour
|
||||
description: Sometimes bugs are subtle and the expected behaviour may need some explanation. Leave empty if it's just "Don't be broken."
|
||||
- type: textarea
|
||||
id: reproduction
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: "Please give as much context as possible: Are there any settings that impact this behaviour?"
|
||||
placeholder: |
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
4.
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots
|
||||
description: If possible, show screenshots of the problem.
|
||||
- type: input
|
||||
id: link
|
||||
attributes:
|
||||
label: Link
|
||||
description: Link to the page where the bug occurs
|
||||
- type: input
|
||||
id: browser
|
||||
attributes:
|
||||
label: Browser (software, desktop or mobile?) and version
|
||||
description: Leave empty for backend problems
|
||||
- type: input
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system, dependency versions
|
||||
description: Leave empty for frontend problems
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: The pretix version in use. (Leave empty if unknown.)
|
||||
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Community Support
|
||||
url: https://github.com/pretix/pretix/discussions/categories/q-a
|
||||
about: Not sure how to do Y? Please post your support requests in the Q&A section of our GitHub Discussions instead!
|
||||
- name: Feature ideas
|
||||
url: https://github.com/pretix/pretix/discussions/categories/ideas
|
||||
about: Please post your idea in the Ideas section of our GitHub Discussions instead!
|
||||
@@ -1 +0,0 @@
|
||||
-r doc/requirements.txt
|
||||
15
.readthedocs.yaml
Normal file
15
.readthedocs.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
version: 2
|
||||
sphinx:
|
||||
configuration: doc/conf.py
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.8"
|
||||
nodejs: "16"
|
||||
apt_packages:
|
||||
- gettext
|
||||
python:
|
||||
install:
|
||||
- method: pip
|
||||
path: ./src/
|
||||
- requirements: doc/requirements.rtd.txt
|
||||
20
SECURITY.md
Normal file
20
SECURITY.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# Security policy
|
||||
|
||||
## Reporting a vulnerability
|
||||
|
||||
If you discover a vulnerability with our software or server systems, please report it to us in private. Do not to attempt to harm our users, customer's data or our system's availability when looking for vulneratbilities.
|
||||
|
||||
Please contact us at security@pretix.eu with full details and steps to reproduce and allow reasonable time for us to resolve the issue before publishing your findings. If you wish to encrypt your email, you can find our GPG key [here](https://pretix.eu/.well-known/security@pretix.eu.asc).
|
||||
|
||||
We're not large enough to run a formal bug bounty program, but if you find a serious vulnerability in our service, we will find a way to show our gratitude.
|
||||
|
||||
## Version support
|
||||
|
||||
Security support is provided for the current stable release as well as the two previous stable releases.
|
||||
Be sure to keep your pretix installation up to date.
|
||||
|
||||
New releases and security issues will be announced on our [blog](https://pretix.eu/about/en/blog/). If you
|
||||
subscribe to our [newsletter](https://pretix.eu/about/en/blog/) in the "News about self-hosting pretix"
|
||||
category, we will also send you an email on security issues.
|
||||
|
||||
Past security issues are listed [on our website](https://pretix.eu/about/en/security).
|
||||
@@ -2,6 +2,7 @@
|
||||
file=/tmp/supervisor.sock
|
||||
|
||||
[supervisord]
|
||||
environment = AUTOMIGRATE="skip"
|
||||
logfile=/dev/stdout
|
||||
logfile_maxbytes=0
|
||||
loglevel=info
|
||||
|
||||
@@ -6067,6 +6067,10 @@ url('../opensans_regular_macroman/OpenSans-Regular-webfont.svg#open_sansregular'
|
||||
img.screenshot, a.screenshot img {
|
||||
box-shadow: 0 4px 18px 0 rgba(0,0,0,0.1), 0 6px 20px 0 rgba(0,0,0,0.09);
|
||||
}
|
||||
section > a.screenshot {
|
||||
display: block;
|
||||
margin-bottom: 24px;
|
||||
}
|
||||
|
||||
/* Changes */
|
||||
.versionchanged {
|
||||
|
||||
@@ -117,6 +117,9 @@ Example::
|
||||
``loglevel``
|
||||
Set console and file log level (``DEBUG``, ``INFO``, ``WARNING``, ``ERROR`` or ``CRITICAL``). Defaults to ``INFO``.
|
||||
|
||||
``request_id_header``
|
||||
Specifies the name of a header that should be used for logging request IDs. Off by default.
|
||||
|
||||
Locale settings
|
||||
---------------
|
||||
|
||||
|
||||
@@ -105,6 +105,37 @@ following endpoint:
|
||||
|
||||
You will receive a response equivalent to the response of your initialization request.
|
||||
|
||||
Device Information
|
||||
------------------
|
||||
|
||||
You can request information about your device and the server with one call:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
GET /api/v1/device/info HTTP/1.1
|
||||
Host: pretix.eu
|
||||
|
||||
The response will look like this:
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
HTTP/1.1 200 OK
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"device": {
|
||||
"organizer": "foo",
|
||||
"device_id": 5,
|
||||
"unique_serial": "HHZ9LW9JWP390VFZ",
|
||||
"api_token": "1kcsh572fonm3hawalrncam4l1gktr2rzx25a22l8g9hx108o9oi0rztpcvwnfnd",
|
||||
"name": "Bar",
|
||||
"gate": {
|
||||
"id": 3,
|
||||
"name": "South entrance"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Creating a new API key
|
||||
----------------------
|
||||
|
||||
|
||||
@@ -17,8 +17,8 @@ The cart position resource contains the following public fields:
|
||||
Field Type Description
|
||||
===================================== ========================== =======================================================
|
||||
id integer Internal ID of the cart position
|
||||
cart_id string Identifier of the cart this belongs to. Needs to end
|
||||
in "@api" for API-created positions.
|
||||
cart_id string Identifier of the cart this belongs to, needs to end
|
||||
in "@api" for API-created positions
|
||||
datetime datetime Time of creation
|
||||
expires datetime The cart position will expire at this time and no longer block quota
|
||||
item integer ID of the item
|
||||
@@ -29,14 +29,15 @@ attendee_name_parts object of strings Composition of
|
||||
attendee_email string Specified attendee email address for this position (or ``null``)
|
||||
voucher integer Internal ID of the voucher used for this position (or ``null``)
|
||||
addon_to integer Internal ID of the position this position is an add-on for (or ``null``)
|
||||
subevent integer ID of the date inside an event series this position belongs to (or ``null``).
|
||||
is_bundled boolean If ``addon_to`` is set, this shows whether this is a bundled product or an addon product
|
||||
subevent integer ID of the date inside an event series this position belongs to (or ``null``)
|
||||
answers list of objects Answers to user-defined questions
|
||||
├ question integer Internal ID of the answered question
|
||||
├ answer string Text representation of the answer
|
||||
├ question_identifier string The question's ``identifier`` field
|
||||
├ options list of integers Internal IDs of selected option(s)s (only for choice types)
|
||||
└ option_identifiers list of strings The ``identifier`` fields of the selected option(s)s
|
||||
seat objects The assigned seat. Can be ``null``.
|
||||
seat objects The assigned seat (or ``null``)
|
||||
├ id integer Internal ID of the seat instance
|
||||
├ name string Human-readable seat name
|
||||
└ seat_guid string Identifier of the seat within the seating plan
|
||||
@@ -46,6 +47,10 @@ seat objects The assigned se
|
||||
|
||||
This ``seat`` attribute has been added.
|
||||
|
||||
.. versionchanged:: 4.14
|
||||
|
||||
This ``is_bundled`` attribute has been added and the cart creation endpoints have been updated.
|
||||
|
||||
|
||||
Cart position endpoints
|
||||
-----------------------
|
||||
@@ -87,6 +92,7 @@ Cart position endpoints
|
||||
"attendee_email": null,
|
||||
"voucher": null,
|
||||
"addon_to": null,
|
||||
"is_bundled": false,
|
||||
"subevent": null,
|
||||
"datetime": "2018-06-11T10:00:00Z",
|
||||
"expires": "2018-06-11T10:00:00Z",
|
||||
@@ -133,6 +139,7 @@ Cart position endpoints
|
||||
"attendee_email": null,
|
||||
"voucher": null,
|
||||
"addon_to": null,
|
||||
"is_bundled": false,
|
||||
"subevent": null,
|
||||
"datetime": "2018-06-11T10:00:00Z",
|
||||
"expires": "2018-06-11T10:00:00Z",
|
||||
@@ -168,7 +175,7 @@ Cart position endpoints
|
||||
|
||||
* does not validate if the event's ticket sales are already over or haven't started
|
||||
|
||||
* does not support add-on products at the moment
|
||||
* does not validate constraints on add-on products at the moment
|
||||
|
||||
* does not check or calculate prices but believes any prices you send
|
||||
|
||||
@@ -176,6 +183,8 @@ Cart position endpoints
|
||||
|
||||
* does not support file upload questions
|
||||
|
||||
Note that more validation might be added in the future, so please do not rely on missing validation.
|
||||
|
||||
You can supply the following fields of the resource:
|
||||
|
||||
* ``cart_id`` (optional, needs to end in ``@api``)
|
||||
@@ -190,6 +199,8 @@ Cart position endpoints
|
||||
* ``includes_tax`` (optional, **deprecated**, do not use, will be removed)
|
||||
* ``sales_channel`` (optional)
|
||||
* ``voucher`` (optional, expect a voucher code)
|
||||
* ``addons`` (optional, expect a list of nested objects of cart positions)
|
||||
* ``bundled`` (optional, expect a list of nested objects of cart positions)
|
||||
* ``answers``
|
||||
|
||||
* ``question``
|
||||
@@ -221,6 +232,12 @@ Cart position endpoints
|
||||
"options": []
|
||||
}
|
||||
],
|
||||
"addons": [
|
||||
{
|
||||
"item": 2,
|
||||
"variation": null,
|
||||
}
|
||||
],
|
||||
"subevent": null
|
||||
}
|
||||
|
||||
@@ -232,7 +249,7 @@ Cart position endpoints
|
||||
Vary: Accept
|
||||
Content-Type: application/json
|
||||
|
||||
(Full cart position resource, see above.)
|
||||
(Full cart position resource, see above, with additional nested objects "addons" and "bundled".)
|
||||
|
||||
:param organizer: The ``slug`` field of the organizer of the event to create a position for
|
||||
:param event: The ``slug`` field of the event to create a position for
|
||||
@@ -244,8 +261,8 @@ Cart position endpoints
|
||||
|
||||
.. http:post:: /api/v1/organizers/(organizer)/events/(event)/cartpositions/bulk_create/
|
||||
|
||||
Creates multiple new cart position. This operation is deliberately not atomic, so each cart position can succeed
|
||||
or fail individually, so the response code of the response is not the only thing to look at!
|
||||
Creates multiple new cart position. **This operation is deliberately not atomic, so each cart position can succeed
|
||||
or fail individually, so the response code of the response is not the only thing to look at!**
|
||||
|
||||
.. warning:: This endpoint is considered **experimental**. It might change at any time without prior notice.
|
||||
|
||||
|
||||
@@ -14,7 +14,10 @@ The customer resource contains the following public fields:
|
||||
Field Type Description
|
||||
===================================== ========================== =======================================================
|
||||
identifier string Internal ID of the customer
|
||||
external_identifier string External ID of the customer (or ``null``)
|
||||
external_identifier string External ID of the customer (or ``null``). This field can
|
||||
be changed for customers created manually or through
|
||||
the API, but is read-only for customers created through a
|
||||
SSO integration.
|
||||
email string Customer email address
|
||||
name string Name of this customer (or ``null``)
|
||||
name_parts object of strings Decomposition of name (i.e. given name, family name)
|
||||
@@ -26,10 +29,16 @@ date_joined datetime Date and time o
|
||||
locale string Preferred language of the customer
|
||||
last_modified datetime Date and time of modification of the record
|
||||
notes string Internal notes and comments (or ``null``)
|
||||
password string Can only be set during creation of a new customer, will
|
||||
not be included in any responses.
|
||||
===================================== ========================== =======================================================
|
||||
|
||||
.. versionadded:: 4.0
|
||||
|
||||
.. versionchanged:: 4.3
|
||||
|
||||
Passwords can now be set through the API during customer creation.
|
||||
|
||||
Endpoints
|
||||
---------
|
||||
|
||||
@@ -146,6 +155,7 @@ Endpoints
|
||||
|
||||
{
|
||||
"email": "test@example.org",
|
||||
"password": "verysecret",
|
||||
"send_email": true
|
||||
}
|
||||
|
||||
|
||||
@@ -1925,7 +1925,7 @@ otherwise, such as splitting an order or changing fees.
|
||||
|
||||
.. sourcecode:: http
|
||||
|
||||
POST /api/v1/organizers/bigevents/events/sampleconf/orders/ABC12/ HTTP/1.1
|
||||
POST /api/v1/organizers/bigevents/events/sampleconf/orders/ABC12/change/ HTTP/1.1
|
||||
Host: pretix.eu
|
||||
Accept: application/json, text/javascript
|
||||
Content-Type: application/json
|
||||
|
||||
@@ -19,6 +19,8 @@ max_usages integer The maximum num
|
||||
redeemed (default: 1).
|
||||
redeemed integer The number of times this voucher already has been
|
||||
redeemed.
|
||||
min_usages integer The minimum number of times this voucher must be
|
||||
redeemed on first usage (default: 1).
|
||||
valid_until datetime The voucher expiration date (or ``null``).
|
||||
block_quota boolean If ``true``, quota is blocked for this voucher.
|
||||
allow_ignore_quota boolean If ``true``, this voucher can be redeemed even if a
|
||||
|
||||
@@ -36,10 +36,16 @@ The following values for ``action_types`` are valid with pretix core:
|
||||
* ``pretix.event.order.canceled``
|
||||
* ``pretix.event.order.reactivated``
|
||||
* ``pretix.event.order.expired``
|
||||
* ``pretix.event.order.expirychanged``
|
||||
* ``pretix.event.order.modified``
|
||||
* ``pretix.event.order.contact.changed``
|
||||
* ``pretix.event.order.changed.*``
|
||||
* ``pretix.event.order.refund.created``
|
||||
* ``pretix.event.order.refund.created.externally``
|
||||
* ``pretix.event.order.refund.requested``
|
||||
* ``pretix.event.order.refund.done``
|
||||
* ``pretix.event.order.refund.canceled``
|
||||
* ``pretix.event.order.refund.failed``
|
||||
* ``pretix.event.order.approved``
|
||||
* ``pretix.event.order.denied``
|
||||
* ``pretix.event.checkin``
|
||||
@@ -50,6 +56,10 @@ The following values for ``action_types`` are valid with pretix core:
|
||||
* ``pretix.subevent.added``
|
||||
* ``pretix.subevent.changed``
|
||||
* ``pretix.subevent.deleted``
|
||||
* ``pretix.event.live.activated``
|
||||
* ``pretix.event.live.deactivated``
|
||||
* ``pretix.event.testmode.activated``
|
||||
* ``pretix.event.testmode.deactivated``
|
||||
|
||||
Installed plugins might register more valid values.
|
||||
|
||||
|
||||
@@ -92,9 +92,10 @@ If any other status code is returned, we will assume you did not receive the cal
|
||||
or ``304 Not Modified`` response will be treated as a failure. pretix will not follow any ``301`` or ``302`` redirect
|
||||
headers and pretix will ignore all other information in your response headers or body.
|
||||
|
||||
If we do not receive a status code in the range of ``200`` and ``299``, pretix will retry to deliver for up to three
|
||||
days with an exponential back off. Therefore, we recommend that you implement your endpoint in a way where calling it
|
||||
multiple times for the same event due to a perceived error does not do any harm.
|
||||
If we do not receive a status code in the range of ``200`` and ``299`` or do not receive any response within a 30 second
|
||||
time frame, pretix will retry to deliver for up to three days with an exponential back off. Therefore, we recommend that
|
||||
you implement your endpoint in a way where calling it multiple times for the same event due to a perceived error does
|
||||
not do any harm.
|
||||
|
||||
There is only one exception: If status code ``410 Gone`` is returned, we will assume the
|
||||
endpoint does not exist any more and automatically disable the webhook.
|
||||
|
||||
@@ -60,7 +60,13 @@ The exporter class
|
||||
.. py:attribute:: BaseExporter.event
|
||||
|
||||
The default constructor sets this property to the event we are currently
|
||||
working for.
|
||||
working for. This will be ``None`` if the exporter is run for multiple
|
||||
events.
|
||||
|
||||
.. py:attribute:: BaseExporter.events
|
||||
|
||||
The default constructor sets this property to the list of events to work
|
||||
on, regardless of whether the exporter is called for one or multiple events.
|
||||
|
||||
.. autoattribute:: identifier
|
||||
|
||||
|
||||
@@ -93,6 +93,7 @@ id integer Internal conten
|
||||
title multi-lingual string The content title (required)
|
||||
content_type string The type of content, valid values are ``webinar``, ``video``, ``livestream``, ``link``, ``file``
|
||||
url string The location of the digital content
|
||||
file file A downloadable file. Either ``url`` or ``file`` must be ``null``.
|
||||
description multi-lingual string A public description of the item. May contain Markdown
|
||||
syntax and is not required.
|
||||
available_from datetime The first date time at which this content will be shown
|
||||
@@ -144,6 +145,7 @@ API Endpoints
|
||||
},
|
||||
"content_type": "link",
|
||||
"url": "https://www.youtube.com/watch?v=dQw4w9WgXcQ",
|
||||
"file": null,
|
||||
"description": {
|
||||
"en": "Watch our event live here on YouTube!"
|
||||
},
|
||||
@@ -191,6 +193,7 @@ API Endpoints
|
||||
},
|
||||
"content_type": "link",
|
||||
"url": "https://www.youtube.com/watch?v=dQw4w9WgXcQ",
|
||||
"file": null,
|
||||
"description": {
|
||||
"en": "Watch our event live here on YouTube!"
|
||||
},
|
||||
@@ -229,6 +232,7 @@ API Endpoints
|
||||
},
|
||||
"content_type": "link",
|
||||
"url": "https://www.youtube.com/watch?v=dQw4w9WgXcQ",
|
||||
"file": null,
|
||||
"description": {
|
||||
"en": "Watch our event live here on YouTube!"
|
||||
},
|
||||
@@ -255,6 +259,7 @@ API Endpoints
|
||||
},
|
||||
"content_type": "link",
|
||||
"url": "https://www.youtube.com/watch?v=dQw4w9WgXcQ",
|
||||
"file": null,
|
||||
"description": {
|
||||
"en": "Watch our event live here on YouTube!"
|
||||
},
|
||||
@@ -309,6 +314,7 @@ API Endpoints
|
||||
},
|
||||
"content_type": "link",
|
||||
"url": "https://mywebsite.com",
|
||||
"file": null,
|
||||
"description": {
|
||||
"en": "Watch our event live here on YouTube!"
|
||||
},
|
||||
|
||||
10
doc/requirements.rtd.txt
Normal file
10
doc/requirements.rtd.txt
Normal file
@@ -0,0 +1,10 @@
|
||||
sphinx==2.3.*
|
||||
jinja2==3.0.*
|
||||
sphinx-rtd-theme
|
||||
sphinxcontrib-httpdomain
|
||||
sphinxcontrib-images
|
||||
sphinxcontrib-spelling==4.*
|
||||
sphinxemoji
|
||||
pygments-markdown-lexer
|
||||
# See https://github.com/rfk/pyenchant/pull/130
|
||||
git+https://github.com/raphaelm/pyenchant.git@patch-1#egg=pyenchant
|
||||
BIN
doc/screens/organizer/customer.png
Normal file
BIN
doc/screens/organizer/customer.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 96 KiB |
BIN
doc/screens/organizer/customer_edit.png
Normal file
BIN
doc/screens/organizer/customer_edit.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 69 KiB |
BIN
doc/screens/organizer/customer_ssoclient_add.png
Normal file
BIN
doc/screens/organizer/customer_ssoclient_add.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 76 KiB |
BIN
doc/screens/organizer/customer_ssoprovider_add.png
Normal file
BIN
doc/screens/organizer/customer_ssoprovider_add.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 87 KiB |
BIN
doc/screens/organizer/customers.png
Normal file
BIN
doc/screens/organizer/customers.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 104 KiB |
BIN
doc/screens/organizer/edit_customer.png
Normal file
BIN
doc/screens/organizer/edit_customer.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 98 KiB |
@@ -66,6 +66,7 @@ iterable
|
||||
Jimdo
|
||||
jwt
|
||||
JWT
|
||||
JWTs
|
||||
libpretixprint
|
||||
libsass
|
||||
linters
|
||||
@@ -88,7 +89,9 @@ nginx
|
||||
nodejs
|
||||
NotificationType
|
||||
npm
|
||||
OIDC
|
||||
ons
|
||||
OpenID
|
||||
optimizations
|
||||
overpayment
|
||||
param
|
||||
@@ -133,6 +136,7 @@ serializer
|
||||
serializers
|
||||
sexualized
|
||||
SQL
|
||||
SSO
|
||||
startup
|
||||
stdout
|
||||
stylesheet
|
||||
@@ -159,6 +163,8 @@ untrusted
|
||||
uptime
|
||||
username
|
||||
url
|
||||
URI
|
||||
URIs
|
||||
validator
|
||||
versa
|
||||
versioning
|
||||
|
||||
210
doc/user/customers/index.rst
Normal file
210
doc/user/customers/index.rst
Normal file
@@ -0,0 +1,210 @@
|
||||
.. _customers:
|
||||
|
||||
Customer accounts
|
||||
=================
|
||||
|
||||
By default, pretix only offers guest checkout, i.e. ticket buyers do not sign up and sign back in, but create a new
|
||||
checkout session every time. In some situations it may be convenient to allow ticket buyers to create
|
||||
accounts that they can later log in to again. Working with customer accounts is even required for some advanced
|
||||
use cases such as described in the :ref:`seasontickets` article.
|
||||
|
||||
Enabling customer accounts
|
||||
--------------------------
|
||||
|
||||
To enable customer accounts, head to your organizer page in the backend and then select "Settings" → "General" →
|
||||
"Customer accounts" and turn on the checkbox "Allow customers to create accounts".
|
||||
|
||||
Using the other settings on the same tab you can fine-tune how the customer account system behaves:
|
||||
|
||||
.. thumbnail:: ../../screens/organizer/edit_customer.png
|
||||
:align: center
|
||||
:class: screenshot
|
||||
|
||||
Allow customers to log in with email address and password
|
||||
In all simple setups, this option should be checked. If this checkbox is removed, it is impossible to log in or
|
||||
sign up unless you connect a SSO provider (see below).
|
||||
|
||||
Match orders based on email address
|
||||
If this option is selected, customers will see orders made with their email address within their account even if
|
||||
they did not make those orders while logged in.
|
||||
|
||||
Name format, Allowed titles
|
||||
This controls how we'll ask your customers for their name, similar to the respective settings on event level.
|
||||
|
||||
Managing customer accounts
|
||||
--------------------------
|
||||
|
||||
After customer accounts have been enabled, you will find a new menu option "Customer accounts" in the organizer-level
|
||||
main menu. The first sub-item, "Customers", allows you to search and inspect the list of your customer accounts, as well
|
||||
as to create a new customer account from the backend:
|
||||
|
||||
.. thumbnail:: ../../screens/organizer/customers.png
|
||||
:align: center
|
||||
:class: screenshot
|
||||
|
||||
If you click on a customer ID, you can see all details of this customer account, including registration information,
|
||||
active memberships, past ticket orders, and account history:
|
||||
|
||||
.. thumbnail:: ../../screens/organizer/customer.png
|
||||
:align: center
|
||||
:class: screenshot
|
||||
|
||||
You can also perform various actions from this view, such as:
|
||||
|
||||
- Send a password reset link
|
||||
- Change registration information
|
||||
- Anonymize the customer account (does not anonymize connected orders)
|
||||
|
||||
When creating or changing a customer, you will be presented with the following form:
|
||||
|
||||
.. thumbnail:: ../../screens/organizer/customer_edit.png
|
||||
:align: center
|
||||
:class: screenshot
|
||||
|
||||
Most fields, such as name, e-mail address, phone number, and language should be self-explanatory. The following fields
|
||||
might require some explanation:
|
||||
|
||||
Account active
|
||||
If this checkbox is removed, the customer will not be able to log in.
|
||||
|
||||
External identifier
|
||||
This field can be used to cross-reference your customer database with other sources. For example, if the customer
|
||||
already has a number in another system, you can insert that number here. This can be especially powerful if you
|
||||
use our API for synchronization with an external system.
|
||||
|
||||
Verified email address
|
||||
This checkbox signifies whether you have verified that this customer in fact controls the given email address.
|
||||
This will automatically be checked after a successful registration or after a successful password reset. Before it
|
||||
is checked, the customer will not be able to log in. You should usually not modify this field manually.
|
||||
|
||||
Notes
|
||||
Entries in this field will only be visible to you and your team, not to the customer.
|
||||
|
||||
Single-Sign-On (SSO)
|
||||
--------------------
|
||||
|
||||
"Single-Sign-On" (SSO) is a technical term for a situation in which a person can log in to multiple systems using just
|
||||
one login. This can be convenient if you have multiple applications that are exposed to your customers: They won't have
|
||||
to remember multiple passwords or understand how your application landscape is structured, they can just always log in
|
||||
with the same credentials whenever they see your brand.
|
||||
|
||||
In this scenario, pretix can be **either** the "SSO provider" **or** the "SSO client".
|
||||
If pretix is the SSO provider, pretix will be the central source of truth for your customer accounts and your other
|
||||
applications can connect to pretix to use pretix's login functionality.
|
||||
If pretix is the SSO client, one of your existing systems will be the source of truth for the customer accounts and
|
||||
pretix will use that system's login functionality.
|
||||
|
||||
All SSO support for customer accounts in pretix is currently built on the `OpenID Connect`_ standard, a modern and
|
||||
widely accepted standard for SSO in all industries.
|
||||
|
||||
Connecting SSO clients (pretix as the SSO provider)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To connect an external application as a SSO client, go to "Customer accounts" → "SSO clients" → "Create a new SSO client"
|
||||
in your organizer account.
|
||||
|
||||
.. thumbnail:: ../../screens/organizer/customer_ssoclient_add.png
|
||||
:align: center
|
||||
:class: screenshot
|
||||
|
||||
You will need to fill out the following fields:
|
||||
|
||||
Active
|
||||
If this checkbox is removed, the SSO client can not be used.
|
||||
|
||||
Application name
|
||||
The name of your external application, e.g. "digital event marketplace".
|
||||
|
||||
Client type
|
||||
For a server-side application which is able to store a secret that will be inaccessible to end users, chose
|
||||
"confidential". For a client-side application, such as many mobile apps, choose "public".
|
||||
|
||||
Grant type
|
||||
This value depends on the OpenID Connect implementation of your software.
|
||||
|
||||
Redirection URIs
|
||||
One or multiple URIs that the user might be redirected to after the successful or failed login.
|
||||
|
||||
Allowed access scopes
|
||||
The types of data the SSO client may access about the customer.
|
||||
|
||||
After you submitted all data, you will receive a client ID as well as a client secret. The client secret is shown
|
||||
in the green success message and will only ever be shown once. If you need it again, use the option "Invalidate old
|
||||
client secret and generate a new one".
|
||||
|
||||
You will need the client ID and client secret to configure your external application. The application will also likely
|
||||
need some other information from you, such as your **issuer URI**. If you use pretix Hosted and your organizer account
|
||||
does not have a custom domain, your issuer will be ``https://pretix.eu/myorgname``, where ``myorgname`` is the short
|
||||
form of your organizer account. If you use a custom domain, such as ``tickets.mycompany.net``, then your issuer will be
|
||||
``https://tickets.mycompany.net``.
|
||||
|
||||
Technical details
|
||||
"""""""""""""""""
|
||||
|
||||
We implement `OpenID Connect Core 1.0`_, except for some optional parts that do not make sense for pretix or bring no
|
||||
additional value. For example, we do not currently support encrypted tokens, offline access, refresh tokens, or passing
|
||||
request parameters as JWTs.
|
||||
|
||||
We implement the provider metadata section from `OpenID Connect Discovery 1.0`_. You can find the endpoint relative
|
||||
to the issuer URI as described above, for example ``http://pretix.eu/demo/.well-known/openid-configuration``.
|
||||
|
||||
We implement all three OpenID Connect Core flows:
|
||||
|
||||
- Authorization Code Flow (response type ``code``)
|
||||
- Implicit Flow (response types ``id_token token`` and ``id_token``)
|
||||
- Hybrid Flow (response types ``code id_token``, ``code id_token token``, and ``code token``)
|
||||
|
||||
We implement the response modes ``query`` and ``fragment``.
|
||||
|
||||
We currently offer the following scopes: ``openid``, ``profile``, ``email``, ``phone``
|
||||
|
||||
As well as the following standardized claims: ``iss``, ``aud``, ``exp``, ``iat``, ``auth_time``, ``nonce``, ``c_hash``,
|
||||
``at_hash``, ``sub``, ``locale``, ``name``, ``given_name``, ``family_name``, ``middle_name``, ``nickname``, ``email``,
|
||||
``email_verified``, ``phone_number``.
|
||||
|
||||
The various endpoints are located relative to the issuer URI as described above:
|
||||
|
||||
- Authorization: ``<issuer>/oauth2/v1/authorize``
|
||||
- Token: ``<issuer>/oauth2/v1/token``
|
||||
- User info: ``<issuer>/oauth2/v1/userinfo``
|
||||
- Keys: ``<issuer>/oauth2/v1/keys``
|
||||
|
||||
We currently do not reproduce their documentation here as they follow the OpenID Connect and OAuth specifications
|
||||
without any special behavior.
|
||||
|
||||
Connecting SSO providers (pretix as the SSO client)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To connect an external application as a SSO client, go to "Customer accounts" → "SSO providers" → "Create a new SSO provider"
|
||||
in your organizer account.
|
||||
|
||||
.. thumbnail:: ../../screens/organizer/customer_ssoprovider_add.png
|
||||
:align: center
|
||||
:class: screenshot
|
||||
|
||||
The "Provider name" and "Login button label" is what we'll use to show the new login option to the user. For the actual
|
||||
connection, we will require information such as the issuer URL, client ID, client secret, scope, and field (or claim)
|
||||
names that you will receive from your SSO provider.
|
||||
|
||||
.. note::
|
||||
|
||||
If you want your customers to *only* use your SSO provider, it makes sense to turn off the "Allow customers to log in
|
||||
with email address and password" settings option (see above).
|
||||
|
||||
Technical details
|
||||
"""""""""""""""""
|
||||
|
||||
We assume that SSO providers fulfill the following requirements:
|
||||
|
||||
- Implementation according to `OpenID Connect Core 1.0`_.
|
||||
|
||||
- Published meta-data document at ``<issuer>/.well-known/openid-configuration`` as specified in `OpenID Connect Discovery 1.0`_.
|
||||
|
||||
- Support for Authorization code flow (``response_type=code``) with ``response_mode=query``.
|
||||
|
||||
- Support for client authentication using client ID and client secret and without public key cryptography.
|
||||
|
||||
|
||||
.. _OpenID Connect: https://en.wikipedia.org/wiki/OpenID#OpenID_Connect_(OIDC)
|
||||
.. _OpenID Connect Core 1.0: https://openid.net/specs/openid-connect-core-1_0.html
|
||||
.. _OpenID Connect Discovery 1.0: https://openid.net/specs/openid-connect-discovery-1_0.html
|
||||
@@ -411,7 +411,7 @@ Hosted or pretix Enterprise are active, you can pass the following fields:
|
||||
};
|
||||
</script>
|
||||
|
||||
If you use ```analytics.js` (Universal Analytics)::
|
||||
If you use ``analytics.js`` (Universal Analytics)::
|
||||
|
||||
<script>
|
||||
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
|
||||
|
||||
@@ -12,6 +12,7 @@ wanting to use pretix to sell tickets.
|
||||
events/settings
|
||||
events/structureguide
|
||||
events/widget
|
||||
customers/index
|
||||
events/giftcards
|
||||
faq
|
||||
markdown
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
|
||||
build:
|
||||
image: latest
|
||||
|
||||
python:
|
||||
version: 3.6
|
||||
@@ -19,4 +19,4 @@
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
__version__ = "4.13.0.dev0"
|
||||
__version__ = "4.14.0"
|
||||
|
||||
@@ -46,6 +46,7 @@ class PretixScanSecurityProfile(AllowListSecurityProfile):
|
||||
('GET', 'api-v1:version'),
|
||||
('GET', 'api-v1:device.eventselection'),
|
||||
('GET', 'api-v1:idempotency.query'),
|
||||
('GET', 'api-v1:device.info'),
|
||||
('POST', 'api-v1:device.update'),
|
||||
('POST', 'api-v1:device.revoke'),
|
||||
('POST', 'api-v1:device.roll'),
|
||||
@@ -80,6 +81,7 @@ class PretixScanNoSyncNoSearchSecurityProfile(AllowListSecurityProfile):
|
||||
('GET', 'api-v1:version'),
|
||||
('GET', 'api-v1:device.eventselection'),
|
||||
('GET', 'api-v1:idempotency.query'),
|
||||
('GET', 'api-v1:device.info'),
|
||||
('POST', 'api-v1:device.update'),
|
||||
('POST', 'api-v1:device.revoke'),
|
||||
('POST', 'api-v1:device.roll'),
|
||||
@@ -112,6 +114,7 @@ class PretixScanNoSyncSecurityProfile(AllowListSecurityProfile):
|
||||
('GET', 'api-v1:version'),
|
||||
('GET', 'api-v1:device.eventselection'),
|
||||
('GET', 'api-v1:idempotency.query'),
|
||||
('GET', 'api-v1:device.info'),
|
||||
('POST', 'api-v1:device.update'),
|
||||
('POST', 'api-v1:device.revoke'),
|
||||
('POST', 'api-v1:device.roll'),
|
||||
@@ -145,6 +148,7 @@ class PretixPosSecurityProfile(AllowListSecurityProfile):
|
||||
('GET', 'api-v1:version'),
|
||||
('GET', 'api-v1:device.eventselection'),
|
||||
('GET', 'api-v1:idempotency.query'),
|
||||
('GET', 'api-v1:device.info'),
|
||||
('POST', 'api-v1:device.update'),
|
||||
('POST', 'api-v1:device.revoke'),
|
||||
('POST', 'api-v1:device.roll'),
|
||||
@@ -192,6 +196,7 @@ class PretixPosSecurityProfile(AllowListSecurityProfile):
|
||||
('POST', 'plugins:pretix_posbackend:posdebuglogentry-bulk-create'),
|
||||
('GET', 'plugins:pretix_posbackend:poscashier-list'),
|
||||
('POST', 'plugins:pretix_posbackend:stripeterminal.token'),
|
||||
('POST', 'plugins:pretix_posbackend:stripeterminal.paymentintent'),
|
||||
('PUT', 'plugins:pretix_posbackend:file.upload'),
|
||||
('GET', 'api-v1:revokedsecrets-list'),
|
||||
('GET', 'api-v1:event.settings'),
|
||||
|
||||
@@ -19,11 +19,17 @@
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
import logging
|
||||
|
||||
import ujson
|
||||
from rest_framework import exceptions
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import exception_handler, status
|
||||
|
||||
from pretix.base.services.locking import LockTimeoutException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def custom_exception_handler(exc, context):
|
||||
response = exception_handler(exc, context)
|
||||
@@ -37,4 +43,7 @@ def custom_exception_handler(exc, context):
|
||||
}
|
||||
)
|
||||
|
||||
if isinstance(exc, exceptions.APIException):
|
||||
logger.info(f'API Exception [{exc.status_code}]: {ujson.dumps(exc.detail)}')
|
||||
|
||||
return response
|
||||
|
||||
29
src/pretix/api/migrations/0008_webhookcallretry.py
Normal file
29
src/pretix/api/migrations/0008_webhookcallretry.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Generated by Django 3.2.12 on 2022-09-13 14:48
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0218_checkinlist_addon_match'),
|
||||
('pretixapi', '0007_alter_webhookcall_target_url'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='WebHookCallRetry',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('retry_not_before', models.DateTimeField(auto_now_add=True)),
|
||||
('retry_count', models.PositiveIntegerField(default=0)),
|
||||
('action_type', models.CharField(max_length=255)),
|
||||
('logentry', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='webhook_retries', to='pretixbase.logentry')),
|
||||
('webhook', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='retries', to='pretixapi.webhook')),
|
||||
],
|
||||
options={
|
||||
'unique_together': {('webhook', 'logentry')},
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -133,6 +133,18 @@ class WebHookCall(models.Model):
|
||||
ordering = ("-datetime",)
|
||||
|
||||
|
||||
class WebHookCallRetry(models.Model):
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
webhook = models.ForeignKey('WebHook', on_delete=models.CASCADE, related_name='retries')
|
||||
logentry = models.ForeignKey('pretixbase.LogEntry', on_delete=models.CASCADE, related_name='webhook_retries')
|
||||
retry_not_before = models.DateTimeField(auto_now_add=True)
|
||||
retry_count = models.PositiveIntegerField(default=0)
|
||||
action_type = models.CharField(max_length=255)
|
||||
|
||||
class Meta:
|
||||
unique_together = (('webhook', 'logentry'),)
|
||||
|
||||
|
||||
class ApiCall(models.Model):
|
||||
idempotency_key = models.CharField(max_length=190, db_index=True)
|
||||
auth_hash = models.CharField(max_length=190, db_index=True)
|
||||
|
||||
@@ -23,8 +23,7 @@ import os
|
||||
from datetime import timedelta
|
||||
|
||||
from django.core.files import File
|
||||
from django.db.models import Q
|
||||
from django.utils.crypto import get_random_string
|
||||
from django.db.models import prefetch_related_objects
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy
|
||||
from rest_framework import serializers
|
||||
@@ -34,7 +33,7 @@ from pretix.api.serializers.i18n import I18nAwareModelSerializer
|
||||
from pretix.api.serializers.order import (
|
||||
AnswerCreateSerializer, AnswerSerializer, InlineSeatSerializer,
|
||||
)
|
||||
from pretix.base.models import Quota, Seat, Voucher
|
||||
from pretix.base.models import Seat, Voucher
|
||||
from pretix.base.models.orders import CartPosition
|
||||
|
||||
|
||||
@@ -52,148 +51,18 @@ class CartPositionSerializer(I18nAwareModelSerializer):
|
||||
model = CartPosition
|
||||
fields = ('id', 'cart_id', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts',
|
||||
'attendee_email', 'voucher', 'addon_to', 'subevent', 'datetime', 'expires', 'includes_tax',
|
||||
'answers', 'seat')
|
||||
'answers', 'seat', 'is_bundled')
|
||||
|
||||
|
||||
class CartPositionCreateSerializer(I18nAwareModelSerializer):
|
||||
class BaseCartPositionCreateSerializer(I18nAwareModelSerializer):
|
||||
answers = AnswerCreateSerializer(many=True, required=False)
|
||||
expires = serializers.DateTimeField(required=False)
|
||||
attendee_name = serializers.CharField(required=False, allow_null=True)
|
||||
seat = serializers.CharField(required=False, allow_null=True)
|
||||
sales_channel = serializers.CharField(required=False, default='sales_channel')
|
||||
includes_tax = serializers.BooleanField(required=False, allow_null=True)
|
||||
voucher = serializers.CharField(required=False, allow_null=True)
|
||||
|
||||
class Meta:
|
||||
model = CartPosition
|
||||
fields = ('cart_id', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts', 'attendee_email',
|
||||
'subevent', 'expires', 'includes_tax', 'answers', 'seat', 'sales_channel', 'voucher')
|
||||
|
||||
def create(self, validated_data):
|
||||
answers_data = validated_data.pop('answers')
|
||||
if not validated_data.get('cart_id'):
|
||||
cid = "{}@api".format(get_random_string(48))
|
||||
while CartPosition.objects.filter(cart_id=cid).exists():
|
||||
cid = "{}@api".format(get_random_string(48))
|
||||
validated_data['cart_id'] = cid
|
||||
|
||||
if not validated_data.get('expires'):
|
||||
validated_data['expires'] = now() + timedelta(
|
||||
minutes=self.context['event'].settings.get('reservation_time', as_type=int)
|
||||
)
|
||||
|
||||
new_quotas = (validated_data.get('variation').quotas.filter(subevent=validated_data.get('subevent'))
|
||||
if validated_data.get('variation')
|
||||
else validated_data.get('item').quotas.filter(subevent=validated_data.get('subevent')))
|
||||
if len(new_quotas) == 0:
|
||||
raise ValidationError(
|
||||
gettext_lazy('The product "{}" is not assigned to a quota.').format(
|
||||
str(validated_data.get('item'))
|
||||
)
|
||||
)
|
||||
for quota in new_quotas:
|
||||
avail = quota.availability(_cache=self.context['quota_cache'])
|
||||
if avail[0] != Quota.AVAILABILITY_OK or (avail[1] is not None and avail[1] < 1):
|
||||
raise ValidationError(
|
||||
gettext_lazy('There is not enough quota available on quota "{}" to perform '
|
||||
'the operation.').format(
|
||||
quota.name
|
||||
)
|
||||
)
|
||||
|
||||
for quota in new_quotas:
|
||||
oldsize = self.context['quota_cache'][quota.pk][1]
|
||||
newsize = oldsize - 1 if oldsize is not None else None
|
||||
self.context['quota_cache'][quota.pk] = (
|
||||
Quota.AVAILABILITY_OK if newsize is None or newsize > 0 else Quota.AVAILABILITY_GONE,
|
||||
newsize
|
||||
)
|
||||
|
||||
attendee_name = validated_data.pop('attendee_name', '')
|
||||
if attendee_name and not validated_data.get('attendee_name_parts'):
|
||||
validated_data['attendee_name_parts'] = {
|
||||
'_legacy': attendee_name
|
||||
}
|
||||
|
||||
seated = validated_data.get('item').seat_category_mappings.filter(subevent=validated_data.get('subevent')).exists()
|
||||
if validated_data.get('seat'):
|
||||
if not seated:
|
||||
raise ValidationError('The specified product does not allow to choose a seat.')
|
||||
try:
|
||||
seat = self.context['event'].seats.get(seat_guid=validated_data['seat'], subevent=validated_data.get('subevent'))
|
||||
except Seat.DoesNotExist:
|
||||
raise ValidationError('The specified seat does not exist.')
|
||||
except Seat.MultipleObjectsReturned:
|
||||
raise ValidationError('The specified seat ID is not unique.')
|
||||
else:
|
||||
validated_data['seat'] = seat
|
||||
elif seated:
|
||||
raise ValidationError('The specified product requires to choose a seat.')
|
||||
|
||||
if validated_data.get('voucher'):
|
||||
try:
|
||||
voucher = self.context['event'].vouchers.get(code__iexact=validated_data.get('voucher'))
|
||||
except Voucher.DoesNotExist:
|
||||
raise ValidationError('The specified voucher does not exist.')
|
||||
|
||||
if voucher and not voucher.applies_to(validated_data.get('item'), validated_data.get('variation')):
|
||||
raise ValidationError('The specified voucher is not valid for the given item and variation.')
|
||||
|
||||
if voucher and voucher.seat and voucher.seat != validated_data.get('seat'):
|
||||
raise ValidationError('The specified voucher is not valid for this seat.')
|
||||
|
||||
if voucher and voucher.subevent_id and (not validated_data.get('subevent') or voucher.subevent_id != validated_data['subevent'].pk):
|
||||
raise ValidationError('The specified voucher is not valid for this subevent.')
|
||||
|
||||
if voucher.valid_until is not None and voucher.valid_until < now():
|
||||
raise ValidationError('The specified voucher is expired.')
|
||||
|
||||
redeemed_in_carts = CartPosition.objects.filter(
|
||||
Q(voucher=voucher) & Q(event=self.context['event']) & Q(expires__gte=now())
|
||||
)
|
||||
cart_count = redeemed_in_carts.count()
|
||||
v_avail = voucher.max_usages - voucher.redeemed - cart_count
|
||||
if v_avail < 1:
|
||||
raise ValidationError('The specified voucher has already been used the maximum number of times.')
|
||||
|
||||
validated_data['voucher'] = voucher
|
||||
|
||||
if validated_data.get('seat'):
|
||||
if not validated_data['seat'].is_available(
|
||||
sales_channel=validated_data.get('sales_channel', 'web'),
|
||||
distance_ignore_cart_id=validated_data['cart_id'],
|
||||
ignore_voucher_id=validated_data['voucher'].pk if validated_data.get('voucher') else None,
|
||||
):
|
||||
raise ValidationError(
|
||||
gettext_lazy('The selected seat "{seat}" is not available.').format(seat=validated_data['seat'].name))
|
||||
|
||||
validated_data.pop('sales_channel')
|
||||
# todo: does this make sense?
|
||||
validated_data['custom_price_input'] = validated_data['price']
|
||||
# todo: listed price, etc?
|
||||
# currently does not matter because there is no way to transform an API cart position into an order that keeps
|
||||
# prices, cart positions are just quota/voucher placeholders
|
||||
validated_data['custom_price_input_is_net'] = not validated_data.pop('includes_tax', True)
|
||||
cp = CartPosition.objects.create(event=self.context['event'], **validated_data)
|
||||
|
||||
for answ_data in answers_data:
|
||||
options = answ_data.pop('options')
|
||||
if isinstance(answ_data['answer'], File):
|
||||
an = answ_data.pop('answer')
|
||||
answ = cp.answers.create(**answ_data, answer='')
|
||||
answ.file.save(os.path.basename(an.name), an, save=False)
|
||||
answ.answer = 'file://' + answ.file.name
|
||||
answ.save()
|
||||
an.close()
|
||||
else:
|
||||
answ = cp.answers.create(**answ_data)
|
||||
answ.options.add(*options)
|
||||
return cp
|
||||
|
||||
def validate_cart_id(self, cid):
|
||||
if cid and not cid.endswith('@api'):
|
||||
raise ValidationError('Cart ID should end in @api or be empty.')
|
||||
return cid
|
||||
fields = ('item', 'variation', 'price', 'attendee_name', 'attendee_name_parts', 'attendee_email',
|
||||
'subevent', 'includes_tax', 'answers')
|
||||
|
||||
def validate_item(self, item):
|
||||
if item.event != self.context['event']:
|
||||
@@ -240,4 +109,178 @@ class CartPositionCreateSerializer(I18nAwareModelSerializer):
|
||||
raise ValidationError(
|
||||
{'attendee_name': ['Do not specify attendee_name if you specified attendee_name_parts.']}
|
||||
)
|
||||
|
||||
if not data.get('expires'):
|
||||
data['expires'] = now() + timedelta(
|
||||
minutes=self.context['event'].settings.get('reservation_time', as_type=int)
|
||||
)
|
||||
|
||||
quotas_for_item_cache = self.context.get('quotas_for_item_cache', {})
|
||||
quotas_for_variation_cache = self.context.get('quotas_for_variation_cache', {})
|
||||
|
||||
seated = data.get('item').seat_category_mappings.filter(subevent=data.get('subevent')).exists()
|
||||
if data.get('seat'):
|
||||
if not seated:
|
||||
raise ValidationError({'seat': ['The specified product does not allow to choose a seat.']})
|
||||
try:
|
||||
seat = self.context['event'].seats.get(seat_guid=data['seat'], subevent=data.get('subevent'))
|
||||
except Seat.DoesNotExist:
|
||||
raise ValidationError({'seat': ['The specified seat does not exist.']})
|
||||
except Seat.MultipleObjectsReturned:
|
||||
raise ValidationError({'seat': ['The specified seat ID is not unique.']})
|
||||
else:
|
||||
data['seat'] = seat
|
||||
elif seated:
|
||||
raise ValidationError({'seat': ['The specified product requires to choose a seat.']})
|
||||
|
||||
if data.get('voucher'):
|
||||
try:
|
||||
voucher = self.context['event'].vouchers.get(code__iexact=data['voucher'])
|
||||
except Voucher.DoesNotExist:
|
||||
raise ValidationError({'voucher': ['The specified voucher does not exist.']})
|
||||
|
||||
if voucher and not voucher.applies_to(data['item'], data.get('variation')):
|
||||
raise ValidationError({'voucher': ['The specified voucher is not valid for the given item and variation.']})
|
||||
|
||||
if voucher and voucher.seat and voucher.seat != data.get('seat'):
|
||||
raise ValidationError({'voucher': ['The specified voucher is not valid for this seat.']})
|
||||
|
||||
if voucher and voucher.subevent_id and (not data.get('subevent') or voucher.subevent_id != data['subevent'].pk):
|
||||
raise ValidationError({'voucher': ['The specified voucher is not valid for this subevent.']})
|
||||
|
||||
if voucher.valid_until is not None and voucher.valid_until < now():
|
||||
raise ValidationError({'voucher': ['The specified voucher is expired.']})
|
||||
|
||||
data['voucher'] = voucher
|
||||
|
||||
if not data.get('voucher') or (not data['voucher'].allow_ignore_quota and not data['voucher'].block_quota):
|
||||
if data.get('variation'):
|
||||
if data['variation'].pk not in quotas_for_variation_cache:
|
||||
quotas_for_variation_cache[data['variation'].pk] = data['variation'].quotas.filter(subevent=data.get('subevent'))
|
||||
data['_quotas'] = quotas_for_variation_cache[data['variation'].pk]
|
||||
else:
|
||||
if data['item'].pk not in quotas_for_item_cache:
|
||||
quotas_for_item_cache[data['item'].pk] = data['item'].quotas.filter(subevent=data.get('subevent'))
|
||||
data['_quotas'] = quotas_for_item_cache[data['item'].pk]
|
||||
|
||||
if len(data['_quotas']) == 0:
|
||||
raise ValidationError(
|
||||
gettext_lazy('The product "{}" is not assigned to a quota.').format(
|
||||
str(data.get('item'))
|
||||
)
|
||||
)
|
||||
else:
|
||||
data['_quotas'] = []
|
||||
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data.pop('_quotas')
|
||||
answers_data = validated_data.pop('answers')
|
||||
|
||||
attendee_name = validated_data.pop('attendee_name', '')
|
||||
if attendee_name and not validated_data.get('attendee_name_parts'):
|
||||
validated_data['attendee_name_parts'] = {
|
||||
'_legacy': attendee_name
|
||||
}
|
||||
|
||||
# todo: does this make sense?
|
||||
validated_data['custom_price_input'] = validated_data['price']
|
||||
# todo: listed price, etc?
|
||||
# currently does not matter because there is no way to transform an API cart position into an order that keeps
|
||||
# prices, cart positions are just quota/voucher placeholders
|
||||
validated_data['custom_price_input_is_net'] = not validated_data.pop('includes_tax', True)
|
||||
cp = CartPosition.objects.create(event=self.context['event'], **validated_data)
|
||||
|
||||
for answ_data in answers_data:
|
||||
options = answ_data.pop('options')
|
||||
if isinstance(answ_data['answer'], File):
|
||||
an = answ_data.pop('answer')
|
||||
answ = cp.answers.create(**answ_data, answer='')
|
||||
answ.file.save(os.path.basename(an.name), an, save=False)
|
||||
answ.answer = 'file://' + answ.file.name
|
||||
answ.save()
|
||||
an.close()
|
||||
else:
|
||||
answ = cp.answers.create(**answ_data)
|
||||
answ.options.add(*options)
|
||||
return cp
|
||||
|
||||
|
||||
class CartPositionCreateSerializer(BaseCartPositionCreateSerializer):
|
||||
expires = serializers.DateTimeField(required=False)
|
||||
addons = BaseCartPositionCreateSerializer(many=True, required=False)
|
||||
bundled = BaseCartPositionCreateSerializer(many=True, required=False)
|
||||
seat = serializers.CharField(required=False, allow_null=True)
|
||||
sales_channel = serializers.CharField(required=False, default='sales_channel')
|
||||
voucher = serializers.CharField(required=False, allow_null=True)
|
||||
|
||||
class Meta:
|
||||
model = CartPosition
|
||||
fields = BaseCartPositionCreateSerializer.Meta.fields + (
|
||||
'cart_id', 'expires', 'addons', 'bundled', 'seat', 'sales_channel', 'voucher'
|
||||
)
|
||||
|
||||
def validate_cart_id(self, cid):
|
||||
if cid and not cid.endswith('@api'):
|
||||
raise ValidationError('Cart ID should end in @api or be empty.')
|
||||
return cid
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data.pop('sales_channel')
|
||||
addons_data = validated_data.pop('addons', None)
|
||||
bundled_data = validated_data.pop('bundled', None)
|
||||
|
||||
cp = super().create(validated_data)
|
||||
|
||||
if addons_data:
|
||||
for addon_data in addons_data:
|
||||
addon_data['addon_to'] = cp
|
||||
addon_data['is_bundled'] = False
|
||||
super().create(addon_data)
|
||||
|
||||
if bundled_data:
|
||||
for bundle_data in bundled_data:
|
||||
bundle_data['addon_to'] = cp
|
||||
bundle_data['is_bundled'] = True
|
||||
super().create(bundle_data)
|
||||
|
||||
return cp
|
||||
|
||||
def validate(self, data):
|
||||
data = super().validate(data)
|
||||
|
||||
# This is currently only a very basic validation of add-ons and bundled products, we don't validate their number
|
||||
# or price. We can always go stricter, as the endpoint is documented as experimental.
|
||||
# However, this serializer should always be *at least* as strict as the order creation serializer.
|
||||
|
||||
if data.get('item') and data.get('addons'):
|
||||
prefetch_related_objects([data['item']], 'addons')
|
||||
for sub_data in data['addons']:
|
||||
if not any(a.addon_category_id == sub_data['item'].category_id for a in data['item'].addons.all()):
|
||||
raise ValidationError({
|
||||
'addons': [
|
||||
'The product "{prod}" can not be used as an add-on product for "{main}".'.format(
|
||||
prod=str(sub_data['item']),
|
||||
main=str(data['item']),
|
||||
)
|
||||
]
|
||||
})
|
||||
|
||||
if data.get('item') and data.get('bundled'):
|
||||
prefetch_related_objects([data['item']], 'bundles')
|
||||
for sub_data in data['bundled']:
|
||||
if not any(
|
||||
a.bundled_item_id == sub_data['item'].pk and
|
||||
a.bundled_variation_id == (sub_data['variation'].pk if sub_data.get('variation') else None)
|
||||
for a in data['item'].bundles.all()
|
||||
):
|
||||
raise ValidationError({
|
||||
'bundled': [
|
||||
'The product "{prod}" can not be used as an bundled product for "{main}".'.format(
|
||||
prod=str(sub_data['item']),
|
||||
main=str(data['item']),
|
||||
)
|
||||
]
|
||||
})
|
||||
return data
|
||||
|
||||
@@ -411,7 +411,7 @@ class CloneEventSerializer(EventSerializer):
|
||||
has_subevents = validated_data.pop('has_subevents', None)
|
||||
tz = validated_data.pop('timezone', None)
|
||||
sales_channels = validated_data.pop('sales_channels', None)
|
||||
new_event = super().create(validated_data)
|
||||
new_event = super().create({**validated_data, 'plugins': None})
|
||||
|
||||
event = Event.objects.filter(slug=self.context['event'], organizer=self.context['organizer'].pk).first()
|
||||
new_event.copy_data_from(event)
|
||||
|
||||
@@ -23,6 +23,8 @@ from django import forms
|
||||
from django.http import QueryDict
|
||||
from rest_framework import serializers
|
||||
|
||||
from pretix.base.exporter import OrganizerLevelExportMixin
|
||||
|
||||
|
||||
class FormFieldWrapperField(serializers.Field):
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -49,7 +51,6 @@ simple_mappings = (
|
||||
(forms.EmailField, serializers.EmailField, ()),
|
||||
(forms.UUIDField, serializers.UUIDField, ()),
|
||||
(forms.URLField, serializers.URLField, ()),
|
||||
(forms.NullBooleanField, serializers.NullBooleanField, ()),
|
||||
(forms.BooleanField, serializers.BooleanField, ()),
|
||||
)
|
||||
|
||||
@@ -87,7 +88,7 @@ class JobRunSerializer(serializers.Serializer):
|
||||
ex = kwargs.pop('exporter')
|
||||
events = kwargs.pop('events', None)
|
||||
super().__init__(*args, **kwargs)
|
||||
if events is not None:
|
||||
if events is not None and not isinstance(ex, OrganizerLevelExportMixin):
|
||||
self.fields["events"] = serializers.SlugRelatedField(
|
||||
queryset=events,
|
||||
required=True,
|
||||
@@ -106,6 +107,12 @@ class JobRunSerializer(serializers.Serializer):
|
||||
)
|
||||
break
|
||||
|
||||
if isinstance(v, forms.NullBooleanField):
|
||||
self.fields[k] = serializers.BooleanField(
|
||||
required=v.required,
|
||||
allow_null=True,
|
||||
validators=v.validators,
|
||||
)
|
||||
if isinstance(v, forms.ModelMultipleChoiceField):
|
||||
self.fields[k] = PrimaryKeyRelatedField(
|
||||
queryset=v.queryset,
|
||||
|
||||
@@ -184,6 +184,8 @@ class ItemSerializer(I18nAwareModelSerializer):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['default_price'].allow_null = False
|
||||
self.fields['default_price'].required = True
|
||||
if not self.read_only:
|
||||
self.fields['require_membership_types'].queryset = self.context['event'].organizer.membership_types.all()
|
||||
self.fields['grant_membership_type'].queryset = self.context['event'].organizer.membership_types.all()
|
||||
|
||||
@@ -1086,6 +1086,10 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
|
||||
|
||||
seated = pos_data.get('item').seat_category_mappings.filter(subevent=pos_data.get('subevent')).exists()
|
||||
if pos_data.get('seat'):
|
||||
if pos_data.get('addon_to'):
|
||||
errs[i]['seat'] = ['Seats are currently not supported for add-on products.']
|
||||
continue
|
||||
|
||||
if not seated:
|
||||
errs[i]['seat'] = ['The specified product does not allow to choose a seat.']
|
||||
try:
|
||||
@@ -1281,6 +1285,9 @@ class OrderCreateSerializer(I18nAwareModelSerializer):
|
||||
|
||||
if not simulate:
|
||||
for cp in delete_cps:
|
||||
if cp.addon_to_id:
|
||||
continue
|
||||
cp.addons.all().delete()
|
||||
cp.delete()
|
||||
|
||||
order.total = sum([p.price for p in pos_map.values()])
|
||||
|
||||
@@ -74,13 +74,19 @@ class CustomerSerializer(I18nAwareModelSerializer):
|
||||
fields = ('identifier', 'external_identifier', 'email', 'name', 'name_parts', 'is_active', 'is_verified', 'last_login', 'date_joined',
|
||||
'locale', 'last_modified', 'notes')
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
if instance and instance.provider_id:
|
||||
validated_data['external_identifier'] = instance.external_identifier
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class CustomerCreateSerializer(CustomerSerializer):
|
||||
send_email = serializers.BooleanField(default=False, required=False, allow_null=True)
|
||||
password = serializers.CharField(write_only=True, required=False, allow_null=True)
|
||||
|
||||
class Meta:
|
||||
model = Customer
|
||||
fields = CustomerSerializer.Meta.fields + ('send_email',)
|
||||
fields = CustomerSerializer.Meta.fields + ('send_email', 'password')
|
||||
|
||||
|
||||
class MembershipTypeSerializer(I18nAwareModelSerializer):
|
||||
@@ -113,20 +119,21 @@ class GiftCardSerializer(I18nAwareModelSerializer):
|
||||
|
||||
def validate(self, data):
|
||||
data = super().validate(data)
|
||||
s = data['secret']
|
||||
qs = GiftCard.objects.filter(
|
||||
secret=s
|
||||
).filter(
|
||||
Q(issuer=self.context["organizer"]) | Q(
|
||||
issuer__gift_card_collector_acceptance__collector=self.context["organizer"])
|
||||
)
|
||||
if self.instance:
|
||||
qs = qs.exclude(pk=self.instance.pk)
|
||||
if qs.exists():
|
||||
raise ValidationError(
|
||||
{'secret': _(
|
||||
'A gift card with the same secret already exists in your or an affiliated organizer account.')}
|
||||
if 'secret' in data:
|
||||
s = data['secret']
|
||||
qs = GiftCard.objects.filter(
|
||||
secret=s
|
||||
).filter(
|
||||
Q(issuer=self.context["organizer"]) | Q(
|
||||
issuer__gift_card_collector_acceptance__collector=self.context["organizer"])
|
||||
)
|
||||
if self.instance:
|
||||
qs = qs.exclude(pk=self.instance.pk)
|
||||
if qs.exists():
|
||||
raise ValidationError(
|
||||
{'secret': _(
|
||||
'A gift card with the same secret already exists in your or an affiliated organizer account.')}
|
||||
)
|
||||
return data
|
||||
|
||||
class Meta:
|
||||
@@ -282,6 +289,7 @@ class TeamMemberSerializer(serializers.ModelSerializer):
|
||||
class OrganizerSettingsSerializer(SettingsSerializer):
|
||||
default_fields = [
|
||||
'customer_accounts',
|
||||
'customer_accounts_native',
|
||||
'customer_accounts_link_by_email',
|
||||
'invoice_regenerate_allowed',
|
||||
'contact_mail',
|
||||
|
||||
@@ -61,7 +61,7 @@ class VoucherSerializer(I18nAwareModelSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Voucher
|
||||
fields = ('id', 'code', 'max_usages', 'redeemed', 'valid_until', 'block_quota',
|
||||
fields = ('id', 'code', 'max_usages', 'redeemed', 'min_usages', 'valid_until', 'block_quota',
|
||||
'allow_ignore_quota', 'price_mode', 'value', 'item', 'variation', 'quota',
|
||||
'tag', 'comment', 'subevent', 'show_hidden_items', 'seat')
|
||||
read_only_fields = ('id', 'redeemed')
|
||||
|
||||
@@ -138,6 +138,7 @@ urlpatterns = [
|
||||
re_path(r"^device/update$", device.UpdateView.as_view(), name="device.update"),
|
||||
re_path(r"^device/roll$", device.RollKeyView.as_view(), name="device.roll"),
|
||||
re_path(r"^device/revoke$", device.RevokeKeyView.as_view(), name="device.revoke"),
|
||||
re_path(r"^device/info$", device.InfoView.as_view(), name="device.info"),
|
||||
re_path(r"^device/eventselection$", device.EventSelectionView.as_view(), name="device.eventselection"),
|
||||
re_path(r"^idempotency_query$", idempotency.IdempotencyQueryView.as_view(), name="idempotency.query"),
|
||||
re_path(r"^upload$", upload.UploadView.as_view(), name="upload"),
|
||||
|
||||
@@ -19,19 +19,28 @@
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
from collections import Counter
|
||||
from typing import List
|
||||
|
||||
from django.db import transaction
|
||||
from django.utils.crypto import get_random_string
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework import status, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.filters import OrderingFilter
|
||||
from rest_framework.mixins import CreateModelMixin, DestroyModelMixin
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.settings import api_settings
|
||||
from rest_framework.serializers import as_serializer_error
|
||||
|
||||
from pretix.api.serializers.cart import (
|
||||
CartPositionCreateSerializer, CartPositionSerializer,
|
||||
)
|
||||
from pretix.base.models import CartPosition
|
||||
from pretix.base.services.cart import (
|
||||
_get_quota_availability, _get_voucher_availability, error_messages,
|
||||
)
|
||||
from pretix.base.services.locking import NoLockManager
|
||||
|
||||
|
||||
@@ -54,18 +63,17 @@ class CartPositionViewSet(CreateModelMixin, DestroyModelMixin, viewsets.ReadOnly
|
||||
def get_serializer_context(self):
|
||||
ctx = super().get_serializer_context()
|
||||
ctx['event'] = self.request.event
|
||||
ctx['quota_cache'] = {}
|
||||
ctx['quotas_for_item_cache'] = {}
|
||||
ctx['quotas_for_variation_cache'] = {}
|
||||
return ctx
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = CartPositionCreateSerializer(data=request.data, context=self.get_serializer_context())
|
||||
ctx = self.get_serializer_context()
|
||||
serializer = CartPositionCreateSerializer(data=request.data, context=ctx)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
with transaction.atomic(), self.request.event.lock():
|
||||
self.perform_create(serializer)
|
||||
cp = serializer.instance
|
||||
serializer = CartPositionSerializer(cp, context=serializer.context)
|
||||
results = self._create(serializers=[serializer], raise_exception=True, ctx=ctx)
|
||||
headers = self.get_success_headers(serializer.data)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
return Response(results[0]['data'], status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
@action(detail=False, methods=['POST'])
|
||||
def bulk_create(self, request, *args, **kwargs):
|
||||
@@ -73,42 +81,163 @@ class CartPositionViewSet(CreateModelMixin, DestroyModelMixin, viewsets.ReadOnly
|
||||
return Response({"error": "Please supply a list"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
ctx = self.get_serializer_context()
|
||||
with transaction.atomic():
|
||||
serializers = [
|
||||
CartPositionCreateSerializer(data=d, context=ctx)
|
||||
for d in request.data
|
||||
]
|
||||
|
||||
lockfn = self.request.event.lock
|
||||
if not any(s.is_valid(raise_exception=False) for s in serializers):
|
||||
lockfn = NoLockManager
|
||||
|
||||
results = []
|
||||
with lockfn():
|
||||
for s in serializers:
|
||||
if s.is_valid(raise_exception=False):
|
||||
try:
|
||||
cp = s.save()
|
||||
except ValidationError as e:
|
||||
results.append({
|
||||
'success': False,
|
||||
'data': None,
|
||||
'errors': {api_settings.NON_FIELD_ERRORS_KEY: e.detail},
|
||||
})
|
||||
else:
|
||||
results.append({
|
||||
'success': True,
|
||||
'data': CartPositionSerializer(cp, context=ctx).data,
|
||||
'errors': None,
|
||||
})
|
||||
else:
|
||||
results.append({
|
||||
'success': False,
|
||||
'data': None,
|
||||
'errors': s.errors,
|
||||
})
|
||||
serializers = [
|
||||
CartPositionCreateSerializer(data=d, context=ctx)
|
||||
for d in request.data
|
||||
]
|
||||
|
||||
results = self._create(serializers=serializers, raise_exception=False, ctx=ctx)
|
||||
return Response({'results': results}, status=status.HTTP_200_OK)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save()
|
||||
raise NotImplementedError()
|
||||
|
||||
@transaction.atomic()
|
||||
def perform_destroy(self, instance):
|
||||
instance.addons.all().delete()
|
||||
instance.delete()
|
||||
|
||||
def _require_locking(self, quota_diff, voucher_use_diff, seat_diff):
|
||||
if voucher_use_diff or seat_diff:
|
||||
# If any vouchers or seats are used, we lock to make sure we don't redeem them to often
|
||||
return True
|
||||
|
||||
if quota_diff and any(q.size is not None for q in quota_diff):
|
||||
# If any quotas are affected that are not unlimited, we lock
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@cached_property
|
||||
def _create_default_cart_id(self):
|
||||
cid = "{}@api".format(get_random_string(48))
|
||||
while CartPosition.objects.filter(cart_id=cid).exists():
|
||||
cid = "{}@api".format(get_random_string(48))
|
||||
return cid
|
||||
|
||||
def _create(self, serializers: List[CartPositionCreateSerializer], ctx, raise_exception=False):
|
||||
voucher_use_diff = Counter()
|
||||
quota_diff = Counter()
|
||||
seat_diff = Counter()
|
||||
results = [{} for pserializer in serializers]
|
||||
|
||||
for i, pserializer in enumerate(serializers):
|
||||
if not pserializer.is_valid(raise_exception=raise_exception):
|
||||
results[i] = {
|
||||
'success': False,
|
||||
'data': None,
|
||||
'errors': pserializer.errors,
|
||||
}
|
||||
|
||||
for pserializer in serializers:
|
||||
if pserializer.errors:
|
||||
continue
|
||||
|
||||
validated_data = pserializer.validated_data
|
||||
if not validated_data.get('cart_id'):
|
||||
validated_data['cart_id'] = self._create_default_cart_id
|
||||
|
||||
if validated_data.get('voucher'):
|
||||
voucher_use_diff[validated_data['voucher']] += 1
|
||||
|
||||
if validated_data.get('seat'):
|
||||
seat_diff[validated_data['seat']] += 1
|
||||
|
||||
for q in validated_data['_quotas']:
|
||||
quota_diff[q] += 1
|
||||
for sub_data in validated_data.get('addons', []) + validated_data.get('bundled', []):
|
||||
for q in sub_data['_quotas']:
|
||||
quota_diff[q] += 1
|
||||
|
||||
seats_seen = set()
|
||||
|
||||
lockfn = NoLockManager
|
||||
if self._require_locking(quota_diff, voucher_use_diff, seat_diff):
|
||||
lockfn = self.request.event.lock
|
||||
|
||||
with lockfn() as now_dt, transaction.atomic():
|
||||
vouchers_ok, vouchers_depend_on_cart = _get_voucher_availability(
|
||||
self.request.event,
|
||||
voucher_use_diff,
|
||||
now_dt,
|
||||
exclude_position_ids=[],
|
||||
)
|
||||
quotas_ok = _get_quota_availability(quota_diff, now_dt)
|
||||
|
||||
for i, pserializer in enumerate(serializers):
|
||||
if results[i]:
|
||||
continue
|
||||
|
||||
try:
|
||||
validated_data = pserializer.validated_data
|
||||
|
||||
if validated_data.get('seat'):
|
||||
# Assumption: Add-ons currently can't have seats
|
||||
if validated_data['seat'] in seats_seen:
|
||||
raise ValidationError(error_messages['seat_multiple'])
|
||||
seats_seen.add(validated_data['seat'])
|
||||
|
||||
quotas_needed = Counter()
|
||||
for q in validated_data['_quotas']:
|
||||
quotas_needed[q] += 1
|
||||
for sub_data in validated_data.get('addons', []) + validated_data.get('bundled', []):
|
||||
for q in sub_data['_quotas']:
|
||||
quotas_needed[q] += 1
|
||||
|
||||
for q, needed in quotas_needed.items():
|
||||
if quotas_ok[q] < needed:
|
||||
raise ValidationError(
|
||||
_('There is not enough quota available on quota "{}" to perform the operation.').format(
|
||||
q.name
|
||||
)
|
||||
)
|
||||
|
||||
if validated_data.get('voucher'):
|
||||
# Assumption: Add-ons currently can't have vouchers, thus we only need to check the main voucher
|
||||
if vouchers_ok[validated_data['voucher']] < 1:
|
||||
raise ValidationError(
|
||||
{'voucher': [_('The specified voucher has already been used the maximum number of times.')]}
|
||||
)
|
||||
|
||||
if validated_data.get('seat'):
|
||||
# Assumption: Add-ons currently can't have seats, thus we only need to check the main product
|
||||
if not validated_data['seat'].is_available(
|
||||
sales_channel=validated_data.get('sales_channel', 'web'),
|
||||
distance_ignore_cart_id=validated_data['cart_id'],
|
||||
ignore_voucher_id=validated_data['voucher'].pk if validated_data.get('voucher') else None,
|
||||
):
|
||||
raise ValidationError(
|
||||
{'seat': [_('The selected seat "{seat}" is not available.').format(seat=validated_data['seat'].name)]}
|
||||
)
|
||||
|
||||
for q, needed in quotas_needed.items():
|
||||
quotas_ok[q] -= needed
|
||||
if validated_data.get('voucher'):
|
||||
vouchers_ok[validated_data['voucher']] -= 1
|
||||
|
||||
if any(qa < 0 for qa in quotas_ok.values()):
|
||||
# Safeguard, should never happen because of conditions above
|
||||
raise ValidationError(error_messages['unavailable'])
|
||||
|
||||
cp = pserializer.create(validated_data)
|
||||
|
||||
d = CartPositionSerializer(cp, context=ctx).data
|
||||
addons = sorted(cp.addons.all(), key=lambda a: a.pk) # order of creation, safe since they are created in the same transaction
|
||||
d['addons'] = CartPositionSerializer([a for a in addons if not a.is_bundled], many=True, context=ctx).data
|
||||
d['bundled'] = CartPositionSerializer([a for a in addons if a.is_bundled], many=True, context=ctx).data
|
||||
|
||||
results[i] = {
|
||||
'success': True,
|
||||
'data': d,
|
||||
'errors': None,
|
||||
}
|
||||
except ValidationError as e:
|
||||
if raise_exception:
|
||||
raise
|
||||
results[i] = {
|
||||
'success': False,
|
||||
'data': None,
|
||||
'errors': as_serializer_error(e),
|
||||
}
|
||||
|
||||
return results
|
||||
|
||||
@@ -29,7 +29,9 @@ from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from pretix import __version__
|
||||
from pretix.api.auth.device import DeviceTokenAuthentication
|
||||
from pretix.api.views.version import numeric_version
|
||||
from pretix.base.models import CheckinList, Device, SubEvent
|
||||
from pretix.base.models.devices import Gate, generate_api_token
|
||||
|
||||
@@ -151,6 +153,24 @@ class RevokeKeyView(APIView):
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
class InfoView(APIView):
|
||||
authentication_classes = (DeviceTokenAuthentication,)
|
||||
|
||||
def get(self, request, format=None):
|
||||
device = request.auth
|
||||
serializer = DeviceSerializer(device)
|
||||
return Response({
|
||||
'device': serializer.data,
|
||||
'server': {
|
||||
'version': {
|
||||
'pretix': __version__,
|
||||
'pretix_numeric': numeric_version(__version__),
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
|
||||
class EventSelectionView(APIView):
|
||||
authentication_classes = (DeviceTokenAuthentication,)
|
||||
|
||||
|
||||
@@ -241,13 +241,17 @@ class EventViewSet(viewsets.ModelViewSet):
|
||||
except Event.DoesNotExist:
|
||||
raise ValidationError('Event to copy from was not found')
|
||||
|
||||
# Ensure that .installed() is only called when we NOT clone
|
||||
plugins = serializer.validated_data.pop('plugins', None)
|
||||
serializer.validated_data['plugins'] = None
|
||||
|
||||
new_event = serializer.save(organizer=self.request.organizer)
|
||||
|
||||
if copy_from:
|
||||
new_event.copy_data_from(copy_from)
|
||||
|
||||
if 'plugins' in serializer.validated_data:
|
||||
new_event.set_active_plugins(serializer.validated_data['plugins'])
|
||||
if plugins:
|
||||
new_event.set_active_plugins(plugins)
|
||||
if 'is_public' in serializer.validated_data:
|
||||
new_event.is_public = serializer.validated_data['is_public']
|
||||
if 'testmode' in serializer.validated_data:
|
||||
@@ -262,6 +266,10 @@ class EventViewSet(viewsets.ModelViewSet):
|
||||
else:
|
||||
serializer.instance.set_defaults()
|
||||
|
||||
if plugins:
|
||||
new_event.set_active_plugins(plugins)
|
||||
new_event.save(update_fields=['plugins'])
|
||||
|
||||
serializer.instance.log_action(
|
||||
'pretix.event.added',
|
||||
user=self.request.user,
|
||||
|
||||
@@ -35,7 +35,8 @@ from rest_framework.reverse import reverse
|
||||
from pretix.api.serializers.exporters import (
|
||||
ExporterSerializer, JobRunSerializer,
|
||||
)
|
||||
from pretix.base.models import CachedFile, Device, TeamAPIToken
|
||||
from pretix.base.exporter import OrganizerLevelExportMixin
|
||||
from pretix.base.models import CachedFile, Device, Event, TeamAPIToken
|
||||
from pretix.base.services.export import export, multiexport
|
||||
from pretix.base.signals import (
|
||||
register_data_exporters, register_multievent_data_exporters,
|
||||
@@ -155,7 +156,19 @@ class OrganizerExportersViewSet(ExportersMixin, viewsets.ViewSet):
|
||||
organizer=self.request.organizer
|
||||
)
|
||||
responses = register_multievent_data_exporters.send(self.request.organizer)
|
||||
for ex in sorted([response(events, self.request.organizer) for r, response in responses if response], key=lambda ex: str(ex.verbose_name)):
|
||||
raw_exporters = [
|
||||
response(Event.objects.none() if issubclass(response, OrganizerLevelExportMixin) else events, self.request.organizer)
|
||||
for r, response in responses
|
||||
if response
|
||||
]
|
||||
raw_exporters = [
|
||||
ex for ex in raw_exporters
|
||||
if (
|
||||
not isinstance(ex, OrganizerLevelExportMixin) or
|
||||
perm_holder.has_organizer_permission(self.request.organizer, ex.organizer_required_permission, self.request)
|
||||
)
|
||||
]
|
||||
for ex in sorted(raw_exporters, key=lambda ex: str(ex.verbose_name)):
|
||||
ex._serializer = JobRunSerializer(exporter=ex, events=events)
|
||||
exporters.append(ex)
|
||||
return exporters
|
||||
|
||||
@@ -61,6 +61,7 @@ from pretix.api.serializers.orderchange import (
|
||||
OrderPositionCreateForExistingOrderSerializer,
|
||||
OrderPositionInfoPatchSerializer,
|
||||
)
|
||||
from pretix.api.views import RichOrderingFilter
|
||||
from pretix.base.i18n import language
|
||||
from pretix.base.models import (
|
||||
CachedCombinedTicket, CachedTicket, Checkin, Device, EventMetaValue,
|
||||
@@ -930,7 +931,7 @@ with scopes_disabled():
|
||||
class OrderPositionViewSet(viewsets.ModelViewSet):
|
||||
serializer_class = OrderPositionSerializer
|
||||
queryset = OrderPosition.all.none()
|
||||
filter_backends = (DjangoFilterBackend, OrderingFilter)
|
||||
filter_backends = (DjangoFilterBackend, RichOrderingFilter)
|
||||
ordering = ('order__datetime', 'positionid')
|
||||
ordering_fields = ('order__code', 'order__datetime', 'positionid', 'attendee_name', 'order__status',)
|
||||
filterset_class = OrderPositionFilter
|
||||
|
||||
@@ -515,8 +515,8 @@ class CustomerViewSet(viewsets.ModelViewSet):
|
||||
raise MethodNotAllowed("Customers cannot be deleted.")
|
||||
|
||||
@transaction.atomic()
|
||||
def perform_create(self, serializer, send_email=False):
|
||||
customer = serializer.save(organizer=self.request.organizer, password=make_password(None))
|
||||
def perform_create(self, serializer, send_email=False, password=None):
|
||||
customer = serializer.save(organizer=self.request.organizer, password=make_password(password))
|
||||
serializer.instance.log_action(
|
||||
'pretix.customer.created',
|
||||
user=self.request.user,
|
||||
@@ -530,7 +530,7 @@ class CustomerViewSet(viewsets.ModelViewSet):
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = CustomerCreateSerializer(data=request.data, context=self.get_serializer_context())
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.perform_create(serializer, send_email=serializer.validated_data.pop('send_email', False))
|
||||
self.perform_create(serializer, send_email=serializer.validated_data.pop('send_email', False), password=serializer.validated_data.pop('password', None))
|
||||
headers = self.get_success_headers(serializer.data)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
|
||||
@@ -23,19 +23,24 @@ import json
|
||||
import logging
|
||||
import time
|
||||
from collections import OrderedDict
|
||||
from datetime import timedelta
|
||||
|
||||
import requests
|
||||
from celery.exceptions import MaxRetriesExceededError
|
||||
from django.db import DatabaseError, connection, transaction
|
||||
from django.db.models import Exists, OuterRef, Q
|
||||
from django.dispatch import receiver
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
||||
from django_scopes import scope, scopes_disabled
|
||||
from requests import RequestException
|
||||
|
||||
from pretix.api.models import WebHook, WebHookCall, WebHookEventListener
|
||||
from pretix.api.models import (
|
||||
WebHook, WebHookCall, WebHookCallRetry, WebHookEventListener,
|
||||
)
|
||||
from pretix.api.signals import register_webhook_events
|
||||
from pretix.base.models import LogEntry
|
||||
from pretix.base.services.tasks import ProfiledTask, TransactionAwareTask
|
||||
from pretix.base.signals import periodic_task
|
||||
from pretix.celery_app import app
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -219,6 +224,10 @@ def register_default_webhook_events(sender, **kwargs):
|
||||
'pretix.event.order.expired',
|
||||
_('Order expired'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.expirychanged',
|
||||
_('Order expiry date changed'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.modified',
|
||||
_('Order information changed'),
|
||||
@@ -231,10 +240,30 @@ def register_default_webhook_events(sender, **kwargs):
|
||||
'pretix.event.order.changed.*',
|
||||
_('Order changed'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.refund.created',
|
||||
_('Refund of payment created'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.refund.created.externally',
|
||||
_('External refund of payment'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.refund.requested',
|
||||
_('Refund of payment requested by customer'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.refund.done',
|
||||
_('Refund of payment completed'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.refund.canceled',
|
||||
_('Refund of payment canceled'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.refund.failed',
|
||||
_('Refund of payment failed'),
|
||||
),
|
||||
ParametrizedOrderWebhookEvent(
|
||||
'pretix.event.order.approved',
|
||||
_('Order approved'),
|
||||
@@ -275,6 +304,22 @@ def register_default_webhook_events(sender, **kwargs):
|
||||
'pretix.subevent.deleted',
|
||||
pgettext_lazy('subevent', 'Event series date deleted'),
|
||||
),
|
||||
ParametrizedEventWebhookEvent(
|
||||
'pretix.event.live.activated',
|
||||
_('Shop taken live'),
|
||||
),
|
||||
ParametrizedEventWebhookEvent(
|
||||
'pretix.event.live.deactivated',
|
||||
_('Shop taken offline'),
|
||||
),
|
||||
ParametrizedEventWebhookEvent(
|
||||
'pretix.event.testmode.activated',
|
||||
_('Test-Mode of shop has been activated'),
|
||||
),
|
||||
ParametrizedEventWebhookEvent(
|
||||
'pretix.event.testmode.deactivated',
|
||||
_('Test-Mode of shop has been deactivated'),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -316,59 +361,163 @@ def notify_webhooks(logentry_ids: list):
|
||||
send_webhook.apply_async(args=(logentry.id, notification_type.action_type, wh.pk))
|
||||
|
||||
|
||||
@app.task(base=ProfiledTask, bind=True, max_retries=9, acks_late=True)
|
||||
def send_webhook(self, logentry_id: int, action_type: str, webhook_id: int):
|
||||
# 9 retries with 2**(2*x) timing is roughly 72 hours
|
||||
@app.task(base=ProfiledTask, bind=True, max_retries=5, default_retry_delay=60, acks_late=True, autoretry_for=(DatabaseError,),)
|
||||
def send_webhook(self, logentry_id: int, action_type: str, webhook_id: int, retry_count: int = 0):
|
||||
"""
|
||||
Sends out a specific webhook using adequate retry and error handling logic.
|
||||
|
||||
Our retry logic is a little complex since we have different constraints here:
|
||||
|
||||
1. We historically documented that we retry for up to three days, so we want to keep that
|
||||
promise. We want to use (approximately) exponentially increasing times to keep load
|
||||
manageable.
|
||||
|
||||
2. We want to use Celery's ``acks_late=True`` options which prevents lost tasks if a worker
|
||||
crashes.
|
||||
|
||||
3. A limitation of Celery's redis broker implementation is that it can not properly handle
|
||||
tasks that *run or wait* longer than `visibility_timeout`, which defaults to 1h, when
|
||||
``acks_late`` is enabled. So any task with a *retry interval* of >1h will be restarted
|
||||
many times because celery believes the worker has crashed.
|
||||
|
||||
4. We do like that the first few retries happen within a few seconds to work around very
|
||||
intermittent connectivity issues quickly. For the longer retries with multiple hours,
|
||||
we don't care if they are emitted a few minutes too late.
|
||||
|
||||
We therefore have a two-phase retry process:
|
||||
|
||||
- For all retry intervals below 5 minutes, which is the first 3 retries currently, we
|
||||
schedule a new celery task directly with an increased retry_count. We do *not* use
|
||||
celery's retry() call currently to make the retry process in both phases more similar,
|
||||
there should not be much of a difference though (except that the initial task will be in
|
||||
SUCCESS state, but we don't check that status anywhere).
|
||||
|
||||
- For all retry intervals of at least 5 minutes, we create a database entry. Then, the
|
||||
periodic task ``schedule_webhook_retries_on_celery`` will schedule celery tasks for them
|
||||
once their time has come.
|
||||
"""
|
||||
retry_intervals = (
|
||||
5, # + 5 seconds
|
||||
30, # + 30 seconds
|
||||
60, # + 1 minute
|
||||
300, # + 5 minutes
|
||||
1200, # + 20 minutes
|
||||
3600, # + 60 minutes
|
||||
1440, # + 4 hours
|
||||
21600, # + 6 hours
|
||||
43200, # + 12 hours
|
||||
43200, # + 24 hours
|
||||
86400, # + 24 hours
|
||||
) # added up, these are approximately 3 days, as documented
|
||||
retry_celery_cutoff = 300
|
||||
|
||||
with scopes_disabled():
|
||||
webhook = WebHook.objects.get(id=webhook_id)
|
||||
with scope(organizer=webhook.organizer):
|
||||
|
||||
with scope(organizer=webhook.organizer), transaction.atomic():
|
||||
logentry = LogEntry.all.get(id=logentry_id)
|
||||
types = get_all_webhook_events()
|
||||
event_type = types.get(action_type)
|
||||
if not event_type or not webhook.enabled:
|
||||
return # Ignore, e.g. plugin not installed
|
||||
return 'obsolete-webhook' # Ignore, e.g. plugin not installed
|
||||
|
||||
payload = event_type.build_payload(logentry)
|
||||
if payload is None:
|
||||
# Content object deleted?
|
||||
return
|
||||
return 'obsolete-payload'
|
||||
|
||||
t = time.time()
|
||||
|
||||
try:
|
||||
try:
|
||||
resp = requests.post(
|
||||
webhook.target_url,
|
||||
json=payload,
|
||||
allow_redirects=False
|
||||
resp = requests.post(
|
||||
webhook.target_url,
|
||||
json=payload,
|
||||
allow_redirects=False,
|
||||
timeout=30,
|
||||
)
|
||||
WebHookCall.objects.create(
|
||||
webhook=webhook,
|
||||
action_type=logentry.action_type,
|
||||
target_url=webhook.target_url,
|
||||
is_retry=self.request.retries > 0,
|
||||
execution_time=time.time() - t,
|
||||
return_code=resp.status_code,
|
||||
payload=json.dumps(payload),
|
||||
response_body=resp.text[:1024 * 1024],
|
||||
success=200 <= resp.status_code <= 299
|
||||
)
|
||||
if resp.status_code == 410:
|
||||
webhook.enabled = False
|
||||
webhook.save()
|
||||
return 'gone'
|
||||
elif resp.status_code > 299:
|
||||
if retry_count >= len(retry_intervals):
|
||||
return 'retry-given-up'
|
||||
elif retry_intervals[retry_count] < retry_celery_cutoff:
|
||||
send_webhook.apply_async(args=(logentry_id, action_type, webhook_id, retry_count + 1),
|
||||
countdown=retry_intervals[retry_count])
|
||||
return 'retry-via-celery'
|
||||
else:
|
||||
webhook.retries.update_or_create(
|
||||
logentry=logentry,
|
||||
defaults=dict(
|
||||
retry_not_before=now() + timedelta(seconds=retry_intervals[retry_count]),
|
||||
retry_count=retry_count + 1,
|
||||
action_type=action_type,
|
||||
),
|
||||
)
|
||||
return 'retry-via-db'
|
||||
return 'ok'
|
||||
except RequestException as e:
|
||||
WebHookCall.objects.create(
|
||||
webhook=webhook,
|
||||
action_type=logentry.action_type,
|
||||
target_url=webhook.target_url,
|
||||
is_retry=self.request.retries > 0,
|
||||
execution_time=time.time() - t,
|
||||
return_code=0,
|
||||
payload=json.dumps(payload),
|
||||
response_body=str(e)[:1024 * 1024]
|
||||
)
|
||||
if retry_count >= len(retry_intervals):
|
||||
return 'retry-given-up'
|
||||
elif retry_intervals[retry_count] < retry_celery_cutoff:
|
||||
send_webhook.apply_async(args=(logentry_id, action_type, webhook_id, retry_count + 1))
|
||||
return 'retry-via-celery'
|
||||
else:
|
||||
webhook.retries.update_or_create(
|
||||
logentry=logentry,
|
||||
defaults=dict(
|
||||
retry_not_before=now() + timedelta(seconds=retry_intervals[retry_count]),
|
||||
retry_count=retry_count + 1,
|
||||
action_type=action_type,
|
||||
),
|
||||
)
|
||||
WebHookCall.objects.create(
|
||||
webhook=webhook,
|
||||
action_type=logentry.action_type,
|
||||
target_url=webhook.target_url,
|
||||
is_retry=self.request.retries > 0,
|
||||
execution_time=time.time() - t,
|
||||
return_code=resp.status_code,
|
||||
payload=json.dumps(payload),
|
||||
response_body=resp.text[:1024 * 1024],
|
||||
success=200 <= resp.status_code <= 299
|
||||
)
|
||||
if resp.status_code == 410:
|
||||
webhook.enabled = False
|
||||
webhook.save()
|
||||
elif resp.status_code > 299:
|
||||
raise self.retry(countdown=2 ** (self.request.retries * 2)) # max is 2 ** (8*2) = 65536 seconds = ~18 hours
|
||||
except RequestException as e:
|
||||
WebHookCall.objects.create(
|
||||
webhook=webhook,
|
||||
action_type=logentry.action_type,
|
||||
target_url=webhook.target_url,
|
||||
is_retry=self.request.retries > 0,
|
||||
execution_time=time.time() - t,
|
||||
return_code=0,
|
||||
payload=json.dumps(payload),
|
||||
response_body=str(e)[:1024 * 1024]
|
||||
)
|
||||
raise self.retry(countdown=2 ** (self.request.retries * 2)) # max is 2 ** (8*2) = 65536 seconds = ~18 hours
|
||||
except MaxRetriesExceededError:
|
||||
pass
|
||||
return 'retry-via-db'
|
||||
|
||||
|
||||
@app.task(base=TransactionAwareTask)
|
||||
def manually_retry_all_calls(webhook_id: int):
|
||||
with scopes_disabled():
|
||||
webhook = WebHook.objects.get(id=webhook_id)
|
||||
with scope(organizer=webhook.organizer), transaction.atomic():
|
||||
for whcr in webhook.retries.select_for_update(
|
||||
skip_locked=connection.features.has_select_for_update_skip_locked
|
||||
):
|
||||
send_webhook.apply_async(
|
||||
args=(whcr.logentry_id, whcr.action_type, whcr.webhook_id, whcr.retry_count),
|
||||
)
|
||||
whcr.delete()
|
||||
|
||||
|
||||
@receiver(signal=periodic_task, dispatch_uid='pretixapi_schedule_webhook_retries_on_celery')
|
||||
@scopes_disabled()
|
||||
def schedule_webhook_retries_on_celery(sender, **kwargs):
|
||||
with transaction.atomic():
|
||||
for whcr in WebHookCallRetry.objects.select_for_update(
|
||||
skip_locked=connection.features.has_select_for_update_skip_locked
|
||||
).filter(retry_not_before__lt=now()):
|
||||
send_webhook.apply_async(
|
||||
args=(whcr.logentry_id, whcr.action_type, whcr.webhook_id, whcr.retry_count),
|
||||
)
|
||||
whcr.delete()
|
||||
|
||||
21
src/pretix/base/customersso/__init__.py
Normal file
21
src/pretix/base/customersso/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
#
|
||||
# This file is part of pretix (Community Edition).
|
||||
#
|
||||
# Copyright (C) 2014-2020 Raphael Michel and contributors
|
||||
# Copyright (C) 2020-2021 rami.io GmbH and contributors
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation in version 3 of the License.
|
||||
#
|
||||
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
|
||||
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
|
||||
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
|
||||
# this file, see <https://pretix.eu/about/en/license>.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
295
src/pretix/base/customersso/oidc.py
Normal file
295
src/pretix/base/customersso/oidc.py
Normal file
@@ -0,0 +1,295 @@
|
||||
#
|
||||
# This file is part of pretix (Community Edition).
|
||||
#
|
||||
# Copyright (C) 2014-2020 Raphael Michel and contributors
|
||||
# Copyright (C) 2020-2021 rami.io GmbH and contributors
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation in version 3 of the License.
|
||||
#
|
||||
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
|
||||
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
|
||||
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
|
||||
# this file, see <https://pretix.eu/about/en/license>.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
import base64
|
||||
import hashlib
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
import jwt
|
||||
import requests
|
||||
from cryptography.hazmat.primitives.asymmetric.rsa import generate_private_key
|
||||
from cryptography.hazmat.primitives.serialization import (
|
||||
Encoding, NoEncryption, PrivateFormat, PublicFormat,
|
||||
)
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from requests import RequestException
|
||||
|
||||
from pretix.multidomain.urlreverse import build_absolute_uri
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
"""
|
||||
This module contains utilities for implementing OpenID Connect for customer authentication both as a receiving party (RP)
|
||||
as well as an OpenID Provider (OP).
|
||||
"""
|
||||
|
||||
|
||||
def _urljoin(base, path):
|
||||
if not base.endswith("/"):
|
||||
base += "/"
|
||||
return urljoin(base, path)
|
||||
|
||||
|
||||
def oidc_validate_and_complete_config(config):
|
||||
for k in ("base_url", "client_id", "client_secret", "uid_field", "email_field", "scope"):
|
||||
if not config.get(k):
|
||||
raise ValidationError(_('Configuration option "{name}" is missing.').format(name=k))
|
||||
|
||||
conf_url = _urljoin(config["base_url"], ".well-known/openid-configuration")
|
||||
try:
|
||||
resp = requests.get(conf_url, timeout=10)
|
||||
resp.raise_for_status()
|
||||
provider_config = resp.json()
|
||||
except RequestException as e:
|
||||
raise ValidationError(_('Unable to retrieve configuration from "{url}". Error message: "{error}".').format(
|
||||
url=conf_url,
|
||||
error=str(e)
|
||||
))
|
||||
except ValueError as e:
|
||||
raise ValidationError(_('Unable to retrieve configuration from "{url}". Error message: "{error}".').format(
|
||||
url=conf_url,
|
||||
error=str(e)
|
||||
))
|
||||
|
||||
if not provider_config.get("authorization_endpoint"):
|
||||
raise ValidationError(_('Incompatible SSO provider: "{error}".').format(
|
||||
error="authorization_endpoint not set"
|
||||
))
|
||||
|
||||
if not provider_config.get("userinfo_endpoint"):
|
||||
raise ValidationError(_('Incompatible SSO provider: "{error}".').format(
|
||||
error="userinfo_endpoint not set"
|
||||
))
|
||||
|
||||
if not provider_config.get("token_endpoint"):
|
||||
raise ValidationError(_('Incompatible SSO provider: "{error}".').format(
|
||||
error="token_endpoint not set"
|
||||
))
|
||||
|
||||
if "code" not in provider_config.get("response_types_supported", []):
|
||||
raise ValidationError(_('Incompatible SSO provider: "{error}".').format(
|
||||
error=f"provider supports response types {','.join(provider_config.get('response_types_supported', []))}, but we only support 'code'."
|
||||
))
|
||||
|
||||
if "query" not in provider_config.get("response_modes_supported", ["query", "fragment"]):
|
||||
raise ValidationError(_('Incompatible SSO provider: "{error}".').format(
|
||||
error=f"provider supports response modes {','.join(provider_config.get('response_modes_supported', []))}, but we only support 'query'."
|
||||
))
|
||||
|
||||
if "authorization_code" not in provider_config.get("grant_types_supported", ["authorization_code", "implicit"]):
|
||||
raise ValidationError(_('Incompatible SSO provider: "{error}".').format(
|
||||
error=f"provider supports grant types {','.join(provider_config.get('grant_types_supported', ''))}, but we only support 'authorization_code'."
|
||||
))
|
||||
|
||||
if "openid" not in config["scope"].split(" "):
|
||||
raise ValidationError(
|
||||
_('You are not requesting "{scope}".').format(
|
||||
scope="openid",
|
||||
))
|
||||
|
||||
for scope in config["scope"].split(" "):
|
||||
if scope not in provider_config.get("scopes_supported", []):
|
||||
raise ValidationError(_('You are requesting scope "{scope}" but provider only supports these: {scopes}.').format(
|
||||
scope=scope,
|
||||
scopes=", ".join(provider_config.get("scopes_supported", []))
|
||||
))
|
||||
|
||||
for k, v in config.items():
|
||||
if k.endswith('_field') and v:
|
||||
if v not in provider_config.get("claims_supported", []): # https://openid.net/specs/openid-connect-core-1_0.html#UserInfo
|
||||
raise ValidationError(_('You are requesting field "{field}" but provider only supports these: {fields}.').format(
|
||||
field=v,
|
||||
fields=", ".join(provider_config.get("claims_supported", []))
|
||||
))
|
||||
|
||||
config['provider_config'] = provider_config
|
||||
return config
|
||||
|
||||
|
||||
def oidc_authorize_url(provider, state, redirect_uri):
|
||||
endpoint = provider.configuration['provider_config']['authorization_endpoint']
|
||||
params = {
|
||||
# https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.1
|
||||
# https://openid.net/specs/openid-connect-core-1_0.html#AuthorizationEndpoint
|
||||
'response_type': 'code',
|
||||
'client_id': provider.configuration['client_id'],
|
||||
'scope': provider.configuration['scope'],
|
||||
'state': state,
|
||||
'redirect_uri': redirect_uri,
|
||||
}
|
||||
return endpoint + '?' + urlencode(params)
|
||||
|
||||
|
||||
def oidc_validate_authorization(provider, code, redirect_uri):
|
||||
endpoint = provider.configuration['provider_config']['token_endpoint']
|
||||
params = {
|
||||
# https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.3
|
||||
# https://openid.net/specs/openid-connect-core-1_0.html#TokenEndpoint
|
||||
'grant_type': 'authorization_code',
|
||||
'code': code,
|
||||
'redirect_uri': redirect_uri,
|
||||
}
|
||||
try:
|
||||
resp = requests.post(
|
||||
endpoint,
|
||||
data=params,
|
||||
headers={
|
||||
'Accept': 'application/json',
|
||||
},
|
||||
auth=(provider.configuration['client_id'], provider.configuration['client_secret']),
|
||||
)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
except RequestException:
|
||||
logger.exception('Could not retrieve authorization token')
|
||||
raise ValidationError(
|
||||
_('Login was not successful. Error message: "{error}".').format(
|
||||
error='could not reach login provider',
|
||||
)
|
||||
)
|
||||
|
||||
if 'access_token' not in data:
|
||||
raise ValidationError(
|
||||
_('Login was not successful. Error message: "{error}".').format(
|
||||
error='access token missing',
|
||||
)
|
||||
)
|
||||
|
||||
endpoint = provider.configuration['provider_config']['userinfo_endpoint']
|
||||
try:
|
||||
# https://openid.net/specs/openid-connect-core-1_0.html#UserInfo
|
||||
resp = requests.get(
|
||||
endpoint,
|
||||
headers={
|
||||
'Authorization': f'Bearer {data["access_token"]}'
|
||||
},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
userinfo = resp.json()
|
||||
except RequestException:
|
||||
logger.exception('Could not retrieve user info')
|
||||
raise ValidationError(
|
||||
_('Login was not successful. Error message: "{error}".').format(
|
||||
error='could not fetch user info',
|
||||
)
|
||||
)
|
||||
|
||||
if 'email_verified' in userinfo and not userinfo['email_verified']:
|
||||
# todo: how universal is this, do we need to make this configurable?
|
||||
raise ValidationError(_('The email address on this account is not yet verified. Please first confirm the '
|
||||
'email address in your customer account.'))
|
||||
|
||||
profile = {}
|
||||
for k, v in provider.configuration.items():
|
||||
if k.endswith('_field'):
|
||||
profile[k[:-6]] = userinfo.get(v)
|
||||
|
||||
if not profile.get('uid'):
|
||||
raise ValidationError(
|
||||
_('Login was not successful. Error message: "{error}".').format(
|
||||
error='could not fetch user id',
|
||||
)
|
||||
)
|
||||
|
||||
if not profile.get('email'):
|
||||
raise ValidationError(
|
||||
_('Login was not successful. Error message: "{error}".').format(
|
||||
error='could not fetch user email',
|
||||
)
|
||||
)
|
||||
|
||||
return profile
|
||||
|
||||
|
||||
def _hash_scheme(value):
|
||||
# As described in https://openid.net/specs/openid-connect-core-1_0.html#HybridIDToken
|
||||
digest = hashlib.sha256(value.encode()).digest()
|
||||
digest_truncated = digest[:(len(digest) // 2)]
|
||||
return base64.urlsafe_b64encode(digest_truncated).decode().rstrip("=")
|
||||
|
||||
|
||||
def customer_claims(customer, scope):
|
||||
scope = scope.split(' ')
|
||||
claims = {
|
||||
'sub': customer.identifier,
|
||||
'locale': customer.locale,
|
||||
}
|
||||
if 'profile' in scope:
|
||||
if customer.name:
|
||||
claims['name'] = customer.name
|
||||
if 'given_name' in customer.name_parts:
|
||||
claims['given_name'] = customer.name_parts['given_name']
|
||||
if 'family_name' in customer.name_parts:
|
||||
claims['family_name'] = customer.name_parts['family_name']
|
||||
if 'middle_name' in customer.name_parts:
|
||||
claims['middle_name'] = customer.name_parts['middle_name']
|
||||
if 'calling_name' in customer.name_parts:
|
||||
claims['nickname'] = customer.name_parts['calling_name']
|
||||
if 'email' in scope and customer.email:
|
||||
claims['email'] = customer.email
|
||||
claims['email_verified'] = customer.is_verified
|
||||
if 'phone' in scope and customer.phone:
|
||||
claims['phone_number'] = customer.phone.as_international
|
||||
return claims
|
||||
|
||||
|
||||
def _get_or_create_server_keypair(organizer):
|
||||
if not organizer.settings.sso_server_signing_key_rsa256_private:
|
||||
privkey = generate_private_key(key_size=4096, public_exponent=65537)
|
||||
pubkey = privkey.public_key()
|
||||
organizer.settings.sso_server_signing_key_rsa256_private = privkey.private_bytes(
|
||||
Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()
|
||||
).decode()
|
||||
organizer.settings.sso_server_signing_key_rsa256_public = pubkey.public_bytes(
|
||||
Encoding.PEM, PublicFormat.SubjectPublicKeyInfo
|
||||
).decode()
|
||||
return organizer.settings.sso_server_signing_key_rsa256_private, organizer.settings.sso_server_signing_key_rsa256_public
|
||||
|
||||
|
||||
def generate_id_token(customer, client, auth_time, nonce, scope, expires: datetime, scope_claims=False, with_code=None, with_access_token=None):
|
||||
payload = {
|
||||
'iss': build_absolute_uri(client.organizer, 'presale:organizer.index').rstrip('/'),
|
||||
'aud': client.client_id,
|
||||
'exp': int(expires.timestamp()),
|
||||
'iat': int(time.time()),
|
||||
'auth_time': auth_time,
|
||||
**customer_claims(customer, client.evaluated_scope(scope) if scope_claims else ''),
|
||||
}
|
||||
if nonce:
|
||||
payload['nonce'] = nonce
|
||||
if with_code:
|
||||
payload['c_hash'] = _hash_scheme(with_code)
|
||||
if with_access_token:
|
||||
payload['at_hash'] = _hash_scheme(with_access_token)
|
||||
privkey, pubkey = _get_or_create_server_keypair(client.organizer)
|
||||
return jwt.encode(
|
||||
payload,
|
||||
privkey,
|
||||
headers={
|
||||
"kid": hashlib.sha256(pubkey.encode()).hexdigest()[:16]
|
||||
},
|
||||
algorithm="RS256",
|
||||
)
|
||||
@@ -43,6 +43,7 @@ from pretix.base.i18n import (
|
||||
LazyCurrencyNumber, LazyDate, LazyExpiresDate, LazyNumber,
|
||||
)
|
||||
from pretix.base.models import Event
|
||||
from pretix.base.reldate import RelativeDateWrapper
|
||||
from pretix.base.settings import PERSON_NAME_SCHEMES
|
||||
from pretix.base.signals import (
|
||||
register_html_mail_renderers, register_mail_placeholders,
|
||||
@@ -299,7 +300,8 @@ def get_email_context(**kwargs):
|
||||
kwargs.setdefault("position_or_address", kwargs['position'])
|
||||
if 'order' in kwargs:
|
||||
try:
|
||||
kwargs['invoice_address'] = kwargs['order'].invoice_address
|
||||
if not kwargs.get('invoice_address'):
|
||||
kwargs['invoice_address'] = kwargs['order'].invoice_address
|
||||
except InvoiceAddress.DoesNotExist:
|
||||
kwargs['invoice_address'] = InvoiceAddress(order=kwargs['order'])
|
||||
finally:
|
||||
@@ -469,6 +471,19 @@ def base_placeholders(sender, **kwargs):
|
||||
}
|
||||
),
|
||||
),
|
||||
SimpleFunctionalMailTextPlaceholder(
|
||||
'order_modification_deadline_date_and_time', ['order', 'event'],
|
||||
lambda order, event:
|
||||
date_format(order.modify_deadline.astimezone(event.timezone), 'SHORT_DATETIME_FORMAT')
|
||||
if order.modify_deadline
|
||||
else '',
|
||||
lambda event: date_format(
|
||||
event.settings.get(
|
||||
'last_order_modification_date', as_type=RelativeDateWrapper
|
||||
).datetime(event).astimezone(event.timezone),
|
||||
'SHORT_DATETIME_FORMAT'
|
||||
) if event.settings.get('last_order_modification_date') else '',
|
||||
),
|
||||
SimpleFunctionalMailTextPlaceholder(
|
||||
'event_location', ['event_or_subevent'], lambda event_or_subevent: str(event_or_subevent.location or ''),
|
||||
lambda event: str(event.location or ''),
|
||||
|
||||
@@ -51,7 +51,7 @@ from pretix.helpers.safe_openpyxl import ( # NOQA: backwards compatibility for
|
||||
SafeWorkbook, remove_invalid_excel_chars as excel_safe,
|
||||
)
|
||||
|
||||
__ = excel_safe # just so the compatbility import above is "used" and doesn't get removed by linter
|
||||
__ = excel_safe # just so the compatibility import above is "used" and doesn't get removed by linter
|
||||
|
||||
|
||||
class BaseExporter:
|
||||
@@ -80,7 +80,7 @@ class BaseExporter:
|
||||
def verbose_name(self) -> str:
|
||||
"""
|
||||
A human-readable name for this exporter. This should be short but
|
||||
self-explaining. Good examples include 'JSON' or 'Microsoft Excel'.
|
||||
self-explaining. Good examples include 'Orders as JSON' or 'Orders as Microsoft Excel'.
|
||||
"""
|
||||
raise NotImplementedError() # NOQA
|
||||
|
||||
@@ -137,6 +137,16 @@ class BaseExporter:
|
||||
raise NotImplementedError() # NOQA
|
||||
|
||||
|
||||
class OrganizerLevelExportMixin:
|
||||
@property
|
||||
def organizer_required_permission(self) -> str:
|
||||
"""
|
||||
The permission level required to use this exporter. Only useful for organizer-level exports,
|
||||
not for event-level exports.
|
||||
"""
|
||||
return 'can_view_orders'
|
||||
|
||||
|
||||
class ListExporter(BaseExporter):
|
||||
ProgressSetTotal = namedtuple('ProgressSetTotal', 'total')
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
from .answers import * # noqa
|
||||
from .customers import * # noqa
|
||||
from .dekodi import * # noqa
|
||||
from .events import * # noqa
|
||||
from .invoices import * # noqa
|
||||
|
||||
113
src/pretix/base/exporters/customers.py
Normal file
113
src/pretix/base/exporters/customers.py
Normal file
@@ -0,0 +1,113 @@
|
||||
#
|
||||
# This file is part of pretix (Community Edition).
|
||||
#
|
||||
# Copyright (C) 2014-2020 Raphael Michel and contributors
|
||||
# Copyright (C) 2020-2021 rami.io GmbH and contributors
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
|
||||
# Public License as published by the Free Software Foundation in version 3 of the License.
|
||||
#
|
||||
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
|
||||
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
|
||||
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
|
||||
# this file, see <https://pretix.eu/about/en/license>.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
|
||||
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
|
||||
# <https://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
# This file is based on an earlier version of pretix which was released under the Apache License 2.0. The full text of
|
||||
# the Apache License 2.0 can be obtained at <http://www.apache.org/licenses/LICENSE-2.0>.
|
||||
#
|
||||
# This file may have since been changed and any changes are released under the terms of AGPLv3 as described above. A
|
||||
# full history of changes and contributors is available at <https://github.com/pretix/pretix>.
|
||||
#
|
||||
# This file contains Apache-licensed contributions copyrighted by: Benjamin Hättasch, Tobias Kunze
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the Apache License 2.0 is
|
||||
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
from django.dispatch import receiver
|
||||
from django.utils.timezone import get_current_timezone
|
||||
from django.utils.translation import gettext as _, gettext_lazy
|
||||
|
||||
from pretix.base.settings import PERSON_NAME_SCHEMES
|
||||
|
||||
from ..exporter import ListExporter, OrganizerLevelExportMixin
|
||||
from ..signals import register_multievent_data_exporters
|
||||
|
||||
|
||||
class CustomerListExporter(OrganizerLevelExportMixin, ListExporter):
|
||||
identifier = 'customerlist'
|
||||
verbose_name = gettext_lazy('Customer accounts')
|
||||
organizer_required_permission = 'can_manage_customers'
|
||||
|
||||
@property
|
||||
def additional_form_fields(self):
|
||||
return OrderedDict(
|
||||
[]
|
||||
)
|
||||
|
||||
def iterate_list(self, form_data):
|
||||
qs = self.organizer.customers.prefetch_related('provider')
|
||||
|
||||
headers = [
|
||||
_('Customer ID'),
|
||||
_('SSO provider'),
|
||||
_('External identifier'),
|
||||
_('E-mail'),
|
||||
_('Phone number'),
|
||||
_('Full name'),
|
||||
]
|
||||
name_scheme = PERSON_NAME_SCHEMES[self.organizer.settings.name_scheme]
|
||||
if name_scheme and len(name_scheme['fields']) > 1:
|
||||
for k, label, w in name_scheme['fields']:
|
||||
headers.append(_('Name') + ': ' + str(label))
|
||||
|
||||
headers += [
|
||||
_('Account active'),
|
||||
_('Verified email address'),
|
||||
_('Last login'),
|
||||
_('Registration date'),
|
||||
_('Language'),
|
||||
_('Notes'),
|
||||
]
|
||||
yield headers
|
||||
|
||||
tz = get_current_timezone()
|
||||
for obj in qs:
|
||||
row = [
|
||||
obj.identifier,
|
||||
obj.provider.name if obj.provider else None,
|
||||
obj.external_identifier,
|
||||
obj.email or '',
|
||||
obj.phone or '',
|
||||
obj.name,
|
||||
]
|
||||
if name_scheme and len(name_scheme['fields']) > 1:
|
||||
for k, label, w in name_scheme['fields']:
|
||||
row.append(obj.name_parts.get(k, ''))
|
||||
row += [
|
||||
_('Yes') if obj.is_active else _('No'),
|
||||
_('Yes') if obj.is_verified else _('No'),
|
||||
obj.last_login.astimezone(tz).date().strftime('%Y-%m-%d') if obj.last_login else '',
|
||||
obj.date_joined.astimezone(tz).date().strftime('%Y-%m-%d') if obj.date_joined else '',
|
||||
obj.get_locale_display(),
|
||||
obj.notes or '',
|
||||
]
|
||||
yield row
|
||||
|
||||
def get_filename(self):
|
||||
return '{}_customers'.format(self.organizer.slug)
|
||||
|
||||
|
||||
@receiver(register_multievent_data_exporters, dispatch_uid="multiexporter_customerlist")
|
||||
def register_multievent_i_customerlist_exporter(sender, **kwargs):
|
||||
return CustomerListExporter
|
||||
@@ -60,7 +60,9 @@ from pretix.base.settings import PERSON_NAME_SCHEMES
|
||||
from ...control.forms.filter import get_all_payment_providers
|
||||
from ...helpers import GroupConcat
|
||||
from ...helpers.iter import chunked_iterable
|
||||
from ..exporter import ListExporter, MultiSheetListExporter
|
||||
from ..exporter import (
|
||||
ListExporter, MultiSheetListExporter, OrganizerLevelExportMixin,
|
||||
)
|
||||
from ..signals import (
|
||||
register_data_exporters, register_multievent_data_exporters,
|
||||
)
|
||||
@@ -610,7 +612,10 @@ class OrderListExporter(MultiSheetListExporter):
|
||||
for k, label, w in name_scheme['fields']:
|
||||
headers.append(_('Invoice address name') + ': ' + str(label))
|
||||
headers += [
|
||||
_('Address'), _('ZIP code'), _('City'), _('Country'), pgettext('address', 'State'), _('VAT ID'),
|
||||
_('Invoice address street'), _('Invoice address ZIP code'), _('Invoice address city'),
|
||||
_('Invoice address country'),
|
||||
pgettext('address', 'Invoice address state'),
|
||||
_('VAT ID'),
|
||||
]
|
||||
headers += [
|
||||
_('Sales channel'), _('Order locale'),
|
||||
@@ -881,76 +886,75 @@ class QuotaListExporter(ListExporter):
|
||||
return '{}_quotas'.format(self.event.slug)
|
||||
|
||||
|
||||
def generate_GiftCardTransactionListExporter(organizer): # hackhack
|
||||
class GiftcardTransactionListExporter(ListExporter):
|
||||
identifier = 'giftcardtransactionlist'
|
||||
verbose_name = gettext_lazy('Gift card transactions')
|
||||
class GiftcardTransactionListExporter(OrganizerLevelExportMixin, ListExporter):
|
||||
identifier = 'giftcardtransactionlist'
|
||||
verbose_name = gettext_lazy('Gift card transactions')
|
||||
organizer_required_permission = 'can_manage_gift_cards'
|
||||
|
||||
@property
|
||||
def additional_form_fields(self):
|
||||
d = [
|
||||
('date_from',
|
||||
forms.DateField(
|
||||
label=_('Start date'),
|
||||
widget=forms.DateInput(attrs={'class': 'datepickerfield'}),
|
||||
required=False,
|
||||
)),
|
||||
('date_to',
|
||||
forms.DateField(
|
||||
label=_('End date'),
|
||||
widget=forms.DateInput(attrs={'class': 'datepickerfield'}),
|
||||
required=False,
|
||||
)),
|
||||
@property
|
||||
def additional_form_fields(self):
|
||||
d = [
|
||||
('date_from',
|
||||
forms.DateField(
|
||||
label=_('Start date'),
|
||||
widget=forms.DateInput(attrs={'class': 'datepickerfield'}),
|
||||
required=False,
|
||||
)),
|
||||
('date_to',
|
||||
forms.DateField(
|
||||
label=_('End date'),
|
||||
widget=forms.DateInput(attrs={'class': 'datepickerfield'}),
|
||||
required=False,
|
||||
)),
|
||||
]
|
||||
d = OrderedDict(d)
|
||||
return d
|
||||
|
||||
def iterate_list(self, form_data):
|
||||
qs = GiftCardTransaction.objects.filter(
|
||||
card__issuer=self.organizer,
|
||||
).order_by('datetime').select_related('card', 'order', 'order__event')
|
||||
|
||||
if form_data.get('date_from'):
|
||||
date_value = form_data.get('date_from')
|
||||
if isinstance(date_value, str):
|
||||
date_value = dateutil.parser.parse(date_value).date()
|
||||
qs = qs.filter(
|
||||
datetime__gte=make_aware(datetime.combine(date_value, time(0, 0, 0)), self.timezone)
|
||||
)
|
||||
|
||||
if form_data.get('date_to'):
|
||||
date_value = form_data.get('date_to')
|
||||
if isinstance(date_value, str):
|
||||
date_value = dateutil.parser.parse(date_value).date()
|
||||
|
||||
qs = qs.filter(
|
||||
datetime__lte=make_aware(datetime.combine(date_value, time(23, 59, 59, 999999)), self.timezone)
|
||||
)
|
||||
|
||||
headers = [
|
||||
_('Gift card code'),
|
||||
_('Test mode'),
|
||||
_('Date'),
|
||||
_('Amount'),
|
||||
_('Currency'),
|
||||
_('Order'),
|
||||
]
|
||||
yield headers
|
||||
|
||||
for obj in qs:
|
||||
row = [
|
||||
obj.card.secret,
|
||||
_('TEST MODE') if obj.card.testmode else '',
|
||||
obj.datetime.astimezone(self.timezone).strftime('%Y-%m-%d %H:%M:%S'),
|
||||
obj.value,
|
||||
obj.card.currency,
|
||||
obj.order.full_code if obj.order else None,
|
||||
]
|
||||
d = OrderedDict(d)
|
||||
return d
|
||||
yield row
|
||||
|
||||
def iterate_list(self, form_data):
|
||||
qs = GiftCardTransaction.objects.filter(
|
||||
card__issuer=organizer,
|
||||
).order_by('datetime').select_related('card', 'order', 'order__event')
|
||||
|
||||
if form_data.get('date_from'):
|
||||
date_value = form_data.get('date_from')
|
||||
if isinstance(date_value, str):
|
||||
date_value = dateutil.parser.parse(date_value).date()
|
||||
qs = qs.filter(
|
||||
datetime__gte=make_aware(datetime.combine(date_value, time(0, 0, 0)), self.timezone)
|
||||
)
|
||||
|
||||
if form_data.get('date_to'):
|
||||
date_value = form_data.get('date_to')
|
||||
if isinstance(date_value, str):
|
||||
date_value = dateutil.parser.parse(date_value).date()
|
||||
|
||||
qs = qs.filter(
|
||||
datetime__lte=make_aware(datetime.combine(date_value, time(23, 59, 59, 999999)), self.timezone)
|
||||
)
|
||||
|
||||
headers = [
|
||||
_('Gift card code'),
|
||||
_('Test mode'),
|
||||
_('Date'),
|
||||
_('Amount'),
|
||||
_('Currency'),
|
||||
_('Order'),
|
||||
]
|
||||
yield headers
|
||||
|
||||
for obj in qs:
|
||||
row = [
|
||||
obj.card.secret,
|
||||
_('TEST MODE') if obj.card.testmode else '',
|
||||
obj.datetime.astimezone(self.timezone).strftime('%Y-%m-%d %H:%M:%S'),
|
||||
obj.value,
|
||||
obj.card.currency,
|
||||
obj.order.full_code if obj.order else None,
|
||||
]
|
||||
yield row
|
||||
|
||||
def get_filename(self):
|
||||
return '{}_giftcardtransactions'.format(organizer.slug)
|
||||
return GiftcardTransactionListExporter
|
||||
def get_filename(self):
|
||||
return '{}_giftcardtransactions'.format(self.organizer.slug)
|
||||
|
||||
|
||||
class GiftcardRedemptionListExporter(ListExporter):
|
||||
@@ -997,114 +1001,112 @@ class GiftcardRedemptionListExporter(ListExporter):
|
||||
return '{}_giftcardredemptions'.format(self.event.slug)
|
||||
|
||||
|
||||
def generate_GiftCardListExporter(organizer): # hackhack
|
||||
class GiftcardListExporter(ListExporter):
|
||||
identifier = 'giftcardlist'
|
||||
verbose_name = gettext_lazy('Gift cards')
|
||||
class GiftcardListExporter(OrganizerLevelExportMixin, ListExporter):
|
||||
identifier = 'giftcardlist'
|
||||
verbose_name = gettext_lazy('Gift cards')
|
||||
organizer_required_permission = 'can_manage_gift_cards'
|
||||
|
||||
@property
|
||||
def additional_form_fields(self):
|
||||
return OrderedDict(
|
||||
[
|
||||
('date', forms.DateTimeField(
|
||||
label=_('Show value at'),
|
||||
initial=now(),
|
||||
)),
|
||||
('testmode', forms.ChoiceField(
|
||||
label=_('Test mode'),
|
||||
choices=(
|
||||
('', _('All')),
|
||||
('yes', _('Test mode')),
|
||||
('no', _('Live')),
|
||||
),
|
||||
initial='no',
|
||||
required=False
|
||||
)),
|
||||
('state', forms.ChoiceField(
|
||||
label=_('Status'),
|
||||
choices=(
|
||||
('', _('All')),
|
||||
('empty', _('Empty')),
|
||||
('valid_value', _('Valid and with value')),
|
||||
('expired_value', _('Expired and with value')),
|
||||
('expired', _('Expired')),
|
||||
),
|
||||
initial='valid_value',
|
||||
required=False
|
||||
))
|
||||
]
|
||||
)
|
||||
|
||||
def iterate_list(self, form_data):
|
||||
s = GiftCardTransaction.objects.filter(
|
||||
card=OuterRef('pk'),
|
||||
datetime__lte=form_data['date']
|
||||
).order_by().values('card').annotate(s=Sum('value')).values('s')
|
||||
qs = organizer.issued_gift_cards.filter(
|
||||
issuance__lte=form_data['date']
|
||||
).annotate(
|
||||
cached_value=Coalesce(Subquery(s), Decimal('0.00')),
|
||||
).order_by('issuance').prefetch_related(
|
||||
'transactions', 'transactions__order', 'transactions__order__event', 'transactions__order__invoices'
|
||||
)
|
||||
|
||||
if form_data.get('testmode') == 'yes':
|
||||
qs = qs.filter(testmode=True)
|
||||
elif form_data.get('testmode') == 'no':
|
||||
qs = qs.filter(testmode=False)
|
||||
|
||||
if form_data.get('state') == 'empty':
|
||||
qs = qs.filter(cached_value=0)
|
||||
elif form_data.get('state') == 'valid_value':
|
||||
qs = qs.exclude(cached_value=0).filter(Q(expires__isnull=True) | Q(expires__gte=form_data['date']))
|
||||
elif form_data.get('state') == 'expired_value':
|
||||
qs = qs.exclude(cached_value=0).filter(expires__lt=form_data['date'])
|
||||
elif form_data.get('state') == 'expired':
|
||||
qs = qs.filter(expires__lt=form_data['date'])
|
||||
|
||||
headers = [
|
||||
_('Gift card code'),
|
||||
_('Test mode card'),
|
||||
_('Creation date'),
|
||||
_('Expiry date'),
|
||||
_('Special terms and conditions'),
|
||||
_('Currency'),
|
||||
_('Current value'),
|
||||
_('Created in order'),
|
||||
_('Last invoice number of order'),
|
||||
_('Last invoice date of order'),
|
||||
@property
|
||||
def additional_form_fields(self):
|
||||
return OrderedDict(
|
||||
[
|
||||
('date', forms.DateTimeField(
|
||||
label=_('Show value at'),
|
||||
initial=now(),
|
||||
)),
|
||||
('testmode', forms.ChoiceField(
|
||||
label=_('Test mode'),
|
||||
choices=(
|
||||
('', _('All')),
|
||||
('yes', _('Test mode')),
|
||||
('no', _('Live')),
|
||||
),
|
||||
initial='no',
|
||||
required=False
|
||||
)),
|
||||
('state', forms.ChoiceField(
|
||||
label=_('Status'),
|
||||
choices=(
|
||||
('', _('All')),
|
||||
('empty', _('Empty')),
|
||||
('valid_value', _('Valid and with value')),
|
||||
('expired_value', _('Expired and with value')),
|
||||
('expired', _('Expired')),
|
||||
),
|
||||
initial='valid_value',
|
||||
required=False
|
||||
))
|
||||
]
|
||||
yield headers
|
||||
)
|
||||
|
||||
tz = get_current_timezone()
|
||||
for obj in qs:
|
||||
o = None
|
||||
i = None
|
||||
trans = list(obj.transactions.all())
|
||||
if trans:
|
||||
o = trans[0].order
|
||||
if o:
|
||||
invs = list(o.invoices.all())
|
||||
if invs:
|
||||
i = invs[-1]
|
||||
row = [
|
||||
obj.secret,
|
||||
_('Yes') if obj.testmode else _('No'),
|
||||
obj.issuance.astimezone(tz).date().strftime('%Y-%m-%d'),
|
||||
obj.expires.astimezone(tz).date().strftime('%Y-%m-%d') if obj.expires else '',
|
||||
obj.conditions or '',
|
||||
obj.currency,
|
||||
obj.cached_value,
|
||||
o.full_code if o else '',
|
||||
i.number if i else '',
|
||||
i.date.strftime('%Y-%m-%d') if i else '',
|
||||
]
|
||||
yield row
|
||||
def iterate_list(self, form_data):
|
||||
s = GiftCardTransaction.objects.filter(
|
||||
card=OuterRef('pk'),
|
||||
datetime__lte=form_data['date']
|
||||
).order_by().values('card').annotate(s=Sum('value')).values('s')
|
||||
qs = self.organizer.issued_gift_cards.filter(
|
||||
issuance__lte=form_data['date']
|
||||
).annotate(
|
||||
cached_value=Coalesce(Subquery(s), Decimal('0.00')),
|
||||
).order_by('issuance').prefetch_related(
|
||||
'transactions', 'transactions__order', 'transactions__order__event', 'transactions__order__invoices'
|
||||
)
|
||||
|
||||
def get_filename(self):
|
||||
return '{}_giftcards'.format(organizer.slug)
|
||||
if form_data.get('testmode') == 'yes':
|
||||
qs = qs.filter(testmode=True)
|
||||
elif form_data.get('testmode') == 'no':
|
||||
qs = qs.filter(testmode=False)
|
||||
|
||||
return GiftcardListExporter
|
||||
if form_data.get('state') == 'empty':
|
||||
qs = qs.filter(cached_value=0)
|
||||
elif form_data.get('state') == 'valid_value':
|
||||
qs = qs.exclude(cached_value=0).filter(Q(expires__isnull=True) | Q(expires__gte=form_data['date']))
|
||||
elif form_data.get('state') == 'expired_value':
|
||||
qs = qs.exclude(cached_value=0).filter(expires__lt=form_data['date'])
|
||||
elif form_data.get('state') == 'expired':
|
||||
qs = qs.filter(expires__lt=form_data['date'])
|
||||
|
||||
headers = [
|
||||
_('Gift card code'),
|
||||
_('Test mode card'),
|
||||
_('Creation date'),
|
||||
_('Expiry date'),
|
||||
_('Special terms and conditions'),
|
||||
_('Currency'),
|
||||
_('Current value'),
|
||||
_('Created in order'),
|
||||
_('Last invoice number of order'),
|
||||
_('Last invoice date of order'),
|
||||
]
|
||||
yield headers
|
||||
|
||||
tz = get_current_timezone()
|
||||
for obj in qs:
|
||||
o = None
|
||||
i = None
|
||||
trans = list(obj.transactions.all())
|
||||
if trans:
|
||||
o = trans[0].order
|
||||
if o:
|
||||
invs = list(o.invoices.all())
|
||||
if invs:
|
||||
i = invs[-1]
|
||||
row = [
|
||||
obj.secret,
|
||||
_('Yes') if obj.testmode else _('No'),
|
||||
obj.issuance.astimezone(tz).date().strftime('%Y-%m-%d'),
|
||||
obj.expires.astimezone(tz).date().strftime('%Y-%m-%d') if obj.expires else '',
|
||||
obj.conditions or '',
|
||||
obj.currency,
|
||||
obj.cached_value,
|
||||
o.full_code if o else '',
|
||||
i.number if i else '',
|
||||
i.date.strftime('%Y-%m-%d') if i else '',
|
||||
]
|
||||
yield row
|
||||
|
||||
def get_filename(self):
|
||||
return '{}_giftcards'.format(self.organizer.slug)
|
||||
|
||||
|
||||
@receiver(register_data_exporters, dispatch_uid="exporter_orderlist")
|
||||
@@ -1144,9 +1146,9 @@ def register_multievent_i_giftcardredemptionlist_exporter(sender, **kwargs):
|
||||
|
||||
@receiver(register_multievent_data_exporters, dispatch_uid="multiexporter_giftcardlist")
|
||||
def register_multievent_i_giftcardlist_exporter(sender, **kwargs):
|
||||
return generate_GiftCardListExporter(sender)
|
||||
return GiftcardListExporter
|
||||
|
||||
|
||||
@receiver(register_multievent_data_exporters, dispatch_uid="multiexporter_giftcardtransactionlist")
|
||||
def register_multievent_i_giftcardtransactionlist_exporter(sender, **kwargs):
|
||||
return generate_GiftCardTransactionListExporter(sender)
|
||||
return GiftcardTransactionListExporter
|
||||
|
||||
@@ -51,6 +51,7 @@ from django.core.validators import (
|
||||
)
|
||||
from django.db.models import QuerySet
|
||||
from django.forms import Select, widgets
|
||||
from django.forms.widgets import FILE_INPUT_CONTRADICTION
|
||||
from django.utils.formats import date_format
|
||||
from django.utils.html import escape
|
||||
from django.utils.safestring import mark_safe
|
||||
@@ -429,7 +430,7 @@ class PortraitImageWidget(UploadedFileWidget):
|
||||
|
||||
def value_from_datadict(self, data, files, name):
|
||||
d = super().value_from_datadict(data, files, name)
|
||||
if d is not None and d is not False:
|
||||
if d is not None and d is not False and d is not FILE_INPUT_CONTRADICTION:
|
||||
d._cropdata = json.loads(data.get(name + '_cropdata', '{}') or '{}')
|
||||
return d
|
||||
|
||||
|
||||
@@ -533,6 +533,7 @@ class ClassicInvoiceRenderer(BaseReportlabInvoiceRenderer):
|
||||
tstyledata = [
|
||||
('ALIGN', (1, 0), (-1, -1), 'RIGHT'),
|
||||
('VALIGN', (0, 0), (-1, -1), 'TOP'),
|
||||
('FONTNAME', (0, 0), (-1, -1), self.font_regular),
|
||||
('FONTNAME', (0, 0), (-1, 0), self.font_bold),
|
||||
('FONTNAME', (0, -1), (-1, -1), self.font_bold),
|
||||
('LEFTPADDING', (0, 0), (0, -1), 0),
|
||||
|
||||
38
src/pretix/base/migrations/0219_auto_20220706_0913.py
Normal file
38
src/pretix/base/migrations/0219_auto_20220706_0913.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Generated by Django 3.2.12 on 2022-07-06 09:13
|
||||
|
||||
import django.db.models.deletion
|
||||
import i18nfield.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
import pretix.base.models.base
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0218_checkinlist_addon_match'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CustomerSSOProvider',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('name', i18nfield.fields.I18nCharField(max_length=200)),
|
||||
('is_active', models.BooleanField(default=True)),
|
||||
('button_label', i18nfield.fields.I18nCharField(max_length=200)),
|
||||
('method', models.CharField(max_length=190)),
|
||||
('configuration', models.JSONField()),
|
||||
('organizer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sso_providers', to='pretixbase.organizer')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=(models.Model, pretix.base.models.base.LoggingMixin),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='customer',
|
||||
name='provider',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='customers', to='pretixbase.customerssoprovider'),
|
||||
),
|
||||
]
|
||||
68
src/pretix/base/migrations/0220_auto_20220811_1002.py
Normal file
68
src/pretix/base/migrations/0220_auto_20220811_1002.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# Generated by Django 3.2.12 on 2022-08-11 10:02
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import pretix.base.models.base
|
||||
import pretix.base.models.customers
|
||||
import pretix.base.models.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0219_auto_20220706_0913'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CustomerSSOClient',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=255)),
|
||||
('is_active', models.BooleanField(default=True)),
|
||||
('client_id', models.CharField(db_index=True, default=pretix.base.models.customers.generate_client_id, max_length=100, unique=True)),
|
||||
('client_secret', models.CharField(max_length=255)),
|
||||
('client_type', models.CharField(default='confidential', max_length=32)),
|
||||
('authorization_grant_type', models.CharField(default='authorization-code', max_length=32)),
|
||||
('redirect_uris', models.TextField()),
|
||||
('allowed_scopes', pretix.base.models.fields.MultiStringField(default=['openid', 'profile', 'email', 'phone'])),
|
||||
('organizer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sso_clients', to='pretixbase.organizer')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
bases=(models.Model, pretix.base.models.base.LoggingMixin),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customer',
|
||||
name='provider',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='customers', to='pretixbase.customerssoprovider'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='CustomerSSOGrant',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('code', models.CharField(max_length=255, unique=True)),
|
||||
('nonce', models.CharField(max_length=255, null=True)),
|
||||
('auth_time', models.IntegerField()),
|
||||
('expires', models.DateTimeField()),
|
||||
('redirect_uri', models.TextField()),
|
||||
('scope', models.TextField()),
|
||||
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='grants', to='pretixbase.customerssoclient')),
|
||||
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sso_grants', to='pretixbase.customer')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='CustomerSSOAccessToken',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('from_code', models.CharField(max_length=255, null=True)),
|
||||
('token', models.CharField(max_length=255, unique=True)),
|
||||
('expires', models.DateTimeField()),
|
||||
('scope', models.TextField()),
|
||||
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='access_tokens', to='pretixbase.customerssoclient')),
|
||||
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sso_access_tokens', to='pretixbase.customer')),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 3.2.4 on 2021-12-01 11:55
|
||||
|
||||
from django.db import migrations
|
||||
from django.db.models import Count
|
||||
|
||||
|
||||
def change_unique_identifiers(apps, schema_editor):
|
||||
# We cannot really know if a position was bundled or an add-on, but we can at least guess
|
||||
Question = apps.get_model("pretixbase", "Question")
|
||||
|
||||
for r in Question.objects.values('event', 'identifier').order_by().annotate(c=Count('*')).filter(c__gt=1):
|
||||
qs = Question.objects.filter(identifier=r['identifier'], event_id=r['event'])
|
||||
for i, q in enumerate(qs[1:]):
|
||||
q.identifier += f'_{i + 2}'
|
||||
q.save(update_fields=['identifier'])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('pretixbase', '0220_auto_20220811_1002'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
change_unique_identifiers,
|
||||
migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 3.2.4 on 2021-12-01 12:04
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0221_clean_nonunique_question_identifiers'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name='question',
|
||||
unique_together={('event', 'identifier')},
|
||||
),
|
||||
]
|
||||
18
src/pretix/base/migrations/0223_voucher_min_usages.py
Normal file
18
src/pretix/base/migrations/0223_voucher_min_usages.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.12 on 2022-10-12 09:13
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('pretixbase', '0222_alter_question_unique_together'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='voucher',
|
||||
name='min_usages',
|
||||
field=models.PositiveIntegerField(default=1),
|
||||
),
|
||||
]
|
||||
@@ -24,28 +24,61 @@ from django.conf import settings
|
||||
from django.contrib.auth.hashers import (
|
||||
check_password, is_password_usable, make_password,
|
||||
)
|
||||
from django.core.validators import RegexValidator
|
||||
from django.core.validators import RegexValidator, URLValidator
|
||||
from django.db import models
|
||||
from django.db.models import F, Q
|
||||
from django.utils.crypto import get_random_string, salted_hmac
|
||||
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
||||
from django_scopes import ScopedManager, scopes_disabled
|
||||
from i18nfield.fields import I18nCharField
|
||||
from phonenumber_field.modelfields import PhoneNumberField
|
||||
|
||||
from pretix.base.banlist import banned
|
||||
from pretix.base.models.base import LoggedModel
|
||||
from pretix.base.models.fields import MultiStringField
|
||||
from pretix.base.models.organizer import Organizer
|
||||
from pretix.base.settings import PERSON_NAME_SCHEMES
|
||||
from pretix.helpers.countries import FastCountryField
|
||||
|
||||
|
||||
class CustomerSSOProvider(LoggedModel):
|
||||
METHOD_OIDC = 'oidc'
|
||||
METHODS = (
|
||||
(METHOD_OIDC, 'OpenID Connect'),
|
||||
)
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
organizer = models.ForeignKey(Organizer, related_name='sso_providers', on_delete=models.CASCADE)
|
||||
name = I18nCharField(
|
||||
max_length=200,
|
||||
verbose_name=_("Provider name"),
|
||||
)
|
||||
is_active = models.BooleanField(default=True, verbose_name=_('Active'))
|
||||
button_label = I18nCharField(
|
||||
max_length=200,
|
||||
verbose_name=_("Login button label"),
|
||||
)
|
||||
method = models.CharField(
|
||||
max_length=190,
|
||||
verbose_name=_("Single-sign-on method"),
|
||||
null=False, blank=False,
|
||||
choices=METHODS,
|
||||
)
|
||||
configuration = models.JSONField()
|
||||
|
||||
def allow_delete(self):
|
||||
return not self.customers.exists()
|
||||
|
||||
|
||||
class Customer(LoggedModel):
|
||||
"""
|
||||
Represents a registered customer of an organizer.
|
||||
"""
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
organizer = models.ForeignKey(Organizer, related_name='customers', on_delete=models.CASCADE)
|
||||
provider = models.ForeignKey(CustomerSSOProvider, related_name='customers', on_delete=models.PROTECT, null=True, blank=True)
|
||||
identifier = models.CharField(
|
||||
verbose_name=_('Customer ID'),
|
||||
max_length=190,
|
||||
db_index=True,
|
||||
help_text=_('You can enter any value here to make it easier to match the data with other sources. If you do '
|
||||
@@ -317,3 +350,134 @@ class AttendeeProfile(models.Model):
|
||||
parts.append(f'{a["field_label"]}: {val}')
|
||||
|
||||
return '\n'.join([str(p).strip() for p in parts if p and str(p).strip()])
|
||||
|
||||
|
||||
def generate_client_id():
|
||||
return get_random_string(40)
|
||||
|
||||
|
||||
def generate_client_secret():
|
||||
return get_random_string(40)
|
||||
|
||||
|
||||
class CustomerSSOClient(LoggedModel):
|
||||
CLIENT_CONFIDENTIAL = "confidential"
|
||||
CLIENT_PUBLIC = "public"
|
||||
CLIENT_TYPES = (
|
||||
(CLIENT_CONFIDENTIAL, pgettext_lazy("openidconnect", "Confidential")),
|
||||
(CLIENT_PUBLIC, pgettext_lazy("openidconnect", "Public")),
|
||||
)
|
||||
|
||||
GRANT_AUTHORIZATION_CODE = "authorization-code"
|
||||
GRANT_IMPLICIT = "implicit"
|
||||
GRANT_TYPES = (
|
||||
(GRANT_AUTHORIZATION_CODE, pgettext_lazy("openidconnect", "Authorization code")),
|
||||
(GRANT_IMPLICIT, pgettext_lazy("openidconnect", "Implicit")),
|
||||
)
|
||||
|
||||
SCOPE_CHOICES = (
|
||||
('openid', _('OpenID Connect access (required)')),
|
||||
('profile', _('Profile data (name, addresses)')),
|
||||
('email', _('E-mail address')),
|
||||
('phone', _('Phone number')),
|
||||
)
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
organizer = models.ForeignKey(Organizer, related_name='sso_clients', on_delete=models.CASCADE)
|
||||
|
||||
name = models.CharField(verbose_name=_("Application name"), max_length=255, blank=False)
|
||||
is_active = models.BooleanField(default=True, verbose_name=_('Active'))
|
||||
|
||||
client_id = models.CharField(
|
||||
verbose_name=_("Client ID"),
|
||||
max_length=100, unique=True, default=generate_client_id, db_index=True
|
||||
)
|
||||
client_secret = models.CharField(
|
||||
max_length=255, blank=False,
|
||||
)
|
||||
|
||||
client_type = models.CharField(
|
||||
max_length=32, choices=CLIENT_TYPES, verbose_name=_("Client type"), default=CLIENT_CONFIDENTIAL,
|
||||
)
|
||||
authorization_grant_type = models.CharField(
|
||||
max_length=32, choices=GRANT_TYPES, verbose_name=_("Grant type"), default=GRANT_AUTHORIZATION_CODE,
|
||||
)
|
||||
redirect_uris = models.TextField(
|
||||
blank=False,
|
||||
verbose_name=_("Redirection URIs"),
|
||||
help_text=_("Allowed URIs list, space separated")
|
||||
)
|
||||
allowed_scopes = MultiStringField(
|
||||
default=['openid', 'profile', 'email', 'phone'],
|
||||
delimiter=" ",
|
||||
blank=True,
|
||||
verbose_name=_('Allowed access scopes'),
|
||||
help_text=_('Separate multiple values with spaces'),
|
||||
)
|
||||
|
||||
def is_usable(self):
|
||||
return self.is_active
|
||||
|
||||
def allow_redirect_uri(self, redirect_uri):
|
||||
return self.redirect_uris and any(r.strip() == redirect_uri for r in self.redirect_uris.split(' '))
|
||||
|
||||
def allow_delete(self):
|
||||
return True
|
||||
|
||||
def evaluated_scope(self, scope):
|
||||
scope = set(scope.split(' '))
|
||||
allowed_scopes = set(self.allowed_scopes)
|
||||
return ' '.join(scope & allowed_scopes)
|
||||
|
||||
def clean(self):
|
||||
redirect_uris = self.redirect_uris.strip().split()
|
||||
|
||||
if redirect_uris:
|
||||
validator = URLValidator()
|
||||
for uri in redirect_uris:
|
||||
validator(uri)
|
||||
|
||||
def set_client_secret(self):
|
||||
secret = get_random_string(64)
|
||||
self.client_secret = make_password(secret)
|
||||
return secret
|
||||
|
||||
def check_client_secret(self, raw_secret):
|
||||
"""
|
||||
Return a boolean of whether the ra_secret was correct. Handles
|
||||
hashing formats behind the scenes.
|
||||
"""
|
||||
def setter(raw_secret):
|
||||
self.client_secret = make_password(raw_secret)
|
||||
self.save(update_fields=["client_secret"])
|
||||
return check_password(raw_secret, self.client_secret, setter)
|
||||
|
||||
|
||||
class CustomerSSOGrant(models.Model):
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
client = models.ForeignKey(
|
||||
CustomerSSOClient, on_delete=models.CASCADE, related_name="grants"
|
||||
)
|
||||
customer = models.ForeignKey(
|
||||
Customer, on_delete=models.CASCADE, related_name="sso_grants"
|
||||
)
|
||||
code = models.CharField(max_length=255, unique=True)
|
||||
nonce = models.CharField(max_length=255, null=True, blank=True)
|
||||
auth_time = models.IntegerField()
|
||||
expires = models.DateTimeField()
|
||||
redirect_uri = models.TextField()
|
||||
scope = models.TextField(blank=True)
|
||||
|
||||
|
||||
class CustomerSSOAccessToken(models.Model):
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
client = models.ForeignKey(
|
||||
CustomerSSOClient, on_delete=models.CASCADE, related_name="access_tokens"
|
||||
)
|
||||
customer = models.ForeignKey(
|
||||
Customer, on_delete=models.CASCADE, related_name="sso_access_tokens"
|
||||
)
|
||||
from_code = models.CharField(max_length=255, null=True, blank=True)
|
||||
token = models.CharField(max_length=255, unique=True)
|
||||
expires = models.DateTimeField()
|
||||
scope = models.TextField(blank=True)
|
||||
|
||||
@@ -327,7 +327,7 @@ class Discount(LoggedModel):
|
||||
candidates = []
|
||||
cardinality = None
|
||||
for se, l in subevent_to_idx.items():
|
||||
l = [ll for ll in l if ll not in current_group]
|
||||
l = [ll for ll in l if ll in initial_candidates and ll not in current_group]
|
||||
if cardinality and len(l) != cardinality:
|
||||
continue
|
||||
if se not in {positions[idx][1] for idx in current_group}:
|
||||
|
||||
@@ -69,6 +69,7 @@ from pretix.base.reldate import RelativeDateWrapper
|
||||
from pretix.base.validators import EventSlugBanlistValidator
|
||||
from pretix.helpers.database import GroupConcat
|
||||
from pretix.helpers.daterange import daterange
|
||||
from pretix.helpers.hierarkey import clean_filename
|
||||
from pretix.helpers.json import safe_string
|
||||
from pretix.helpers.thumb import get_thumbnail
|
||||
|
||||
@@ -122,6 +123,16 @@ class EventMixin:
|
||||
("SHORT_" if short else "") + ("DATETIME_FORMAT" if self.settings.show_times and show_times else "DATE_FORMAT")
|
||||
)
|
||||
|
||||
def get_weekday_from_display(self, tz=None, short=False) -> str:
|
||||
"""
|
||||
Returns a formatted string containing the weekday of the start date of the event with respect
|
||||
to the current locale.
|
||||
"""
|
||||
tz = tz or self.timezone
|
||||
return _date(
|
||||
self.date_from.astimezone(tz), ("D" if short else "l")
|
||||
)
|
||||
|
||||
def get_time_from_display(self, tz=None) -> str:
|
||||
"""
|
||||
Returns a formatted string containing the start time of the event, ignoring
|
||||
@@ -146,6 +157,18 @@ class EventMixin:
|
||||
("SHORT_" if short else "") + ("DATETIME_FORMAT" if self.settings.show_times and show_times else "DATE_FORMAT")
|
||||
)
|
||||
|
||||
def get_weekday_to_display(self, tz=None, short=False) -> str:
|
||||
"""
|
||||
Returns a formatted string containing the weekday of the end date of the event with respect
|
||||
to the current locale.
|
||||
"""
|
||||
tz = tz or self.timezone
|
||||
if not self.settings.show_date_to or not self.date_to:
|
||||
return ""
|
||||
return _date(
|
||||
self.date_to.astimezone(tz), ("D" if short else "l")
|
||||
)
|
||||
|
||||
def get_date_range_display(self, tz=None, force_show_end=False, as_html=False) -> str:
|
||||
"""
|
||||
Returns a formatted string containing the start date and the end date
|
||||
@@ -567,6 +590,7 @@ class Event(EventMixin, LoggedModel):
|
||||
self.settings.event_list_type = 'calendar'
|
||||
self.settings.invoice_email_attachment = True
|
||||
self.settings.name_scheme = 'given_family'
|
||||
self.settings.payment_banktransfer_invoice_immediately = True
|
||||
|
||||
@property
|
||||
def social_image(self):
|
||||
@@ -917,11 +941,13 @@ class Event(EventMixin, LoggedModel):
|
||||
s.object = self
|
||||
s.pk = None
|
||||
if s.value.startswith('file://'):
|
||||
fi = default_storage.open(s.value[7:], 'rb')
|
||||
fi = default_storage.open(s.value[len('file://'):], 'rb')
|
||||
nonce = get_random_string(length=8)
|
||||
fname_base = clean_filename(os.path.basename(s.value))
|
||||
|
||||
# TODO: make sure pub is always correct
|
||||
fname = 'pub/%s/%s/%s.%s.%s' % (
|
||||
self.organizer.slug, self.slug, s.key, nonce, s.value.split('.')[-1]
|
||||
self.organizer.slug, self.slug, fname_base, nonce, s.value.split('.')[-1]
|
||||
)
|
||||
newname = default_storage.save(fname, fi)
|
||||
s.value = 'file://' + newname
|
||||
|
||||
@@ -33,7 +33,8 @@ class MultiStringField(TextField):
|
||||
'delimiter_found': _('No value can contain the delimiter character.')
|
||||
}
|
||||
|
||||
def __init__(self, verbose_name=None, name=None, **kwargs):
|
||||
def __init__(self, verbose_name=None, name=None, delimiter=DELIMITER, **kwargs):
|
||||
self.delimiter = delimiter
|
||||
super().__init__(verbose_name, name, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
@@ -44,13 +45,13 @@ class MultiStringField(TextField):
|
||||
if isinstance(value, (list, tuple)):
|
||||
return value
|
||||
elif value:
|
||||
return [v for v in value.split(DELIMITER) if v]
|
||||
return [v for v in value.split(self.delimiter) if v]
|
||||
else:
|
||||
return []
|
||||
|
||||
def get_prep_value(self, value):
|
||||
if isinstance(value, (list, tuple)):
|
||||
return DELIMITER + DELIMITER.join(value) + DELIMITER
|
||||
return self.delimiter + self.delimiter.join(value) + self.delimiter
|
||||
elif value is None:
|
||||
if self.null:
|
||||
return None
|
||||
@@ -63,14 +64,14 @@ class MultiStringField(TextField):
|
||||
|
||||
def from_db_value(self, value, expression, connection):
|
||||
if value:
|
||||
return [v for v in value.split(DELIMITER) if v]
|
||||
return [v for v in value.split(self.delimiter) if v]
|
||||
else:
|
||||
return []
|
||||
|
||||
def validate(self, value, model_instance):
|
||||
super().validate(value, model_instance)
|
||||
for l in value:
|
||||
if DELIMITER in l:
|
||||
if self.delimiter in l:
|
||||
raise exceptions.ValidationError(
|
||||
self.error_messages['delimiter_found'],
|
||||
code='delimiter_found',
|
||||
@@ -78,9 +79,9 @@ class MultiStringField(TextField):
|
||||
|
||||
def get_lookup(self, lookup_name):
|
||||
if lookup_name == 'contains':
|
||||
return MultiStringContains
|
||||
return make_multistring_contains_lookup(self.delimiter)
|
||||
elif lookup_name == 'icontains':
|
||||
return MultiStringIContains
|
||||
return make_multistring_icontains_lookup(self.delimiter)
|
||||
elif lookup_name == 'isnull':
|
||||
return builtin_lookups.IsNull
|
||||
raise NotImplementedError(
|
||||
@@ -88,18 +89,22 @@ class MultiStringField(TextField):
|
||||
)
|
||||
|
||||
|
||||
class MultiStringContains(builtin_lookups.Contains):
|
||||
def process_rhs(self, qn, connection):
|
||||
sql, params = super().process_rhs(qn, connection)
|
||||
params[0] = "%" + DELIMITER + params[0][1:-1] + DELIMITER + "%"
|
||||
return sql, params
|
||||
def make_multistring_contains_lookup(delimiter):
|
||||
class Cls(builtin_lookups.Contains):
|
||||
def process_rhs(self, qn, connection):
|
||||
sql, params = super().process_rhs(qn, connection)
|
||||
params[0] = "%" + delimiter + params[0][1:-1] + delimiter + "%"
|
||||
return sql, params
|
||||
return Cls
|
||||
|
||||
|
||||
class MultiStringIContains(builtin_lookups.IContains):
|
||||
def process_rhs(self, qn, connection):
|
||||
sql, params = super().process_rhs(qn, connection)
|
||||
params[0] = "%" + DELIMITER + params[0][1:-1] + DELIMITER + "%"
|
||||
return sql, params
|
||||
def make_multistring_icontains_lookup(delimiter):
|
||||
class Cls(builtin_lookups.IContains):
|
||||
def process_rhs(self, qn, connection):
|
||||
sql, params = super().process_rhs(qn, connection)
|
||||
params[0] = "%" + delimiter + params[0][1:-1] + delimiter + "%"
|
||||
return sql, params
|
||||
return Cls
|
||||
|
||||
|
||||
class MultiStringSerializer(serializers.Field):
|
||||
|
||||
@@ -600,10 +600,14 @@ class Item(LoggedModel):
|
||||
invoice_address=invoice_address,
|
||||
base_price_is='gross',
|
||||
currency=currency)
|
||||
compare_price = self.tax_rule.tax(b.designated_price * b.count,
|
||||
override_tax_rate=override_tax_rate,
|
||||
invoice_address=invoice_address,
|
||||
currency=currency)
|
||||
if not self.tax_rule:
|
||||
compare_price = TaxedPrice(gross=b.designated_price * b.count, net=b.designated_price * b.count,
|
||||
tax=Decimal('0.00'), rate=Decimal('0.00'), name='')
|
||||
else:
|
||||
compare_price = self.tax_rule.tax(b.designated_price * b.count,
|
||||
override_tax_rate=override_tax_rate,
|
||||
invoice_address=invoice_address,
|
||||
currency=currency)
|
||||
t.net += bprice.net - compare_price.net
|
||||
t.tax += bprice.tax - compare_price.tax
|
||||
t.name = "MIXED!"
|
||||
@@ -1325,6 +1329,7 @@ class Question(LoggedModel):
|
||||
verbose_name = _("Question")
|
||||
verbose_name_plural = _("Questions")
|
||||
ordering = ('position', 'id')
|
||||
unique_together = (('event', 'identifier'),)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.question)
|
||||
@@ -1340,7 +1345,7 @@ class Question(LoggedModel):
|
||||
@staticmethod
|
||||
def _clean_identifier(event, code, instance=None):
|
||||
qs = Question.objects.filter(event=event, identifier__iexact=code)
|
||||
if instance:
|
||||
if instance and instance.pk:
|
||||
qs = qs.exclude(pk=instance.pk)
|
||||
if qs.exists():
|
||||
raise ValidationError(_('This identifier is already used for a different question.'))
|
||||
|
||||
@@ -268,7 +268,10 @@ class Order(LockModel, LoggedModel):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if 'require_approval' not in self.get_deferred_fields() and 'status' not in self.get_deferred_fields():
|
||||
self.__initial_status_paid_or_pending = self.status in (Order.STATUS_PENDING, Order.STATUS_PAID) and not self.require_approval
|
||||
self._transaction_key_reset()
|
||||
|
||||
def _transaction_key_reset(self):
|
||||
self.__initial_status_paid_or_pending = self.status in (Order.STATUS_PENDING, Order.STATUS_PAID) and not self.require_approval
|
||||
|
||||
def gracefully_delete(self, user=None, auth=None):
|
||||
from . import GiftCard, GiftCardTransaction, Membership, Voucher
|
||||
@@ -746,6 +749,19 @@ class Order(LockModel, LoggedModel):
|
||||
length += 1
|
||||
iteration = 0
|
||||
|
||||
@property
|
||||
def modify_deadline(self):
|
||||
modify_deadline = self.event.settings.get('last_order_modification_date', as_type=RelativeDateWrapper)
|
||||
if self.event.has_subevents and modify_deadline:
|
||||
dates = [
|
||||
modify_deadline.datetime(se)
|
||||
for se in self.event.subevents.filter(id__in=self.positions.values_list('subevent', flat=True))
|
||||
]
|
||||
return min(dates) if dates else None
|
||||
elif modify_deadline:
|
||||
return modify_deadline.datetime(self.event)
|
||||
return None
|
||||
|
||||
@property
|
||||
def can_modify_answers(self) -> bool:
|
||||
"""
|
||||
@@ -758,16 +774,7 @@ class Order(LockModel, LoggedModel):
|
||||
if self.status not in (Order.STATUS_PENDING, Order.STATUS_PAID, Order.STATUS_EXPIRED):
|
||||
return False
|
||||
|
||||
modify_deadline = self.event.settings.get('last_order_modification_date', as_type=RelativeDateWrapper)
|
||||
if self.event.has_subevents and modify_deadline:
|
||||
dates = [
|
||||
modify_deadline.datetime(se)
|
||||
for se in self.event.subevents.filter(id__in=self.positions.values_list('subevent', flat=True))
|
||||
]
|
||||
modify_deadline = min(dates) if dates else None
|
||||
elif modify_deadline:
|
||||
modify_deadline = modify_deadline.datetime(self.event)
|
||||
|
||||
modify_deadline = self.modify_deadline
|
||||
if modify_deadline is not None and now() > modify_deadline:
|
||||
return False
|
||||
|
||||
@@ -1052,12 +1059,14 @@ class Order(LockModel, LoggedModel):
|
||||
if p.canceled and not _backfill_before_cancellation:
|
||||
continue
|
||||
target_transaction_count[Transaction.key(p)] += 1
|
||||
p._transaction_key_reset()
|
||||
|
||||
fees = self.fees.all() if fees is None else fees
|
||||
for f in fees:
|
||||
if f.canceled and not _backfill_before_cancellation:
|
||||
continue
|
||||
target_transaction_count[Transaction.key(f)] += 1
|
||||
f._transaction_key_reset()
|
||||
|
||||
keys = set(target_transaction_count.keys()) | set(current_transaction_count.keys())
|
||||
create = []
|
||||
@@ -1084,6 +1093,7 @@ class Order(LockModel, LoggedModel):
|
||||
create.sort(key=lambda t: (0 if t.count < 0 else 1, t.positionid or 0))
|
||||
if save:
|
||||
Transaction.objects.bulk_create(create)
|
||||
self._transaction_key_reset()
|
||||
_transactions_mark_order_clean(self.pk)
|
||||
return create
|
||||
|
||||
@@ -2072,8 +2082,20 @@ class OrderFee(models.Model):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if not self.get_deferred_fields():
|
||||
self.__initial_transaction_key = Transaction.key(self)
|
||||
self.__initial_canceled = self.canceled
|
||||
self._transaction_key_reset()
|
||||
|
||||
def refresh_from_db(self, using=None, fields=None):
|
||||
"""
|
||||
Reload field values from the database. Similar to django's implementation
|
||||
with adjustment for our method that forces us to create ``Transaction`` instances.
|
||||
"""
|
||||
if not self.get_deferred_fields():
|
||||
self._transaction_key_reset()
|
||||
return super().refresh_from_db(using, fields)
|
||||
|
||||
def _transaction_key_reset(self):
|
||||
self.__initial_transaction_key = Transaction.key(self)
|
||||
self.__initial_canceled = self.canceled
|
||||
|
||||
def __str__(self):
|
||||
if self.description:
|
||||
@@ -2193,8 +2215,20 @@ class OrderPosition(AbstractPosition):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if not self.get_deferred_fields():
|
||||
self.__initial_transaction_key = Transaction.key(self)
|
||||
self.__initial_canceled = self.canceled
|
||||
self._transaction_key_reset()
|
||||
|
||||
def refresh_from_db(self, using=None, fields=None):
|
||||
"""
|
||||
Reload field values from the database. Similar to django's implementation
|
||||
with adjustment for our method that forces us to create ``Transaction`` instances.
|
||||
"""
|
||||
if not self.get_deferred_fields():
|
||||
self._transaction_key_reset()
|
||||
return super().refresh_from_db(using, fields)
|
||||
|
||||
def _transaction_key_reset(self):
|
||||
self.__initial_transaction_key = Transaction.key(self)
|
||||
self.__initial_canceled = self.canceled
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Order position")
|
||||
@@ -2203,7 +2237,7 @@ class OrderPosition(AbstractPosition):
|
||||
|
||||
@cached_property
|
||||
def sort_key(self):
|
||||
return self.addon_to.positionid if self.addon_to else self.positionid, self.addon_to_id or 0
|
||||
return self.addon_to.positionid if self.addon_to else self.positionid, self.addon_to_id or 0, self.positionid
|
||||
|
||||
@property
|
||||
def checkins(self):
|
||||
@@ -2229,7 +2263,7 @@ class OrderPosition(AbstractPosition):
|
||||
ops = []
|
||||
cp_mapping = {}
|
||||
# The sorting key ensures that all addons come directly after the position they refer to
|
||||
for i, cartpos in enumerate(sorted(cp, key=lambda c: (c.addon_to_id or c.pk, c.addon_to_id or 0))):
|
||||
for i, cartpos in enumerate(sorted(cp, key=lambda c: c.sort_key)):
|
||||
op = OrderPosition(order=order)
|
||||
for f in AbstractPosition._meta.fields:
|
||||
if f.name == 'addon_to':
|
||||
@@ -2625,6 +2659,20 @@ class CartPosition(AbstractPosition):
|
||||
self.event.currency)
|
||||
return self.price - net
|
||||
|
||||
@cached_property
|
||||
def sort_key(self):
|
||||
subevent_key = (self.subevent.date_from, str(self.subevent.name), self.subevent_id) if self.subevent_id else (0, "", 0)
|
||||
category_key = (self.item.category.position, self.item.category.id) if self.item.category_id is not None else (0, 0)
|
||||
item_key = self.item.position, self.item_id
|
||||
variation_key = (self.variation.position, self.variation.id) if self.variation_id is not None else (0, 0)
|
||||
line_key = (self.price, (self.voucher_id or 0), (self.seat.sorting_rank if self.seat_id else None), self.pk)
|
||||
sort_key = subevent_key + category_key + item_key + variation_key + line_key
|
||||
|
||||
if self.addon_to_id:
|
||||
return self.addon_to.sort_key + (1 if self.is_bundled else 2,) + sort_key
|
||||
else:
|
||||
return sort_key
|
||||
|
||||
def update_listed_price_and_voucher(self, voucher_only=False, max_discount=None):
|
||||
from pretix.base.services.pricing import (
|
||||
get_listed_price, is_included_for_free,
|
||||
@@ -2680,6 +2728,11 @@ class CartPosition(AbstractPosition):
|
||||
self.tax_rate = line_price.rate
|
||||
self.save(update_fields=['line_price_gross', 'tax_rate'])
|
||||
|
||||
@property
|
||||
def addons_without_bundled(self):
|
||||
addons = [op for op in self.addons.all() if not op.is_bundled]
|
||||
return sorted(addons, key=lambda cp: cp.sort_key)
|
||||
|
||||
|
||||
class InvoiceAddress(models.Model):
|
||||
last_modified = models.DateTimeField(auto_now=True)
|
||||
|
||||
@@ -114,7 +114,7 @@ EU_CURRENCIES = {
|
||||
'RO': 'RON',
|
||||
'SE': 'SEK'
|
||||
}
|
||||
VAT_ID_COUNTRIES = EU_COUNTRIES | {'CH'}
|
||||
VAT_ID_COUNTRIES = EU_COUNTRIES | {'CH', 'NO'}
|
||||
|
||||
|
||||
def is_eu_country(cc):
|
||||
|
||||
@@ -137,6 +137,8 @@ class Voucher(LoggedModel):
|
||||
:type max_usages: int
|
||||
:param redeemed: The number of times this voucher already has been redeemed
|
||||
:type redeemed: int
|
||||
:param min_usages: The minimum number of times this voucher must be redeemed
|
||||
:type min_usages: int
|
||||
:param valid_until: The expiration date of this voucher (optional)
|
||||
:type valid_until: datetime
|
||||
:param block_quota: If set to true, this voucher will reserve quota for its holder
|
||||
@@ -199,6 +201,14 @@ class Voucher(LoggedModel):
|
||||
verbose_name=_("Redeemed"),
|
||||
default=0
|
||||
)
|
||||
min_usages = models.PositiveIntegerField(
|
||||
verbose_name=_("Minimum usages"),
|
||||
help_text=_("If set to more than one, the voucher must be redeemed for this many products when it is used for "
|
||||
"the first time. On later usages, it can also be used for lower numbers of products. Note that "
|
||||
"this means that the total number of usages in some cases can be lower than this limit, e.g. in "
|
||||
"case of cancellations."),
|
||||
default=1
|
||||
)
|
||||
budget = models.DecimalField(
|
||||
verbose_name=_("Maximum discount budget"),
|
||||
help_text=_("This is the maximum monetary amount that will be discounted using this voucher across all usages. "
|
||||
@@ -350,6 +360,10 @@ class Voucher(LoggedModel):
|
||||
'redeemed': redeemed
|
||||
}
|
||||
)
|
||||
if data.get('max_usages', 1) < data.get('min_usages', 1):
|
||||
raise ValidationError(
|
||||
_('The maximum number of usages may not be lower than the minimum number of usages.'),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def clean_subevent(data, event):
|
||||
@@ -464,7 +478,7 @@ class Voucher(LoggedModel):
|
||||
if quota:
|
||||
raise ValidationError(_('You need to choose a specific product if you select a seat.'))
|
||||
|
||||
if data.get('max_usages', 1) > 1:
|
||||
if data.get('max_usages', 1) > 1 or data.get('min_usages', 1) > 1:
|
||||
raise ValidationError(_('Seat-specific vouchers can only be used once.'))
|
||||
|
||||
if item and seat.product != item:
|
||||
@@ -567,6 +581,10 @@ class Voucher(LoggedModel):
|
||||
else:
|
||||
return bool(subevent.seating_plan) if subevent else self.event.seating_plan
|
||||
|
||||
@property
|
||||
def min_usages_remaining(self):
|
||||
return max(1, self.min_usages - self.redeemed)
|
||||
|
||||
@classmethod
|
||||
def annotate_budget_used_orders(cls, qs):
|
||||
opq = OrderPosition.objects.filter(
|
||||
|
||||
@@ -392,7 +392,7 @@ class InvoiceAddressCountry(ImportColumn):
|
||||
return list(countries)
|
||||
|
||||
def clean(self, value, previous_values):
|
||||
if value and not Country(value).numeric:
|
||||
if value and not (Country(value).numeric or value in settings.COUNTRIES_OVERRIDE):
|
||||
raise ValidationError(_("Please enter a valid country code."))
|
||||
return value
|
||||
|
||||
@@ -538,7 +538,7 @@ class AttendeeCountry(ImportColumn):
|
||||
return list(countries)
|
||||
|
||||
def clean(self, value, previous_values):
|
||||
if value and not Country(value).numeric:
|
||||
if value and not (Country(value).numeric or value in settings.COUNTRIES_OVERRIDE):
|
||||
raise ValidationError(_("Please enter a valid country code."))
|
||||
return value
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ from django.utils.html import conditional_escape
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _, pgettext
|
||||
from i18nfield.strings import LazyI18nString
|
||||
from PyPDF2 import PdfFileReader
|
||||
from PyPDF2 import PdfReader
|
||||
from pytz import timezone
|
||||
from reportlab.graphics import renderPDF
|
||||
from reportlab.graphics.barcode.qr import QrCodeWidget
|
||||
@@ -251,6 +251,11 @@ DEFAULT_VARIABLES = OrderedDict((
|
||||
"editor_sample": _("20:00"),
|
||||
"evaluate": lambda op, order, ev: ev.get_time_from_display()
|
||||
}),
|
||||
("event_begin_weekday", {
|
||||
"label": _("Event begin weekday"),
|
||||
"editor_sample": _("Friday"),
|
||||
"evaluate": lambda op, order, ev: ev.get_weekday_from_display()
|
||||
}),
|
||||
("event_end", {
|
||||
"label": _("Event end date and time"),
|
||||
"editor_sample": _("2017-05-31 22:00"),
|
||||
@@ -275,6 +280,11 @@ DEFAULT_VARIABLES = OrderedDict((
|
||||
"TIME_FORMAT"
|
||||
) if ev.date_to else ""
|
||||
}),
|
||||
("event_end_weekday", {
|
||||
"label": _("Event end weekday"),
|
||||
"editor_sample": _("Friday"),
|
||||
"evaluate": lambda op, order, ev: ev.get_weekday_to_display()
|
||||
}),
|
||||
("event_admission", {
|
||||
"label": _("Event admission date and time"),
|
||||
"editor_sample": _("2017-05-31 19:00"),
|
||||
@@ -646,7 +656,7 @@ class Renderer:
|
||||
self.event = event
|
||||
if self.background_file:
|
||||
self.bg_bytes = self.background_file.read()
|
||||
self.bg_pdf = PdfFileReader(BytesIO(self.bg_bytes), strict=False)
|
||||
self.bg_pdf = PdfReader(BytesIO(self.bg_bytes), strict=False)
|
||||
else:
|
||||
self.bg_bytes = None
|
||||
self.bg_pdf = None
|
||||
@@ -861,7 +871,7 @@ class Renderer:
|
||||
canvas.restoreState()
|
||||
|
||||
def draw_page(self, canvas: Canvas, order: Order, op: OrderPosition, show_page=True, only_page=None):
|
||||
page_count = self.bg_pdf.getNumPages()
|
||||
page_count = len(self.bg_pdf.pages)
|
||||
|
||||
if not only_page and not show_page:
|
||||
raise ValueError("only_page=None and show_page=False cannot be combined")
|
||||
@@ -881,7 +891,11 @@ class Renderer:
|
||||
elif o['type'] == "poweredby":
|
||||
self._draw_poweredby(canvas, op, o)
|
||||
if self.bg_pdf:
|
||||
canvas.setPageSize((self.bg_pdf.getPage(page).mediaBox[2], self.bg_pdf.getPage(page).mediaBox[3]))
|
||||
page_size = (self.bg_pdf.pages[0].mediabox[2], self.bg_pdf.pages[0].mediabox[3])
|
||||
if self.bg_pdf.pages[0].get('/Rotate') in (90, 270):
|
||||
# swap dimensions due to pdf being rotated
|
||||
page_size = page_size[::-1]
|
||||
canvas.setPageSize(page_size)
|
||||
if show_page:
|
||||
canvas.showPage()
|
||||
|
||||
@@ -905,17 +919,41 @@ class Renderer:
|
||||
with open(os.path.join(d, 'out.pdf'), 'rb') as f:
|
||||
return BytesIO(f.read())
|
||||
else:
|
||||
from PyPDF2 import PdfFileReader, PdfFileWriter
|
||||
from PyPDF2 import PdfReader, PdfWriter, Transformation
|
||||
from PyPDF2.generic import RectangleObject
|
||||
buffer.seek(0)
|
||||
new_pdf = PdfFileReader(buffer)
|
||||
output = PdfFileWriter()
|
||||
new_pdf = PdfReader(buffer)
|
||||
output = PdfWriter()
|
||||
|
||||
for i, page in enumerate(new_pdf.pages):
|
||||
bg_page = copy.copy(self.bg_pdf.getPage(i))
|
||||
bg_page.mergePage(page)
|
||||
output.addPage(bg_page)
|
||||
bg_page = copy.copy(self.bg_pdf.pages[i])
|
||||
bg_rotation = bg_page.get('/Rotate')
|
||||
if bg_rotation:
|
||||
# /Rotate is clockwise, transformation.rotate is counter-clockwise
|
||||
t = Transformation().rotate(bg_rotation)
|
||||
w = float(page.mediabox.getWidth())
|
||||
h = float(page.mediabox.getHeight())
|
||||
if bg_rotation in (90, 270):
|
||||
# offset due to rotation base
|
||||
if bg_rotation == 90:
|
||||
t = t.translate(h, 0)
|
||||
else:
|
||||
t = t.translate(0, w)
|
||||
# rotate mediabox as well
|
||||
page.mediabox = RectangleObject((
|
||||
page.mediabox.left.as_numeric(),
|
||||
page.mediabox.bottom.as_numeric(),
|
||||
page.mediabox.top.as_numeric(),
|
||||
page.mediabox.right.as_numeric(),
|
||||
))
|
||||
page.trimbox = page.mediabox
|
||||
elif bg_rotation == 180:
|
||||
t = t.translate(w, h)
|
||||
page.add_transformation(t)
|
||||
bg_page.merge_page(page)
|
||||
output.add_page(bg_page)
|
||||
|
||||
output.addMetadata({
|
||||
output.add_metadata({
|
||||
'/Title': str(title),
|
||||
'/Creator': 'pretix',
|
||||
})
|
||||
|
||||
@@ -23,11 +23,12 @@
|
||||
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: pretix_sig1.proto
|
||||
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf import (
|
||||
descriptor as _descriptor, message as _message, reflection as _reflection,
|
||||
descriptor as _descriptor, descriptor_pool as _descriptor_pool,
|
||||
symbol_database as _symbol_database,
|
||||
)
|
||||
from google.protobuf.internal import builder as _builder
|
||||
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
@@ -36,80 +37,14 @@ _sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='pretix_sig1.proto',
|
||||
package='',
|
||||
syntax='proto3',
|
||||
serialized_options=b'\n\026eu.pretix.secrets.sig1B\014TicketProtos',
|
||||
create_key=_descriptor._internal_create_key,
|
||||
serialized_pb=b'\n\x11pretix_sig1.proto\"I\n\x06Ticket\x12\x0c\n\x04seed\x18\x01 \x01(\t\x12\x0c\n\x04item\x18\x02 \x01(\x03\x12\x11\n\tvariation\x18\x03 \x01(\x03\x12\x10\n\x08subevent\x18\x04 \x01(\x03\x42&\n\x16\x65u.pretix.secrets.sig1B\x0cTicketProtosb\x06proto3'
|
||||
)
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11pretix_sig1.proto\"I\n\x06Ticket\x12\x0c\n\x04seed\x18\x01 \x01(\t\x12\x0c\n\x04item\x18\x02 \x01(\x03\x12\x11\n\tvariation\x18\x03 \x01(\x03\x12\x10\n\x08subevent\x18\x04 \x01(\x03\x42\x33\n#eu.pretix.libpretixsync.crypto.sig1B\x0cTicketProtosb\x06proto3')
|
||||
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'pretix_sig1_pb2', globals())
|
||||
if _descriptor._USE_C_DESCRIPTORS == False:
|
||||
|
||||
|
||||
|
||||
_TICKET = _descriptor.Descriptor(
|
||||
name='Ticket',
|
||||
full_name='Ticket',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
create_key=_descriptor._internal_create_key,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='seed', full_name='Ticket.seed', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='item', full_name='Ticket.item', index=1,
|
||||
number=2, type=3, cpp_type=2, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='variation', full_name='Ticket.variation', index=2,
|
||||
number=3, type=3, cpp_type=2, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='subevent', full_name='Ticket.subevent', index=3,
|
||||
number=4, type=3, cpp_type=2, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
serialized_options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=21,
|
||||
serialized_end=94,
|
||||
)
|
||||
|
||||
DESCRIPTOR.message_types_by_name['Ticket'] = _TICKET
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
Ticket = _reflection.GeneratedProtocolMessageType('Ticket', (_message.Message,), {
|
||||
'DESCRIPTOR' : _TICKET,
|
||||
'__module__' : 'pretix_sig1_pb2'
|
||||
# @@protoc_insertion_point(class_scope:Ticket)
|
||||
})
|
||||
_sym_db.RegisterMessage(Ticket)
|
||||
|
||||
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._options = None
|
||||
DESCRIPTOR._serialized_options = b'\n#eu.pretix.libpretixsync.crypto.sig1B\014TicketProtos'
|
||||
_TICKET._serialized_start=21
|
||||
_TICKET._serialized_end=94
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
|
||||
@@ -101,15 +101,20 @@ error_messages = {
|
||||
'min_items_per_product': _("You need to select at least %(min)s items of the product %(product)s."),
|
||||
'min_items_per_product_removed': _("We removed %(product)s from your cart as you can not buy less than "
|
||||
"%(min)s items of it."),
|
||||
'not_started': _('The presale period for this event has not yet started.'),
|
||||
'ended': _('The presale period for this event has ended.'),
|
||||
'not_started': _('The booking period for this event has not yet started.'),
|
||||
'ended': _('The booking period for this event has ended.'),
|
||||
'payment_ended': _('All payments for this event need to be confirmed already, so no new orders can be created.'),
|
||||
'some_subevent_not_started': _('The presale period for this event has not yet started. The affected positions '
|
||||
'some_subevent_not_started': _('The booking period for this event has not yet started. The affected positions '
|
||||
'have been removed from your cart.'),
|
||||
'some_subevent_ended': _('The presale period for one of the events in your cart has ended. The affected '
|
||||
'some_subevent_ended': _('The booking period for one of the events in your cart has ended. The affected '
|
||||
'positions have been removed from your cart.'),
|
||||
'price_too_high': _('The entered price is to high.'),
|
||||
'voucher_invalid': _('This voucher code is not known in our database.'),
|
||||
'voucher_min_usages': _('The voucher code "%(voucher)s" can only be used if you select at least %(number)s '
|
||||
'matching products.'),
|
||||
'voucher_min_usages_removed': _('The voucher code "%(voucher)s" can only be used if you select at least '
|
||||
'%(number)s matching products. We have therefore removed some positions from '
|
||||
'your cart that can no longer be purchased like this.'),
|
||||
'voucher_redeemed': _('This voucher code has already been used the maximum number of times allowed.'),
|
||||
'voucher_redeemed_cart': _('This voucher code is currently locked since it is already contained in a cart. This '
|
||||
'might mean that someone else is redeeming this voucher right now, or that you tried '
|
||||
@@ -147,6 +152,45 @@ error_messages = {
|
||||
}
|
||||
|
||||
|
||||
def _get_quota_availability(quota_diff, now_dt):
|
||||
quotas_ok = defaultdict(int)
|
||||
qa = QuotaAvailability()
|
||||
qa.queue(*[k for k, v in quota_diff.items() if v > 0])
|
||||
qa.compute(now_dt=now_dt)
|
||||
for quota, count in quota_diff.items():
|
||||
if count <= 0:
|
||||
quotas_ok[quota] = 0
|
||||
break
|
||||
avail = qa.results[quota]
|
||||
if avail[1] is not None and avail[1] < count:
|
||||
quotas_ok[quota] = min(count, avail[1])
|
||||
else:
|
||||
quotas_ok[quota] = count
|
||||
return quotas_ok
|
||||
|
||||
|
||||
def _get_voucher_availability(event, voucher_use_diff, now_dt, exclude_position_ids):
|
||||
vouchers_ok = {}
|
||||
_voucher_depend_on_cart = set()
|
||||
for voucher, count in voucher_use_diff.items():
|
||||
voucher.refresh_from_db()
|
||||
|
||||
if voucher.valid_until is not None and voucher.valid_until < now_dt:
|
||||
raise CartError(error_messages['voucher_expired'])
|
||||
|
||||
redeemed_in_carts = CartPosition.objects.filter(
|
||||
Q(voucher=voucher) & Q(event=event) &
|
||||
Q(expires__gte=now_dt)
|
||||
).exclude(pk__in=exclude_position_ids)
|
||||
cart_count = redeemed_in_carts.count()
|
||||
v_avail = voucher.max_usages - voucher.redeemed - cart_count
|
||||
if cart_count > 0:
|
||||
_voucher_depend_on_cart.add(voucher)
|
||||
vouchers_ok[voucher] = v_avail
|
||||
|
||||
return vouchers_ok, _voucher_depend_on_cart
|
||||
|
||||
|
||||
class CartManager:
|
||||
AddOperation = namedtuple('AddOperation', ('count', 'item', 'variation', 'voucher', 'quotas',
|
||||
'addon_to', 'subevent', 'bundled', 'seat', 'listed_price',
|
||||
@@ -485,6 +529,15 @@ class CartManager:
|
||||
voucher_use_diff[voucher] += 1
|
||||
ops.append((listed_price - price_after_voucher, self.VoucherOperation(p, voucher, price_after_voucher)))
|
||||
|
||||
for voucher, cnt in list(voucher_use_diff.items()):
|
||||
if 0 < cnt < voucher.min_usages_remaining:
|
||||
raise CartError(
|
||||
_(error_messages['voucher_min_usages']) % {
|
||||
'voucher': voucher.code,
|
||||
'number': voucher.min_usages_remaining,
|
||||
}
|
||||
)
|
||||
|
||||
# If there are not enough voucher usages left for the full cart, let's apply them in the order that benefits
|
||||
# the user the most.
|
||||
ops.sort(key=lambda k: k[0], reverse=True)
|
||||
@@ -819,43 +872,13 @@ class CartManager:
|
||||
self._quota_diff.update(quota_diff)
|
||||
self._operations += operations
|
||||
|
||||
def _get_quota_availability(self):
|
||||
quotas_ok = defaultdict(int)
|
||||
qa = QuotaAvailability()
|
||||
qa.queue(*[k for k, v in self._quota_diff.items() if v > 0])
|
||||
qa.compute(now_dt=self.now_dt)
|
||||
for quota, count in self._quota_diff.items():
|
||||
if count <= 0:
|
||||
quotas_ok[quota] = 0
|
||||
break
|
||||
avail = qa.results[quota]
|
||||
if avail[1] is not None and avail[1] < count:
|
||||
quotas_ok[quota] = min(count, avail[1])
|
||||
else:
|
||||
quotas_ok[quota] = count
|
||||
return quotas_ok
|
||||
|
||||
def _get_voucher_availability(self):
|
||||
vouchers_ok = {}
|
||||
self._voucher_depend_on_cart = set()
|
||||
for voucher, count in self._voucher_use_diff.items():
|
||||
voucher.refresh_from_db()
|
||||
|
||||
if voucher.valid_until is not None and voucher.valid_until < self.now_dt:
|
||||
raise CartError(error_messages['voucher_expired'])
|
||||
|
||||
redeemed_in_carts = CartPosition.objects.filter(
|
||||
Q(voucher=voucher) & Q(event=self.event) &
|
||||
Q(expires__gte=self.now_dt)
|
||||
).exclude(pk__in=[
|
||||
vouchers_ok, self._voucher_depend_on_cart = _get_voucher_availability(
|
||||
self.event, self._voucher_use_diff, self.now_dt,
|
||||
exclude_position_ids=[
|
||||
op.position.id for op in self._operations if isinstance(op, self.ExtendOperation)
|
||||
])
|
||||
cart_count = redeemed_in_carts.count()
|
||||
v_avail = voucher.max_usages - voucher.redeemed - cart_count
|
||||
if cart_count > 0:
|
||||
self._voucher_depend_on_cart.add(voucher)
|
||||
vouchers_ok[voucher] = v_avail
|
||||
|
||||
]
|
||||
)
|
||||
return vouchers_ok
|
||||
|
||||
def _check_min_max_per_product(self):
|
||||
@@ -906,9 +929,44 @@ class CartManager:
|
||||
)
|
||||
return err
|
||||
|
||||
def _check_min_per_voucher(self):
|
||||
vouchers = Counter()
|
||||
for p in self.positions:
|
||||
vouchers[p.voucher] += 1
|
||||
for op in self._operations:
|
||||
if isinstance(op, self.AddOperation):
|
||||
vouchers[op.voucher] += op.count
|
||||
elif isinstance(op, self.RemoveOperation):
|
||||
vouchers[op.position.voucher] -= 1
|
||||
|
||||
err = None
|
||||
for voucher, count in vouchers.items():
|
||||
if not voucher or count == 0:
|
||||
continue
|
||||
if count < voucher.min_usages_remaining:
|
||||
self._operations = [o for o in self._operations if not (
|
||||
isinstance(o, self.AddOperation) and o.voucher.pk == voucher.pk
|
||||
)]
|
||||
removals = [o.position.pk for o in self._operations if isinstance(o, self.RemoveOperation)]
|
||||
for p in self.positions:
|
||||
if p.voucher_id == voucher.pk and p.pk not in removals:
|
||||
self._operations.append(self.RemoveOperation(position=p))
|
||||
err = _(error_messages['voucher_min_usages_removed']) % {
|
||||
'voucher': voucher.code,
|
||||
'number': voucher.min_usages_remaining,
|
||||
}
|
||||
if not err:
|
||||
raise CartError(
|
||||
_(error_messages['voucher_min_usages']) % {
|
||||
'voucher': voucher.code,
|
||||
'number': voucher.min_usages_remaining,
|
||||
}
|
||||
)
|
||||
return err
|
||||
|
||||
def _perform_operations(self):
|
||||
vouchers_ok = self._get_voucher_availability()
|
||||
quotas_ok = self._get_quota_availability()
|
||||
quotas_ok = _get_quota_availability(self._quota_diff, self.now_dt)
|
||||
err = None
|
||||
new_cart_positions = []
|
||||
|
||||
@@ -1162,6 +1220,7 @@ class CartManager:
|
||||
|
||||
err = self._delete_out_of_timeframe()
|
||||
err = self.extend_expired_positions() or err
|
||||
err = err or self._check_min_per_voucher()
|
||||
|
||||
lockfn = NoLockManager
|
||||
if self._require_locking():
|
||||
|
||||
@@ -28,6 +28,7 @@ from django.utils.timezone import now
|
||||
from django_scopes import scopes_disabled
|
||||
|
||||
from pretix.base.models import CachedCombinedTicket, CachedTicket
|
||||
from pretix.base.models.customers import CustomerSSOGrant
|
||||
|
||||
from ..models import CachedFile, CartPosition, InvoiceAddress
|
||||
from ..signals import periodic_task
|
||||
@@ -68,3 +69,9 @@ def clean_cached_tickets(sender, **kwargs):
|
||||
@scopes_disabled()
|
||||
def clearsessions(sender, **kwargs):
|
||||
call_command('clearsessions')
|
||||
|
||||
|
||||
@receiver(signal=periodic_task)
|
||||
@scopes_disabled()
|
||||
def clear_oidc_data(sender, **kwargs):
|
||||
CustomerSSOGrant.objects.filter(expires__lt=now() - timedelta(days=14)).delete()
|
||||
|
||||
@@ -26,6 +26,7 @@ from django.core.files.base import ContentFile
|
||||
from django.utils.timezone import override
|
||||
from django.utils.translation import gettext
|
||||
|
||||
from pretix.base.exporter import OrganizerLevelExportMixin
|
||||
from pretix.base.i18n import LazyLocaleException, language
|
||||
from pretix.base.models import (
|
||||
CachedFile, Device, Event, Organizer, TeamAPIToken, User, cachedfile_name,
|
||||
@@ -66,8 +67,8 @@ def export(self, event: Event, fileid: str, provider: str, form_data: Dict[str,
|
||||
gettext('Your export did not contain any data.')
|
||||
)
|
||||
file.filename, file.type, data = d
|
||||
file.file.save(cachedfile_name(file, file.filename), ContentFile(data))
|
||||
file.save()
|
||||
f = ContentFile(data)
|
||||
file.file.save(cachedfile_name(file, file.filename), f)
|
||||
return file.pk
|
||||
|
||||
|
||||
@@ -101,9 +102,9 @@ def multiexport(self, organizer: Organizer, user: User, device: int, token: int,
|
||||
timezone = e.settings.timezone
|
||||
region = e.settings.region
|
||||
else:
|
||||
locale = settings.LANGUAGE_CODE
|
||||
timezone = settings.TIME_ZONE
|
||||
region = None
|
||||
locale = organizer.settings.locale or settings.LANGUAGE_CODE
|
||||
timezone = organizer.settings.timezone or settings.TIME_ZONE
|
||||
region = organizer.settings.region
|
||||
with language(locale, region), override(timezone):
|
||||
if form_data.get('events') is not None:
|
||||
if isinstance(form_data['events'][0], str):
|
||||
@@ -119,12 +120,21 @@ def multiexport(self, organizer: Organizer, user: User, device: int, token: int,
|
||||
continue
|
||||
ex = response(events, organizer, set_progress)
|
||||
if ex.identifier == provider:
|
||||
if (
|
||||
isinstance(ex, OrganizerLevelExportMixin) and
|
||||
not staff_session and
|
||||
not (device or token or user).has_organizer_permission(organizer, ex.organizer_required_permission)
|
||||
):
|
||||
raise ExportError(
|
||||
gettext('You do not have sufficient permission to perform this export.')
|
||||
)
|
||||
|
||||
d = ex.render(form_data)
|
||||
if d is None:
|
||||
raise ExportError(
|
||||
gettext('Your export did not contain any data.')
|
||||
)
|
||||
file.filename, file.type, data = d
|
||||
file.file.save(cachedfile_name(file, file.filename), ContentFile(data))
|
||||
file.save()
|
||||
f = ContentFile(data)
|
||||
file.file.save(cachedfile_name(file, file.filename), f)
|
||||
return file.pk
|
||||
|
||||
@@ -77,7 +77,7 @@ class LockTimeoutException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class LockReleaseException(Exception):
|
||||
class LockReleaseException(LockTimeoutException):
|
||||
pass
|
||||
|
||||
|
||||
@@ -180,5 +180,5 @@ def release_event_redis(event):
|
||||
lock.release()
|
||||
except RedisError:
|
||||
logger.exception('Error releasing an event lock')
|
||||
raise LockTimeoutException()
|
||||
raise LockReleaseException()
|
||||
event._lock = None
|
||||
|
||||
@@ -121,7 +121,7 @@ def mail(email: Union[str, Sequence[str]], subject: str, template: Union[str, La
|
||||
:param order: The order this email is related to (optional). If set, this will be used to include a link to the
|
||||
order below the email.
|
||||
|
||||
:param order: The order position this email is related to (optional). If set, this will be used to include a link
|
||||
:param position: The order position this email is related to (optional). If set, this will be used to include a link
|
||||
to the order position instead of the order below the email.
|
||||
|
||||
:param headers: A dict of custom mail headers to add to the mail
|
||||
@@ -141,7 +141,7 @@ def mail(email: Union[str, Sequence[str]], subject: str, template: Union[str, La
|
||||
|
||||
:param user: The user this email is sent to
|
||||
|
||||
:param customer: The user this email is sent to
|
||||
:param customer: The customer this email is sent to
|
||||
|
||||
:param attach_cached_files: A list of cached file to attach to this email.
|
||||
|
||||
@@ -502,11 +502,11 @@ def mail_send_task(self, *args, to: List[str], subject: str, body: str, html: st
|
||||
rc.expire(redis_key, 300)
|
||||
|
||||
max_retries = 10
|
||||
retry_after = 30 + cnt * 10
|
||||
retry_after = min(30 + cnt * 10, 1800)
|
||||
else:
|
||||
# Most likely some other kind of temporary failure, retry again (but pretty soon)
|
||||
max_retries = 5
|
||||
retry_after = 2 ** (self.request.retries * 3) # max is 2 ** (4*3) = 4096 seconds = 68 minutes
|
||||
retry_after = [10, 30, 60, 300, 900, 900][self.request.retries]
|
||||
|
||||
try:
|
||||
self.retry(max_retries=max_retries, countdown=retry_after)
|
||||
@@ -542,7 +542,7 @@ def mail_send_task(self, *args, to: List[str], subject: str, body: str, html: st
|
||||
if not any(c >= 500 for c in smtp_codes):
|
||||
# Not a permanent failure (mailbox full, service unavailable), retry later, but with large intervals
|
||||
try:
|
||||
self.retry(max_retries=5, countdown=2 ** (self.request.retries * 3) * 4) # max is 2 ** (4*3) * 4 = 16384 seconds = approx 4.5 hours
|
||||
self.retry(max_retries=5, countdown=[60, 300, 600, 1200, 1800, 1800][self.request.retries])
|
||||
except MaxRetriesExceededError:
|
||||
# ignore and go on with logging the error
|
||||
pass
|
||||
@@ -567,7 +567,7 @@ def mail_send_task(self, *args, to: List[str], subject: str, body: str, html: st
|
||||
except Exception as e:
|
||||
if isinstance(e, (smtplib.SMTPServerDisconnected, smtplib.SMTPConnectError, ssl.SSLError, OSError)):
|
||||
try:
|
||||
self.retry(max_retries=5, countdown=2 ** (self.request.retries * 3)) # max is 2 ** (4*3) = 4096 seconds = 68 minutes
|
||||
self.retry(max_retries=5, countdown=[10, 30, 60, 300, 900, 900][self.request.retries])
|
||||
except MaxRetriesExceededError:
|
||||
if log_target:
|
||||
log_target.log_action(
|
||||
|
||||
@@ -36,6 +36,7 @@ from pretix.base.models import (
|
||||
from pretix.base.models.orders import Transaction
|
||||
from pretix.base.orderimport import get_all_columns
|
||||
from pretix.base.services.invoices import generate_invoice, invoice_qualified
|
||||
from pretix.base.services.locking import NoLockManager
|
||||
from pretix.base.services.tasks import ProfiledEventTask
|
||||
from pretix.base.signals import order_paid, order_placed
|
||||
from pretix.celery_app import app
|
||||
@@ -85,9 +86,9 @@ def setif(record, obj, attr, setting):
|
||||
|
||||
@app.task(base=ProfiledEventTask, throws=(DataImportError,))
|
||||
def import_orders(event: Event, fileid: str, settings: dict, locale: str, user) -> None:
|
||||
# TODO: quotacheck?
|
||||
cf = CachedFile.objects.get(id=fileid)
|
||||
user = User.objects.get(pk=user)
|
||||
seats_used = False
|
||||
with language(locale, event.settings.region):
|
||||
cols = get_all_columns(event)
|
||||
parsed = parse_csv(cf.file)
|
||||
@@ -133,6 +134,8 @@ def import_orders(event: Event, fileid: str, settings: dict, locale: str, user)
|
||||
position = OrderPosition(positionid=len(order._positions) + 1)
|
||||
position.attendee_name_parts = {'_scheme': event.settings.name_scheme}
|
||||
position.meta_info = {}
|
||||
if position.seat is not None:
|
||||
seats_used = True
|
||||
order._positions.append(position)
|
||||
position.assign_pseudonymization_id()
|
||||
|
||||
@@ -144,9 +147,12 @@ def import_orders(event: Event, fileid: str, settings: dict, locale: str, user)
|
||||
_('Invalid data in row {row}: {message}').format(row=i, message=str(e))
|
||||
)
|
||||
|
||||
# quota check?
|
||||
with event.lock():
|
||||
with transaction.atomic():
|
||||
# We don't support vouchers, quotas, or memberships here, so we only need to lock if seats
|
||||
# are in use
|
||||
lockfn = event.lock if seats_used else NoLockManager
|
||||
|
||||
try:
|
||||
with lockfn(), transaction.atomic():
|
||||
save_transactions = []
|
||||
for o in orders:
|
||||
o.total = sum([c.price for c in o._positions]) # currently no support for fees
|
||||
@@ -204,4 +210,7 @@ def import_orders(event: Event, fileid: str, settings: dict, locale: str, user)
|
||||
) and not o.invoices.last()
|
||||
if gen_invoice:
|
||||
generate_invoice(o, trigger_pdf=True)
|
||||
except DataImportError:
|
||||
raise ValidationError(_('We were not able to process your request completely as the server was too busy. '
|
||||
'Please try again.'))
|
||||
cf.delete()
|
||||
|
||||
@@ -113,8 +113,10 @@ error_messages = {
|
||||
"surplus items from your cart."),
|
||||
'busy': _('We were not able to process your request completely as the '
|
||||
'server was too busy. Please try again.'),
|
||||
'not_started': _('The presale period for this event has not yet started.'),
|
||||
'ended': _('The presale period has ended.'),
|
||||
'not_started': _('The booking period for this event has not yet started.'),
|
||||
'ended': _('The booking period has ended.'),
|
||||
'voucher_min_usages': _('The voucher code "%(voucher)s" can only be used if you select at least %(number)s '
|
||||
'matching products.'),
|
||||
'voucher_invalid': _('The voucher code used for one of the items in your cart is not known in our database.'),
|
||||
'voucher_redeemed': _('The voucher code used for one of the items in your cart has already been used the maximum '
|
||||
'number of times allowed. We removed this item from your cart.'),
|
||||
@@ -125,9 +127,9 @@ error_messages = {
|
||||
'voucher_invalid_item': _('The voucher code used for one of the items in your cart is not valid for this item. We '
|
||||
'removed this item from your cart.'),
|
||||
'voucher_required': _('You need a valid voucher code to order one of the products.'),
|
||||
'some_subevent_not_started': _('The presale period for one of the events in your cart has not yet started. The '
|
||||
'some_subevent_not_started': _('The booking period for one of the events in your cart has not yet started. The '
|
||||
'affected positions have been removed from your cart.'),
|
||||
'some_subevent_ended': _('The presale period for one of the events in your cart has ended. The affected '
|
||||
'some_subevent_ended': _('The booking period for one of the events in your cart has ended. The affected '
|
||||
'positions have been removed from your cart.'),
|
||||
'seat_invalid': _('One of the seats in your order was invalid, we removed the position from your cart.'),
|
||||
'seat_unavailable': _('One of the seats in your order has been taken in the meantime, we removed the position from your cart.'),
|
||||
@@ -569,6 +571,7 @@ def _check_positions(event: Event, now_dt: datetime, positions: List[CartPositio
|
||||
products_seen = Counter()
|
||||
q_avail = Counter()
|
||||
v_avail = Counter()
|
||||
v_usages = Counter()
|
||||
v_budget = {}
|
||||
deleted_positions = set()
|
||||
seats_seen = set()
|
||||
@@ -606,6 +609,7 @@ def _check_positions(event: Event, now_dt: datetime, positions: List[CartPositio
|
||||
break
|
||||
|
||||
if cp.voucher:
|
||||
v_usages[cp.voucher] += 1
|
||||
if cp.voucher not in v_avail:
|
||||
redeemed_in_carts = CartPosition.objects.filter(
|
||||
Q(voucher=cp.voucher) & Q(event=event) & Q(expires__gte=now_dt)
|
||||
@@ -717,6 +721,13 @@ def _check_positions(event: Event, now_dt: datetime, positions: List[CartPositio
|
||||
# Sorry, can't let you keep that!
|
||||
delete(cp)
|
||||
|
||||
for voucher, cnt in v_usages.items():
|
||||
if 0 < cnt < voucher.min_usages_remaining:
|
||||
raise OrderError(error_messages['voucher_min_usages'], {
|
||||
'voucher': voucher.code,
|
||||
'number': voucher.min_usages_remaining,
|
||||
})
|
||||
|
||||
# Check prices
|
||||
sorted_positions = [cp for cp in sorted_positions if cp.pk and cp.pk not in deleted_positions]
|
||||
old_total = sum(cp.price for cp in sorted_positions)
|
||||
@@ -1944,14 +1955,14 @@ class OrderChangeManager:
|
||||
'position': op.position.pk,
|
||||
'positionid': op.position.positionid,
|
||||
'addon_to': op.position.addon_to_id,
|
||||
'old_taxrule': op.position.tax_rule.pk if op.position.tax_rate else None,
|
||||
'old_taxrule': op.position.tax_rule.pk if op.position.tax_rule else None,
|
||||
'new_taxrule': op.tax_rule.pk
|
||||
})
|
||||
elif isinstance(op.position, OrderFee):
|
||||
self.order.log_action('pretix.event.order.changed.tax_rule', user=self.user, auth=self.auth, data={
|
||||
'fee': op.position.pk,
|
||||
'fee_type': op.position.fee_type,
|
||||
'old_taxrule': op.position.tax_rule.pk if op.position.tax_rate else None,
|
||||
'old_taxrule': op.position.tax_rule.pk if op.position.tax_rule else None,
|
||||
'new_taxrule': op.tax_rule.pk
|
||||
})
|
||||
op.position._calculate_tax(op.tax_rule)
|
||||
|
||||
@@ -112,7 +112,7 @@ def dictsum(*dicts) -> dict:
|
||||
|
||||
def order_overview(
|
||||
event: Event, subevent: SubEvent=None, date_filter='', date_from=None, date_until=None, fees=False,
|
||||
admission_only=False
|
||||
admission_only=False, base_qs=None
|
||||
) -> Tuple[List[Tuple[ItemCategory, List[Item]]], Dict[str, Tuple[Decimal, Decimal]]]:
|
||||
items = event.items.all().select_related(
|
||||
'category', # for re-grouping
|
||||
@@ -120,7 +120,7 @@ def order_overview(
|
||||
'variations'
|
||||
).order_by('category__position', 'category_id', 'position', 'name')
|
||||
|
||||
qs = OrderPosition.all
|
||||
qs = OrderPosition.all if base_qs is None else base_qs
|
||||
if isinstance(subevent, (list, QuerySet)):
|
||||
qs = qs.filter(subevent__in=subevent)
|
||||
elif subevent:
|
||||
|
||||
@@ -22,9 +22,9 @@
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from urllib.error import HTTPError
|
||||
from xml.etree import ElementTree
|
||||
|
||||
import vat_moss.errors
|
||||
import requests
|
||||
import vat_moss.id
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -35,6 +35,16 @@ from zeep.exceptions import Fault
|
||||
from pretix.base.models.tax import cc_to_vat_prefix, is_eu_country
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
error_messages = {
|
||||
'unavailable': _(
|
||||
'Your VAT ID could not be checked, as the VAT checking service of '
|
||||
'your country is currently not available. We will therefore '
|
||||
'need to charge VAT on your invoice. You can get the tax amount '
|
||||
'back via the VAT reimbursement process.'
|
||||
),
|
||||
'invalid': _('This VAT ID is not valid. Please re-check your input.'),
|
||||
'country_mismatch': _('Your VAT ID does not match the selected country.'),
|
||||
}
|
||||
|
||||
|
||||
class VATIDError(Exception):
|
||||
@@ -50,33 +60,107 @@ class VATIDTemporaryError(VATIDError):
|
||||
pass
|
||||
|
||||
|
||||
def _validate_vat_id_EU(vat_id, country_code):
|
||||
if vat_id[:2] != cc_to_vat_prefix(country_code):
|
||||
raise VATIDFinalError(_('Your VAT ID does not match the selected country.'))
|
||||
def _validate_vat_id_NO(vat_id, country_code):
|
||||
# Inspired by vat_moss library
|
||||
vat_id = vat_moss.id.normalize(vat_id)
|
||||
|
||||
if not vat_id or len(vat_id) < 3 or not re.match('^\\d{9}MVA$', vat_id[2:]):
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
|
||||
organization_number = vat_id[2:].replace('MVA', '')
|
||||
validation_url = 'https://data.brreg.no/enhetsregisteret/api/enheter/%s' % organization_number
|
||||
|
||||
try:
|
||||
result = vat_moss.id.validate(vat_id)
|
||||
if result:
|
||||
country_code, normalized_id, company_name = result
|
||||
return normalized_id
|
||||
except (vat_moss.errors.InvalidError, ValueError):
|
||||
raise VATIDFinalError(_('This VAT ID is not valid. Please re-check your input.'))
|
||||
except vat_moss.errors.WebServiceUnavailableError:
|
||||
response = requests.get(validation_url, timeout=10)
|
||||
if response.status_code in (404, 400):
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
|
||||
response.raise_for_status()
|
||||
|
||||
info = response.json()
|
||||
# This should never happen, but keeping it incase the API is changed
|
||||
if 'organisasjonsnummer' not in info or info['organisasjonsnummer'] != organization_number:
|
||||
logger.warning(
|
||||
'VAT ID checking failed for Norway due to missing or mismatching organisasjonsnummer in repsonse'
|
||||
)
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
except requests.RequestException:
|
||||
logger.exception('VAT ID checking failed for country {}'.format(country_code))
|
||||
raise VATIDTemporaryError(_(
|
||||
'Your VAT ID could not be checked, as the VAT checking service of '
|
||||
'your country is currently not available. We will therefore '
|
||||
'need to charge VAT on your invoice. You can get the tax amount '
|
||||
'back via the VAT reimbursement process.'
|
||||
))
|
||||
except (vat_moss.errors.WebServiceError, HTTPError):
|
||||
raise VATIDTemporaryError(error_messages['unavailable'])
|
||||
else:
|
||||
return vat_id
|
||||
|
||||
|
||||
def _validate_vat_id_EU(vat_id, country_code):
|
||||
# Inspired by vat_moss library
|
||||
try:
|
||||
vat_id = vat_moss.id.normalize(vat_id)
|
||||
except ValueError:
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
|
||||
if not vat_id or len(vat_id) < 3:
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
|
||||
number = vat_id[2:]
|
||||
|
||||
if vat_id[:2] != cc_to_vat_prefix(country_code):
|
||||
raise VATIDFinalError(error_messages['country_mismatch'])
|
||||
|
||||
if not re.match(vat_moss.id.ID_PATTERNS[cc_to_vat_prefix(country_code)]['regex'], number):
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
|
||||
# We are relying on the country code of the normalized VAT-ID and not the user/InvoiceAddress-provided
|
||||
# VAT-ID, since Django and the EU have different ideas of which country is using which country code.
|
||||
# For example: For django and most people, Greece is GR. However, the VAT-service expects EL.
|
||||
payload = """
|
||||
<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:urn="urn:ec.europa.eu:taxud:vies:services:checkVat:types">
|
||||
<soapenv:Header/>
|
||||
<soapenv:Body>
|
||||
<urn:checkVat>
|
||||
<urn:countryCode>%s</urn:countryCode>
|
||||
<urn:vatNumber>%s</urn:vatNumber>
|
||||
</urn:checkVat>
|
||||
</soapenv:Body>
|
||||
</soapenv:Envelope>
|
||||
""".strip() % (vat_id[:2], number)
|
||||
|
||||
try:
|
||||
response = requests.post(
|
||||
'https://ec.europa.eu/taxation_customs/vies/services/checkVatService',
|
||||
data=payload,
|
||||
timeout=10,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
return_xml = response.text
|
||||
|
||||
try:
|
||||
envelope = ElementTree.fromstring(return_xml)
|
||||
except ElementTree.ParseError:
|
||||
logger.error(
|
||||
f'VAT ID checking failed for {country_code} due to XML parse error'
|
||||
)
|
||||
raise VATIDTemporaryError(error_messages['unavailable'])
|
||||
|
||||
namespaces = {
|
||||
'soap': 'http://schemas.xmlsoap.org/soap/envelope/',
|
||||
'vat': 'urn:ec.europa.eu:taxud:vies:services:checkVat:types'
|
||||
}
|
||||
valid_elements = envelope.findall('./soap:Body/vat:checkVatResponse/vat:valid', namespaces)
|
||||
if not valid_elements:
|
||||
logger.error(
|
||||
f'VAT ID checking failed for {country_code} due to missing <valid> tag'
|
||||
)
|
||||
raise VATIDTemporaryError(error_messages['unavailable'])
|
||||
|
||||
if valid_elements[0].text.lower() != 'true':
|
||||
raise VATIDFinalError(error_messages['invalid'])
|
||||
|
||||
except requests.RequestException:
|
||||
logger.exception('VAT ID checking failed for country {}'.format(country_code))
|
||||
raise VATIDTemporaryError(_(
|
||||
'Your VAT ID could not be checked, as the VAT checking service of '
|
||||
'your country returned an incorrect result. We will therefore '
|
||||
'need to charge VAT on your invoice. Please contact support to '
|
||||
'resolve this manually.'
|
||||
))
|
||||
raise VATIDTemporaryError(error_messages['unavailable'])
|
||||
else:
|
||||
return vat_id
|
||||
|
||||
|
||||
def _validate_vat_id_CH(vat_id, country_code):
|
||||
@@ -85,10 +169,13 @@ def _validate_vat_id_CH(vat_id, country_code):
|
||||
|
||||
vat_id = re.sub('[^A-Z0-9]', '', vat_id.replace('HR', '').replace('MWST', ''))
|
||||
try:
|
||||
transport = Transport(cache=SqliteCache(os.path.join(settings.CACHE_DIR, "validate_vat_id_ch_zeep_cache.db")))
|
||||
transport = Transport(
|
||||
cache=SqliteCache(os.path.join(settings.CACHE_DIR, "validate_vat_id_ch_zeep_cache.db")),
|
||||
timeout=10
|
||||
)
|
||||
client = Client(
|
||||
'https://www.uid-wse.admin.ch/V5.0/PublicServices.svc?wsdl',
|
||||
transport=transport
|
||||
transport=transport,
|
||||
)
|
||||
result = client.service.ValidateUID(uid=vat_id)
|
||||
except Fault as e:
|
||||
@@ -125,10 +212,14 @@ def _validate_vat_id_CH(vat_id, country_code):
|
||||
|
||||
|
||||
def validate_vat_id(vat_id, country_code):
|
||||
if not vat_id:
|
||||
return vat_id
|
||||
country_code = str(country_code)
|
||||
if is_eu_country(country_code):
|
||||
return _validate_vat_id_EU(vat_id, country_code)
|
||||
elif country_code == 'CH':
|
||||
return _validate_vat_id_CH(vat_id, country_code)
|
||||
elif country_code == 'NO':
|
||||
return _validate_vat_id_NO(vat_id, country_code)
|
||||
|
||||
raise VATIDTemporaryError(f'VAT ID should not be entered for country {country_code}')
|
||||
|
||||
@@ -57,6 +57,7 @@ from django_countries.fields import Country
|
||||
from hierarkey.models import GlobalSettingsBase, Hierarkey
|
||||
from i18nfield.forms import I18nFormField, I18nTextarea, I18nTextInput
|
||||
from i18nfield.strings import LazyI18nString
|
||||
from phonenumbers import PhoneNumber, parse
|
||||
from rest_framework import serializers
|
||||
|
||||
from pretix.api.serializers.fields import (
|
||||
@@ -145,6 +146,17 @@ DEFAULTS = {
|
||||
"advanced features like memberships.")
|
||||
)
|
||||
},
|
||||
'customer_accounts_native': {
|
||||
'default': 'True',
|
||||
'type': bool,
|
||||
'form_class': forms.BooleanField,
|
||||
'serializer_class': serializers.BooleanField,
|
||||
'form_kwargs': dict(
|
||||
label=_("Allow customers to log in with email address and password"),
|
||||
help_text=_("If disabled, you will need to connect one or more single-sign-on providers."),
|
||||
widget=forms.CheckboxInput(attrs={'data-display-dependency': '#id_settings-customer_accounts'}),
|
||||
)
|
||||
},
|
||||
'customer_accounts_link_by_email': {
|
||||
'default': 'False',
|
||||
'type': bool,
|
||||
@@ -2744,6 +2756,11 @@ PERSON_NAME_TITLE_GROUPS = OrderedDict([
|
||||
'Dr.',
|
||||
'Prof.',
|
||||
'Prof. Dr.',
|
||||
))),
|
||||
('dr_prof_he', ('Dr., Prof., H.E.', (
|
||||
'Dr.',
|
||||
'Prof.',
|
||||
'H.E.',
|
||||
)))
|
||||
])
|
||||
|
||||
@@ -3030,6 +3047,7 @@ settings_hierarkey.add_type(LazyI18nStringList,
|
||||
settings_hierarkey.add_type(RelativeDateWrapper,
|
||||
serialize=lambda rdw: rdw.to_string(),
|
||||
unserialize=lambda s: RelativeDateWrapper.from_string(s))
|
||||
settings_hierarkey.add_type(PhoneNumber, lambda pn: pn.as_international, lambda s: parse(s) if s else None)
|
||||
|
||||
|
||||
@settings_hierarkey.set_global(cache_namespace='global')
|
||||
|
||||
@@ -64,6 +64,10 @@ class EventPluginSignal(django.dispatch.Signal):
|
||||
# Send to all events!
|
||||
return True
|
||||
|
||||
# If sentry packed this in a wrapper, unpack that
|
||||
if "sentry" in receiver.__module__:
|
||||
receiver = receiver.__wrapped__
|
||||
|
||||
# Find the Django application this belongs to
|
||||
searchpath = receiver.__module__
|
||||
core_module = any([searchpath.startswith(cm) for cm in settings.CORE_MODULES])
|
||||
|
||||
52
src/pretix/base/templates/400_hostname.html
Normal file
52
src/pretix/base/templates/400_hostname.html
Normal file
@@ -0,0 +1,52 @@
|
||||
{% extends "error.html" %}
|
||||
{% load i18n %}
|
||||
{% load static %}
|
||||
{% block title %}{% trans "Unknown host" %}{% endblock %}
|
||||
{% block content %}
|
||||
<i class="fa fa-question-circle-o fa-fw big-icon"></i>
|
||||
<div class="error-details">
|
||||
<h1>{% trans "Unknown host" %}</h1>
|
||||
<p>
|
||||
{% blocktrans trimmed with host=header_host %}
|
||||
Your browser told us that you want to access "{{ header_host }}". Unfortunately, we don't have
|
||||
any content for this domain.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
{% if is_fresh_install %}
|
||||
<p>
|
||||
{% blocktrans trimmed %}
|
||||
It looks like this is a fresh installation of pretix. This error message is probably caused due to
|
||||
the fact that either your configuration includes the wrong site URL or your reverse proxy is sending
|
||||
the wrong header.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
<dl>
|
||||
<dt>{% trans "Expected host according to configuration" %}</dt>
|
||||
<dd><code>{{ site_host }}</code></dd>
|
||||
<dt>{% trans "Received headers" %}</dt>
|
||||
<dd>
|
||||
<code>Host: {{ request.headers.Host }}</code>
|
||||
{% if xfh %}
|
||||
<br>
|
||||
<code>X-Forwarded-For: {{ xfh }}</code>
|
||||
{% if not settings.USE_X_FORWARDED_HOST %}({% trans "ignored" %}){% endif %}
|
||||
{% endif %}
|
||||
</dd>
|
||||
<dt>{% trans "Derived host from headers" %}</dt>
|
||||
<dd><code>{{ header_host }}</code></dd>
|
||||
</dl>
|
||||
{% else %}
|
||||
<p>
|
||||
{% blocktrans trimmed %}
|
||||
If you just configured this as a domain for your ticket shop, you now need to set this up as a "custom domain"
|
||||
in your organizer account.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
{% endif %}
|
||||
<p class="links">
|
||||
<a id='goback' href='#'>{% trans "Take a step back" %}</a>
|
||||
· <a id='reload' href='#'>{% trans "Try again" %}</a>
|
||||
</p>
|
||||
<img src="{% static "pretixbase/img/pretix-logo.svg" %}" class="logo"/>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -199,7 +199,7 @@
|
||||
<tr>
|
||||
<td style="line-height: 0">
|
||||
<img class="wide" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAlgAAAA8CAAAAACf95tlAAAAAXNCSVQI5gpbmQAAAAlwSFlzAAAOxAAADsQBlSsOGwAAABl0RVh0U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAAAG/SURBVHja7dvRboMwDIXhvf/DLiQQAwkku9+qDgq2hPyfN6j1qTlx06/uMunbLMnnhL98fuzRDtYILEeZ7GBNwAIWsIB1LdkOVgaWo4gdLAGWo6x2sFZgOUq1g1WB5SjNDlYDlqcEK1dDB5anmK3eE7C4FnIpBNbVFLo7sB7d3huwKFlULGA9pWQJsJxls4G1ActbooWr2IHlLbMFrBlY7rJbwNqBxb2QZ8nAuiUGO9ICLOo71R1YN0X9td8KLJ8ZeDEDrAd+Za3A4mLIz4TAujGqv+tUYPmN4v8LcweW3zS1t++hActzCrtRYD3pMJQOLOeJ7NyBpZFdoWaFDVjuJ6BRswpTBZbCAn5hpsDq/fbHpDMTBZbC1TAzT2ApyMIVsDROQ2GWwFJo8PR2YP3eOtywzwrsGYD1J9vlHXzcmSKw7q/wU2OEwHpdtALHILA00jJfV8DSaVofvYOPlckB658sp/8VNrBkANahqnXqfhhXJgasgymHD8REZwfWmezzga+tQdhcAet0qry1FYV3osD6dP1QJL3YbYUkhfUCsK6einWRPI0pxjROWZbK+QcsAiwCLEKARYBFgEXIu/wAYbjtwujw8KwAAAAASUVORK5CYII="
|
||||
style="max-height: 60px;">
|
||||
style="max-height: 60px;" alt="">
|
||||
</td>
|
||||
</tr>
|
||||
<!--<![endif]-->
|
||||
@@ -233,7 +233,7 @@
|
||||
<td style="line-height: 0">
|
||||
<br>
|
||||
<img class="wide" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAlgAAAA8CAYAAAC6nMS5AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAAOxAAADsQBlSsOGwAAABl0RVh0U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAAAPnSURBVHic7d3dbuJIEAbQsg2Ecd7/TQeDf3svVuFmdjJLxsGm+xwJKXcpqS76U3VTVCmlFAAArKbeugAAgNwIWAAAKxOwAABWJmABAKxMwAIAWNlh6wIAIiKWZYllWWKe5/vfEREppfsnIqKqqvsnIqKu66jrOpqmuf8NsDUBC3i6lFJM0xTjOMY0TbEsS6y1MaaqqqjrOg6HQxyPxzgcDvcwBvAslT1YwDN8BKpxHGOe56f+76Zp4ng83gMXwHcTsIBvsyxLDMMQfd/fr/y2Vtd1nE6nOJ1O0TTN1uUAmRKwgNV9hKppmrYu5VOHwyHO53Mcj8etSwEyI2ABqxmGIW6329OvAP9WXddxPp/j7e1t61KATAhYwF/r+z5ut9turgG/StAC1iJgAV82z3N0Xbf7q8BHNU0Tbdt6EA98mYAFPCylFNfrNfq+37qUb3U6naJtW2segIcJWMBDhmGIrutW21u1d1VVRdu2cTqdti4FeCECFvC/lDK1+h3TLOARAhbwR/M8x+VyeblvB66taZp4f3+3Pwv4IwEL+NQ4jnG5XIq5EvyTqqri/f3d7izgUwIW8FvDMMTlctm6jF1q29Y6B+C3BCzgP91ut7her1uXsWvn8zl+/PixdRnADglYwC+6riv2Mfuj3t7eom3brcsAdqbeugBgX4Srx/R9H13XbV0GsDMCFnB3u92Eqy/4+KkggA8CFhAR/z5o9+bq60reEQb8SsAC7qsY+Dtd18U4jluXAeyAgAWFW5ZFuFqRhaxAhIAFxfv586cloitKKVnMCghYULKu60xbvsE8z96zQeEELCjUOI4eZX+jvu+9x4KCCVhQoI9rLL6Xq0Iol4AFBbperw7+J0gpuSqEQglYUJh5nl0NPlHf9zFN09ZlAE8mYEFh/KzL85liQXkELCiIaco2pmmKYRi2LgN4IgELCuL38rZjigVlEbCgEMMwxLIsW5dRrGVZTLGgIAIWFML0ant6AOUQsKAA4zja2L4D8zxbPgqFELCgACYn+6EXUAYBCzK3LItvDu7INE3ewkEBBCzInIfV+6MnkD8BCzLnMN8fPYH8CViQsXmePW7fIX2B/AlYkDGTkv3SG8ibgAUZ87h9v/QG8iZgQaZSSg7xHZumKVJKW5cBfBMBCzIlXO2fHkG+BCzIlMN7//QI8iVgQaYc3vunR5AvAQsyZQ3A/tnoDvkSsCBDKSUPqF/Asiz6BJkSsCBDplevQ68gTwIWZMjV0+vQK8iTgAUZMhV5HXoFeRKwIEPe9bwOvYI8CViQIYf269AryJOABQCwMgELMmQq8jr0CvIkYEGGHNqvQ68gT/8AETAn3pyLgvsAAAAASUVORK5CYII="
|
||||
style="max-height: 60px;">
|
||||
style="max-height: 60px;" alt="">
|
||||
</td>
|
||||
</tr>
|
||||
<!--<![endif]-->
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
<td>
|
||||
<div style="line-height: 18px;height: 18px;"> </div>
|
||||
<img class="wide" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAlgAAAAEBAMAAACgm1xKAAABhGlDQ1BJQ0MgcHJvZmlsZQAAKJF9kT1Iw0AcxV/TSkUrDu0g4pChOlkQleKoVShChVArtOpgcukXNGlIUlwcBdeCgx+LVQcXZ10dXAVB8APExdVJ0UVK/F9aaBHjwXE/3t173L0DhEaFaVZgAtB020wnE2I2tyoGXxFCAP0IIy4zy5iTpBQ8x9c9fHy9i/Es73N/jgE1bzHAJxLPMsO0iTeI45u2wXmfOMJKskp8Tjxu0gWJH7mutPiNc9FlgWdGzEx6njhCLBa7WOliVjI14mniqKrplC9kW6xy3uKsVWqsfU/+wlBeX1nmOs0RJLGIJUgQoaCGMiqwEaNVJ8VCmvYTHv5h1y+RSyFXGYwcC6hCg+z6wf/gd7dWYWqylRRKAD0vjvMxCgR3gWbdcb6PHad5AvifgSu94682gJlP0usdLXoEDG4DF9cdTdkDLneAoSdDNmVX8tMUCgXg/Yy+KQeEb4G+tVZv7X2cPgAZ6ip1AxwcAmNFyl73eHdvd2//nmn39wNhNnKgJpT5BQAAAC1QTFRF7u7u7+/v8PDw8fHx8vLy9PT09fX19/f3+Pj4+fn5+vr6/Pz8/f39/v7+////BLnnfgAAAAlwSFlzAAAOxAAADsQBlSsOGwAAAAd0SU1FB+MMBAsUDD3bzUUAAABhSURBVDjLY2BAgLp3CNCAJO6HJH4ASZwXSfwxkjgnkvhzJHFWJPHXSOKMSOLvFJAk1iGJJyCJ5yGJL0AS10MSf4Akzo0k/hRJnB1J/CWSOAuS+FsG7GA0sEYDazSwBiSwAPzzGpfLqBMlAAAAAElFTkSuQmCC"
|
||||
style="max-height: 4px;">
|
||||
style="max-height: 4px;" alt="">
|
||||
<div style="line-height: 18px;height: 18px;"> </div>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
@@ -44,16 +44,6 @@ class BaseQuestionsViewMixin:
|
||||
form_class = BaseQuestionsForm
|
||||
all_optional = False
|
||||
|
||||
@staticmethod
|
||||
def _keyfunc(pos):
|
||||
# Sort addons after the item they are an addon to
|
||||
if isinstance(pos, OrderPosition):
|
||||
i = pos.addon_to.positionid if pos.addon_to else pos.positionid
|
||||
else:
|
||||
i = pos.addon_to.pk if pos.addon_to else pos.pk
|
||||
addon_penalty = 1 if pos.addon_to else 0
|
||||
return i, addon_penalty, pos.pk
|
||||
|
||||
@cached_property
|
||||
def _positions_for_questions(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
@@ -215,8 +215,16 @@ class AsyncFormView(AsyncMixin, FormView):
|
||||
expected_exceptions = (ValidationError,)
|
||||
task_base = ProfiledEventTask
|
||||
|
||||
def async_set_progress(self, percentage):
|
||||
if not self._task_self.request.called_directly:
|
||||
self._task_self.update_state(
|
||||
state='PROGRESS',
|
||||
meta={'value': percentage}
|
||||
)
|
||||
|
||||
def __init_subclass__(cls):
|
||||
def async_execute(self, *, request_path, query_string, form_kwargs, locale, tz, organizer=None, event=None, user=None, session_key=None):
|
||||
def async_execute(self, *, request_path, query_string, form_kwargs, locale, tz, url_kwargs=None, url_args=None,
|
||||
organizer=None, event=None, user=None, session_key=None):
|
||||
view_instance = cls()
|
||||
form_kwargs['data'] = QueryDict(form_kwargs['data'])
|
||||
req = RequestFactory().post(
|
||||
@@ -225,6 +233,8 @@ class AsyncFormView(AsyncMixin, FormView):
|
||||
content_type='application/x-www-form-urlencoded'
|
||||
)
|
||||
view_instance.request = req
|
||||
view_instance.kwargs = url_kwargs
|
||||
view_instance.args = url_args
|
||||
if event:
|
||||
view_instance.request.event = event
|
||||
view_instance.request.organizer = event.organizer
|
||||
@@ -237,6 +247,9 @@ class AsyncFormView(AsyncMixin, FormView):
|
||||
self.SessionStore = engine.SessionStore
|
||||
view_instance.request.session = self.SessionStore(session_key)
|
||||
|
||||
task_self = self
|
||||
view_instance._task_self = task_self
|
||||
|
||||
with translation.override(locale), timezone.override(pytz.timezone(tz)):
|
||||
form_class = view_instance.get_form_class()
|
||||
if form_kwargs.get('instance'):
|
||||
@@ -284,6 +297,8 @@ class AsyncFormView(AsyncMixin, FormView):
|
||||
'request_path': self.request.path,
|
||||
'query_string': self.request.GET.urlencode(),
|
||||
'form_kwargs': form_kwargs,
|
||||
'url_args': self.args,
|
||||
'url_kwargs': self.kwargs,
|
||||
'locale': get_language(),
|
||||
'tz': get_current_timezone().zone,
|
||||
}
|
||||
@@ -326,6 +341,13 @@ class AsyncPostView(AsyncMixin, View):
|
||||
expected_exceptions = (ValidationError,)
|
||||
task_base = ProfiledEventTask
|
||||
|
||||
def async_set_progress(self, percentage):
|
||||
if not self._task_self.request.called_directly:
|
||||
self._task_self.update_state(
|
||||
state='PROGRESS',
|
||||
meta={'value': percentage}
|
||||
)
|
||||
|
||||
def __init_subclass__(cls):
|
||||
def async_execute(self, *, request_path, url_args, url_kwargs, query_string, post_data, locale, tz,
|
||||
organizer=None, event=None, user=None, session_key=None):
|
||||
@@ -336,6 +358,8 @@ class AsyncPostView(AsyncMixin, View):
|
||||
content_type='application/x-www-form-urlencoded'
|
||||
)
|
||||
view_instance.request = req
|
||||
view_instance.kwargs = url_kwargs
|
||||
view_instance.args = url_args
|
||||
if event:
|
||||
view_instance.request.event = event
|
||||
view_instance.request.organizer = event.organizer
|
||||
@@ -348,6 +372,9 @@ class AsyncPostView(AsyncMixin, View):
|
||||
self.SessionStore = engine.SessionStore
|
||||
view_instance.request.session = self.SessionStore(session_key)
|
||||
|
||||
task_self = self
|
||||
view_instance._task_self = task_self
|
||||
|
||||
with translation.override(locale), timezone.override(pytz.timezone(tz)):
|
||||
return view_instance.async_post(view_instance.request, *url_args, **url_kwargs)
|
||||
|
||||
|
||||
@@ -71,7 +71,7 @@ def _default_context(request):
|
||||
except Resolver404:
|
||||
return {}
|
||||
|
||||
if not request.path.startswith(get_script_prefix() + 'control'):
|
||||
if not request.path.startswith(get_script_prefix() + 'control') or not hasattr(request, 'user'):
|
||||
return {}
|
||||
ctx = {
|
||||
'url_name': url.url_name,
|
||||
|
||||
@@ -48,6 +48,8 @@ from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_scopes.forms import SafeModelMultipleChoiceField
|
||||
|
||||
from pretix.helpers.hierarkey import clean_filename
|
||||
|
||||
from ...base.forms import I18nModelForm
|
||||
|
||||
# Import for backwards compatibility with okd import paths
|
||||
@@ -127,7 +129,7 @@ class ClearableBasenameFileInput(forms.ClearableFileInput):
|
||||
def __str__(self):
|
||||
if hasattr(self.file, 'display_name'):
|
||||
return self.file.display_name
|
||||
return os.path.basename(self.file.name).split('.', 1)[-1]
|
||||
return clean_filename(os.path.basename(self.file.name))
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
|
||||
@@ -39,7 +39,7 @@ from urllib.parse import urlencode, urlparse
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import validate_email
|
||||
from django.core.validators import MaxValueValidator, validate_email
|
||||
from django.db.models import Prefetch, Q, prefetch_related_objects
|
||||
from django.forms import (
|
||||
CheckboxSelectMultiple, formset_factory, inlineformset_factory,
|
||||
@@ -589,7 +589,7 @@ class EventSettingsForm(SettingsForm):
|
||||
(k, '{scheme}: {samples}'.format(
|
||||
scheme=v[0],
|
||||
samples=', '.join(v[1])
|
||||
))
|
||||
) if v[0] != ', '.join(v[1]) else v[0])
|
||||
for k, v in PERSON_NAME_TITLE_GROUPS.items()
|
||||
]
|
||||
if not self.event.has_subevents:
|
||||
@@ -848,6 +848,7 @@ class InvoiceSettingsForm(SettingsForm):
|
||||
self.fields['invoice_generate_sales_channels'].choices = (
|
||||
(c.identifier, c.verbose_name) for c in get_all_sales_channels().values()
|
||||
)
|
||||
self.fields['invoice_numbers_counter_length'].validators.append(MaxValueValidator(15))
|
||||
|
||||
def clean(self):
|
||||
data = super().clean()
|
||||
|
||||
@@ -129,6 +129,11 @@ class QuestionForm(I18nModelForm):
|
||||
|
||||
return val
|
||||
|
||||
def clean_identifier(self):
|
||||
val = self.cleaned_data.get('identifier')
|
||||
Question._clean_identifier(self.instance.event, val, self.instance)
|
||||
return val
|
||||
|
||||
def clean(self):
|
||||
d = super().clean()
|
||||
if d.get('dependency_question') and not d.get('dependency_values'):
|
||||
@@ -763,10 +768,6 @@ class ItemAddOnsFormSet(I18nFormSet):
|
||||
if self._should_delete_form(form):
|
||||
# This form is going to be deleted so any of its errors
|
||||
# should not cause the entire formset to be invalid.
|
||||
try:
|
||||
categories.remove(form.cleaned_data['addon_category'].pk)
|
||||
except KeyError:
|
||||
pass
|
||||
continue
|
||||
|
||||
if 'addon_category' in form.cleaned_data:
|
||||
|
||||
@@ -51,6 +51,7 @@ from pytz import common_timezones
|
||||
|
||||
from pretix.api.models import WebHook
|
||||
from pretix.api.webhooks import get_all_webhook_events
|
||||
from pretix.base.customersso.oidc import oidc_validate_and_complete_config
|
||||
from pretix.base.forms import I18nModelForm, PlaceholderValidator, SettingsForm
|
||||
from pretix.base.forms.questions import (
|
||||
NamePartsFormField, WrappedPhoneNumberPrefixWidget, get_country_by_locale,
|
||||
@@ -61,6 +62,7 @@ from pretix.base.models import (
|
||||
Customer, Device, EventMetaProperty, Gate, GiftCard, Membership,
|
||||
MembershipType, Organizer, Team,
|
||||
)
|
||||
from pretix.base.models.customers import CustomerSSOClient, CustomerSSOProvider
|
||||
from pretix.base.models.organizer import OrganizerFooterLink
|
||||
from pretix.base.settings import PERSON_NAME_SCHEMES, PERSON_NAME_TITLE_GROUPS
|
||||
from pretix.control.forms import ExtFileField, SplitDateTimeField
|
||||
@@ -159,7 +161,7 @@ class OrganizerUpdateForm(OrganizerForm):
|
||||
instance = super().save(commit)
|
||||
|
||||
if self.domain:
|
||||
current_domain = instance.domains.first()
|
||||
current_domain = instance.domains.filter(event__isnull=True).first()
|
||||
if self.cleaned_data['domain']:
|
||||
if current_domain and current_domain.domainname != self.cleaned_data['domain']:
|
||||
current_domain.delete()
|
||||
@@ -354,6 +356,7 @@ class OrganizerSettingsForm(SettingsForm):
|
||||
auto_fields = [
|
||||
'allowed_restricted_plugins',
|
||||
'customer_accounts',
|
||||
'customer_accounts_native',
|
||||
'customer_accounts_link_by_email',
|
||||
'invoice_regenerate_allowed',
|
||||
'contact_mail',
|
||||
@@ -631,6 +634,10 @@ class CustomerUpdateForm(forms.ModelForm):
|
||||
titles=self.instance.organizer.settings.name_scheme_titles,
|
||||
label=_('Name'),
|
||||
)
|
||||
if self.instance.provider_id:
|
||||
self.fields['email'].disabled = True
|
||||
self.fields['is_verified'].disabled = True
|
||||
self.fields['external_identifier'].disabled = True
|
||||
|
||||
def clean(self):
|
||||
email = self.cleaned_data.get('email')
|
||||
@@ -706,3 +713,120 @@ OrganizerFooterLinkFormset = inlineformset_factory(
|
||||
formset=BaseOrganizerFooterLinkFormSet,
|
||||
can_order=False, can_delete=True, extra=0
|
||||
)
|
||||
|
||||
|
||||
class SSOProviderForm(I18nModelForm):
|
||||
|
||||
config_oidc_base_url = forms.URLField(
|
||||
label=pgettext_lazy('sso_oidc', 'Base URL'),
|
||||
required=False,
|
||||
)
|
||||
config_oidc_client_id = forms.CharField(
|
||||
label=pgettext_lazy('sso_oidc', 'Client ID'),
|
||||
required=False,
|
||||
)
|
||||
config_oidc_client_secret = forms.CharField(
|
||||
label=pgettext_lazy('sso_oidc', 'Client secret'),
|
||||
required=False,
|
||||
)
|
||||
config_oidc_scope = forms.CharField(
|
||||
label=pgettext_lazy('sso_oidc', 'Scope'),
|
||||
help_text=pgettext_lazy('sso_oidc', 'Multiple scopes separated with spaces.'),
|
||||
required=False,
|
||||
)
|
||||
config_oidc_uid_field = forms.CharField(
|
||||
label=pgettext_lazy('sso_oidc', 'User ID field'),
|
||||
help_text=pgettext_lazy('sso_oidc', 'We will assume that the contents of the user ID fields are unique and '
|
||||
'can never change for a user.'),
|
||||
required=True,
|
||||
initial='sub',
|
||||
)
|
||||
config_oidc_email_field = forms.CharField(
|
||||
label=pgettext_lazy('sso_oidc', 'Email field'),
|
||||
help_text=pgettext_lazy('sso_oidc', 'We will assume that all email addresses received from the SSO provider '
|
||||
'are verified to really belong the the user. If this can\'t be '
|
||||
'guaranteed, security issues might arise.'),
|
||||
required=True,
|
||||
initial='email',
|
||||
)
|
||||
config_oidc_phone_field = forms.CharField(
|
||||
label=pgettext_lazy('sso_oidc', 'Phone field'),
|
||||
required=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = CustomerSSOProvider
|
||||
fields = ['is_active', 'name', 'button_label', 'method']
|
||||
widgets = {
|
||||
'method': forms.RadioSelect,
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
name_scheme = self.event.settings.name_scheme
|
||||
scheme = PERSON_NAME_SCHEMES.get(name_scheme)
|
||||
for fname, label, size in scheme['fields']:
|
||||
self.fields[f'config_oidc_{fname}_field'] = forms.CharField(
|
||||
label=pgettext_lazy('sso_oidc', f'{label} field').format(label=label),
|
||||
required=False,
|
||||
)
|
||||
|
||||
self.fields['method'].choices = [c for c in self.fields['method'].choices if c[0]]
|
||||
|
||||
for fname, f in self.fields.items():
|
||||
if fname.startswith('config_'):
|
||||
prefix, method, suffix = fname.split('_', 2)
|
||||
f.widget.attrs['data-display-dependency'] = f'input[name=method][value={method}]'
|
||||
|
||||
if self.instance and self.instance.method == method:
|
||||
f.initial = self.instance.configuration.get(suffix)
|
||||
|
||||
def clean(self):
|
||||
data = self.cleaned_data
|
||||
if not data.get("method"):
|
||||
return data
|
||||
|
||||
config = {}
|
||||
for fname, f in self.fields.items():
|
||||
if fname.startswith(f'config_{data["method"]}_'):
|
||||
prefix, method, suffix = fname.split('_', 2)
|
||||
config[suffix] = data.get(fname)
|
||||
|
||||
if data["method"] == "oidc":
|
||||
oidc_validate_and_complete_config(config)
|
||||
|
||||
self.instance.configuration = config
|
||||
|
||||
|
||||
class SSOClientForm(I18nModelForm):
|
||||
regenerate_client_secret = forms.BooleanField(
|
||||
label=_('Invalidate old client secret and generate a new one'),
|
||||
required=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = CustomerSSOClient
|
||||
fields = ['is_active', 'name', 'client_id', 'client_type', 'authorization_grant_type', 'redirect_uris',
|
||||
'allowed_scopes']
|
||||
widgets = {
|
||||
'authorization_grant_type': forms.RadioSelect,
|
||||
'client_type': forms.RadioSelect,
|
||||
'allowed_scopes': forms.CheckboxSelectMultiple,
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['allowed_scopes'] = forms.MultipleChoiceField(
|
||||
label=self.fields['allowed_scopes'].label,
|
||||
help_text=self.fields['allowed_scopes'].help_text,
|
||||
required=self.fields['allowed_scopes'].required,
|
||||
initial=self.fields['allowed_scopes'].initial,
|
||||
choices=CustomerSSOClient.SCOPE_CHOICES,
|
||||
widget=forms.CheckboxSelectMultiple
|
||||
)
|
||||
if self.instance and self.instance.pk:
|
||||
self.fields['client_id'].disabled = True
|
||||
else:
|
||||
del self.fields['client_id']
|
||||
del self.fields['regenerate_client_secret']
|
||||
|
||||
@@ -72,7 +72,7 @@ class VoucherForm(I18nModelForm):
|
||||
localized_fields = '__all__'
|
||||
fields = [
|
||||
'code', 'valid_until', 'block_quota', 'allow_ignore_quota', 'value', 'tag',
|
||||
'comment', 'max_usages', 'price_mode', 'subevent', 'show_hidden_items', 'budget'
|
||||
'comment', 'max_usages', 'min_usages', 'price_mode', 'subevent', 'show_hidden_items', 'budget'
|
||||
]
|
||||
field_classes = {
|
||||
'valid_until': SplitDateTimeField,
|
||||
@@ -308,7 +308,7 @@ class VoucherBulkForm(VoucherForm):
|
||||
localized_fields = '__all__'
|
||||
fields = [
|
||||
'valid_until', 'block_quota', 'allow_ignore_quota', 'value', 'tag', 'comment',
|
||||
'max_usages', 'price_mode', 'subevent', 'show_hidden_items', 'budget'
|
||||
'max_usages', 'min_usages', 'price_mode', 'subevent', 'show_hidden_items', 'budget'
|
||||
]
|
||||
field_classes = {
|
||||
'valid_until': SplitDateTimeField,
|
||||
|
||||
@@ -319,6 +319,14 @@ def pretixcontrol_logentry_display(sender: Event, logentry: LogEntry, **kwargs):
|
||||
'pretix.giftcards.acceptance.removed': _('Gift card acceptance for another organizer has been removed.'),
|
||||
'pretix.webhook.created': _('The webhook has been created.'),
|
||||
'pretix.webhook.changed': _('The webhook has been changed.'),
|
||||
'pretix.webhook.retries.expedited': _('The webhook call retry jobs have been manually expedited.'),
|
||||
'pretix.webhook.retries.dropped': _('The webhook call retry jobs have been dropped.'),
|
||||
'pretix.ssoprovider.created': _('The SSO provider has been created.'),
|
||||
'pretix.ssoprovider.changed': _('The SSO provider has been changed.'),
|
||||
'pretix.ssoprovider.deleted': _('The SSO provider has been deleted.'),
|
||||
'pretix.ssoclient.created': _('The SSO client has been created.'),
|
||||
'pretix.ssoclient.changed': _('The SSO client has been changed.'),
|
||||
'pretix.ssoclient.deleted': _('The SSO client has been deleted.'),
|
||||
'pretix.membershiptype.created': _('The membership type has been created.'),
|
||||
'pretix.membershiptype.changed': _('The membership type has been changed.'),
|
||||
'pretix.membershiptype.deleted': _('The membership type has been deleted.'),
|
||||
|
||||
@@ -550,6 +550,24 @@ def get_organizer_navigation(request):
|
||||
'active': 'organizer.membershiptype' in url.url_name,
|
||||
}
|
||||
)
|
||||
children.append(
|
||||
{
|
||||
'label': _('SSO clients'),
|
||||
'url': reverse('control:organizer.ssoclients', kwargs={
|
||||
'organizer': request.organizer.slug
|
||||
}),
|
||||
'active': 'organizer.ssoclient' in url.url_name,
|
||||
}
|
||||
)
|
||||
children.append(
|
||||
{
|
||||
'label': _('SSO providers'),
|
||||
'url': reverse('control:organizer.ssoproviders', kwargs={
|
||||
'organizer': request.organizer.slug
|
||||
}),
|
||||
'active': 'organizer.ssoprovider' in url.url_name,
|
||||
}
|
||||
)
|
||||
if children:
|
||||
nav.append({
|
||||
'label': _('Customer accounts'),
|
||||
|
||||
@@ -72,10 +72,10 @@
|
||||
</div>
|
||||
{% else %}
|
||||
<form method="post" action="{% url "control:event.orders.checkinlists.bulk_action" event=request.event.slug organizer=request.event.organizer.slug list=checkinlist.pk %}" data-asynctask>
|
||||
<div class="hidden">
|
||||
{{ filter_form.as_p }}
|
||||
<input name="returnquery" type="hidden" value="{{ request.META.QUERY_STRING }}">
|
||||
</div>
|
||||
{% for field in filter_form %}
|
||||
{{ field.as_hidden }}
|
||||
{% endfor %}
|
||||
<input name="returnquery" type="hidden" value="{{ request.META.QUERY_STRING }}">
|
||||
{% csrf_token %}
|
||||
<div class="table-responsive">
|
||||
<table class="table table-condensed table-hover">
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user