feat: Initial commit

This commit is contained in:
Vincent Mahnke 2025-11-08 18:21:46 +01:00
commit 17973e866b
Signed by: ViMaSter
GPG key ID: 6D787326BA7D6469
44 changed files with 1444 additions and 0 deletions

19
docker/pretix/Dockerfile Normal file
View file

@ -0,0 +1,19 @@
FROM pretix/standalone:stable
USER root
ENV IMAGE_CRON_DIR="/image/cron" \
IMAGE_CONFIG_DIR="/image/config"
ADD files /image
COPY crontab /tmp/crontab
RUN mv /image/supervisord/crond.conf /etc/supervisord/crond.conf && \
pip install crontab && chmod 644 $IMAGE_CONFIG_DIR/ssl/*.crt && chmod +x $IMAGE_CRON_DIR/cron.py
USER pretixuser
EXPOSE 443
ENTRYPOINT ["pretix"]
CMD ["all"]

24
docker/pretix/crontab Normal file
View file

@ -0,0 +1,24 @@
# Edit this file to introduce tasks to be run by cron.
#
# Each task to run has to be defined through a single line
# indicating with different fields when the task will be run
# and what command to run for the task
#
# To define the time you can provide concrete values for
# minute (m), hour (h), day of month (dom), month (mon),
# and day of week (dow) or use '*' in these fields (for 'any').
#
# Notice that tasks will be started based on the cron's system
# daemon's notion of time and timezones.
#
# Output of the crontab jobs (including errors) is sent through
# email to the user the crontab file belongs to (unless redirected).
#
# For example, you can run a backup of all your user accounts
# at 5 a.m every week with:
# 0 5 * * 1 tar -zcf /var/backups/home.tgz /home/
#
# For more information see the manual pages of crontab(5) and cron(8)
#
# m h dom mon dow command
15,45 * * * * su pretixuser -c "PRETIX_CONFIG_FILE=/etc/pretix/pretix.cfg python -m pretix runperiodic"

View file

@ -0,0 +1,224 @@
#!/usr/local/bin/python3
from crontab import CronTab
import argparse
import logging
import time
import subprocess
import sys
import signal
import os
def _parse_crontab(crontab_file: str) -> list:
"""The method includes a functionality to parse the crontab file, and it returns a list of CronTab jobs
Keyword arguments:
crontab_file -> Specify the inserted crontab file
"""
logger = logging.getLogger("parser")
logger.info(f"Reading crontab from {crontab_file}")
if not os.path.isfile(crontab_file):
logger.error(f"Crontab {crontab_file} does not exist. Exiting!")
sys.exit(1)
with open(crontab_file, "r") as crontab:
lines: list = crontab.readlines()
logger.info(f"{len(lines)} lines read from crontab {crontab_file}")
jobs: list = list()
for i, line in enumerate(lines):
line: str = line.strip()
if not line:
continue
if line.startswith("#"):
continue
logger.info(f"Parsing line {line}")
expression: list = line.split(" ", 5)
cron_expression: str = " ".join(expression[0:5])
logger.info(f"Cron expression is {cron_expression}")
try:
cron_entry = CronTab(cron_expression)
except ValueError as e:
logger.critical(
f"Unable to parse crontab. Line {i + 1}: Illegal cron expression {cron_expression}. Error message: {e}"
)
sys.exit(1)
command: str = expression[5]
logger.info(f"Command is {command}")
jobs.append([cron_entry, command])
if len(jobs) == 0:
logger.error(
"Specified crontab does not contain any scheduled execution. Exiting!"
)
sys.exit(1)
return jobs
def _get_next_executions(jobs: list):
"""The method includes a functionality to extract the execution time and job itself from the submitted job list
Keyword arguments:
jobs -> Specify the inserted list of jobs
"""
logger = logging.getLogger("next-exec")
scheduled_executions: tuple = tuple(
(x[1], int(x[0].next(default_utc=True)) + 1) for x in jobs
)
logger.debug(f"Next executions of scheduled are {scheduled_executions}")
next_exec_time: int = int(min(scheduled_executions, key=lambda x: x[1])[1])
logger.debug(f"Next execution is in {next_exec_time} second(s)")
next_commands: list = [x[0] for x in scheduled_executions if x[1] == next_exec_time]
logger.debug(
f"Next commands to be executed in {next_exec_time} are {next_commands}"
)
return next_exec_time, next_commands
def _loop(jobs: list, test_mode: bool = False):
"""The method includes a functionality to loop over all jobs inside the crontab file and execute them
Keyword arguments:
jobs -> Specify the inserted jobs as list
test_mode -> Specify if you want to use the test mode or not (default False)
"""
logger = logging.getLogger("loop")
logger.info("Entering main loop")
if test_mode is False:
while True:
sleep_time, commands = _get_next_executions(jobs)
logger.debug(f"Sleeping for {sleep_time} second(s)")
if sleep_time <= 1:
logger.debug("Sleep time <= 1 second, ignoring.")
time.sleep(1)
continue
time.sleep(sleep_time)
for command in commands:
_execute_command(command)
else:
sleep_time, commands = _get_next_executions(jobs)
logger.debug(f"Sleeping for {sleep_time} second(s)")
if sleep_time <= 1:
logger.debug("Sleep time <= 1 second, ignoring.")
time.sleep(1)
time.sleep(sleep_time)
for command in commands:
_execute_command(command)
def _execute_command(command: str):
"""The method includes a functionality to execute a crontab command
Keyword arguments:
command -> Specify the inserted command for the execution
"""
logger = logging.getLogger("exec")
logger.info(f"Executing command {command}")
result = subprocess.run(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
)
logger.info(f"Standard output: {result.stdout}")
logger.info(f"Standard error: {result.stderr}")
def _signal_handler():
"""The method includes a functionality for the signal handler to exit a process"""
logger = logging.getLogger("signal")
logger.info("Exiting")
sys.exit(0)
def main():
"""The method includes a functionality to control and execute crontab entries
Arguments:
-c -> Specify the inserted crontab file
-L -> Specify the inserted log file
-C -> Specify the if the output should be forwarded to the console
-l -> Specify the log level
"""
signal.signal(signal.SIGINT, _signal_handler)
signal.signal(signal.SIGTERM, _signal_handler)
parser = argparse.ArgumentParser(description="cron")
parser.add_argument("-c", "--crontab", required=True, type=str)
logging_target = parser.add_mutually_exclusive_group(required=True)
logging_target.add_argument("-L", "--logfile", type=str)
logging_target.add_argument("-C", "--console", action="store_true")
parser.add_argument(
"-l",
"--loglevel",
choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
default="INFO",
type=str,
)
args = parser.parse_args()
log_level = getattr(logging, args.loglevel.upper(), logging.INFO)
if args.console:
logging.basicConfig(
filemode="w",
level=log_level,
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
)
else:
logging.basicConfig(
filename=args.logfile,
filemode="a+",
level=log_level,
format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
)
logger = logging.getLogger("main")
logger.info("Starting cron")
jobs: list = _parse_crontab(args.crontab)
_loop(jobs)
if __name__ == "__main__":
main()

View file

@ -0,0 +1,7 @@
[program:crond]
command = %(ENV_IMAGE_CRON_DIR)s/cron.py --crontab /tmp/crontab --loglevel INFO --logfile /var/log/crond.log
autostart = true
redirect_stderr = true
stdout_logfile = /var/log/crond.log
stdout_logfile_maxbytes = 1MB
stdout_logfile_backups = 2

View file

@ -0,0 +1,85 @@
user www-data www-data;
worker_processes auto;
pid /var/run/nginx.pid;
daemon off;
worker_rlimit_nofile 262144;
events {
worker_connections 16384;
multi_accept on;
use epoll;
}
http {
server_tokens off;
sendfile on;
charset utf-8;
tcp_nopush on;
tcp_nodelay on;
log_format private '[$time_local] $host "$request" $status $body_bytes_sent';
types_hash_max_size 2048;
server_names_hash_bucket_size 64;
include /etc/nginx/mime.types;
default_type application/octet-stream;
add_header X-Content-Type-Options nosniff;
access_log /var/log/nginx/access.log private;
error_log /var/log/nginx/error.log;
add_header Referrer-Policy same-origin;
gzip on;
gzip_disable "msie6";
gzip_types text/plain text/css application/json application/javascript application/x-javascript text/javascript text/xml application/xml application/rss+xml application/atom+xml application/rdf+xml image/svg+xml;
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_buffers 16 8k;
include /etc/nginx/conf.d/*.conf;
server {
listen 80 backlog=4096 default_server;
listen [::]:80 ipv6only=on default_server;
server_name _;
index index.php index.html;
root /var/www;
location /media/ {
alias /data/media/;
expires 7d;
access_log off;
}
location ^~ /media/cachedfiles {
deny all;
return 404;
}
location ^~ /media/invoices {
deny all;
return 404;
}
location /static/ {
alias /pretix/src/pretix/static.dist/;
access_log off;
expires 365d;
add_header Cache-Control "public";
add_header Access-Control-Allow-Origin "*";
gzip on;
}
location / {
# Very important:
# proxy_pass http://unix:/tmp/pretix.sock:;
# is not the same as
# proxy_pass http://unix:/tmp/pretix.sock:/;
# In the latter case, nginx will apply its URL parsing, in the former it doesn't.
# There are situations in which pretix' API will deal with "file names" containing %2F%2F, which
# nginx will normalize to %2F, which can break ticket validation.
proxy_pass http://unix:/tmp/pretix.sock:;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
}
}
}

38
docker/pretix/pretix.cfg Normal file
View file

@ -0,0 +1,38 @@
[pretix]
instance_name=localhost
url=http://localhost
currency=EUR
; DO NOT change the following value, it has to be set to the location of the
; directory *inside* the docker container
datadir=/data
registration=off
[locale]
default=de
timezone=Europe/Berlin
[database]
backend=postgresql
name=pretix
user=pretix
password=pretix
host=database
[mail]
from=FROM_MAIL
host=MAIL_SERVER
user=USERNAME
password=FOOBAR
port=587
tls=off
ssl=off
[redis]
location=redis://cache/0
; Remove the following line if you are unsure about your redis'security
; to reduce impact if redis gets compromised.
sessions=true
[celery]
backend=redis://cache/1
broker=redis://cache/2