Compare commits
154 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 8066efb923 | |||
| 463cf8783c | |||
| 5a8dc7e313 | |||
| 7f43efc857 | |||
| 08b8f03661 | |||
| fe5e340d6e | |||
| 979c7e1f9d | |||
| a1cdfb57a7 | |||
| 487fdffd91 | |||
| b1a1038dec | |||
| 2b873e4cb8 | |||
| a12edcd360 | |||
| 383b1925ef | |||
| 0603a8c7e6 | |||
| 5620c199a9 | |||
| 08d99bf714 | |||
| ebe76358ce | |||
| 47b69f0530 | |||
| f46bae2372 | |||
| 830b3ea114 | |||
| 926e249272 | |||
| bcb60def00 | |||
| 03654ef5af | |||
| 2d59c68004 | |||
| 7a51040ac0 | |||
| c2b177db49 | |||
| 7f0aeed88a | |||
| 8391afdac5 | |||
| d91b205a89 | |||
| 3311bfbd9f | |||
| 351ce246c5 | |||
| 9572ac822f | |||
| a59d33ec03 | |||
| a9e4013d86 | |||
| 19c1945110 | |||
| fb22a015e5 | |||
| e6312a2318 | |||
| 776654970e | |||
| 22f730d5b5 | |||
| dc614483b5 | |||
| 891e29a362 | |||
| 2667553cf2 | |||
| 8467803fdd | |||
| 084cf958a0 | |||
| b4cba98564 | |||
| 39d5fb8d16 | |||
| 0c74cfd5e9 | |||
| 841f523f73 | |||
| 6d38d04a1e | |||
| 504089427d | |||
| 60f29aab70 | |||
| ee94e30004 | |||
| 3469d98a43 | |||
| 5fd775d855 | |||
| 725d5292b2 | |||
| 9161a2501c | |||
| 9b3f856eb0 | |||
| 9621184bd8 | |||
| 1f2273d2ab | |||
| 0514fa0241 | |||
| 2f263476d3 | |||
| e65aa8fdab | |||
| 70b17657a1 | |||
| b8389352ec | |||
| 7586d4ff29 | |||
| bc656cdef4 | |||
| 278f6de6f5 | |||
| 2de9fed1fa | |||
| 3bcd2be520 | |||
| 7eac09e547 | |||
| 5fb1ee54b9 | |||
| ecfd60803f | |||
| 81b17b389f | |||
| 57675c08eb | |||
| 64f869121b | |||
| c41e6f8240 | |||
| 7483d0c012 | |||
| f1b26e5933 | |||
| f8ddcd7b7c | |||
| 962bd06a32 | |||
| 3d6d4d5503 | |||
| 4b22705ff7 | |||
| 983ad1b1ae | |||
| 849c305d7d | |||
| ff0d0d2e8b | |||
| c98b8c6f05 | |||
| 4136f819a5 | |||
| 78fe5440a8 | |||
| 012325e996 | |||
| 951fa63296 | |||
| 6f86abd997 | |||
| c1917d51a0 | |||
| 75017a99df | |||
| 980fdc8203 | |||
| 7df21873c1 | |||
| 9bbaeb67d3 | |||
| a6b557882d | |||
| 90c02e58bf | |||
| 8829902e0b | |||
| e7c5fe9213 | |||
| 5a1ce55086 | |||
| cca320e2f4 | |||
| e4e3c57f20 | |||
| 5274639ca3 | |||
| 3e5ed906bc | |||
| 9a519432b0 | |||
| 6a3424faf4 | |||
| 19a8d28a24 | |||
| a52d9b052f | |||
| db56385513 | |||
| 7ab96e6a47 | |||
| c37bca287e | |||
| d17f6da77a | |||
| 460f809403 | |||
| 0e6a705d3f | |||
| d54eff344f | |||
| 79a54578b8 | |||
| 1d8f20ff25 | |||
| d3b8e2e414 | |||
| 85daf26174 | |||
| 53933957a4 | |||
| 8d941ebef4 | |||
| 800bd90778 | |||
| df38fdb99e | |||
| 23947bd967 | |||
| 32ea52c8f4 | |||
| d755267dd9 | |||
| 53659b4364 | |||
| 0035dd1e6f | |||
| c8680b06ac | |||
| 3f82d0fc57 | |||
| 5d95a33c5a | |||
| aeb0a4fbe7 | |||
| 9e139fd422 | |||
| 9733a55942 | |||
| befdf5ad6e | |||
| 663116c778 | |||
| 187b0440c8 | |||
| bdb9fa064d | |||
| d3ba9db0c6 | |||
| 3dffc05c9d | |||
| 6616ae7417 | |||
| dc40295dde | |||
| 1d8361cc5f | |||
| 35243fdba6 | |||
| 43e7c1f3e4 | |||
| dcd2ebc49c | |||
| 555350eab7 | |||
| e117acac04 | |||
| 16313b9e40 | |||
| 033a1cf6e5 | |||
| 8befec9769 | |||
| d22add5bfd | |||
| 69fb93a664 |
168 changed files with 5077 additions and 1151 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -2,3 +2,4 @@
|
||||||
.venv
|
.venv
|
||||||
.cache
|
.cache
|
||||||
*.pyc
|
*.pyc
|
||||||
|
.bw_debug_history
|
||||||
|
|
@ -37,3 +37,12 @@ fi
|
||||||
telegraf: execd for daemons
|
telegraf: execd for daemons
|
||||||
|
|
||||||
TEST
|
TEST
|
||||||
|
|
||||||
|
# git signing
|
||||||
|
|
||||||
|
git config --global gpg.format ssh
|
||||||
|
git config --global commit.gpgsign true
|
||||||
|
|
||||||
|
git config user.name CroneKorkN
|
||||||
|
git config user.email i@ckn.li
|
||||||
|
git config user.signingkey "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILMVroYmswD4tLk6iH+2tvQiyaMe42yfONDsPDIdFv6I"
|
||||||
|
|
|
||||||
22
bin/passwords-for
Executable file
22
bin/passwords-for
Executable file
|
|
@ -0,0 +1,22 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from bundlewrap.repo import Repository
|
||||||
|
from os.path import realpath, dirname
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('node', help='Node to generate passwords for')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
bw = Repository(dirname(dirname(realpath(__file__))))
|
||||||
|
node = bw.get_node(args.node)
|
||||||
|
|
||||||
|
if node.password:
|
||||||
|
print(f"password: {node.password}")
|
||||||
|
|
||||||
|
for metadata_key in sorted([
|
||||||
|
'users/root/password',
|
||||||
|
]):
|
||||||
|
if value := node.metadata.get(metadata_key, None):
|
||||||
|
print(f"{metadata_key}: {value}")
|
||||||
|
|
@ -3,4 +3,4 @@
|
||||||
from bundlewrap.repo import Repository
|
from bundlewrap.repo import Repository
|
||||||
from os.path import realpath, dirname
|
from os.path import realpath, dirname
|
||||||
|
|
||||||
repo = Repository(dirname(dirname(realpath(__file__))))
|
bw = Repository(dirname(dirname(realpath(__file__))))
|
||||||
|
|
|
||||||
132
bin/sync_1password
Executable file
132
bin/sync_1password
Executable file
|
|
@ -0,0 +1,132 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from bundlewrap.repo import Repository
|
||||||
|
from os.path import realpath, dirname
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional, List
|
||||||
|
|
||||||
|
bw = Repository(dirname(dirname(realpath(__file__))))
|
||||||
|
|
||||||
|
VAULT=bw.vault.decrypt('encrypt$gAAAAABpLgX_xxb5NmNCl3cgHM0JL65GT6PHVXO5gwly7IkmWoEgkCDSuAcSAkNFB8Tb4RdnTdpzVQEUL1XppTKVto_O7_b11GjATiyQYiSfiQ8KZkTKLvk=').value
|
||||||
|
BW_TAG = "bw"
|
||||||
|
BUNDLEWRAP_FIELD_LABEL = "bundlewrap node id"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class OpResult:
|
||||||
|
stdout: str
|
||||||
|
stderr: str
|
||||||
|
returncode: int
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
for node in bw.nodes_in_group('routeros'):
|
||||||
|
upsert_node_item(
|
||||||
|
node_name=node.name,
|
||||||
|
node_uuid=node.metadata.get('id'),
|
||||||
|
username=node.username,
|
||||||
|
password=node.password,
|
||||||
|
url=f'http://{node.hostname}',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def run_op(args):
|
||||||
|
proc = subprocess.run(
|
||||||
|
["op", "--vault", VAULT] + args,
|
||||||
|
env=os.environ.copy(),
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if proc.returncode != 0:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"op {' '.join(args)} failed with code {proc.returncode}:\n"
|
||||||
|
f"STDOUT:\n{proc.stdout}\n\nSTDERR:\n{proc.stderr}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return OpResult(stdout=proc.stdout, stderr=proc.stderr, returncode=proc.returncode)
|
||||||
|
|
||||||
|
|
||||||
|
def op_item_list_bw():
|
||||||
|
out = run_op([
|
||||||
|
"item", "list",
|
||||||
|
"--tags", BW_TAG,
|
||||||
|
"--format", "json",
|
||||||
|
])
|
||||||
|
stdout = out.stdout.strip()
|
||||||
|
return json.loads(stdout) if stdout else []
|
||||||
|
|
||||||
|
|
||||||
|
def op_item_get(item_id):
|
||||||
|
args = ["item", "get", item_id, "--format", "json"]
|
||||||
|
return json.loads(run_op(args).stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def op_item_create(title, node_uuid, username, password, url):
|
||||||
|
print(f"creating {title}")
|
||||||
|
return json.loads(run_op([
|
||||||
|
"item", "create",
|
||||||
|
"--category", "LOGIN",
|
||||||
|
"--title", title,
|
||||||
|
"--tags", BW_TAG,
|
||||||
|
"--url", url,
|
||||||
|
"--format", "json",
|
||||||
|
f"username={username}",
|
||||||
|
f"password={password}",
|
||||||
|
f"{BUNDLEWRAP_FIELD_LABEL}[text]={node_uuid}",
|
||||||
|
]).stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def op_item_edit(item_id, title, username, password, url):
|
||||||
|
print(f"updating {title}")
|
||||||
|
return json.loads(run_op([
|
||||||
|
"item", "edit",
|
||||||
|
item_id,
|
||||||
|
"--title", title,
|
||||||
|
"--url", url,
|
||||||
|
"--format", "json",
|
||||||
|
f"username={username}",
|
||||||
|
f"password={password}",
|
||||||
|
]).stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def find_node_item_id(node_uuid):
|
||||||
|
for summary in op_item_list_bw():
|
||||||
|
item_id = summary.get("id")
|
||||||
|
if not item_id:
|
||||||
|
continue
|
||||||
|
|
||||||
|
item = op_item_get(item_id)
|
||||||
|
for field in item.get("fields") or []:
|
||||||
|
label = field.get("label")
|
||||||
|
value = field.get("value")
|
||||||
|
if label == BUNDLEWRAP_FIELD_LABEL and value == node_uuid:
|
||||||
|
return item_id
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def upsert_node_item(node_name, node_uuid, username, password, url):
|
||||||
|
if item_id := find_node_item_id(node_uuid):
|
||||||
|
return op_item_edit(
|
||||||
|
item_id=item_id,
|
||||||
|
title=node_name,
|
||||||
|
username=username,
|
||||||
|
password=password,
|
||||||
|
url=url,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return op_item_create(
|
||||||
|
title=node_name,
|
||||||
|
node_uuid=node_uuid,
|
||||||
|
username=username,
|
||||||
|
password=password,
|
||||||
|
url=url,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
@ -23,7 +23,7 @@ for node in nodes:
|
||||||
print(node.run('DEBIAN_FRONTEND=noninteractive apt update').stdout.decode())
|
print(node.run('DEBIAN_FRONTEND=noninteractive apt update').stdout.decode())
|
||||||
print(node.run('DEBIAN_FRONTEND=noninteractive apt list --upgradable').stdout.decode())
|
print(node.run('DEBIAN_FRONTEND=noninteractive apt list --upgradable').stdout.decode())
|
||||||
if int(node.run('DEBIAN_FRONTEND=noninteractive apt list --upgradable 2> /dev/null | grep upgradable | wc -l').stdout.decode()):
|
if int(node.run('DEBIAN_FRONTEND=noninteractive apt list --upgradable 2> /dev/null | grep upgradable | wc -l').stdout.decode()):
|
||||||
print(node.run('DEBIAN_FRONTEND=noninteractive apt -y dist-upgrade').stdout.decode())
|
print(node.run('DEBIAN_FRONTEND=noninteractive apt -qy full-upgrade').stdout.decode())
|
||||||
|
|
||||||
# REBOOT IN ORDER
|
# REBOOT IN ORDER
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,9 @@
|
||||||
'deb',
|
'deb',
|
||||||
'deb-src',
|
'deb-src',
|
||||||
},
|
},
|
||||||
|
'options': { # optional
|
||||||
|
'aarch': 'amd64',
|
||||||
|
},
|
||||||
'urls': {
|
'urls': {
|
||||||
'https://deb.debian.org/debian',
|
'https://deb.debian.org/debian',
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -62,6 +62,7 @@ files = {
|
||||||
'/usr/lib/nagios/plugins/check_apt_upgradable': {
|
'/usr/lib/nagios/plugins/check_apt_upgradable': {
|
||||||
'mode': '0755',
|
'mode': '0755',
|
||||||
},
|
},
|
||||||
|
# /etc/kernel/postinst.d/apt-auto-removal
|
||||||
}
|
}
|
||||||
|
|
||||||
actions = {
|
actions = {
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,31 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
set -exu
|
set -u
|
||||||
|
|
||||||
# FIXME: inelegant
|
# FIXME: inelegant
|
||||||
% if wol_command:
|
% if wol_command:
|
||||||
${wol_command}
|
${wol_command}
|
||||||
% endif
|
% endif
|
||||||
|
|
||||||
|
exit=0
|
||||||
|
failed_paths=""
|
||||||
|
|
||||||
for path in $(jq -r '.paths | .[]' < /etc/backup/config.json)
|
for path in $(jq -r '.paths | .[]' < /etc/backup/config.json)
|
||||||
do
|
do
|
||||||
|
echo backing up $path
|
||||||
/opt/backup/backup_path "$path"
|
/opt/backup/backup_path "$path"
|
||||||
|
# set exit to 1 if any backup fails
|
||||||
|
if [ $? -ne 0 ]
|
||||||
|
then
|
||||||
|
echo ERROR: backing up $path failed >&2
|
||||||
|
exit=5
|
||||||
|
failed_paths="$failed_paths $path"
|
||||||
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
if [ $exit -ne 0 ]
|
||||||
|
then
|
||||||
|
echo "ERROR: failed to backup paths: $failed_paths" >&2
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit $exit
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
set -exu
|
set -eu
|
||||||
|
|
||||||
path=$1
|
path=$1
|
||||||
uuid=$(jq -r .client_uuid < /etc/backup/config.json)
|
uuid=$(jq -r .client_uuid < /etc/backup/config.json)
|
||||||
|
|
|
||||||
8
bundles/bind/files/db.empty
Normal file
8
bundles/bind/files/db.empty
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
$TTL 86400
|
||||||
|
@ IN SOA localhost. root.localhost. (
|
||||||
|
1 ; Serial
|
||||||
|
604800 ; Refresh
|
||||||
|
86400 ; Retry
|
||||||
|
2419200 ; Expire
|
||||||
|
86400 ) ; Negative Cache TTL
|
||||||
|
IN NS localhost.
|
||||||
|
|
@ -29,6 +29,7 @@ view "${view_name}" {
|
||||||
|
|
||||||
% if view_conf['is_internal']:
|
% if view_conf['is_internal']:
|
||||||
recursion yes;
|
recursion yes;
|
||||||
|
include "/etc/bind/zones.rfc1918";
|
||||||
% else:
|
% else:
|
||||||
recursion no;
|
recursion no;
|
||||||
rate-limit {
|
rate-limit {
|
||||||
|
|
@ -62,9 +63,6 @@ view "${view_name}" {
|
||||||
file "/var/lib/bind/${view_name}/${zone_name}";
|
file "/var/lib/bind/${view_name}/${zone_name}";
|
||||||
};
|
};
|
||||||
% endfor
|
% endfor
|
||||||
|
|
||||||
include "/etc/bind/named.conf.default-zones";
|
|
||||||
include "/etc/bind/zones.rfc1918";
|
|
||||||
};
|
};
|
||||||
|
|
||||||
% endfor
|
% endfor
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ options {
|
||||||
|
|
||||||
% if type == 'master':
|
% if type == 'master':
|
||||||
notify yes;
|
notify yes;
|
||||||
also-notify { ${' '.join([f'{ip};' for ip in slave_ips])} };
|
also-notify { ${' '.join(sorted(f'{ip};' for ip in slave_ips))} };
|
||||||
allow-transfer { ${' '.join([f'{ip};' for ip in slave_ips])} };
|
allow-transfer { ${' '.join(sorted(f'{ip};' for ip in slave_ips))} };
|
||||||
% endif
|
% endif
|
||||||
};
|
};
|
||||||
|
|
|
||||||
19
bundles/bind/files/zones.rfc1918
Normal file
19
bundles/bind/files/zones.rfc1918
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
zone "10.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "16.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "17.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "18.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "19.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "20.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "21.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "22.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "23.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "24.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "25.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "26.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "27.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "28.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "29.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "30.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "31.172.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "168.192.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
zone "254.169.in-addr.arpa" { type master; file "/etc/bind/db.empty"; };
|
||||||
|
|
@ -142,3 +142,21 @@ actions['named-checkconf'] = {
|
||||||
'svc_systemd:bind9:reload',
|
'svc_systemd:bind9:reload',
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# beantwortet Anfragen nach privaten IP-Adressen mit NXDOMAIN, statt sie ins Internet weiterzuleiten
|
||||||
|
files['/etc/bind/zones.rfc1918'] = {
|
||||||
|
'needed_by': [
|
||||||
|
'svc_systemd:bind9',
|
||||||
|
],
|
||||||
|
'triggers': [
|
||||||
|
'svc_systemd:bind9:reload',
|
||||||
|
],
|
||||||
|
}
|
||||||
|
files['/etc/bind/db.empty'] = {
|
||||||
|
'needed_by': [
|
||||||
|
'svc_systemd:bind9',
|
||||||
|
],
|
||||||
|
'triggers': [
|
||||||
|
'svc_systemd:bind9:reload',
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ from json import dumps
|
||||||
h = repo.libs.hashable.hashable
|
h = repo.libs.hashable.hashable
|
||||||
repo.libs.bind.repo = repo
|
repo.libs.bind.repo = repo
|
||||||
|
|
||||||
|
|
||||||
defaults = {
|
defaults = {
|
||||||
'apt': {
|
'apt': {
|
||||||
'packages': {
|
'packages': {
|
||||||
|
|
@ -211,7 +212,7 @@ def generate_keys(metadata):
|
||||||
'token':repo.libs.hmac.hmac_sha512(
|
'token':repo.libs.hmac.hmac_sha512(
|
||||||
key,
|
key,
|
||||||
str(repo.vault.random_bytes_as_base64_for(
|
str(repo.vault.random_bytes_as_base64_for(
|
||||||
f"{metadata.get('id')} bind key {key}",
|
f"{metadata.get('id')} bind key {key} 20250713",
|
||||||
length=32,
|
length=32,
|
||||||
)),
|
)),
|
||||||
)
|
)
|
||||||
|
|
|
||||||
165
bundles/bootshorn/files/process
Executable file
165
bundles/bootshorn/files/process
Executable file
|
|
@ -0,0 +1,165 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import datetime
|
||||||
|
import numpy as np
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import soundfile as sf
|
||||||
|
from scipy.fft import rfft, rfftfreq
|
||||||
|
import shutil
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
|
||||||
|
RECORDINGS_DIR = "recordings"
|
||||||
|
PROCESSED_RECORDINGS_DIR = "recordings/processed"
|
||||||
|
DETECTIONS_DIR = "events"
|
||||||
|
|
||||||
|
DETECT_FREQUENCY = 211 # Hz
|
||||||
|
DETECT_FREQUENCY_TOLERANCE = 2 # Hz
|
||||||
|
ADJACENCY_FACTOR = 2 # area to look for the frequency (e.g. 2 means 100Hz to 400Hz for 200Hz detection)
|
||||||
|
BLOCK_SECONDS = 3 # seconds (longer means more frequency resolution, but less time resolution)
|
||||||
|
DETECTION_DISTANCE_SECONDS = 30 # seconds (minimum time between detections)
|
||||||
|
BLOCK_OVERLAP_FACTOR = 0.9 # overlap between blocks (0.2 means 20% overlap)
|
||||||
|
MIN_SIGNAL_QUALITY = 1000.0 # maximum noise level (relative DB) to consider a detection valid
|
||||||
|
PLOT_PADDING_START_SECONDS = 2 # seconds (padding before and after the event in the plot)
|
||||||
|
PLOT_PADDING_END_SECONDS = 3 # seconds (padding before and after the event in the plot)
|
||||||
|
|
||||||
|
DETECTION_DISTANCE_BLOCKS = DETECTION_DISTANCE_SECONDS // BLOCK_SECONDS # number of blocks to skip after a detection
|
||||||
|
DETECT_FREQUENCY_FROM = DETECT_FREQUENCY - DETECT_FREQUENCY_TOLERANCE # Hz
|
||||||
|
DETECT_FREQUENCY_TO = DETECT_FREQUENCY + DETECT_FREQUENCY_TOLERANCE # Hz
|
||||||
|
|
||||||
|
|
||||||
|
def process_recording(filename):
|
||||||
|
print('processing', filename)
|
||||||
|
|
||||||
|
# get ISO 8601 nanosecond recording date from filename
|
||||||
|
date_string_from_filename = os.path.splitext(filename)[0]
|
||||||
|
recording_date = datetime.datetime.strptime(date_string_from_filename, "%Y-%m-%d_%H-%M-%S.%f%z")
|
||||||
|
|
||||||
|
# get data and metadata from recording
|
||||||
|
path = os.path.join(RECORDINGS_DIR, filename)
|
||||||
|
soundfile = sf.SoundFile(path)
|
||||||
|
samplerate = soundfile.samplerate
|
||||||
|
samples_per_block = int(BLOCK_SECONDS * samplerate)
|
||||||
|
overlapping_samples = int(samples_per_block * BLOCK_OVERLAP_FACTOR)
|
||||||
|
|
||||||
|
sample_num = 0
|
||||||
|
current_event = None
|
||||||
|
|
||||||
|
while sample_num < len(soundfile):
|
||||||
|
soundfile.seek(sample_num)
|
||||||
|
block = soundfile.read(frames=samples_per_block, dtype='float32', always_2d=False)
|
||||||
|
|
||||||
|
if len(block) == 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
# calculate FFT
|
||||||
|
labels = rfftfreq(len(block), d=1/samplerate)
|
||||||
|
complex_amplitudes = rfft(block)
|
||||||
|
amplitudes = np.abs(complex_amplitudes)
|
||||||
|
|
||||||
|
# get the frequency with the highest amplitude within the search range
|
||||||
|
search_amplitudes = amplitudes[(labels >= DETECT_FREQUENCY_FROM/ADJACENCY_FACTOR) & (labels <= DETECT_FREQUENCY_TO*ADJACENCY_FACTOR)]
|
||||||
|
search_labels = labels[(labels >= DETECT_FREQUENCY_FROM/ADJACENCY_FACTOR) & (labels <= DETECT_FREQUENCY_TO*ADJACENCY_FACTOR)]
|
||||||
|
max_amplitude = max(search_amplitudes)
|
||||||
|
max_amplitude_index = np.argmax(search_amplitudes)
|
||||||
|
max_freq = search_labels[max_amplitude_index]
|
||||||
|
max_freq_detected = DETECT_FREQUENCY_FROM <= max_freq <= DETECT_FREQUENCY_TO
|
||||||
|
|
||||||
|
# calculate signal quality
|
||||||
|
adjacent_amplitudes = amplitudes[(labels < DETECT_FREQUENCY_FROM) | (labels > DETECT_FREQUENCY_TO)]
|
||||||
|
signal_quality = max_amplitude/np.mean(adjacent_amplitudes)
|
||||||
|
good_signal_quality = signal_quality > MIN_SIGNAL_QUALITY
|
||||||
|
|
||||||
|
# conclude detection
|
||||||
|
if (
|
||||||
|
max_freq_detected and
|
||||||
|
good_signal_quality
|
||||||
|
):
|
||||||
|
block_date = recording_date + datetime.timedelta(seconds=sample_num / samplerate)
|
||||||
|
|
||||||
|
# detecting an event
|
||||||
|
if not current_event:
|
||||||
|
current_event = {
|
||||||
|
'start_at': block_date,
|
||||||
|
'end_at': block_date,
|
||||||
|
'start_sample': sample_num,
|
||||||
|
'end_sample': sample_num + samples_per_block,
|
||||||
|
'start_freq': max_freq,
|
||||||
|
'end_freq': max_freq,
|
||||||
|
'max_amplitude': max_amplitude,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
current_event.update({
|
||||||
|
'end_at': block_date,
|
||||||
|
'end_freq': max_freq,
|
||||||
|
'end_sample': sample_num + samples_per_block,
|
||||||
|
'max_amplitude': max(max_amplitude, current_event['max_amplitude']),
|
||||||
|
})
|
||||||
|
print(f'- {block_date.strftime('%Y-%m-%d %H:%M:%S')}: {max_amplitude:.1f}rDB @ {max_freq:.1f}Hz (signal {signal_quality:.3f}x)')
|
||||||
|
else:
|
||||||
|
# not detecting an event
|
||||||
|
if current_event:
|
||||||
|
duration = (current_event['end_at'] - current_event['start_at']).total_seconds()
|
||||||
|
current_event['duration'] = duration
|
||||||
|
print(f'🔊 {current_event['start_at'].strftime('%Y-%m-%d %H:%M:%S')} ({duration:.1f}s): {current_event['start_freq']:.1f}Hz->{current_event['end_freq']:.1f}Hz @{current_event['max_amplitude']:.0f}rDB')
|
||||||
|
|
||||||
|
# read full audio clip again for writing
|
||||||
|
write_event(current_event=current_event, soundfile=soundfile, samplerate=samplerate)
|
||||||
|
|
||||||
|
current_event = None
|
||||||
|
sample_num += DETECTION_DISTANCE_BLOCKS * samples_per_block
|
||||||
|
|
||||||
|
sample_num += samples_per_block - overlapping_samples
|
||||||
|
|
||||||
|
# move to PROCESSED_RECORDINGS_DIR
|
||||||
|
|
||||||
|
os.makedirs(PROCESSED_RECORDINGS_DIR, exist_ok=True)
|
||||||
|
shutil.move(os.path.join(RECORDINGS_DIR, filename), os.path.join(PROCESSED_RECORDINGS_DIR, filename))
|
||||||
|
|
||||||
|
|
||||||
|
# write a spectrogram using the sound from start to end of the event
|
||||||
|
def write_event(current_event, soundfile, samplerate):
|
||||||
|
# date and filename
|
||||||
|
event_date = current_event['start_at'] - datetime.timedelta(seconds=PLOT_PADDING_START_SECONDS)
|
||||||
|
filename_prefix = event_date.strftime('%Y-%m-%d_%H-%M-%S.%f%z')
|
||||||
|
|
||||||
|
# event clip
|
||||||
|
event_start_sample = current_event['start_sample'] - samplerate * PLOT_PADDING_START_SECONDS
|
||||||
|
event_end_sample = current_event['end_sample'] + samplerate * PLOT_PADDING_END_SECONDS
|
||||||
|
total_samples = event_end_sample - event_start_sample
|
||||||
|
soundfile.seek(event_start_sample)
|
||||||
|
event_clip = soundfile.read(frames=total_samples, dtype='float32', always_2d=False)
|
||||||
|
|
||||||
|
# write flac
|
||||||
|
flac_path = os.path.join(DETECTIONS_DIR, f"{filename_prefix}.flac")
|
||||||
|
sf.write(flac_path, event_clip, samplerate, format='FLAC')
|
||||||
|
|
||||||
|
# write spectrogram
|
||||||
|
plt.figure(figsize=(8, 6))
|
||||||
|
plt.specgram(event_clip, Fs=samplerate, NFFT=samplerate, noverlap=samplerate//2, cmap='inferno', vmin=-100, vmax=-10)
|
||||||
|
plt.title(f"Bootshorn @{event_date.strftime('%Y-%m-%d %H:%M:%S%z')}")
|
||||||
|
plt.xlabel(f"Time {current_event['duration']:.1f}s")
|
||||||
|
plt.ylabel(f"Frequency {current_event['start_freq']:.1f}Hz -> {current_event['end_freq']:.1f}Hz")
|
||||||
|
plt.colorbar(label="Intensity (rDB)")
|
||||||
|
plt.ylim(50, 1000)
|
||||||
|
plt.savefig(os.path.join(DETECTIONS_DIR, f"{filename_prefix}.png"))
|
||||||
|
plt.close()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
os.makedirs(RECORDINGS_DIR, exist_ok=True)
|
||||||
|
os.makedirs(PROCESSED_RECORDINGS_DIR, exist_ok=True)
|
||||||
|
|
||||||
|
for filename in sorted(os.listdir(RECORDINGS_DIR)):
|
||||||
|
if filename.endswith(".flac"):
|
||||||
|
try:
|
||||||
|
process_recording(filename)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error processing {filename}: {e}")
|
||||||
|
# print stacktrace
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
25
bundles/bootshorn/files/record
Executable file
25
bundles/bootshorn/files/record
Executable file
|
|
@ -0,0 +1,25 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
mkdir -p recordings
|
||||||
|
|
||||||
|
while true
|
||||||
|
do
|
||||||
|
# get date in ISO 8601 format with nanoseconds
|
||||||
|
PROGRAMM=$(test $(uname) = "Darwin" && echo "gdate" || echo "date")
|
||||||
|
DATE=$($PROGRAMM "+%Y-%m-%d_%H-%M-%S.%6N%z")
|
||||||
|
|
||||||
|
# record audio using ffmpeg
|
||||||
|
ffmpeg \
|
||||||
|
-y \
|
||||||
|
-f pulse \
|
||||||
|
-i "alsa_input.usb-HANMUS_USB_AUDIO_24BIT_2I2O_1612310-00.analog-stereo" \
|
||||||
|
-ac 1 \
|
||||||
|
-ar 96000 \
|
||||||
|
-sample_fmt s32 \
|
||||||
|
-t "3600" \
|
||||||
|
-c:a flac \
|
||||||
|
-compression_level 12 \
|
||||||
|
"recordings/current/$DATE.flac"
|
||||||
|
|
||||||
|
mv "recordings/current/$DATE.flac" "recordings/$DATE.flac"
|
||||||
|
done
|
||||||
43
bundles/bootshorn/files/temperature
Executable file
43
bundles/bootshorn/files/temperature
Executable file
|
|
@ -0,0 +1,43 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import urllib3
|
||||||
|
import datetime
|
||||||
|
import csv
|
||||||
|
urllib3.disable_warnings()
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
HUE_IP = "${hue_ip}" # replace with your bridge IP
|
||||||
|
HUE_APP_KEY = "${hue_app_key}" # local only
|
||||||
|
HUE_DEVICE_ID = "31f58786-3242-4e88-b9ce-23f44ba27bbe"
|
||||||
|
TEMPERATURE_LOG_DIR = "/opt/bootshorn/temperatures"
|
||||||
|
|
||||||
|
response = requests.get(
|
||||||
|
f"https://{HUE_IP}/clip/v2/resource/temperature",
|
||||||
|
headers={"hue-application-key": HUE_APP_KEY},
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
for item in data["data"]:
|
||||||
|
if item["id"] == HUE_DEVICE_ID:
|
||||||
|
temperature = item["temperature"]["temperature"]
|
||||||
|
temperature_date_string = item["temperature"]["temperature_report"]["changed"]
|
||||||
|
temperature_date = datetime.datetime.fromisoformat(temperature_date_string).astimezone(datetime.timezone.utc)
|
||||||
|
break
|
||||||
|
|
||||||
|
print(f"@{temperature_date}: {temperature}°C")
|
||||||
|
|
||||||
|
filename = temperature_date.strftime("%Y-%m-%d_00-00-00.000000%z") + ".log"
|
||||||
|
logpath = os.path.join(TEMPERATURE_LOG_DIR, filename)
|
||||||
|
now_utc = datetime.datetime.now(datetime.timezone.utc)
|
||||||
|
|
||||||
|
with open(logpath, "a+", newline="") as logfile:
|
||||||
|
writer = csv.writer(logfile)
|
||||||
|
writer.writerow([
|
||||||
|
now_utc.strftime('%Y-%m-%d_%H-%M-%S.%f%z'), # current UTC time
|
||||||
|
temperature_date.strftime('%Y-%m-%d_%H-%M-%S.%f%z'), # date of temperature reading
|
||||||
|
temperature,
|
||||||
|
])
|
||||||
61
bundles/bootshorn/items.py
Normal file
61
bundles/bootshorn/items.py
Normal file
|
|
@ -0,0 +1,61 @@
|
||||||
|
# nano /etc/selinux/config
|
||||||
|
# SELINUX=disabled
|
||||||
|
# reboot
|
||||||
|
|
||||||
|
directories = {
|
||||||
|
'/opt/bootshorn': {
|
||||||
|
'owner': 'ckn',
|
||||||
|
'group': 'ckn',
|
||||||
|
},
|
||||||
|
'/opt/bootshorn/temperatures': {
|
||||||
|
'owner': 'ckn',
|
||||||
|
'group': 'ckn',
|
||||||
|
},
|
||||||
|
'/opt/bootshorn/recordings': {
|
||||||
|
'owner': 'ckn',
|
||||||
|
'group': 'ckn',
|
||||||
|
},
|
||||||
|
'/opt/bootshorn/recordings/current': {
|
||||||
|
'owner': 'ckn',
|
||||||
|
'group': 'ckn',
|
||||||
|
},
|
||||||
|
'/opt/bootshorn/recordings/processed': {
|
||||||
|
'owner': 'ckn',
|
||||||
|
'group': 'ckn',
|
||||||
|
},
|
||||||
|
'/opt/bootshorn/events': {
|
||||||
|
'owner': 'ckn',
|
||||||
|
'group': 'ckn',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
files = {
|
||||||
|
'/opt/bootshorn/record': {
|
||||||
|
'owner': 'ckn',
|
||||||
|
'group': 'ckn',
|
||||||
|
'mode': '755',
|
||||||
|
},
|
||||||
|
'/opt/bootshorn/temperature': {
|
||||||
|
'content_type': 'mako',
|
||||||
|
'context': {
|
||||||
|
'hue_ip': repo.get_node('home.hue').hostname,
|
||||||
|
'hue_app_key': repo.vault.decrypt('encrypt$gAAAAABoc2WxZCLbxl-Z4IrSC97CdOeFgBplr9Fp5ujpd0WCCCPNBUY_WquHN86z8hKLq5Y04dwq8TdJW0PMSOSgTFbGgdp_P1q0jOBLEKaW9IIT1YM88h-JYwLf9QGDV_5oEfvnBCtO'),
|
||||||
|
},
|
||||||
|
'owner': 'ckn',
|
||||||
|
'group': 'ckn',
|
||||||
|
'mode': '755',
|
||||||
|
},
|
||||||
|
'/opt/bootshorn/process': {
|
||||||
|
'owner': 'ckn',
|
||||||
|
'group': 'ckn',
|
||||||
|
'mode': '755',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
svc_systemd = {
|
||||||
|
'bootshorn-record.service': {
|
||||||
|
'needs': {
|
||||||
|
'file:/opt/bootshorn/record',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
44
bundles/bootshorn/metadata.py
Normal file
44
bundles/bootshorn/metadata.py
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
defaults = {
|
||||||
|
'systemd': {
|
||||||
|
'units': {
|
||||||
|
'bootshorn-record.service': {
|
||||||
|
'Unit': {
|
||||||
|
'Description': 'Bootshorn Recorder',
|
||||||
|
'After': 'network.target',
|
||||||
|
},
|
||||||
|
'Service': {
|
||||||
|
'User': 'ckn',
|
||||||
|
'Group': 'ckn',
|
||||||
|
'Type': 'simple',
|
||||||
|
'WorkingDirectory': '/opt/bootshorn',
|
||||||
|
'ExecStart': '/opt/bootshorn/record',
|
||||||
|
'Restart': 'always',
|
||||||
|
'RestartSec': 5,
|
||||||
|
'Environment': {
|
||||||
|
"XDG_RUNTIME_DIR": "/run/user/1000",
|
||||||
|
"PULSE_SERVER": "unix:/run/user/1000/pulse/native",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'systemd-timers': {
|
||||||
|
'bootshorn-temperature': {
|
||||||
|
'command': '/opt/bootshorn/temperature',
|
||||||
|
'when': '*:0/10',
|
||||||
|
'working_dir': '/opt/bootshorn',
|
||||||
|
'user': 'ckn',
|
||||||
|
'group': 'ckn',
|
||||||
|
},
|
||||||
|
# 'bootshorn-process': {
|
||||||
|
# 'command': '/opt/bootshorn/process',
|
||||||
|
# 'when': 'hourly',
|
||||||
|
# 'working_dir': '/opt/bootshorn',
|
||||||
|
# 'user': 'ckn',
|
||||||
|
# 'group': 'ckn',
|
||||||
|
# 'after': {
|
||||||
|
# 'bootshorn-process.service',
|
||||||
|
# },
|
||||||
|
# },
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
@ -8,6 +8,7 @@ defaults = {
|
||||||
'sources': {
|
'sources': {
|
||||||
'crystal': {
|
'crystal': {
|
||||||
# https://software.opensuse.org/download.html?project=devel%3Alanguages%3Acrystal&package=crystal
|
# https://software.opensuse.org/download.html?project=devel%3Alanguages%3Acrystal&package=crystal
|
||||||
|
# curl -fsSL https://download.opensuse.org/repositories/devel:/languages:/crystal/Debian_Testing/Release.key
|
||||||
'urls': {
|
'urls': {
|
||||||
'http://download.opensuse.org/repositories/devel:/languages:/crystal/Debian_Testing/',
|
'http://download.opensuse.org/repositories/devel:/languages:/crystal/Debian_Testing/',
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,17 +0,0 @@
|
||||||
connect = host=${host} dbname=${name} user=${user} password=${password}
|
|
||||||
driver = pgsql
|
|
||||||
default_pass_scheme = ARGON2ID
|
|
||||||
|
|
||||||
user_query = SELECT '/var/vmail/%u' AS home, 'vmail' AS uid, 'vmail' AS gid
|
|
||||||
|
|
||||||
iterate_query = SELECT CONCAT(users.name, '@', domains.name) AS user \
|
|
||||||
FROM users \
|
|
||||||
LEFT JOIN domains ON users.domain_id = domains.id \
|
|
||||||
WHERE redirect IS NULL
|
|
||||||
|
|
||||||
password_query = SELECT CONCAT(users.name, '@', domains.name) AS user, password \
|
|
||||||
FROM users \
|
|
||||||
LEFT JOIN domains ON users.domain_id = domains.id \
|
|
||||||
WHERE redirect IS NULL \
|
|
||||||
AND users.name = SPLIT_PART('%u', '@', 1) \
|
|
||||||
AND domains.name = SPLIT_PART('%u', '@', 2)
|
|
||||||
|
|
@ -1,13 +1,17 @@
|
||||||
|
dovecot_config_version = ${config_version}
|
||||||
|
dovecot_storage_version = ${storage_version}
|
||||||
|
|
||||||
protocols = imap lmtp sieve
|
protocols = imap lmtp sieve
|
||||||
auth_mechanisms = plain login
|
auth_mechanisms = plain login
|
||||||
mail_privileged_group = mail
|
|
||||||
ssl = required
|
ssl = required
|
||||||
ssl_cert = </var/lib/dehydrated/certs/${node.metadata.get('mailserver/hostname')}/fullchain.pem
|
ssl_server_cert_file = /var/lib/dehydrated/certs/${hostname}/fullchain.pem
|
||||||
ssl_key = </var/lib/dehydrated/certs/${node.metadata.get('mailserver/hostname')}/privkey.pem
|
ssl_server_key_file = /var/lib/dehydrated/certs/${hostname}/privkey.pem
|
||||||
ssl_dh = </etc/dovecot/dhparam.pem
|
ssl_server_dh_file = /etc/dovecot/dhparam.pem
|
||||||
ssl_client_ca_dir = /etc/ssl/certs
|
ssl_client_ca_dir = /etc/ssl/certs
|
||||||
mail_location = maildir:${node.metadata.get('mailserver/maildir')}/%u:INDEX=${node.metadata.get('mailserver/maildir')}/index/%u
|
mail_driver = maildir
|
||||||
mail_plugins = fts fts_xapian
|
mail_path = ${maildir}/%{user}
|
||||||
|
mail_index_path = ${maildir}/index/%{user}
|
||||||
|
mail_plugins = fts fts_flatcurve
|
||||||
|
|
||||||
namespace inbox {
|
namespace inbox {
|
||||||
inbox = yes
|
inbox = yes
|
||||||
|
|
@ -30,14 +34,46 @@ namespace inbox {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
passdb {
|
# postgres passdb userdb
|
||||||
driver = sql
|
|
||||||
args = /etc/dovecot/dovecot-sql.conf
|
sql_driver = pgsql
|
||||||
|
|
||||||
|
pgsql main {
|
||||||
|
parameters {
|
||||||
|
host = ${db_host}
|
||||||
|
dbname = ${db_name}
|
||||||
|
user = ${db_user}
|
||||||
|
password = ${db_password}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
# use sql for userdb too, to enable iterate_query
|
|
||||||
userdb {
|
passdb sql {
|
||||||
driver = sql
|
passdb_default_password_scheme = ARGON2ID
|
||||||
args = /etc/dovecot/dovecot-sql.conf
|
|
||||||
|
query = SELECT \
|
||||||
|
CONCAT(users.name, '@', domains.name) AS "user", \
|
||||||
|
password \
|
||||||
|
FROM users \
|
||||||
|
LEFT JOIN domains ON users.domain_id = domains.id \
|
||||||
|
WHERE redirect IS NULL \
|
||||||
|
AND users.name = SPLIT_PART('%{user}', '@', 1) \
|
||||||
|
AND domains.name = SPLIT_PART('%{user}', '@', 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
mail_uid = vmail
|
||||||
|
mail_gid = vmail
|
||||||
|
|
||||||
|
userdb sql {
|
||||||
|
query = SELECT \
|
||||||
|
'/var/vmail/%{user}' AS home, \
|
||||||
|
'vmail' AS uid, \
|
||||||
|
'vmail' AS gid
|
||||||
|
|
||||||
|
iterate_query = SELECT \
|
||||||
|
CONCAT(users.name, '@', domains.name) AS username \
|
||||||
|
FROM users \
|
||||||
|
LEFT JOIN domains ON users.domain_id = domains.id \
|
||||||
|
WHERE redirect IS NULL
|
||||||
}
|
}
|
||||||
|
|
||||||
service auth {
|
service auth {
|
||||||
|
|
@ -67,10 +103,9 @@ service stats {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
service managesieve-login {
|
service managesieve-login {
|
||||||
inet_listener sieve {
|
#inet_listener sieve {}
|
||||||
}
|
process_min_avail = 1
|
||||||
process_min_avail = 0
|
process_limit = 1
|
||||||
service_count = 1
|
|
||||||
vsz_limit = 64 M
|
vsz_limit = 64 M
|
||||||
}
|
}
|
||||||
service managesieve {
|
service managesieve {
|
||||||
|
|
@ -78,31 +113,53 @@ service managesieve {
|
||||||
}
|
}
|
||||||
|
|
||||||
protocol imap {
|
protocol imap {
|
||||||
mail_plugins = $mail_plugins imap_sieve
|
mail_plugins = fts fts_flatcurve imap_sieve
|
||||||
mail_max_userip_connections = 50
|
mail_max_userip_connections = 50
|
||||||
imap_idle_notify_interval = 29 mins
|
imap_idle_notify_interval = 29 mins
|
||||||
}
|
}
|
||||||
protocol lmtp {
|
protocol lmtp {
|
||||||
mail_plugins = $mail_plugins sieve
|
mail_plugins = fts fts_flatcurve sieve
|
||||||
}
|
}
|
||||||
protocol sieve {
|
|
||||||
plugin {
|
# Persönliches Skript (deine alte Datei /var/vmail/sieve/%u.sieve)
|
||||||
sieve = /var/vmail/sieve/%u.sieve
|
sieve_script personal {
|
||||||
sieve_storage = /var/vmail/sieve/%u/
|
driver = file
|
||||||
}
|
# Verzeichnis mit (evtl. mehreren) Sieve-Skripten des Users
|
||||||
|
path = /var/vmail/sieve/%{user}/
|
||||||
|
# Aktives Skript (entspricht früher "sieve = /var/vmail/sieve/%u.sieve")
|
||||||
|
active_path = /var/vmail/sieve/%{user}.sieve
|
||||||
|
}
|
||||||
|
|
||||||
|
# Globales After-Skript (dein früheres "sieve_after = …")
|
||||||
|
sieve_script after {
|
||||||
|
type = after
|
||||||
|
driver = file
|
||||||
|
path = /var/vmail/sieve/global/spam-to-folder.sieve
|
||||||
}
|
}
|
||||||
|
|
||||||
# fulltext search
|
# fulltext search
|
||||||
plugin {
|
language en {
|
||||||
fts = xapian
|
|
||||||
fts_xapian = partial=3 full=20 verbose=0
|
|
||||||
fts_autoindex = yes
|
|
||||||
fts_enforced = yes
|
|
||||||
# Index attachements
|
|
||||||
fts_decoder = decode2text
|
|
||||||
}
|
}
|
||||||
|
language de {
|
||||||
|
default = yes
|
||||||
|
}
|
||||||
|
language_tokenizers = generic email-address
|
||||||
|
|
||||||
|
fts flatcurve {
|
||||||
|
substring_search = yes
|
||||||
|
# rotate_count = 5000 # DB-Rotation nach X Mails
|
||||||
|
# rotate_time = 5s # oder zeitbasiert rotieren
|
||||||
|
# optimize_limit = 10
|
||||||
|
# min_term_size = 3
|
||||||
|
}
|
||||||
|
|
||||||
|
fts_autoindex = yes
|
||||||
|
fts_decoder_driver = script
|
||||||
|
fts_decoder_script_socket_path = decode2text
|
||||||
|
|
||||||
service indexer-worker {
|
service indexer-worker {
|
||||||
vsz_limit = ${indexer_ram}
|
process_limit = ${indexer_cores}
|
||||||
|
vsz_limit = ${indexer_ram}M
|
||||||
}
|
}
|
||||||
service decode2text {
|
service decode2text {
|
||||||
executable = script /usr/local/libexec/dovecot/decode2text.sh
|
executable = script /usr/local/libexec/dovecot/decode2text.sh
|
||||||
|
|
@ -112,24 +169,39 @@ service decode2text {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# spam filter
|
mailbox Junk {
|
||||||
plugin {
|
sieve_script learn_spam {
|
||||||
sieve_plugins = sieve_imapsieve sieve_extprograms
|
driver = file
|
||||||
sieve_dir = /var/vmail/sieve/%u/
|
type = before
|
||||||
sieve = /var/vmail/sieve/%u.sieve
|
cause = copy
|
||||||
sieve_pipe_bin_dir = /var/vmail/sieve/bin
|
path = /var/vmail/sieve/global/learn-spam.sieve
|
||||||
sieve_extensions = +vnd.dovecot.pipe
|
}
|
||||||
|
|
||||||
sieve_after = /var/vmail/sieve/global/spam-to-folder.sieve
|
|
||||||
|
|
||||||
# From elsewhere to Spam folder
|
|
||||||
imapsieve_mailbox1_name = Junk
|
|
||||||
imapsieve_mailbox1_causes = COPY
|
|
||||||
imapsieve_mailbox1_before = file:/var/vmail/sieve/global/learn-spam.sieve
|
|
||||||
|
|
||||||
# From Spam folder to elsewhere
|
|
||||||
imapsieve_mailbox2_name = *
|
|
||||||
imapsieve_mailbox2_from = Junk
|
|
||||||
imapsieve_mailbox2_causes = COPY
|
|
||||||
imapsieve_mailbox2_before = file:/var/vmail/sieve/global/learn-ham.sieve
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
imapsieve_from Junk {
|
||||||
|
sieve_script learn_ham {
|
||||||
|
driver = file
|
||||||
|
type = before
|
||||||
|
cause = copy
|
||||||
|
path = /var/vmail/sieve/global/learn-ham.sieve
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Extprograms-Plugin einschalten
|
||||||
|
sieve_plugins {
|
||||||
|
sieve_extprograms = yes
|
||||||
|
}
|
||||||
|
|
||||||
|
# Welche Sieve-Erweiterungen dürfen genutzt werden?
|
||||||
|
# Empfehlung: nur global erlauben (nicht in User-Skripten):
|
||||||
|
sieve_global_extensions {
|
||||||
|
vnd.dovecot.pipe = yes
|
||||||
|
# vnd.dovecot.filter = yes # nur falls gebraucht
|
||||||
|
# vnd.dovecot.execute = yes # nur falls gebraucht
|
||||||
|
}
|
||||||
|
|
||||||
|
# Verzeichnis mit deinen Skripten/Binaries für :pipe
|
||||||
|
sieve_pipe_bin_dir = /var/vmail/sieve/bin
|
||||||
|
# (optional, analog für :filter / :execute)
|
||||||
|
# sieve_filter_bin_dir = /var/vmail/sieve/filter
|
||||||
|
# sieve_execute_bin_dir = /var/vmail/sieve/execute
|
||||||
|
|
@ -44,6 +44,16 @@ files = {
|
||||||
'context': {
|
'context': {
|
||||||
'admin_email': node.metadata.get('mailserver/admin_email'),
|
'admin_email': node.metadata.get('mailserver/admin_email'),
|
||||||
'indexer_ram': node.metadata.get('dovecot/indexer_ram'),
|
'indexer_ram': node.metadata.get('dovecot/indexer_ram'),
|
||||||
|
'config_version': node.metadata.get('dovecot/config_version'),
|
||||||
|
'storage_version': node.metadata.get('dovecot/storage_version'),
|
||||||
|
'maildir': node.metadata.get('mailserver/maildir'),
|
||||||
|
'hostname': node.metadata.get('mailserver/hostname'),
|
||||||
|
'db_host': node.metadata.get('mailserver/database/host'),
|
||||||
|
'db_name': node.metadata.get('mailserver/database/name'),
|
||||||
|
'db_user': node.metadata.get('mailserver/database/user'),
|
||||||
|
'db_password': node.metadata.get('mailserver/database/password'),
|
||||||
|
'indexer_cores': node.metadata.get('vm/cores'),
|
||||||
|
'indexer_ram': node.metadata.get('vm/ram')//2,
|
||||||
},
|
},
|
||||||
'needs': {
|
'needs': {
|
||||||
'pkg_apt:'
|
'pkg_apt:'
|
||||||
|
|
@ -52,29 +62,9 @@ files = {
|
||||||
'svc_systemd:dovecot:restart',
|
'svc_systemd:dovecot:restart',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
'/etc/dovecot/dovecot-sql.conf': {
|
|
||||||
'content_type': 'mako',
|
|
||||||
'context': node.metadata.get('mailserver/database'),
|
|
||||||
'needs': {
|
|
||||||
'pkg_apt:'
|
|
||||||
},
|
|
||||||
'triggers': {
|
|
||||||
'svc_systemd:dovecot:restart',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'/etc/dovecot/dhparam.pem': {
|
'/etc/dovecot/dhparam.pem': {
|
||||||
'content_type': 'any',
|
'content_type': 'any',
|
||||||
},
|
},
|
||||||
'/etc/dovecot/dovecot-sql.conf': {
|
|
||||||
'content_type': 'mako',
|
|
||||||
'context': node.metadata.get('mailserver/database'),
|
|
||||||
'needs': {
|
|
||||||
'pkg_apt:'
|
|
||||||
},
|
|
||||||
'triggers': {
|
|
||||||
'svc_systemd:dovecot:restart',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'/var/vmail/sieve/global/spam-to-folder.sieve': {
|
'/var/vmail/sieve/global/spam-to-folder.sieve': {
|
||||||
'owner': 'vmail',
|
'owner': 'vmail',
|
||||||
'group': 'vmail',
|
'group': 'vmail',
|
||||||
|
|
@ -131,7 +121,6 @@ svc_systemd = {
|
||||||
'action:letsencrypt_update_certificates',
|
'action:letsencrypt_update_certificates',
|
||||||
'action:dovecot_generate_dhparam',
|
'action:dovecot_generate_dhparam',
|
||||||
'file:/etc/dovecot/dovecot.conf',
|
'file:/etc/dovecot/dovecot.conf',
|
||||||
'file:/etc/dovecot/dovecot-sql.conf',
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ defaults = {
|
||||||
'dovecot-sieve': {},
|
'dovecot-sieve': {},
|
||||||
'dovecot-managesieved': {},
|
'dovecot-managesieved': {},
|
||||||
# fulltext search
|
# fulltext search
|
||||||
'dovecot-fts-xapian': {}, # buster-backports
|
'dovecot-flatcurve': {}, # buster-backports
|
||||||
'poppler-utils': {}, # pdftotext
|
'poppler-utils': {}, # pdftotext
|
||||||
'catdoc': {}, # catdoc, catppt, xls2csv
|
'catdoc': {}, # catdoc, catppt, xls2csv
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -40,7 +40,7 @@ ENABLE_OPENID_SIGNUP = false
|
||||||
[service]
|
[service]
|
||||||
REGISTER_EMAIL_CONFIRM = true
|
REGISTER_EMAIL_CONFIRM = true
|
||||||
ENABLE_NOTIFY_MAIL = true
|
ENABLE_NOTIFY_MAIL = true
|
||||||
DISABLE_REGISTRATION = false
|
DISABLE_REGISTRATION = true
|
||||||
ALLOW_ONLY_EXTERNAL_REGISTRATION = false
|
ALLOW_ONLY_EXTERNAL_REGISTRATION = false
|
||||||
ENABLE_CAPTCHA = false
|
ENABLE_CAPTCHA = false
|
||||||
REQUIRE_SIGNIN_VIEW = false
|
REQUIRE_SIGNIN_VIEW = false
|
||||||
|
|
|
||||||
|
|
@ -49,7 +49,7 @@ files['/etc/gitea/app.ini'] = {
|
||||||
),
|
),
|
||||||
'owner': 'git',
|
'owner': 'git',
|
||||||
'mode': '0600',
|
'mode': '0600',
|
||||||
'context': node.metadata['gitea'],
|
'context': node.metadata.get('gitea'),
|
||||||
'triggers': {
|
'triggers': {
|
||||||
'svc_systemd:gitea:restart',
|
'svc_systemd:gitea:restart',
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -26,9 +26,15 @@ defaults = {
|
||||||
'config': {
|
'config': {
|
||||||
'server': {
|
'server': {
|
||||||
'http_port': 8300,
|
'http_port': 8300,
|
||||||
|
'http_addr': '127.0.0.1',
|
||||||
|
'enable_gzip': True,
|
||||||
},
|
},
|
||||||
'database': {
|
'database': {
|
||||||
'url': f'postgres://grafana:{postgres_password}@localhost:5432/grafana',
|
'type': 'postgres',
|
||||||
|
'host': '127.0.0.1:5432',
|
||||||
|
'name': 'grafana',
|
||||||
|
'user': 'grafana',
|
||||||
|
'password': postgres_password,
|
||||||
},
|
},
|
||||||
'remote_cache': {
|
'remote_cache': {
|
||||||
'type': 'redis',
|
'type': 'redis',
|
||||||
|
|
@ -133,11 +139,13 @@ def dns(metadata):
|
||||||
|
|
||||||
|
|
||||||
@metadata_reactor.provides(
|
@metadata_reactor.provides(
|
||||||
|
'nginx/has_websockets',
|
||||||
'nginx/vhosts',
|
'nginx/vhosts',
|
||||||
)
|
)
|
||||||
def nginx(metadata):
|
def nginx(metadata):
|
||||||
return {
|
return {
|
||||||
'nginx': {
|
'nginx': {
|
||||||
|
'has_websockets': True,
|
||||||
'vhosts': {
|
'vhosts': {
|
||||||
metadata.get('grafana/hostname'): {
|
metadata.get('grafana/hostname'): {
|
||||||
'content': 'grafana/vhost.conf',
|
'content': 'grafana/vhost.conf',
|
||||||
|
|
|
||||||
|
|
@ -1,23 +0,0 @@
|
||||||
https://github.com/home-assistant/supervised-installer?tab=readme-ov-file
|
|
||||||
https://github.com/home-assistant/os-agent/tree/main?tab=readme-ov-file#using-home-assistant-supervised-on-debian
|
|
||||||
https://docs.docker.com/engine/install/debian/
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
https://www.home-assistant.io/installation/linux#install-home-assistant-supervised
|
|
||||||
https://github.com/home-assistant/supervised-installer
|
|
||||||
https://github.com/home-assistant/architecture/blob/master/adr/0014-home-assistant-supervised.md
|
|
||||||
|
|
||||||
DATA_SHARE=/usr/share/hassio dpkg --force-confdef --force-confold -i homeassistant-supervised.deb
|
|
||||||
|
|
||||||
neu debian
|
|
||||||
ha installieren
|
|
||||||
gucken ob geht
|
|
||||||
dann bw drüberbügeln
|
|
||||||
|
|
||||||
|
|
||||||
https://www.home-assistant.io/integrations/http/#ssl_certificate
|
|
||||||
|
|
||||||
`wget "$(curl -L https://api.github.com/repos/home-assistant/supervised-installer/releases/latest | jq -r '.assets[0].browser_download_url')" -O homeassistant-supervised.deb && dpkg -i homeassistant-supervised.deb`
|
|
||||||
|
|
@ -1,30 +0,0 @@
|
||||||
from shlex import quote
|
|
||||||
|
|
||||||
|
|
||||||
version = node.metadata.get('homeassistant/os_agent_version')
|
|
||||||
|
|
||||||
directories = {
|
|
||||||
'/usr/share/hassio': {},
|
|
||||||
}
|
|
||||||
|
|
||||||
actions = {
|
|
||||||
'install_os_agent': {
|
|
||||||
'command': ' && '.join([
|
|
||||||
f'wget -O /tmp/os-agent.deb https://github.com/home-assistant/os-agent/releases/download/{quote(version)}/os-agent_{quote(version)}_linux_aarch64.deb',
|
|
||||||
'DEBIAN_FRONTEND=noninteractive dpkg -i /tmp/os-agent.deb',
|
|
||||||
]),
|
|
||||||
'unless': f'test "$(apt -qq list os-agent | cut -d" " -f2)" = "{quote(version)}"',
|
|
||||||
'needs': {
|
|
||||||
'pkg_apt:',
|
|
||||||
'zfs_dataset:tank/homeassistant',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'install_homeassistant_supervised': {
|
|
||||||
'command': 'wget -O /tmp/homeassistant-supervised.deb https://github.com/home-assistant/supervised-installer/releases/latest/download/homeassistant-supervised.deb && apt install /tmp/homeassistant-supervised.deb',
|
|
||||||
'unless': 'apt -qq list homeassistant-supervised | grep -q "installed"',
|
|
||||||
'needs': {
|
|
||||||
'action:install_os_agent',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
@ -1,65 +0,0 @@
|
||||||
defaults = {
|
|
||||||
'apt': {
|
|
||||||
'packages': {
|
|
||||||
# homeassistant-supervised
|
|
||||||
'apparmor': {},
|
|
||||||
'bluez': {},
|
|
||||||
'cifs-utils': {},
|
|
||||||
'curl': {},
|
|
||||||
'dbus': {},
|
|
||||||
'jq': {},
|
|
||||||
'libglib2.0-bin': {},
|
|
||||||
'lsb-release': {},
|
|
||||||
'network-manager': {},
|
|
||||||
'nfs-common': {},
|
|
||||||
'systemd-journal-remote': {},
|
|
||||||
'systemd-resolved': {},
|
|
||||||
'udisks2': {},
|
|
||||||
'wget': {},
|
|
||||||
# docker
|
|
||||||
'docker-ce': {},
|
|
||||||
'docker-ce-cli': {},
|
|
||||||
'containerd.io': {},
|
|
||||||
'docker-buildx-plugin': {},
|
|
||||||
'docker-compose-plugin': {},
|
|
||||||
},
|
|
||||||
'sources': {
|
|
||||||
# docker: https://docs.docker.com/engine/install/debian/#install-using-the-repository
|
|
||||||
'docker': {
|
|
||||||
'urls': {
|
|
||||||
'https://download.docker.com/linux/debian',
|
|
||||||
},
|
|
||||||
'suites': {
|
|
||||||
'{codename}',
|
|
||||||
},
|
|
||||||
'components': {
|
|
||||||
'stable',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'zfs': {
|
|
||||||
'datasets': {
|
|
||||||
'tank/homeassistant': {
|
|
||||||
'mountpoint': '/usr/share/hassio',
|
|
||||||
'needed_by': {
|
|
||||||
'directory:/usr/share/hassio',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
@metadata_reactor.provides(
|
|
||||||
'nginx/vhosts',
|
|
||||||
)
|
|
||||||
def nginx(metadata):
|
|
||||||
return {
|
|
||||||
'nginx': {
|
|
||||||
'vhosts': {
|
|
||||||
metadata.get('homeassistant/domain'): {
|
|
||||||
'content': 'homeassistant/vhost.conf',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
@ -179,6 +179,7 @@ def nginx(metadata):
|
||||||
'context': {
|
'context': {
|
||||||
'php_version': metadata.get('php/version'),
|
'php_version': metadata.get('php/version'),
|
||||||
},
|
},
|
||||||
|
'check_path': '/icingaweb2/index.php',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
3
bundles/ifupdown/items.py
Normal file
3
bundles/ifupdown/items.py
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
# svc_systemd = {
|
||||||
|
# 'ifupdown.service': {},
|
||||||
|
# }
|
||||||
|
|
@ -15,7 +15,7 @@ svc_systemd = {
|
||||||
'needs': [
|
'needs': [
|
||||||
'pkg_apt:kea-dhcp4-server',
|
'pkg_apt:kea-dhcp4-server',
|
||||||
'file:/etc/kea/kea-dhcp4.conf',
|
'file:/etc/kea/kea-dhcp4.conf',
|
||||||
'svc_systemd:systemd-networkd:restart',
|
'svc_systemd:systemd-networkd.service:restart',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -52,13 +52,14 @@ def subnets(metadata):
|
||||||
if 'mac' in network_conf
|
if 'mac' in network_conf
|
||||||
)
|
)
|
||||||
|
|
||||||
for network_name, network_conf in metadata.get('network').items():
|
for id, (network_name, network_conf) in enumerate(sorted(metadata.get('network').items())):
|
||||||
dhcp_server_config = network_conf.get('dhcp_server_config', None)
|
dhcp_server_config = network_conf.get('dhcp_server_config', None)
|
||||||
|
|
||||||
if dhcp_server_config:
|
if dhcp_server_config:
|
||||||
_network = ip_network(dhcp_server_config['subnet'])
|
_network = ip_network(dhcp_server_config['subnet'])
|
||||||
|
|
||||||
subnet4.add(hashable({
|
subnet4.add(hashable({
|
||||||
|
'id': id + 1,
|
||||||
'subnet': dhcp_server_config['subnet'],
|
'subnet': dhcp_server_config['subnet'],
|
||||||
'pools': [
|
'pools': [
|
||||||
{
|
{
|
||||||
|
|
@ -72,7 +73,7 @@ def subnets(metadata):
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
'name': 'domain-name-servers',
|
'name': 'domain-name-servers',
|
||||||
'data': '10.0.10.2',
|
'data': '10.0.0.1',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
'reservations': set(
|
'reservations': set(
|
||||||
|
|
|
||||||
|
|
@ -1,58 +1 @@
|
||||||
https://developer.valvesoftware.com/wiki/List_of_L4D2_Cvars
|
https://github.com/SirPlease/L4D2-Competitive-Rework/blob/master/Dedicated%20Server%20Install%20Guide/README.md
|
||||||
|
|
||||||
Dead Center c1m1_hotel
|
|
||||||
Dead Center c1m2_streets
|
|
||||||
Dead Center c1m3_mall
|
|
||||||
Dead Center c1m4_atrium
|
|
||||||
Dark Carnival c2m1_highway
|
|
||||||
Dark Carnival c2m2_fairgrounds
|
|
||||||
Dark Carnival c2m3_coaster
|
|
||||||
Dark Carnival c2m4_barns
|
|
||||||
Dark Carnival c2m5_concert
|
|
||||||
Swamp Fever c3m1_plankcountry
|
|
||||||
Swamp Fever c3m2_swamp
|
|
||||||
Swamp Fever c3m3_shantytown
|
|
||||||
Swamp Fever c3m4_plantation
|
|
||||||
Hard Rain c4m1_milltown_a
|
|
||||||
Hard Rain c4m2_sugarmill_a
|
|
||||||
Hard Rain c4m3_sugarmill_b
|
|
||||||
Hard Rain c4m4_milltown_b
|
|
||||||
Hard Rain c4m5_milltown_escape
|
|
||||||
The Parish c5m1_waterfront_sndscape
|
|
||||||
The Parish c5m1_waterfront
|
|
||||||
The Parish c5m2_park
|
|
||||||
The Parish c5m3_cemetery
|
|
||||||
The Parish c5m4_quarter
|
|
||||||
The Parish c5m5_bridge
|
|
||||||
The Passing c6m1_riverbank
|
|
||||||
The Passing c6m2_bedlam
|
|
||||||
The Passing c6m3_port
|
|
||||||
The Sacrifice c7m1_docks
|
|
||||||
The Sacrifice c7m2_barge
|
|
||||||
The Sacrifice c7m3_port
|
|
||||||
No Mercy c8m1_apartment
|
|
||||||
No Mercy c8m2_subway
|
|
||||||
No Mercy c8m3_sewers
|
|
||||||
No Mercy c8m4_interior
|
|
||||||
No Mercy c8m5_rooftop
|
|
||||||
Crash Course c9m1_alleys
|
|
||||||
Crash Course c9m2_lots
|
|
||||||
Death Toll c10m1_caves
|
|
||||||
Death Toll c10m2_drainage
|
|
||||||
Death Toll c10m3_ranchhouse
|
|
||||||
Death Toll c10m4_mainstreet
|
|
||||||
Death Toll c10m5_houseboat
|
|
||||||
Dead Air c11m1_greenhouse
|
|
||||||
Dead Air c11m2_offices
|
|
||||||
Dead Air c11m3_garage
|
|
||||||
Dead Air c11m4_terminal
|
|
||||||
Dead Air c11m5_runway
|
|
||||||
Blood Harvest c12m1_hilltop
|
|
||||||
Blood Harvest c12m2_traintunnel
|
|
||||||
Blood Harvest c12m3_bridge
|
|
||||||
Blood Harvest c12m4_barn
|
|
||||||
Blood Harvest c12m5_cornfield
|
|
||||||
Cold Stream c13m1_alpinecreek
|
|
||||||
Cold Stream c13m2_southpinestream
|
|
||||||
Cold Stream c13m3_memorialbridge
|
|
||||||
Cold Stream c13m4_cutthroatcreek
|
|
||||||
13
bundles/left4dead2/files/scripts/helpers
Normal file
13
bundles/left4dead2/files/scripts/helpers
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -xeuo pipefail
|
||||||
|
|
||||||
|
function steam() {
|
||||||
|
# for systemd, so it can terminate the process (for other things sudo would have been enough)
|
||||||
|
setpriv --reuid=steam --regid=steam --init-groups "$@" <&0
|
||||||
|
export HOME=/opt/l4d2/steam
|
||||||
|
}
|
||||||
|
|
||||||
|
function workshop() {
|
||||||
|
steam mkdir -p "/opt/l4d2/overlays/${overlay}/left4dead2/addons"
|
||||||
|
steam /opt/l4d2/scripts/steam-workshop-download --out "/opt/l4d2/overlays/${overlay}/left4dead2/addons" "$@"
|
||||||
|
}
|
||||||
10
bundles/left4dead2/files/scripts/overlays/competitive_rework
Normal file
10
bundles/left4dead2/files/scripts/overlays/competitive_rework
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -xeuo pipefail
|
||||||
|
source /opt/l4d2/scripts/helpers
|
||||||
|
overlay=$(basename "$0")
|
||||||
|
|
||||||
|
# https://github.com/SirPlease/L4D2-Competitive-Rework
|
||||||
|
|
||||||
|
steam mkdir -p /opt/l4d2/overlays/$overlay/left4dead2
|
||||||
|
test -d /opt/l4d2/overlays/$overlay/left4dead2/cfg/cfgogl || \
|
||||||
|
curl -L https://github.com/SirPlease/L4D2-Competitive-Rework/archive/refs/heads/master.tar.gz | steam tar -xz --strip-components=1 -C /opt/l4d2/overlays/$overlay/left4dead2
|
||||||
128
bundles/left4dead2/files/scripts/overlays/l4d2center_maps
Normal file
128
bundles/left4dead2/files/scripts/overlays/l4d2center_maps
Normal file
|
|
@ -0,0 +1,128 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -xeuo pipefail
|
||||||
|
source /opt/l4d2/scripts/helpers
|
||||||
|
overlay=$(basename "$0")
|
||||||
|
|
||||||
|
steam mkdir -p /opt/l4d2/overlays/$overlay/left4dead2/addons
|
||||||
|
cd /opt/l4d2/overlays/$overlay/left4dead2/addons
|
||||||
|
|
||||||
|
# https://l4d2center.com/maps/servers/l4d2center_maps_sync.sh.txt ->
|
||||||
|
|
||||||
|
# Exit immediately if a command exits with a non-zero status.
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Function to print error messages
|
||||||
|
error_exit() {
|
||||||
|
echo "Error: $1" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if the current directory ends with /left4dead2/addons
|
||||||
|
current_dir=$(pwd)
|
||||||
|
expected_dir="/left4dead2/addons"
|
||||||
|
|
||||||
|
if [[ ! "$current_dir" == *"$expected_dir" ]]; then
|
||||||
|
error_exit "Script must be run from your L4D2 \"addons\" folder. Current directory: $current_dir"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for required commands
|
||||||
|
for cmd in curl md5sum 7z; do
|
||||||
|
if ! command -v "$cmd" >/dev/null 2>&1; then
|
||||||
|
error_exit "Required command '$cmd' is not installed. Please install it and retry."
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# URL of the CSV file
|
||||||
|
CSV_URL="https://l4d2center.com/maps/servers/index.csv"
|
||||||
|
|
||||||
|
# Temporary file to store CSV
|
||||||
|
TEMP_CSV=$(mktemp)
|
||||||
|
|
||||||
|
# Ensure temporary file is removed on exit
|
||||||
|
trap 'rm -f "$TEMP_CSV"' EXIT
|
||||||
|
|
||||||
|
echo "Downloading CSV from $CSV_URL..."
|
||||||
|
curl -sSL -o "$TEMP_CSV" "$CSV_URL" || error_exit "Failed to download CSV."
|
||||||
|
|
||||||
|
declare -A map_md5
|
||||||
|
declare -A map_links
|
||||||
|
|
||||||
|
# Read CSV and populate associative arrays
|
||||||
|
{
|
||||||
|
# Skip the first line (header)
|
||||||
|
IFS= read -r header
|
||||||
|
|
||||||
|
while IFS=';' read -r Name Size MD5 DownloadLink || [[ $Name ]]; do
|
||||||
|
# Trim whitespace
|
||||||
|
Name=$(echo "$Name" | xargs)
|
||||||
|
MD5=$(echo "$MD5" | xargs)
|
||||||
|
DownloadLink=$(echo "$DownloadLink" | xargs)
|
||||||
|
|
||||||
|
# Populate associative arrays
|
||||||
|
map_md5["$Name"]="$MD5"
|
||||||
|
map_links["$Name"]="$DownloadLink"
|
||||||
|
done
|
||||||
|
} < "$TEMP_CSV"
|
||||||
|
|
||||||
|
# Get list of expected VPK files
|
||||||
|
expected_vpk=("${!map_md5[@]}")
|
||||||
|
|
||||||
|
# Remove VPK files not in expected list or with mismatched MD5
|
||||||
|
echo "Cleaning up existing VPK files..."
|
||||||
|
for file in *.vpk; do
|
||||||
|
# Check if it's a regular file
|
||||||
|
if [[ -f "$file" ]]; then
|
||||||
|
if [[ -z "${map_md5["$file"]}" ]]; then
|
||||||
|
echo "Removing unexpected file: $file"
|
||||||
|
rm -f "$file"
|
||||||
|
else
|
||||||
|
# Calculate MD5
|
||||||
|
echo "Calculating MD5 for existing file: $file..."
|
||||||
|
current_md5=$(md5sum "$file" | awk '{print $1}')
|
||||||
|
expected_md5="${map_md5["$file"]}"
|
||||||
|
|
||||||
|
if [[ "$current_md5" != "$expected_md5" ]]; then
|
||||||
|
echo "MD5 mismatch for $file. Removing."
|
||||||
|
rm -f "$file"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Download and extract missing or updated VPK files
|
||||||
|
echo "Processing required VPK files..."
|
||||||
|
for vpk in "${expected_vpk[@]}"; do
|
||||||
|
if [[ ! -f "$vpk" ]]; then
|
||||||
|
echo "Downloading and extracting $vpk..."
|
||||||
|
download_url="${map_links["$vpk"]}"
|
||||||
|
|
||||||
|
if [[ -z "$download_url" ]]; then
|
||||||
|
echo "No download link found for $vpk. Skipping."
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
encoded_url=$(echo "$download_url" | sed 's/ /%20/g')
|
||||||
|
|
||||||
|
# Download the .7z file to a temporary location
|
||||||
|
TEMP_7Z=$(mktemp --suffix=.7z)
|
||||||
|
curl -# -L -o "$TEMP_7Z" "$encoded_url"
|
||||||
|
|
||||||
|
# Check if the download was successful
|
||||||
|
if [[ $? -ne 0 ]]; then
|
||||||
|
echo "Failed to download $download_url. Skipping."
|
||||||
|
rm -f "$TEMP_7Z"
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract the .7z file
|
||||||
|
7z x -y "$TEMP_7Z" || { echo "Failed to extract $TEMP_7Z. Skipping."; rm -f "$TEMP_7Z"; continue; }
|
||||||
|
|
||||||
|
# Remove the temporary .7z file
|
||||||
|
rm -f "$TEMP_7Z"
|
||||||
|
|
||||||
|
else
|
||||||
|
echo "$vpk is already up to date."
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Synchronization complete."
|
||||||
25
bundles/left4dead2/files/scripts/overlays/tickrate
Normal file
25
bundles/left4dead2/files/scripts/overlays/tickrate
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -xeuo pipefail
|
||||||
|
source /opt/l4d2/scripts/helpers
|
||||||
|
overlay=$(basename "$0")
|
||||||
|
|
||||||
|
# server config
|
||||||
|
# https://github.com/SirPlease/L4D2-Competitive-Rework/blob/7ecc3a32a5e2180d6607a40119ff2f3c072502a9/cfg/server.cfg#L58-L69
|
||||||
|
# https://www.programmersought.com/article/513810199514/
|
||||||
|
steam mkdir -p /opt/l4d2/overlays/$overlay/left4dead2/cfg
|
||||||
|
steam cat <<'EOF' > /opt/l4d2/overlays/$overlay/left4dead2/cfg/server.cfg
|
||||||
|
# https://github.com/SirPlease/L4D2-Competitive-Rework/blob/7ecc3a32a5e2180d6607a40119ff2f3c072502a9/cfg/server.cfg#L58-L69
|
||||||
|
sv_minrate 100000
|
||||||
|
sv_maxrate 100000
|
||||||
|
nb_update_frequency 0.014
|
||||||
|
net_splitpacket_maxrate 50000
|
||||||
|
net_maxcleartime 0.0001
|
||||||
|
fps_max 0
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# install tickrate enabler
|
||||||
|
steam mkdir -p "/opt/l4d2/overlays/${overlay}/left4dead2/addons"
|
||||||
|
for file in tickrate_enabler.dll tickrate_enabler.so tickrate_enabler.vdf
|
||||||
|
do
|
||||||
|
curl -L "https://github.com/SirPlease/L4D2-Competitive-Rework/raw/refs/heads/master/addons/${file}" -o "/opt/l4d2/overlays/${overlay}/left4dead2/addons/${file}"
|
||||||
|
done
|
||||||
12
bundles/left4dead2/files/scripts/overlays/vanilla
Normal file
12
bundles/left4dead2/files/scripts/overlays/vanilla
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -xeuo pipefail
|
||||||
|
source /opt/l4d2/scripts/helpers
|
||||||
|
overlay=$(basename "$0")
|
||||||
|
|
||||||
|
# Ions Vocalizer
|
||||||
|
workshop -i 698857882
|
||||||
|
|
||||||
|
# admin system
|
||||||
|
workshop --item 2524204971
|
||||||
|
steam mkdir -p "/opt/l4d2/overlays/${overlay}/left4dead2/ems/admin system"
|
||||||
|
steam echo "STEAM_1:0:12376499" > "/opt/l4d2/overlays/${overlay}/left4dead2/ems/admin system/admins.txt"
|
||||||
13
bundles/left4dead2/files/scripts/overlays/workshop_maps
Normal file
13
bundles/left4dead2/files/scripts/overlays/workshop_maps
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -xeuo pipefail
|
||||||
|
source /opt/l4d2/scripts/helpers
|
||||||
|
overlay=$(basename "$0")
|
||||||
|
|
||||||
|
workshop --collection 121115793 # Back To School
|
||||||
|
|
||||||
|
workshop --item 2957035482 # hehe30-part1
|
||||||
|
workshop --item 2973628334 # hehe30-part2
|
||||||
|
workshop --item 3013844371 # hehe30-part3
|
||||||
|
|
||||||
|
workshop --item 3478461158 # 虚伪黎明(Dawn's Deception)
|
||||||
|
workshop --item 3478934394 # 虚伪黎明(Dawn's Deception)PART2
|
||||||
|
|
@ -1,40 +1,13 @@
|
||||||
hostname "CroneKorkN : ${name}"
|
// defaults
|
||||||
sv_contact "admin@sublimity.de"
|
hostname ${server_name}
|
||||||
|
|
||||||
|
|
||||||
sv_steamgroup "${','.join(steamgroups)}"
|
|
||||||
|
|
||||||
rcon_password "${rcon_password}"
|
|
||||||
|
|
||||||
|
|
||||||
motd_enabled 0
|
motd_enabled 0
|
||||||
|
rcon_password ${rcon_password}
|
||||||
|
sv_steamgroup "38347879"
|
||||||
|
|
||||||
|
mp_autoteambalance 0
|
||||||
|
sv_forcepreload 1
|
||||||
|
|
||||||
sv_cheats 1
|
// server specific
|
||||||
|
% for line in config:
|
||||||
|
${line}
|
||||||
sv_consistency 0
|
% endfor
|
||||||
|
|
||||||
|
|
||||||
sv_lan 0
|
|
||||||
|
|
||||||
|
|
||||||
sv_allow_lobby_connect_only 0
|
|
||||||
|
|
||||||
|
|
||||||
sv_gametypes "coop,realism,survival,versus,teamversus,scavenge,teamscavenge"
|
|
||||||
|
|
||||||
|
|
||||||
sv_minrate 30000
|
|
||||||
sv_maxrate 60000
|
|
||||||
sv_mincmdrate 66
|
|
||||||
sv_maxcmdrate 101
|
|
||||||
|
|
||||||
|
|
||||||
sv_logsdir "logs-${name}" //Folder in the game directory where server logs will be stored.
|
|
||||||
log on //Creates a logfile (on | off)
|
|
||||||
sv_logecho 0 //default 0; Echo log information to the console.
|
|
||||||
sv_logfile 1 //default 1; Log server information in the log file.
|
|
||||||
sv_log_onefile 0 //default 0; Log server information to only one file.
|
|
||||||
sv_logbans 1 //default 0;Log server bans in the server logs.
|
|
||||||
sv_logflush 0 //default 0; Flush the log files to disk on each write (slow).
|
|
||||||
72
bundles/left4dead2/files/setup
Normal file
72
bundles/left4dead2/files/setup
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -xeuo pipefail
|
||||||
|
|
||||||
|
# -- DEFINE FUNCTIONS AND VARIABLES -- #
|
||||||
|
|
||||||
|
function steam() {
|
||||||
|
# for systemd, so it can terminate the process (for other things sudo would have been enough)
|
||||||
|
setpriv --reuid=steam --regid=steam --init-groups "$@" <&0
|
||||||
|
export HOME=/opt/l4d2/steam
|
||||||
|
}
|
||||||
|
|
||||||
|
# -- PREPARE SYSTEM -- #
|
||||||
|
|
||||||
|
getent passwd steam >/dev/null || useradd -M -d /opt/l4d2 -s /bin/bash steam
|
||||||
|
mkdir -p /opt/l4d2 /tmp/dumps
|
||||||
|
chown steam:steam /opt/l4d2 /tmp/dumps
|
||||||
|
dpkg --add-architecture i386
|
||||||
|
apt update
|
||||||
|
DEBIAN_FRONTEND=noninteractive apt install -y libc6:i386 lib32z1
|
||||||
|
|
||||||
|
# workshop downloader
|
||||||
|
test -f /opt/l4d2/scripts/steam-workshop-download || \
|
||||||
|
steam wget -4 https://git.sublimity.de/cronekorkn/steam-workshop-downloader/raw/branch/master/steam-workshop-download -P /opt/l4d2/scripts
|
||||||
|
steam chmod +x /opt/l4d2/scripts/steam-workshop-download
|
||||||
|
|
||||||
|
# -- STEAM -- #
|
||||||
|
|
||||||
|
steam mkdir -p /opt/l4d2/steam
|
||||||
|
test -f /opt/l4d2/steam/steamcmd_linux.tar.gz || \
|
||||||
|
steam wget http://media.steampowered.com/installer/steamcmd_linux.tar.gz -P /opt/l4d2/steam
|
||||||
|
test -f /opt/l4d2/steam/steamcmd.sh || \
|
||||||
|
steam tar -xvzf /opt/l4d2/steam/steamcmd_linux.tar.gz -C /opt/l4d2/steam
|
||||||
|
|
||||||
|
# fix for: /opt/l4d2/.steam/sdk32/steamclient.so: cannot open shared object file: No such file or directory
|
||||||
|
steam mkdir -p /opt/l4d2/steam/.steam # needs to be in steam users home dir
|
||||||
|
readlink /opt/l4d2/steam/.steam/sdk32 | grep -q ^/opt/l4d2/steam/linux32$ || \
|
||||||
|
steam ln -sf /opt/l4d2/steam/linux32 /opt/l4d2/steam/.steam/sdk32
|
||||||
|
readlink /opt/l4d2/steam/.steam/sdk64 | grep -q ^/opt/l4d2/steam/linux64$ || \
|
||||||
|
steam ln -sf /opt/l4d2/steam/linux64 /opt/l4d2/steam/.steam/sdk64
|
||||||
|
|
||||||
|
# -- INSTALL -- #
|
||||||
|
|
||||||
|
# erst die windows deps zu installieren scheint ein workaround für x64 zu sein?
|
||||||
|
steam mkdir -p /opt/l4d2/installation
|
||||||
|
steam /opt/l4d2/steam/steamcmd.sh \
|
||||||
|
+force_install_dir /opt/l4d2/installation \
|
||||||
|
+login anonymous \
|
||||||
|
+@sSteamCmdForcePlatformType windows \
|
||||||
|
+app_update 222860 validate \
|
||||||
|
+quit
|
||||||
|
steam /opt/l4d2/steam/steamcmd.sh \
|
||||||
|
+force_install_dir /opt/l4d2/installation \
|
||||||
|
+login anonymous \
|
||||||
|
+@sSteamCmdForcePlatformType linux \
|
||||||
|
+app_update 222860 validate \
|
||||||
|
+quit
|
||||||
|
|
||||||
|
# -- OVERLAYS -- #
|
||||||
|
|
||||||
|
for overlay_path in /opt/l4d2/scripts/overlays/*; do
|
||||||
|
overlay=$(basename "$overlay_path")
|
||||||
|
steam mkdir -p /opt/l4d2/overlays/$overlay
|
||||||
|
bash -xeuo pipefail "$overlay_path"
|
||||||
|
test -f /opt/l4d2/overlays/$overlay/left4dead2/cfg/server.cfg && \
|
||||||
|
steam cp /opt/l4d2/overlays/$overlay/left4dead2/cfg/server.cfg /opt/l4d2/overlays/$overlay/left4dead2/cfg/server_$overlay.cfg
|
||||||
|
done
|
||||||
|
|
||||||
|
# -- SERVERS -- #
|
||||||
|
|
||||||
|
#steam rm -rf /opt/l4d2/servers
|
||||||
|
steam mkdir -p /opt/l4d2/servers
|
||||||
75
bundles/left4dead2/files/start
Normal file
75
bundles/left4dead2/files/start
Normal file
|
|
@ -0,0 +1,75 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -xeuo pipefail
|
||||||
|
|
||||||
|
name=""
|
||||||
|
port=""
|
||||||
|
configfile=""
|
||||||
|
overlays=""
|
||||||
|
arguments=""
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
-n|--name)
|
||||||
|
name="$2"; shift 2
|
||||||
|
;;
|
||||||
|
-p|--port)
|
||||||
|
port="$2"; shift 2
|
||||||
|
;;
|
||||||
|
-c|--config)
|
||||||
|
configfile="$2"; shift 2
|
||||||
|
;;
|
||||||
|
-o|--overlay)
|
||||||
|
overlays="/opt/l4d2/overlays/$2:$overlays"; shift 2
|
||||||
|
;;
|
||||||
|
--)
|
||||||
|
shift
|
||||||
|
arguments+="$@"
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "ERROR: unknown argument $1"; exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
[[ -n "${name}" ]] || { echo "ERROR: -n/--name missing"; exit 1; }
|
||||||
|
[[ -n "${port}" ]] || { echo "ERROR: -p/--port missing"; exit 1; }
|
||||||
|
|
||||||
|
# -- HELPER FUNCTIONS -- #
|
||||||
|
|
||||||
|
function steam() {
|
||||||
|
# für systemd, damit es den prozess beenden kann
|
||||||
|
setpriv --reuid=steam --regid=steam --init-groups "$@"
|
||||||
|
export HOME=/opt/l4d2/steam
|
||||||
|
}
|
||||||
|
|
||||||
|
# -- TIDY UP -- #
|
||||||
|
|
||||||
|
mountpoint -q "/opt/l4d2/servers/$name/merged" && umount "/opt/l4d2/servers/$name/merged"
|
||||||
|
steam rm -rf "/opt/l4d2/servers/$name"
|
||||||
|
|
||||||
|
# -- CREATE DIRECTORIES -- #
|
||||||
|
|
||||||
|
steam mkdir -p \
|
||||||
|
"/opt/l4d2/servers/$name" \
|
||||||
|
"/opt/l4d2/servers/$name/work" \
|
||||||
|
"/opt/l4d2/servers/$name/upper" \
|
||||||
|
"/opt/l4d2/servers/$name/merged"
|
||||||
|
|
||||||
|
# -- MOUNT OVERLAYFS -- #
|
||||||
|
|
||||||
|
mount -t overlay overlay \
|
||||||
|
-o "lowerdir=$overlays/opt/l4d2/installation,upperdir=/opt/l4d2/servers/$name/upper,workdir=/opt/l4d2/servers/$name/work" \
|
||||||
|
"/opt/l4d2/servers/$name/merged"
|
||||||
|
|
||||||
|
# -- REPLACE SERVER.CFG -- #
|
||||||
|
|
||||||
|
if [[ -n "$configfile" ]]; then
|
||||||
|
cp "$configfile" "/opt/l4d2/servers/$name/merged/left4dead2/cfg/server.cfg"
|
||||||
|
chown steam:steam "/opt/l4d2/servers/$name/merged/left4dead2/cfg/server.cfg"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# -- RUN L4D2 -- #
|
||||||
|
|
||||||
|
steam "/opt/l4d2/servers/$name/merged/srcds_run" -norestart -pidfile "/opt/l4d2/servers/$name/pid" -game left4dead2 -ip 0.0.0.0 -port "$port" +hostname "Crone_$name" +map c1m1_hotel $arguments
|
||||||
19
bundles/left4dead2/files/stop
Normal file
19
bundles/left4dead2/files/stop
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -xeuo pipefail
|
||||||
|
|
||||||
|
name=""
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
-n|--name)
|
||||||
|
name="$2"; shift 2
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "ERROR: unknown argument $1"; exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
mountpoint -q "/opt/l4d2/servers/$name/merged" && umount "/opt/l4d2/servers/$name/merged"
|
||||||
|
steam rm -rf "/opt/l4d2/servers/$name"
|
||||||
|
|
@ -1,122 +1,104 @@
|
||||||
assert node.has_bundle('steam') and node.has_bundle('steam-workshop-download')
|
users = {
|
||||||
|
'steam': {
|
||||||
|
'home': '/opt/l4d2/steam',
|
||||||
|
'shell': '/bin/bash',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
directories = {
|
directories = {
|
||||||
'/opt/steam/left4dead2-servers': {
|
'/opt/l4d2': {
|
||||||
'owner': 'steam',
|
'owner': 'steam', 'group': 'steam',
|
||||||
'group': 'steam',
|
},
|
||||||
'mode': '0755',
|
'/opt/l4d2/steam': {
|
||||||
|
'owner': 'steam', 'group': 'steam',
|
||||||
|
},
|
||||||
|
'/opt/l4d2/configs': {
|
||||||
|
'owner': 'steam', 'group': 'steam',
|
||||||
'purge': True,
|
'purge': True,
|
||||||
},
|
},
|
||||||
# Current zfs doesnt support zfs upperdir. The support was added in October 2022. Move upperdir - unused anyway -
|
'/opt/l4d2/scripts': {
|
||||||
# to another dir. Also move workdir alongside it, as it has to be on same fs.
|
'owner': 'steam', 'group': 'steam',
|
||||||
'/opt/steam-zfs-overlay-workarounds': {
|
},
|
||||||
'owner': 'steam',
|
'/opt/l4d2/scripts/overlays': {
|
||||||
'group': 'steam',
|
'owner': 'steam', 'group': 'steam',
|
||||||
'mode': '0755',
|
|
||||||
'purge': True,
|
'purge': True,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
# /opt/steam/steam/.steam/sdk32/steamclient.so: cannot open shared object file: No such file or directory
|
files = {
|
||||||
symlinks = {
|
'/opt/l4d2/setup': {
|
||||||
'/opt/steam/steam/.steam/sdk32': {
|
'mode': '755',
|
||||||
'target': '/opt/steam/steam/linux32',
|
'triggers': {
|
||||||
'owner': 'steam',
|
'svc_systemd:left4dead2-initialize.service:restart',
|
||||||
'group': 'steam',
|
},
|
||||||
}
|
},
|
||||||
|
'/opt/l4d2/start': {
|
||||||
|
'mode': '755',
|
||||||
|
'triggers': {
|
||||||
|
f'svc_systemd:left4dead2-{server_name}.service:restart'
|
||||||
|
for server_name in node.metadata.get('left4dead2/servers').keys()
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'/opt/l4d2/stop': {
|
||||||
|
'mode': '755',
|
||||||
|
'triggers': {
|
||||||
|
f'svc_systemd:left4dead2-{server_name}.service:restart'
|
||||||
|
for server_name in node.metadata.get('left4dead2/servers').keys()
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'/opt/l4d2/scripts/helpers': {
|
||||||
|
'source': 'scripts/helpers',
|
||||||
|
'mode': '755',
|
||||||
|
'triggers': {
|
||||||
|
'svc_systemd:left4dead2-initialize.service:restart',
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#
|
for overlay in node.metadata.get('left4dead2/overlays'):
|
||||||
# SERVERS
|
files[f'/opt/l4d2/scripts/overlays/{overlay}'] = {
|
||||||
#
|
'source': f'scripts/overlays/{overlay}',
|
||||||
|
'mode': '755',
|
||||||
for name, config in node.metadata.get('left4dead2/servers').items():
|
'triggers': {
|
||||||
|
'svc_systemd:left4dead2-initialize.service:restart',
|
||||||
#overlay
|
},
|
||||||
directories[f'/opt/steam/left4dead2-servers/{name}'] = {
|
|
||||||
'owner': 'steam',
|
|
||||||
'group': 'steam',
|
|
||||||
}
|
|
||||||
directories[f'/opt/steam-zfs-overlay-workarounds/{name}/upper'] = {
|
|
||||||
'owner': 'steam',
|
|
||||||
'group': 'steam',
|
|
||||||
}
|
|
||||||
directories[f'/opt/steam-zfs-overlay-workarounds/{name}/workdir'] = {
|
|
||||||
'owner': 'steam',
|
|
||||||
'group': 'steam',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# conf
|
svc_systemd = {
|
||||||
files[f'/opt/steam/left4dead2-servers/{name}/left4dead2/cfg/server.cfg'] = {
|
'left4dead2-initialize.service': {
|
||||||
'content_type': 'mako',
|
'enabled': True,
|
||||||
|
'running': None,
|
||||||
|
'needs': {
|
||||||
|
'file:/opt/l4d2/setup',
|
||||||
|
'file:/usr/local/lib/systemd/system/left4dead2-initialize.service',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for server_name, config in node.metadata.get('left4dead2/servers').items():
|
||||||
|
files[f'/opt/l4d2/configs/{server_name}.cfg'] = {
|
||||||
'source': 'server.cfg',
|
'source': 'server.cfg',
|
||||||
|
'content_type': 'mako',
|
||||||
'context': {
|
'context': {
|
||||||
'name': name,
|
'server_name': server_name,
|
||||||
'steamgroups': node.metadata.get('left4dead2/steamgroups'),
|
'rcon_password': repo.vault.decrypt('encrypt$gAAAAABpAdZhxwJ47I1AXotuZmBvyZP1ecVTt9IXFkLI28JiVS74LKs9QdgIBz-FC-iXtIHHh_GVGxxKQZprn4UrXZcvZ57kCKxfHBs3cE2JiGnbWE8_mfs=').value,
|
||||||
'rcon_password': config['rcon_password'],
|
'config': config.get('config', []),
|
||||||
},
|
},
|
||||||
'owner': 'steam',
|
'owner': 'steam',
|
||||||
'group': 'steam',
|
'mode': '644',
|
||||||
'triggers': [
|
'triggers': {
|
||||||
f'svc_systemd:left4dead2-{name}.service:restart',
|
f'svc_systemd:left4dead2-{server_name}.service:restart',
|
||||||
],
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
# service
|
svc_systemd[f'left4dead2-{server_name}.service'] = {
|
||||||
svc_systemd[f'left4dead2-{name}.service'] = {
|
'enabled': True,
|
||||||
'needs': [
|
'running': True,
|
||||||
f'file:/opt/steam/left4dead2-servers/{name}/left4dead2/cfg/server.cfg',
|
'tags': {
|
||||||
f'file:/usr/local/lib/systemd/system/left4dead2-{name}.service',
|
'left4dead2-servers',
|
||||||
],
|
},
|
||||||
}
|
'needs': {
|
||||||
|
'svc_systemd:left4dead2-initialize.service',
|
||||||
#
|
f'file:/usr/local/lib/systemd/system/left4dead2-{server_name}.service',
|
||||||
# ADDONS
|
},
|
||||||
#
|
|
||||||
|
|
||||||
# base
|
|
||||||
files[f'/opt/steam/left4dead2-servers/{name}/left4dead2/addons/readme.txt'] = {
|
|
||||||
'content_type': 'any',
|
|
||||||
'owner': 'steam',
|
|
||||||
'group': 'steam',
|
|
||||||
}
|
|
||||||
directories[f'/opt/steam/left4dead2-servers/{name}/left4dead2/addons'] = {
|
|
||||||
'owner': 'steam',
|
|
||||||
'group': 'steam',
|
|
||||||
'purge': True,
|
|
||||||
'triggers': [
|
|
||||||
f'svc_systemd:left4dead2-{name}.service:restart',
|
|
||||||
],
|
|
||||||
}
|
|
||||||
for id in [
|
|
||||||
*config.get('workshop', []),
|
|
||||||
*node.metadata.get('left4dead2/workshop'),
|
|
||||||
]:
|
|
||||||
files[f'/opt/steam/left4dead2-servers/{name}/left4dead2/addons/{id}.vpk'] = {
|
|
||||||
'content_type': 'any',
|
|
||||||
'owner': 'steam',
|
|
||||||
'group': 'steam',
|
|
||||||
'triggers': [
|
|
||||||
f'svc_systemd:left4dead2-{name}.service:restart',
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
# admin system
|
|
||||||
|
|
||||||
directories[f'/opt/steam/left4dead2-servers/{name}/left4dead2/ems/admin system'] = {
|
|
||||||
'owner': 'steam',
|
|
||||||
'group': 'steam',
|
|
||||||
'mode': '0755',
|
|
||||||
'triggers': [
|
|
||||||
f'svc_systemd:left4dead2-{name}.service:restart',
|
|
||||||
],
|
|
||||||
}
|
|
||||||
files[f'/opt/steam/left4dead2-servers/{name}/left4dead2/ems/admin system/admins.txt'] = {
|
|
||||||
'owner': 'steam',
|
|
||||||
'group': 'steam',
|
|
||||||
'mode': '0755',
|
|
||||||
'content': '\n'.join(sorted(node.metadata.get('left4dead2/admins'))),
|
|
||||||
'triggers': [
|
|
||||||
f'svc_systemd:left4dead2-{name}.service:restart',
|
|
||||||
],
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,110 +1,104 @@
|
||||||
assert node.has_bundle('steam')
|
from re import match
|
||||||
|
from os import path, listdir
|
||||||
|
|
||||||
from shlex import quote
|
|
||||||
|
|
||||||
defaults = {
|
defaults = {
|
||||||
'steam': {
|
'apt': {
|
||||||
'games': {
|
'packages': {
|
||||||
'left4dead2': 222860,
|
'libc6_i386': {}, # installs libc6:i386
|
||||||
|
'lib32z1': {},
|
||||||
|
'unzip': {},
|
||||||
|
'p7zip-full': {}, # l4d2center_maps_sync.sh
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
'left4dead2': {
|
'left4dead2': {
|
||||||
'servers': {},
|
'overlays': set(listdir(path.join(repo.path, 'bundles/left4dead2/files/scripts/overlays'))),
|
||||||
'admins': set(),
|
'servers': {
|
||||||
'workshop': set(),
|
# 'port': 27017,
|
||||||
|
# 'overlays': ['competitive_rework'],
|
||||||
|
# 'arguments': ['-tickrate 60'],
|
||||||
|
# 'config': [
|
||||||
|
# 'exec server_original.cfg',
|
||||||
|
# 'sm_forcematch zonemod',
|
||||||
|
# ],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'nftables': {
|
||||||
|
'input': {
|
||||||
|
'udp dport { 27005, 27020 } accept',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'systemd': {
|
||||||
|
'units': {
|
||||||
|
'left4dead2-initialize.service': {
|
||||||
|
'Unit': {
|
||||||
|
'Description': 'initialize left4dead2',
|
||||||
|
'After': 'network-online.target',
|
||||||
|
},
|
||||||
|
'Service': {
|
||||||
|
'Type': 'oneshot',
|
||||||
|
'RemainAfterExit': 'yes',
|
||||||
|
'ExecStart': '/opt/l4d2/setup',
|
||||||
|
'StandardOutput': 'journal',
|
||||||
|
'StandardError': 'journal',
|
||||||
|
},
|
||||||
|
'Install': {
|
||||||
|
'WantedBy': {'multi-user.target'},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@metadata_reactor.provides(
|
@metadata_reactor.provides(
|
||||||
'left4dead2/servers',
|
|
||||||
)
|
|
||||||
def rconn_password(metadata):
|
|
||||||
# only works from localhost!
|
|
||||||
return {
|
|
||||||
'left4dead2': {
|
|
||||||
'servers': {
|
|
||||||
server: {
|
|
||||||
'rcon_password': repo.vault.password_for(f'{node.name} left4dead2 {server} rcon', length=24),
|
|
||||||
}
|
|
||||||
for server in metadata.get('left4dead2/servers')
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@metadata_reactor.provides(
|
|
||||||
'steam-workshop-download',
|
|
||||||
'systemd/units',
|
'systemd/units',
|
||||||
)
|
)
|
||||||
def server_units(metadata):
|
def server_units(metadata):
|
||||||
units = {}
|
units = {}
|
||||||
workshop = {}
|
|
||||||
|
|
||||||
for name, config in metadata.get('left4dead2/servers').items():
|
for name, config in metadata.get('left4dead2/servers').items():
|
||||||
# mount overlay
|
assert match(r'^[A-z0-9-_-]+$', name)
|
||||||
mountpoint = f'/opt/steam/left4dead2-servers/{name}'
|
assert 27000 <= config["port"] <= 27100
|
||||||
mount_unit_name = mountpoint[1:].replace('-', '\\x2d').replace('/', '-') + '.mount'
|
for overlay in config.get('overlays', []):
|
||||||
units[mount_unit_name] = {
|
assert overlay in metadata.get('left4dead2/overlays'), f"unknown overlay {overlay}, known: {metadata.get('left4dead2/overlays')}"
|
||||||
'Unit': {
|
|
||||||
'Description': f"Mount left4dead2 server {name} overlay",
|
|
||||||
'Conflicts': {'umount.target'},
|
|
||||||
'Before': {'umount.target'},
|
|
||||||
},
|
|
||||||
'Mount': {
|
|
||||||
'What': 'overlay',
|
|
||||||
'Where': mountpoint,
|
|
||||||
'Type': 'overlay',
|
|
||||||
'Options': ','.join([
|
|
||||||
'auto',
|
|
||||||
'lowerdir=/opt/steam/left4dead2',
|
|
||||||
f'upperdir=/opt/steam-zfs-overlay-workarounds/{name}/upper',
|
|
||||||
f'workdir=/opt/steam-zfs-overlay-workarounds/{name}/workdir',
|
|
||||||
]),
|
|
||||||
},
|
|
||||||
'Install': {
|
|
||||||
'RequiredBy': {
|
|
||||||
f'left4dead2-{name}.service',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# individual workshop
|
cmd = f'/opt/l4d2/start -n {name} -p {config["port"]}'
|
||||||
workshop_ids = config.get('workshop', set()) | metadata.get('left4dead2/workshop', set())
|
|
||||||
if workshop_ids:
|
if 'config' in config:
|
||||||
workshop[f'left4dead2-{name}'] = {
|
cmd += f' -c /opt/l4d2/configs/{name}.cfg'
|
||||||
'ids': workshop_ids,
|
|
||||||
'path': f'/opt/steam/left4dead2-servers/{name}/left4dead2/addons',
|
for overlay in config.get('overlays', []):
|
||||||
'user': 'steam',
|
cmd += f' -o {overlay}'
|
||||||
'requires': {
|
|
||||||
mount_unit_name,
|
if 'arguments' in config:
|
||||||
},
|
cmd += ' -- ' + ' '.join(config['arguments'])
|
||||||
'required_by': {
|
|
||||||
f'left4dead2-{name}.service',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# left4dead2 server unit
|
|
||||||
units[f'left4dead2-{name}.service'] = {
|
units[f'left4dead2-{name}.service'] = {
|
||||||
'Unit': {
|
'Unit': {
|
||||||
'Description': f'left4dead2 server {name}',
|
'Description': f'left4dead2 server {name}',
|
||||||
'After': {'steam-update.service'},
|
'After': {'left4dead2-initialize.service'},
|
||||||
'Requires': {'steam-update.service'},
|
'Requires': {'left4dead2-initialize.service'},
|
||||||
},
|
},
|
||||||
'Service': {
|
'Service': {
|
||||||
'User': 'steam',
|
'Type': 'simple',
|
||||||
'Group': 'steam',
|
'ExecStart': cmd,
|
||||||
'WorkingDirectory': f'/opt/steam/left4dead2-servers/{name}',
|
'ExecStopPost': f'/opt/l4d2/stop -n {name}',
|
||||||
'ExecStart': f'/opt/steam/left4dead2-servers/{name}/srcds_run -port {config["port"]} +exec server.cfg',
|
|
||||||
'Restart': 'on-failure',
|
'Restart': 'on-failure',
|
||||||
|
'Nice': -10,
|
||||||
|
'CPUWeight': 200,
|
||||||
|
'IOSchedulingClass': 'best-effort',
|
||||||
|
'IOSchedulingPriority': 0,
|
||||||
},
|
},
|
||||||
'Install': {
|
'Install': {
|
||||||
'WantedBy': {'multi-user.target'},
|
'WantedBy': {'multi-user.target'},
|
||||||
},
|
},
|
||||||
|
'triggers': {
|
||||||
|
f'svc_systemd:left4dead2-{name}.service:restart',
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'steam-workshop-download': workshop,
|
|
||||||
'systemd': {
|
'systemd': {
|
||||||
'units': units,
|
'units': units,
|
||||||
},
|
},
|
||||||
|
|
@ -114,14 +108,13 @@ def server_units(metadata):
|
||||||
@metadata_reactor.provides(
|
@metadata_reactor.provides(
|
||||||
'nftables/input',
|
'nftables/input',
|
||||||
)
|
)
|
||||||
def firewall(metadata):
|
def nftables(metadata):
|
||||||
ports = set(str(server['port']) for server in metadata.get('left4dead2/servers').values())
|
ports = sorted(str(config["port"]) for config in metadata.get('left4dead2/servers').values())
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'nftables': {
|
'nftables': {
|
||||||
'input': {
|
'input': {
|
||||||
f"tcp dport {{ {', '.join(sorted(ports))} }} accept",
|
f'ip protocol {{ tcp, udp }} th dport {{ {", ".join(ports)} }} accept'
|
||||||
f"udp dport {{ {', '.join(sorted(ports))} }} accept",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
||||||
58
bundles/left4dead2_old/README.md
Normal file
58
bundles/left4dead2_old/README.md
Normal file
|
|
@ -0,0 +1,58 @@
|
||||||
|
https://developer.valvesoftware.com/wiki/List_of_L4D2_Cvars
|
||||||
|
|
||||||
|
Dead Center c1m1_hotel
|
||||||
|
Dead Center c1m2_streets
|
||||||
|
Dead Center c1m3_mall
|
||||||
|
Dead Center c1m4_atrium
|
||||||
|
Dark Carnival c2m1_highway
|
||||||
|
Dark Carnival c2m2_fairgrounds
|
||||||
|
Dark Carnival c2m3_coaster
|
||||||
|
Dark Carnival c2m4_barns
|
||||||
|
Dark Carnival c2m5_concert
|
||||||
|
Swamp Fever c3m1_plankcountry
|
||||||
|
Swamp Fever c3m2_swamp
|
||||||
|
Swamp Fever c3m3_shantytown
|
||||||
|
Swamp Fever c3m4_plantation
|
||||||
|
Hard Rain c4m1_milltown_a
|
||||||
|
Hard Rain c4m2_sugarmill_a
|
||||||
|
Hard Rain c4m3_sugarmill_b
|
||||||
|
Hard Rain c4m4_milltown_b
|
||||||
|
Hard Rain c4m5_milltown_escape
|
||||||
|
The Parish c5m1_waterfront_sndscape
|
||||||
|
The Parish c5m1_waterfront
|
||||||
|
The Parish c5m2_park
|
||||||
|
The Parish c5m3_cemetery
|
||||||
|
The Parish c5m4_quarter
|
||||||
|
The Parish c5m5_bridge
|
||||||
|
The Passing c6m1_riverbank
|
||||||
|
The Passing c6m2_bedlam
|
||||||
|
The Passing c6m3_port
|
||||||
|
The Sacrifice c7m1_docks
|
||||||
|
The Sacrifice c7m2_barge
|
||||||
|
The Sacrifice c7m3_port
|
||||||
|
No Mercy c8m1_apartment
|
||||||
|
No Mercy c8m2_subway
|
||||||
|
No Mercy c8m3_sewers
|
||||||
|
No Mercy c8m4_interior
|
||||||
|
No Mercy c8m5_rooftop
|
||||||
|
Crash Course c9m1_alleys
|
||||||
|
Crash Course c9m2_lots
|
||||||
|
Death Toll c10m1_caves
|
||||||
|
Death Toll c10m2_drainage
|
||||||
|
Death Toll c10m3_ranchhouse
|
||||||
|
Death Toll c10m4_mainstreet
|
||||||
|
Death Toll c10m5_houseboat
|
||||||
|
Dead Air c11m1_greenhouse
|
||||||
|
Dead Air c11m2_offices
|
||||||
|
Dead Air c11m3_garage
|
||||||
|
Dead Air c11m4_terminal
|
||||||
|
Dead Air c11m5_runway
|
||||||
|
Blood Harvest c12m1_hilltop
|
||||||
|
Blood Harvest c12m2_traintunnel
|
||||||
|
Blood Harvest c12m3_bridge
|
||||||
|
Blood Harvest c12m4_barn
|
||||||
|
Blood Harvest c12m5_cornfield
|
||||||
|
Cold Stream c13m1_alpinecreek
|
||||||
|
Cold Stream c13m2_southpinestream
|
||||||
|
Cold Stream c13m3_memorialbridge
|
||||||
|
Cold Stream c13m4_cutthroatcreek
|
||||||
40
bundles/left4dead2_old/files/server.cfg
Normal file
40
bundles/left4dead2_old/files/server.cfg
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
hostname "CroneKorkN : ${name}"
|
||||||
|
sv_contact "admin@sublimity.de"
|
||||||
|
|
||||||
|
|
||||||
|
sv_steamgroup "${','.join(steamgroups)}"
|
||||||
|
|
||||||
|
rcon_password "${rcon_password}"
|
||||||
|
|
||||||
|
|
||||||
|
motd_enabled 0
|
||||||
|
|
||||||
|
|
||||||
|
sv_cheats 1
|
||||||
|
|
||||||
|
|
||||||
|
sv_consistency 0
|
||||||
|
|
||||||
|
|
||||||
|
sv_lan 0
|
||||||
|
|
||||||
|
|
||||||
|
sv_allow_lobby_connect_only 0
|
||||||
|
|
||||||
|
|
||||||
|
sv_gametypes "coop,realism,survival,versus,teamversus,scavenge,teamscavenge"
|
||||||
|
|
||||||
|
|
||||||
|
sv_minrate 30000
|
||||||
|
sv_maxrate 60000
|
||||||
|
sv_mincmdrate 66
|
||||||
|
sv_maxcmdrate 101
|
||||||
|
|
||||||
|
|
||||||
|
sv_logsdir "logs-${name}" //Folder in the game directory where server logs will be stored.
|
||||||
|
log on //Creates a logfile (on | off)
|
||||||
|
sv_logecho 0 //default 0; Echo log information to the console.
|
||||||
|
sv_logfile 1 //default 1; Log server information in the log file.
|
||||||
|
sv_log_onefile 0 //default 0; Log server information to only one file.
|
||||||
|
sv_logbans 1 //default 0;Log server bans in the server logs.
|
||||||
|
sv_logflush 0 //default 0; Flush the log files to disk on each write (slow).
|
||||||
122
bundles/left4dead2_old/items.py
Normal file
122
bundles/left4dead2_old/items.py
Normal file
|
|
@ -0,0 +1,122 @@
|
||||||
|
assert node.has_bundle('steam') and node.has_bundle('steam-workshop-download')
|
||||||
|
|
||||||
|
directories = {
|
||||||
|
'/opt/steam/left4dead2-servers': {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
'mode': '0755',
|
||||||
|
'purge': True,
|
||||||
|
},
|
||||||
|
# Current zfs doesnt support zfs upperdir. The support was added in October 2022. Move upperdir - unused anyway -
|
||||||
|
# to another dir. Also move workdir alongside it, as it has to be on same fs.
|
||||||
|
'/opt/steam-zfs-overlay-workarounds': {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
'mode': '0755',
|
||||||
|
'purge': True,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# /opt/steam/steam/.steam/sdk32/steamclient.so: cannot open shared object file: No such file or directory
|
||||||
|
symlinks = {
|
||||||
|
'/opt/steam/steam/.steam/sdk32': {
|
||||||
|
'target': '/opt/steam/steam/linux32',
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#
|
||||||
|
# SERVERS
|
||||||
|
#
|
||||||
|
|
||||||
|
for name, config in node.metadata.get('left4dead2/servers').items():
|
||||||
|
|
||||||
|
#overlay
|
||||||
|
directories[f'/opt/steam/left4dead2-servers/{name}'] = {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
}
|
||||||
|
directories[f'/opt/steam-zfs-overlay-workarounds/{name}/upper'] = {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
}
|
||||||
|
directories[f'/opt/steam-zfs-overlay-workarounds/{name}/workdir'] = {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
}
|
||||||
|
|
||||||
|
# conf
|
||||||
|
files[f'/opt/steam/left4dead2-servers/{name}/left4dead2/cfg/server.cfg'] = {
|
||||||
|
'content_type': 'mako',
|
||||||
|
'source': 'server.cfg',
|
||||||
|
'context': {
|
||||||
|
'name': name,
|
||||||
|
'steamgroups': node.metadata.get('left4dead2/steamgroups'),
|
||||||
|
'rcon_password': config['rcon_password'],
|
||||||
|
},
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
'triggers': [
|
||||||
|
f'svc_systemd:left4dead2-{name}.service:restart',
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
# service
|
||||||
|
svc_systemd[f'left4dead2-{name}.service'] = {
|
||||||
|
'needs': [
|
||||||
|
f'file:/opt/steam/left4dead2-servers/{name}/left4dead2/cfg/server.cfg',
|
||||||
|
f'file:/usr/local/lib/systemd/system/left4dead2-{name}.service',
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
#
|
||||||
|
# ADDONS
|
||||||
|
#
|
||||||
|
|
||||||
|
# base
|
||||||
|
files[f'/opt/steam/left4dead2-servers/{name}/left4dead2/addons/readme.txt'] = {
|
||||||
|
'content_type': 'any',
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
}
|
||||||
|
directories[f'/opt/steam/left4dead2-servers/{name}/left4dead2/addons'] = {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
'purge': True,
|
||||||
|
'triggers': [
|
||||||
|
f'svc_systemd:left4dead2-{name}.service:restart',
|
||||||
|
],
|
||||||
|
}
|
||||||
|
for id in [
|
||||||
|
*config.get('workshop', []),
|
||||||
|
*node.metadata.get('left4dead2/workshop'),
|
||||||
|
]:
|
||||||
|
files[f'/opt/steam/left4dead2-servers/{name}/left4dead2/addons/{id}.vpk'] = {
|
||||||
|
'content_type': 'any',
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
'triggers': [
|
||||||
|
f'svc_systemd:left4dead2-{name}.service:restart',
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
# admin system
|
||||||
|
|
||||||
|
directories[f'/opt/steam/left4dead2-servers/{name}/left4dead2/ems/admin system'] = {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
'mode': '0755',
|
||||||
|
'triggers': [
|
||||||
|
f'svc_systemd:left4dead2-{name}.service:restart',
|
||||||
|
],
|
||||||
|
}
|
||||||
|
files[f'/opt/steam/left4dead2-servers/{name}/left4dead2/ems/admin system/admins.txt'] = {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
'mode': '0755',
|
||||||
|
'content': '\n'.join(sorted(node.metadata.get('left4dead2/admins'))),
|
||||||
|
'triggers': [
|
||||||
|
f'svc_systemd:left4dead2-{name}.service:restart',
|
||||||
|
],
|
||||||
|
}
|
||||||
127
bundles/left4dead2_old/metadata.py
Normal file
127
bundles/left4dead2_old/metadata.py
Normal file
|
|
@ -0,0 +1,127 @@
|
||||||
|
assert node.has_bundle('steam')
|
||||||
|
|
||||||
|
from shlex import quote
|
||||||
|
|
||||||
|
defaults = {
|
||||||
|
'steam': {
|
||||||
|
'games': {
|
||||||
|
'left4dead2': 222860,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'left4dead2': {
|
||||||
|
'servers': {},
|
||||||
|
'admins': set(),
|
||||||
|
'workshop': set(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@metadata_reactor.provides(
|
||||||
|
'left4dead2/servers',
|
||||||
|
)
|
||||||
|
def rconn_password(metadata):
|
||||||
|
# only works from localhost!
|
||||||
|
return {
|
||||||
|
'left4dead2': {
|
||||||
|
'servers': {
|
||||||
|
server: {
|
||||||
|
'rcon_password': repo.vault.password_for(f'{node.name} left4dead2 {server} rcon', length=24),
|
||||||
|
}
|
||||||
|
for server in metadata.get('left4dead2/servers')
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@metadata_reactor.provides(
|
||||||
|
'steam-workshop-download',
|
||||||
|
'systemd/units',
|
||||||
|
)
|
||||||
|
def server_units(metadata):
|
||||||
|
units = {}
|
||||||
|
workshop = {}
|
||||||
|
|
||||||
|
for name, config in metadata.get('left4dead2/servers').items():
|
||||||
|
# mount overlay
|
||||||
|
mountpoint = f'/opt/steam/left4dead2-servers/{name}'
|
||||||
|
mount_unit_name = mountpoint[1:].replace('-', '\\x2d').replace('/', '-') + '.mount'
|
||||||
|
units[mount_unit_name] = {
|
||||||
|
'Unit': {
|
||||||
|
'Description': f"Mount left4dead2 server {name} overlay",
|
||||||
|
'Conflicts': {'umount.target'},
|
||||||
|
'Before': {'umount.target'},
|
||||||
|
},
|
||||||
|
'Mount': {
|
||||||
|
'What': 'overlay',
|
||||||
|
'Where': mountpoint,
|
||||||
|
'Type': 'overlay',
|
||||||
|
'Options': ','.join([
|
||||||
|
'auto',
|
||||||
|
'lowerdir=/opt/steam/left4dead2',
|
||||||
|
f'upperdir=/opt/steam-zfs-overlay-workarounds/{name}/upper',
|
||||||
|
f'workdir=/opt/steam-zfs-overlay-workarounds/{name}/workdir',
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
'Install': {
|
||||||
|
'RequiredBy': {
|
||||||
|
f'left4dead2-{name}.service',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# individual workshop
|
||||||
|
workshop_ids = config.get('workshop', set()) | metadata.get('left4dead2/workshop', set())
|
||||||
|
if workshop_ids:
|
||||||
|
workshop[f'left4dead2-{name}'] = {
|
||||||
|
'ids': workshop_ids,
|
||||||
|
'path': f'/opt/steam/left4dead2-servers/{name}/left4dead2/addons',
|
||||||
|
'user': 'steam',
|
||||||
|
'requires': {
|
||||||
|
mount_unit_name,
|
||||||
|
},
|
||||||
|
'required_by': {
|
||||||
|
f'left4dead2-{name}.service',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# left4dead2 server unit
|
||||||
|
units[f'left4dead2-{name}.service'] = {
|
||||||
|
'Unit': {
|
||||||
|
'Description': f'left4dead2 server {name}',
|
||||||
|
'After': {'steam-update.service'},
|
||||||
|
'Requires': {'steam-update.service'},
|
||||||
|
},
|
||||||
|
'Service': {
|
||||||
|
'User': 'steam',
|
||||||
|
'Group': 'steam',
|
||||||
|
'WorkingDirectory': f'/opt/steam/left4dead2-servers/{name}',
|
||||||
|
'ExecStart': f'/opt/steam/left4dead2-servers/{name}/srcds_run -port {config["port"]} +exec server.cfg',
|
||||||
|
'Restart': 'on-failure',
|
||||||
|
},
|
||||||
|
'Install': {
|
||||||
|
'WantedBy': {'multi-user.target'},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
'steam-workshop-download': workshop,
|
||||||
|
'systemd': {
|
||||||
|
'units': units,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@metadata_reactor.provides(
|
||||||
|
'nftables/input',
|
||||||
|
)
|
||||||
|
def firewall(metadata):
|
||||||
|
ports = set(str(server['port']) for server in metadata.get('left4dead2/servers').values())
|
||||||
|
|
||||||
|
return {
|
||||||
|
'nftables': {
|
||||||
|
'input': {
|
||||||
|
f"tcp dport {{ {', '.join(sorted(ports))} }} accept",
|
||||||
|
f"udp dport {{ {', '.join(sorted(ports))} }} accept",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
97
bundles/left4dead2_old2/README.md
Normal file
97
bundles/left4dead2_old2/README.md
Normal file
|
|
@ -0,0 +1,97 @@
|
||||||
|
# https://github.com/SirPlease/L4D2-Competitive-Rework/blob/master/Dedicated%20Server%20Install%20Guide/README.md
|
||||||
|
|
||||||
|
getent passwd steam >/dev/null || useradd -M -d /opt/l4d2 -s /bin/bash steam
|
||||||
|
mkdir -p /opt/l4d2 /tmp/dumps
|
||||||
|
chown steam:steam /opt/l4d2 /tmp/dumps
|
||||||
|
dpkg --add-architecture i386
|
||||||
|
apt update
|
||||||
|
DEBIAN_FRONTEND=noninteractive apt install -y libc6:i386 lib32z1
|
||||||
|
|
||||||
|
function steam() { sudo -Hiu steam $* }
|
||||||
|
|
||||||
|
# -- STEAM -- #
|
||||||
|
|
||||||
|
steam mkdir -p /opt/l4d2/steam
|
||||||
|
test -f /opt/l4d2/steam/steamcmd_linux.tar.gz || \
|
||||||
|
steam wget http://media.steampowered.com/installer/steamcmd_linux.tar.gz -P /opt/l4d2/steam
|
||||||
|
test -f /opt/l4d2/steam/steamcmd.sh || \
|
||||||
|
steam tar -xvzf /opt/l4d2/steam/steamcmd_linux.tar.gz -C /opt/l4d2/steam
|
||||||
|
|
||||||
|
# fix: /opt/l4d2/.steam/sdk32/steamclient.so: cannot open shared object file: No such file or directory
|
||||||
|
steam mkdir -p /opt/l4d2/steam/.steam
|
||||||
|
test -f /opt/l4d2/steam/.steam/sdk32/steamclient.so || \
|
||||||
|
steam ln -s /opt/l4d2/steam/linux32 /opt/l4d2/steam/.steam/sdk32
|
||||||
|
|
||||||
|
# -- INSTALL -- #
|
||||||
|
|
||||||
|
# erst die windows deps zu installieren scheint ein workaround für x64 zu sein?
|
||||||
|
steam mkdir -p /opt/l4d2/installation
|
||||||
|
steam /opt/l4d2/steam/steamcmd.sh \
|
||||||
|
+force_install_dir /opt/l4d2/installation \
|
||||||
|
+login anonymous \
|
||||||
|
+@sSteamCmdForcePlatformType windows \
|
||||||
|
+app_update 222860 validate \
|
||||||
|
+quit
|
||||||
|
steam /opt/l4d2/steam/steamcmd.sh \
|
||||||
|
+force_install_dir /opt/l4d2/installation \
|
||||||
|
+login anonymous \
|
||||||
|
+@sSteamCmdForcePlatformType linux \
|
||||||
|
+app_update 222860 validate \
|
||||||
|
+quit
|
||||||
|
|
||||||
|
# -- OVERLAYS -- #
|
||||||
|
|
||||||
|
steam mkdir -p /opt/l4d2/overlays
|
||||||
|
|
||||||
|
# workshop downloader
|
||||||
|
steam wget -4 https://git.sublimity.de/cronekorkn/steam-workshop-downloader/raw/branch/master/steam-workshop-download -P /opt/l4d2
|
||||||
|
steam chmod +x /opt/l4d2/steam-workshop-download
|
||||||
|
|
||||||
|
# -- OVERLAY PVE -- #
|
||||||
|
|
||||||
|
steam mkdir -p /opt/l4d2/overlays/pve
|
||||||
|
|
||||||
|
# admin system
|
||||||
|
steam mkdir -p /opt/l4d2/overlays/pve/left4dead2/addons
|
||||||
|
steam /opt/l4d2/steam-workshop-download 2524204971 --out /opt/l4d2/overlays/pve/left4dead2/addons
|
||||||
|
steam mkdir -p "/opt/l4d2/overlays/pve/left4dead2/ems/admin system"
|
||||||
|
echo "STEAM_1:0:12376499" | steam tee "/opt/l4d2/overlays/pve/left4dead2/ems/admin system/admins.txt"
|
||||||
|
|
||||||
|
# ions vocalizer
|
||||||
|
steam /opt/l4d2/steam-workshop-download 698857882 --out /opt/l4d2/overlays/pve/left4dead2/addons
|
||||||
|
|
||||||
|
# -- OVERLAY ZONEMOD -- #
|
||||||
|
|
||||||
|
true
|
||||||
|
|
||||||
|
# -- SERVERS -- #
|
||||||
|
|
||||||
|
steam mkdir -p /opt/l4d2/servers
|
||||||
|
|
||||||
|
# -- SERVER PVE1 -- #
|
||||||
|
|
||||||
|
steam mkdir -p \
|
||||||
|
/opt/l4d2/servers/pve1 \
|
||||||
|
/opt/l4d2/servers/pve1/work \
|
||||||
|
/opt/l4d2/servers/pve1/upper \
|
||||||
|
/opt/l4d2/servers/pve1/merged
|
||||||
|
|
||||||
|
mount -t overlay overlay \
|
||||||
|
-o lowerdir=/opt/l4d2/overlays/pve:/opt/l4d2/installation,upperdir=/opt/l4d2/servers/pve1/upper,workdir=/opt/l4d2/servers/pve1/work \
|
||||||
|
/opt/l4d2/servers/pve1/merged
|
||||||
|
|
||||||
|
# run server
|
||||||
|
steam cat <<'EOF' > /opt/l4d2/servers/pve1/merged/left4dead2/cfg/server.cfg
|
||||||
|
hostname "CKNs Server"
|
||||||
|
motd_enabled 0
|
||||||
|
|
||||||
|
sv_steamgroup "38347879"
|
||||||
|
#sv_steamgroup_exclusive 0
|
||||||
|
|
||||||
|
sv_minrate 60000
|
||||||
|
sv_maxrate 0
|
||||||
|
net_splitpacket_maxrate 60000
|
||||||
|
|
||||||
|
sv_hibernate_when_empty 0
|
||||||
|
EOF
|
||||||
|
steam /opt/l4d2/servers/pve1/merged/srcds_run -game left4dead2 -ip 0.0.0.0 -port 27015 +map c1m1_hotel
|
||||||
0
bundles/left4dead2_old2/files/server.cfg
Normal file
0
bundles/left4dead2_old2/files/server.cfg
Normal file
183
bundles/left4dead2_old2/items.py
Normal file
183
bundles/left4dead2_old2/items.py
Normal file
|
|
@ -0,0 +1,183 @@
|
||||||
|
from shlex import quote
|
||||||
|
|
||||||
|
|
||||||
|
def steam_run(cmd):
|
||||||
|
return f'su - steam -c {quote(cmd)}'
|
||||||
|
|
||||||
|
|
||||||
|
users = {
|
||||||
|
'steam': {
|
||||||
|
'home': '/opt/steam',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
directories = {
|
||||||
|
'/opt/steam': {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
},
|
||||||
|
'/opt/steam/.steam': {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
},
|
||||||
|
'/opt/left4dead2': {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
},
|
||||||
|
'/opt/left4dead2/left4dead2/ems/admin system': {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
},
|
||||||
|
'/opt/left4dead2/left4dead2/addons': {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
},
|
||||||
|
'/tmp/dumps': {
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
'mode': '1770',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
symlinks = {
|
||||||
|
'/opt/steam/.steam/sdk32': {
|
||||||
|
'target': '/opt/steam/linux32',
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
files = {
|
||||||
|
'/opt/steam-workshop-download': {
|
||||||
|
'content_type': 'download',
|
||||||
|
'source': 'https://git.sublimity.de/cronekorkn/steam-workshop-downloader/raw/branch/master/steam-workshop-download',
|
||||||
|
'mode': '755',
|
||||||
|
},
|
||||||
|
'/opt/left4dead2/left4dead2/ems/admin system/admins.txt': {
|
||||||
|
'unless': 'test -f /opt/left4dead2/left4dead2/ems/admin system/admins.txt',
|
||||||
|
'content': 'STEAM_1:0:12376499',
|
||||||
|
'owner': 'steam',
|
||||||
|
'group': 'steam',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
actions = {
|
||||||
|
'dpkg_add_architecture': {
|
||||||
|
'command': 'dpkg --add-architecture i386',
|
||||||
|
'unless': 'dpkg --print-foreign-architectures | grep -q i386',
|
||||||
|
'triggers': [
|
||||||
|
'action:apt_update',
|
||||||
|
],
|
||||||
|
'needed_by': [
|
||||||
|
'pkg_apt:libc6_i386',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
'download_steam': {
|
||||||
|
'command': steam_run('wget http://media.steampowered.com/installer/steamcmd_linux.tar.gz -P /opt/steam'),
|
||||||
|
'unless': steam_run('test -f /opt/steam/steamcmd_linux.tar.gz'),
|
||||||
|
'needs': {
|
||||||
|
'pkg_apt:libc6_i386',
|
||||||
|
'directory:/opt/steam',
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'extract_steamcmd': {
|
||||||
|
'command': steam_run('tar -xvzf /opt/steam/steamcmd_linux.tar.gz -C /opt/steam'),
|
||||||
|
'unless': steam_run('test -f /opt/steam/steamcmd.sh'),
|
||||||
|
'needs': {
|
||||||
|
'action:download_steam',
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for addon_id in [2524204971]:
|
||||||
|
actions[f'download-left4dead2-addon-{addon_id}'] = {
|
||||||
|
'command': steam_run(f'/opt/steam-workshop-download {addon_id} --out /opt/left4dead2/left4dead2/addons'),
|
||||||
|
'unless': steam_run(f'test -f /opt/left4dead2/left4dead2/addons/{addon_id}.vpk'),
|
||||||
|
'needs': {
|
||||||
|
'directory:/opt/left4dead2/left4dead2/addons',
|
||||||
|
},
|
||||||
|
'needed_by': {
|
||||||
|
'tag:left4dead2-servers',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
svc_systemd = {
|
||||||
|
'left4dead2-install.service': {
|
||||||
|
'enabled': True,
|
||||||
|
'running': False,
|
||||||
|
'needs': {
|
||||||
|
'file:/usr/local/lib/systemd/system/left4dead2-install.service',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for server_name, server_config in node.metadata.get('left4dead2/servers', {}).items():
|
||||||
|
svc_systemd[f'left4dead2-{server_name}.service'] = {
|
||||||
|
'enabled': True,
|
||||||
|
'running': True,
|
||||||
|
'tags': {
|
||||||
|
'left4dead2-servers',
|
||||||
|
},
|
||||||
|
'needs': {
|
||||||
|
'svc_systemd:left4dead2-install.service',
|
||||||
|
f'file:/usr/local/lib/systemd/system/left4dead2-{server_name}.service',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# # https://github.com/SirPlease/L4D2-Competitive-Rework/blob/master/Dedicated%20Server%20Install%20Guide/README.md
|
||||||
|
|
||||||
|
# mkdir /opt/steam /tmp/dumps
|
||||||
|
# useradd -M -d /opt/steam -s /bin/bash steam
|
||||||
|
# chown steam:steam /opt/steam /tmp/dumps
|
||||||
|
# dpkg --add-architecture i386
|
||||||
|
# apt update
|
||||||
|
# apt install libc6:i386 lib32z1
|
||||||
|
# sudo su - steam -s /bin/bash
|
||||||
|
|
||||||
|
# #--------
|
||||||
|
|
||||||
|
# wget http://media.steampowered.com/installer/steamcmd_linux.tar.gz
|
||||||
|
# tar -xvzf steamcmd_linux.tar.gz
|
||||||
|
|
||||||
|
# # fix: /opt/steam/.steam/sdk32/steamclient.so: cannot open shared object file: No such file or directory
|
||||||
|
# mkdir /opt/steam/.steam && ln -s /opt/steam/linux32 /opt/steam/.steam/sdk32
|
||||||
|
|
||||||
|
# # erst die windows deps zu installieren scheint ein workaround für x64 zu sein?
|
||||||
|
# ./steamcmd.sh \
|
||||||
|
# +force_install_dir /opt/steam/left4dead2 \
|
||||||
|
# +login anonymous \
|
||||||
|
# +@sSteamCmdForcePlatformType windows \
|
||||||
|
# +app_update 222860 validate \
|
||||||
|
# +quit
|
||||||
|
# ./steamcmd.sh \
|
||||||
|
# +force_install_dir /opt/steam/left4dead2 \
|
||||||
|
# +login anonymous \
|
||||||
|
# +@sSteamCmdForcePlatformType linux \
|
||||||
|
# +app_update 222860 validate \
|
||||||
|
# +quit
|
||||||
|
|
||||||
|
# # download admin system
|
||||||
|
# wget -4 https://git.sublimity.de/cronekorkn/steam-workshop-downloader/raw/branch/master/steam-workshop-download
|
||||||
|
# chmod +x steam-workshop-download
|
||||||
|
# ./steam-workshop-download 2524204971 --out /opt/steam/left4dead2/left4dead2/addons
|
||||||
|
# mkdir -p "/opt/steam/left4dead2/left4dead2/ems/admin system"
|
||||||
|
# echo "STEAM_1:0:12376499" > "/opt/steam/left4dead2/left4dead2/ems/admin system/admins.txt"
|
||||||
|
|
||||||
|
# /opt/steam/left4dead2/srcds_run -game left4dead2 -ip 0.0.0.0 -port 27015 +map c1m1_hotel
|
||||||
|
|
||||||
|
|
||||||
|
# cat <<'EOF' > /opt/steam/left4dead2/left4dead2/cfg/server.cfg
|
||||||
|
# hostname "CKNs Server"
|
||||||
|
# motd_enabled 0
|
||||||
|
|
||||||
|
# sv_steamgroup "38347879"
|
||||||
|
# #sv_steamgroup_exclusive 0
|
||||||
|
|
||||||
|
# sv_minrate 60000
|
||||||
|
# sv_maxrate 0
|
||||||
|
# net_splitpacket_maxrate 60000
|
||||||
|
|
||||||
|
# sv_hibernate_when_empty 0
|
||||||
|
# EOF
|
||||||
107
bundles/left4dead2_old2/metadata.py
Normal file
107
bundles/left4dead2_old2/metadata.py
Normal file
|
|
@ -0,0 +1,107 @@
|
||||||
|
from re import match
|
||||||
|
|
||||||
|
defaults = {
|
||||||
|
'apt': {
|
||||||
|
'packages': {
|
||||||
|
'libc6_i386': {}, # installs libc6:i386
|
||||||
|
'lib32z1': {},
|
||||||
|
'unzip': {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'left4dead2': {
|
||||||
|
'servers': {},
|
||||||
|
},
|
||||||
|
'nftables': {
|
||||||
|
'input': {
|
||||||
|
'udp dport { 27005, 27020 } accept',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@metadata_reactor.provides(
|
||||||
|
'nftables/input',
|
||||||
|
)
|
||||||
|
def nftables(metadata):
|
||||||
|
ports = sorted(str(config["port"]) for config in metadata.get('left4dead2/servers', {}).values())
|
||||||
|
|
||||||
|
return {
|
||||||
|
'nftables': {
|
||||||
|
'input': {
|
||||||
|
f'ip protocol {{ tcp, udp }} th dport {{ {", ".join(ports)} }} accept'
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@metadata_reactor.provides(
|
||||||
|
'systemd/units',
|
||||||
|
)
|
||||||
|
def initial_unit(metadata):
|
||||||
|
install_command = (
|
||||||
|
'/opt/steam/steamcmd.sh '
|
||||||
|
'+force_install_dir /opt/left4dead2 '
|
||||||
|
'+login anonymous '
|
||||||
|
'+@sSteamCmdForcePlatformType {platform} '
|
||||||
|
'+app_update 222860 validate '
|
||||||
|
'+quit '
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'systemd': {
|
||||||
|
'units': {
|
||||||
|
'left4dead2-install.service': {
|
||||||
|
'Unit': {
|
||||||
|
'Description': 'install or update left4dead2',
|
||||||
|
'After': 'network-online.target',
|
||||||
|
},
|
||||||
|
'Service': {
|
||||||
|
'Type': 'oneshot',
|
||||||
|
'RemainAfterExit': 'yes',
|
||||||
|
'User': 'steam',
|
||||||
|
'Group': 'steam',
|
||||||
|
'WorkingDirectory': '/opt/steam',
|
||||||
|
'ExecStartPre': install_command.format(platform='windows'),
|
||||||
|
'ExecStart': install_command.format(platform='linux'),
|
||||||
|
},
|
||||||
|
'Install': {
|
||||||
|
'WantedBy': {'multi-user.target'},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@metadata_reactor.provides(
|
||||||
|
'systemd/units',
|
||||||
|
)
|
||||||
|
def server_units(metadata):
|
||||||
|
units = {}
|
||||||
|
|
||||||
|
for name, config in metadata.get('left4dead2/servers').items():
|
||||||
|
assert match(r'^[A-z0-9-_-]+$', name)
|
||||||
|
|
||||||
|
units[f'left4dead2-{name}.service'] = {
|
||||||
|
'Unit': {
|
||||||
|
'Description': f'left4dead2 server {name}',
|
||||||
|
'After': {'left4dead2-install.service'},
|
||||||
|
'Requires': {'left4dead2-install.service'},
|
||||||
|
},
|
||||||
|
'Service': {
|
||||||
|
'User': 'steam',
|
||||||
|
'Group': 'steam',
|
||||||
|
'WorkingDirectory': '/opt/left4dead2',
|
||||||
|
'ExecStart': f'/opt/left4dead2/srcds_run -port {config["port"]} +exec server_{name}.cfg',
|
||||||
|
'Restart': 'on-failure',
|
||||||
|
},
|
||||||
|
'Install': {
|
||||||
|
'WantedBy': {'multi-user.target'},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
'systemd': {
|
||||||
|
'units': units,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
@ -5,5 +5,5 @@ printf "server 127.0.0.1
|
||||||
zone acme.resolver.name.
|
zone acme.resolver.name.
|
||||||
update add _acme-challenge.ckn.li.acme.resolver.name. 600 IN TXT "hello"
|
update add _acme-challenge.ckn.li.acme.resolver.name. 600 IN TXT "hello"
|
||||||
send
|
send
|
||||||
" | nsupdate -y hmac-sha512:acme:Y9BHl85l352BGZDXa/vg90hh2+5PYe4oJxpkq/oQvIODDkW8bAyQSFr0gKQQxjyIOyYlTjf0MGcdWFv46G/3Rg==
|
" | nsupdate -y hmac-sha512:acme:XXXXXX
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -31,6 +31,12 @@ deploy_cert() {
|
||||||
% for domain, conf in sorted(domains.items()):
|
% for domain, conf in sorted(domains.items()):
|
||||||
<% if not conf: continue %>\
|
<% if not conf: continue %>\
|
||||||
${domain})
|
${domain})
|
||||||
|
% if conf.get('scp', None):
|
||||||
|
scp "$KEYFILE" "${conf['scp']}/${conf.get('privkey_name', 'privkey.pem')}"
|
||||||
|
scp "$CERTFILE" "${conf['scp']}/${conf.get('cert_name', 'cert.pem')}"
|
||||||
|
scp "$FULLCHAINFILE" "${conf['scp']}/${conf.get('fullchain_name', 'fullchain.pem')}"
|
||||||
|
scp "$CHAINFILE" "${conf['scp']}/${conf.get('chain_name', 'chain.pem')}"
|
||||||
|
% endif
|
||||||
% if conf.get('location', None):
|
% if conf.get('location', None):
|
||||||
cat "$KEYFILE" > "${conf['location']}/${conf.get('privkey_name', 'privkey.pem')}"
|
cat "$KEYFILE" > "${conf['location']}/${conf.get('privkey_name', 'privkey.pem')}"
|
||||||
cat "$CERTFILE" > "${conf['location']}/${conf.get('cert_name', 'cert.pem')}"
|
cat "$CERTFILE" > "${conf['location']}/${conf.get('cert_name', 'cert.pem')}"
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ files = {
|
||||||
}
|
}
|
||||||
|
|
||||||
actions['letsencrypt_update_certificates'] = {
|
actions['letsencrypt_update_certificates'] = {
|
||||||
'command': 'dehydrated --cron --accept-terms --challenge dns-01',
|
'command': 'systemctl start letsencrypt.service',
|
||||||
'triggered': True,
|
'triggered': True,
|
||||||
'skip': delegated,
|
'skip': delegated,
|
||||||
'needs': {
|
'needs': {
|
||||||
|
|
|
||||||
|
|
@ -12,9 +12,8 @@ def generate_sysctl_key_value_pairs_from_json(json_data, parents=[]):
|
||||||
|
|
||||||
key_value_pairs = generate_sysctl_key_value_pairs_from_json(node.metadata.get('sysctl'))
|
key_value_pairs = generate_sysctl_key_value_pairs_from_json(node.metadata.get('sysctl'))
|
||||||
|
|
||||||
|
|
||||||
files= {
|
files= {
|
||||||
'/etc/sysctl.conf': {
|
'/etc/sysctl.d/managed.conf': {
|
||||||
'content': '\n'.join(
|
'content': '\n'.join(
|
||||||
sorted(
|
sorted(
|
||||||
f"{'.'.join(path)}={value}"
|
f"{'.'.join(path)}={value}"
|
||||||
|
|
@ -25,6 +24,9 @@ files= {
|
||||||
'svc_systemd:systemd-sysctl.service:restart',
|
'svc_systemd:systemd-sysctl.service:restart',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
'/etc/modules-load.d/managed.conf': {
|
||||||
|
'content': '\n'.join(sorted(node.metadata.get('modules-load'))),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
svc_systemd = {
|
svc_systemd = {
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
defaults = {
|
defaults = {
|
||||||
'sysctl': {},
|
'sysctl': {},
|
||||||
|
'modules-load': set(),
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -7,12 +7,7 @@ defaults = {
|
||||||
'locale': {
|
'locale': {
|
||||||
'default': ('en_US.UTF-8', 'UTF-8'),
|
'default': ('en_US.UTF-8', 'UTF-8'),
|
||||||
'installed': {
|
'installed': {
|
||||||
('de_AT.UTF-8', 'UTF-8'),
|
|
||||||
('de_CH.UTF-8', 'UTF-8'),
|
|
||||||
('de_DE.UTF-8', 'UTF-8'),
|
('de_DE.UTF-8', 'UTF-8'),
|
||||||
('de_LU.UTF-8', 'UTF-8'),
|
|
||||||
('en_CA.UTF-8', 'UTF-8'),
|
|
||||||
('en_GB.UTF-8', 'UTF-8'),
|
|
||||||
('en_US.UTF-8', 'UTF-8'),
|
('en_US.UTF-8', 'UTF-8'),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -2,5 +2,5 @@
|
||||||
|
|
||||||
cd "$OLDPWD"
|
cd "$OLDPWD"
|
||||||
|
|
||||||
export BW_ITEM_WORKERS=$(expr "$(nproc)" '*' 12 '/' 10)
|
export BW_ITEM_WORKERS=$(expr "$(sysctl -n hw.logicalcpu)" '*' 12 '/' 10)
|
||||||
export BW_NODE_WORKERS=$(expr 320 '/' "$BW_ITEM_WORKERS")
|
export BW_NODE_WORKERS=$(expr 320 '/' "$BW_ITEM_WORKERS")
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,5 @@
|
||||||
|
|
||||||
cd "$OLDPWD"
|
cd "$OLDPWD"
|
||||||
|
|
||||||
GNU_PATH="$HOME/.local/gnu_bin"
|
PATH_add "/opt/homebrew/opt/gnu-sed/libexec/gnubin"
|
||||||
mkdir -p "$GNU_PATH"
|
PATH_add "/opt/homebrew/opt/grep/libexec/gnubin"
|
||||||
test -f "$GNU_PATH/sed" || ln -s "$(which gsed)" "$GNU_PATH/sed"
|
|
||||||
PATH_add "$GNU_PATH"
|
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,8 @@
|
||||||
|
|
||||||
cd "$OLDPWD"
|
cd "$OLDPWD"
|
||||||
|
|
||||||
|
pyenv install --skip-existing
|
||||||
|
|
||||||
if test -f .venv/bin/python && test "$(realpath .venv/bin/python)" != "$(realpath "$(pyenv which python)")"
|
if test -f .venv/bin/python && test "$(realpath .venv/bin/python)" != "$(realpath "$(pyenv which python)")"
|
||||||
then
|
then
|
||||||
echo "rebuilding venv für new python version"
|
echo "rebuilding venv für new python version"
|
||||||
|
|
|
||||||
26
bundles/mailman/README.md
Normal file
26
bundles/mailman/README.md
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
# Mailman
|
||||||
|
|
||||||
|
- django admin udner /admin
|
||||||
|
|
||||||
|
## Testmail
|
||||||
|
|
||||||
|
`echo export REST_API_PASS=$(bw metadata mseibert.mailman -k mailman/api_password | jq -r .mailman.api_password)`
|
||||||
|
```sh
|
||||||
|
curl -s -o /dev/null \
|
||||||
|
-w "Status: %{http_code}\nTime: %{time_total}s\n" \
|
||||||
|
-u restadmin:$REST_API_PASS \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-X POST http://localhost:8001/3.1/queues/in \
|
||||||
|
-d "{
|
||||||
|
\"list_id\": \"testlist-2.mailman.ckn.li\",
|
||||||
|
\"text\": \"From: i@ckn.li\nTo: testlist-2@mailman.ckn.li\nSubject: Curl Test $(date '+%Y-%m-%d %H:%M:%S')\n\nThis message was sent at $(date '+%Y-%m-%d %H:%M:%S').\"
|
||||||
|
}"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Log locations
|
||||||
|
|
||||||
|
`tail -f /var/log/mailman3/*.log`
|
||||||
|
|
||||||
|
`journalctl -f | grep postfix/`
|
||||||
|
|
||||||
|
`mailq | head -20`
|
||||||
22
bundles/mailman/files/mailman-hyperkitty.cfg
Normal file
22
bundles/mailman/files/mailman-hyperkitty.cfg
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
# This is the mailman extension configuration file to enable HyperKitty as an
|
||||||
|
# archiver. Remember to add the following lines in the mailman.cfg file:
|
||||||
|
#
|
||||||
|
# [archiver.hyperkitty]
|
||||||
|
# class: mailman_hyperkitty.Archiver
|
||||||
|
# enable: yes
|
||||||
|
# configuration: /etc/mailman3/mailman-hyperkitty.cfg
|
||||||
|
#
|
||||||
|
|
||||||
|
[general]
|
||||||
|
|
||||||
|
# This is your HyperKitty installation, preferably on the localhost. This
|
||||||
|
# address will be used by Mailman to forward incoming emails to HyperKitty
|
||||||
|
# for archiving. It does not need to be publicly available, in fact it's
|
||||||
|
# better if it is not.
|
||||||
|
# However, if your Mailman installation is accessed via HTTPS, the URL needs
|
||||||
|
# to match your SSL certificate (e.g. https://lists.example.com/hyperkitty).
|
||||||
|
base_url: http://${hostname}/mailman3/hyperkitty/
|
||||||
|
|
||||||
|
# The shared api_key, must be identical except for quoting to the value of
|
||||||
|
# MAILMAN_ARCHIVER_KEY in HyperKitty's settings.
|
||||||
|
api_key: ${archiver_key}
|
||||||
190
bundles/mailman/files/mailman-web.py
Normal file
190
bundles/mailman/files/mailman-web.py
Normal file
|
|
@ -0,0 +1,190 @@
|
||||||
|
ACCOUNT_EMAIL_VERIFICATION='none'
|
||||||
|
|
||||||
|
# This file is imported by the Mailman Suite. It is used to override
|
||||||
|
# the default settings from /usr/share/mailman3-web/settings.py.
|
||||||
|
|
||||||
|
# SECURITY WARNING: keep the secret key used in production secret!
|
||||||
|
SECRET_KEY = '${secret_key}'
|
||||||
|
|
||||||
|
ADMINS = (
|
||||||
|
('Mailman Suite Admin', 'root@localhost'),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Hosts/domain names that are valid for this site; required if DEBUG is False
|
||||||
|
# See https://docs.djangoproject.com/en/1.8/ref/settings/#allowed-hosts
|
||||||
|
# Set to '*' per default in the Deian package to allow all hostnames. Mailman3
|
||||||
|
# is meant to run behind a webserver reverse proxy anyway.
|
||||||
|
ALLOWED_HOSTS = [
|
||||||
|
'${hostname}',
|
||||||
|
]
|
||||||
|
|
||||||
|
# Mailman API credentials
|
||||||
|
MAILMAN_REST_API_URL = 'http://localhost:8001'
|
||||||
|
MAILMAN_REST_API_USER = 'restadmin'
|
||||||
|
MAILMAN_REST_API_PASS = '${api_password}'
|
||||||
|
MAILMAN_ARCHIVER_KEY = '${archiver_key}'
|
||||||
|
MAILMAN_ARCHIVER_FROM = ('127.0.0.1', '::1')
|
||||||
|
|
||||||
|
# Application definition
|
||||||
|
|
||||||
|
INSTALLED_APPS = (
|
||||||
|
'hyperkitty',
|
||||||
|
'postorius',
|
||||||
|
'django_mailman3',
|
||||||
|
# Uncomment the next line to enable the admin:
|
||||||
|
'django.contrib.admin',
|
||||||
|
# Uncomment the next line to enable admin documentation:
|
||||||
|
# 'django.contrib.admindocs',
|
||||||
|
'django.contrib.auth',
|
||||||
|
'django.contrib.contenttypes',
|
||||||
|
'django.contrib.sessions',
|
||||||
|
'django.contrib.sites',
|
||||||
|
'django.contrib.messages',
|
||||||
|
'django.contrib.staticfiles',
|
||||||
|
'rest_framework',
|
||||||
|
'django_gravatar',
|
||||||
|
'compressor',
|
||||||
|
'haystack',
|
||||||
|
'django_extensions',
|
||||||
|
'django_q',
|
||||||
|
'allauth',
|
||||||
|
'allauth.account',
|
||||||
|
'allauth.socialaccount',
|
||||||
|
'django_mailman3.lib.auth.fedora',
|
||||||
|
#'allauth.socialaccount.providers.openid',
|
||||||
|
#'allauth.socialaccount.providers.github',
|
||||||
|
#'allauth.socialaccount.providers.gitlab',
|
||||||
|
#'allauth.socialaccount.providers.google',
|
||||||
|
#'allauth.socialaccount.providers.facebook',
|
||||||
|
#'allauth.socialaccount.providers.twitter',
|
||||||
|
#'allauth.socialaccount.providers.stackexchange',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Database
|
||||||
|
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
|
||||||
|
|
||||||
|
DATABASES = {
|
||||||
|
'default': {
|
||||||
|
# Use 'sqlite3', 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||||
|
#'ENGINE': 'django.db.backends.sqlite3',
|
||||||
|
'ENGINE': 'django.db.backends.postgresql_psycopg2',
|
||||||
|
#'ENGINE': 'django.db.backends.mysql',
|
||||||
|
# DB name or path to database file if using sqlite3.
|
||||||
|
#'NAME': '/var/lib/mailman3/web/mailman3web.db',
|
||||||
|
'NAME': 'mailman',
|
||||||
|
# The following settings are not used with sqlite3:
|
||||||
|
'USER': 'mailman',
|
||||||
|
'PASSWORD': '${db_password}',
|
||||||
|
# HOST: empty for localhost through domain sockets or '127.0.0.1' for
|
||||||
|
# localhost through TCP.
|
||||||
|
'HOST': '127.0.0.1',
|
||||||
|
# PORT: set to empty string for default.
|
||||||
|
'PORT': '5432',
|
||||||
|
# OPTIONS: Extra parameters to use when connecting to the database.
|
||||||
|
'OPTIONS': {
|
||||||
|
# Set sql_mode to 'STRICT_TRANS_TABLES' for MySQL. See
|
||||||
|
# https://docs.djangoproject.com/en/1.11/ref/
|
||||||
|
# databases/#setting-sql-mode
|
||||||
|
#'init_command': "SET sql_mode='STRICT_TRANS_TABLES'",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# If you're behind a proxy, use the X-Forwarded-Host header
|
||||||
|
# See https://docs.djangoproject.com/en/1.8/ref/settings/#use-x-forwarded-host
|
||||||
|
USE_X_FORWARDED_HOST = True
|
||||||
|
|
||||||
|
# And if your proxy does your SSL encoding for you, set SECURE_PROXY_SSL_HEADER
|
||||||
|
# https://docs.djangoproject.com/en/1.8/ref/settings/#secure-proxy-ssl-header
|
||||||
|
# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
||||||
|
# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_SCHEME', 'https')
|
||||||
|
|
||||||
|
# Other security settings
|
||||||
|
# SECURE_SSL_REDIRECT = True
|
||||||
|
# If you set SECURE_SSL_REDIRECT to True, make sure the SECURE_REDIRECT_EXEMPT
|
||||||
|
# contains at least this line:
|
||||||
|
# SECURE_REDIRECT_EXEMPT = [
|
||||||
|
# "archives/api/mailman/.*", # Request from Mailman.
|
||||||
|
# ]
|
||||||
|
# SESSION_COOKIE_SECURE = True
|
||||||
|
# SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||||
|
# SECURE_BROWSER_XSS_FILTER = True
|
||||||
|
# CSRF_COOKIE_SECURE = True
|
||||||
|
# CSRF_COOKIE_HTTPONLY = True
|
||||||
|
# X_FRAME_OPTIONS = 'DENY'
|
||||||
|
|
||||||
|
|
||||||
|
# Internationalization
|
||||||
|
# https://docs.djangoproject.com/en/1.8/topics/i18n/
|
||||||
|
|
||||||
|
LANGUAGE_CODE = 'en-us'
|
||||||
|
|
||||||
|
TIME_ZONE = 'UTC'
|
||||||
|
|
||||||
|
USE_I18N = True
|
||||||
|
USE_L10N = True
|
||||||
|
USE_TZ = True
|
||||||
|
|
||||||
|
|
||||||
|
# Set default domain for email addresses.
|
||||||
|
EMAILNAME = 'localhost.local'
|
||||||
|
|
||||||
|
# If you enable internal authentication, this is the address that the emails
|
||||||
|
# will appear to be coming from. Make sure you set a valid domain name,
|
||||||
|
# otherwise the emails may get rejected.
|
||||||
|
# https://docs.djangoproject.com/en/1.8/ref/settings/#default-from-email
|
||||||
|
# DEFAULT_FROM_EMAIL = "mailing-lists@you-domain.org"
|
||||||
|
DEFAULT_FROM_EMAIL = 'postorius@{}'.format(EMAILNAME)
|
||||||
|
|
||||||
|
# If you enable email reporting for error messages, this is where those emails
|
||||||
|
# will appear to be coming from. Make sure you set a valid domain name,
|
||||||
|
# otherwise the emails may get rejected.
|
||||||
|
# https://docs.djangoproject.com/en/1.8/ref/settings/#std:setting-SERVER_EMAIL
|
||||||
|
# SERVER_EMAIL = 'root@your-domain.org'
|
||||||
|
SERVER_EMAIL = 'root@{}'.format(EMAILNAME)
|
||||||
|
|
||||||
|
|
||||||
|
# Django Allauth
|
||||||
|
ACCOUNT_DEFAULT_HTTP_PROTOCOL = "https"
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Social auth
|
||||||
|
#
|
||||||
|
SOCIALACCOUNT_PROVIDERS = {
|
||||||
|
#'openid': {
|
||||||
|
# 'SERVERS': [
|
||||||
|
# dict(id='yahoo',
|
||||||
|
# name='Yahoo',
|
||||||
|
# openid_url='http://me.yahoo.com'),
|
||||||
|
# ],
|
||||||
|
#},
|
||||||
|
#'google': {
|
||||||
|
# 'SCOPE': ['profile', 'email'],
|
||||||
|
# 'AUTH_PARAMS': {'access_type': 'online'},
|
||||||
|
#},
|
||||||
|
#'facebook': {
|
||||||
|
# 'METHOD': 'oauth2',
|
||||||
|
# 'SCOPE': ['email'],
|
||||||
|
# 'FIELDS': [
|
||||||
|
# 'email',
|
||||||
|
# 'name',
|
||||||
|
# 'first_name',
|
||||||
|
# 'last_name',
|
||||||
|
# 'locale',
|
||||||
|
# 'timezone',
|
||||||
|
# ],
|
||||||
|
# 'VERSION': 'v2.4',
|
||||||
|
#},
|
||||||
|
}
|
||||||
|
|
||||||
|
# On a production setup, setting COMPRESS_OFFLINE to True will bring a
|
||||||
|
# significant performance improvement, as CSS files will not need to be
|
||||||
|
# recompiled on each requests. It means running an additional "compress"
|
||||||
|
# management command after each code upgrade.
|
||||||
|
# http://django-compressor.readthedocs.io/en/latest/usage/#offline-compression
|
||||||
|
COMPRESS_OFFLINE = True
|
||||||
|
|
||||||
|
POSTORIUS_TEMPLATE_BASE_URL = 'http://${hostname}/mailman3/'
|
||||||
271
bundles/mailman/files/mailman.cfg
Normal file
271
bundles/mailman/files/mailman.cfg
Normal file
|
|
@ -0,0 +1,271 @@
|
||||||
|
# Copyright (C) 2008-2017 by the Free Software Foundation, Inc.
|
||||||
|
#
|
||||||
|
# This file is part of GNU Mailman.
|
||||||
|
#
|
||||||
|
# GNU Mailman is free software: you can redistribute it and/or modify it under
|
||||||
|
# the terms of the GNU General Public License as published by the Free
|
||||||
|
# Software Foundation, either version 3 of the License, or (at your option)
|
||||||
|
# any later version.
|
||||||
|
#
|
||||||
|
# GNU Mailman is distributed in the hope that it will be useful, but WITHOUT
|
||||||
|
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||||
|
# more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along with
|
||||||
|
# GNU Mailman. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
# This file contains the Debian configuration for mailman. It uses ini-style
|
||||||
|
# formats under the lazr.config regime to define all system configuration
|
||||||
|
# options. See <https://launchpad.net/lazr.config> for details.
|
||||||
|
|
||||||
|
|
||||||
|
[mailman]
|
||||||
|
# This address is the "site owner" address. Certain messages which must be
|
||||||
|
# delivered to a human, but which can't be delivered to a list owner (e.g. a
|
||||||
|
# bounce from a list owner), will be sent to this address. It should point to
|
||||||
|
# a human.
|
||||||
|
site_owner: ${site_owner_email}
|
||||||
|
|
||||||
|
# This is the local-part of an email address used in the From field whenever a
|
||||||
|
# message comes from some entity to which there is no natural reply recipient.
|
||||||
|
# Mailman will append '@' and the host name of the list involved. This
|
||||||
|
# address must not bounce and it must not point to a Mailman process.
|
||||||
|
noreply_address: noreply
|
||||||
|
|
||||||
|
# The default language for this server.
|
||||||
|
default_language: de
|
||||||
|
|
||||||
|
# Membership tests for posting purposes are usually performed by looking at a
|
||||||
|
# set of headers, passing the test if any of their values match a member of
|
||||||
|
# the list. Headers are checked in the order given in this variable. The
|
||||||
|
# value From_ means to use the envelope sender. Field names are case
|
||||||
|
# insensitive. This is a space separate list of headers.
|
||||||
|
sender_headers: from from_ reply-to sender
|
||||||
|
|
||||||
|
# Mail command processor will ignore mail command lines after designated max.
|
||||||
|
email_commands_max_lines: 10
|
||||||
|
|
||||||
|
# Default length of time a pending request is live before it is evicted from
|
||||||
|
# the pending database.
|
||||||
|
pending_request_life: 3d
|
||||||
|
|
||||||
|
# How long should files be saved before they are evicted from the cache?
|
||||||
|
cache_life: 7d
|
||||||
|
|
||||||
|
# A callable to run with no arguments early in the initialization process.
|
||||||
|
# This runs before database initialization.
|
||||||
|
pre_hook:
|
||||||
|
|
||||||
|
# A callable to run with no arguments late in the initialization process.
|
||||||
|
# This runs after adapters are initialized.
|
||||||
|
post_hook:
|
||||||
|
|
||||||
|
# Which paths.* file system layout to use.
|
||||||
|
# You should not change this variable.
|
||||||
|
layout: debian
|
||||||
|
|
||||||
|
# Can MIME filtered messages be preserved by list owners?
|
||||||
|
filtered_messages_are_preservable: no
|
||||||
|
|
||||||
|
# How should text/html parts be converted to text/plain when the mailing list
|
||||||
|
# is set to convert HTML to plaintext? This names a command to be called,
|
||||||
|
# where the substitution variable $filename is filled in by Mailman, and
|
||||||
|
# contains the path to the temporary file that the command should read from.
|
||||||
|
# The command should print the converted text to stdout.
|
||||||
|
html_to_plain_text_command: /usr/bin/lynx -dump $filename
|
||||||
|
|
||||||
|
# Specify what characters are allowed in list names. Characters outside of
|
||||||
|
# the class [-_.+=!$*{}~0-9a-z] matched case insensitively are never allowed,
|
||||||
|
# but this specifies a subset as the only allowable characters. This must be
|
||||||
|
# a valid character class regexp or the effect on list creation is
|
||||||
|
# unpredictable.
|
||||||
|
listname_chars: [-_.0-9a-z]
|
||||||
|
|
||||||
|
|
||||||
|
[shell]
|
||||||
|
# `mailman shell` (also `withlist`) gives you an interactive prompt that you
|
||||||
|
# can use to interact with an initialized and configured Mailman system. Use
|
||||||
|
# --help for more information. This section allows you to configure certain
|
||||||
|
# aspects of this interactive shell.
|
||||||
|
|
||||||
|
# Customize the interpreter prompt.
|
||||||
|
prompt: >>>
|
||||||
|
|
||||||
|
# Banner to show on startup.
|
||||||
|
banner: Welcome to the GNU Mailman shell
|
||||||
|
|
||||||
|
# Use IPython as the shell, which must be found on the system. Valid values
|
||||||
|
# are `no`, `yes`, and `debug` where the latter is equivalent to `yes` except
|
||||||
|
# that any import errors will be displayed to stderr.
|
||||||
|
use_ipython: no
|
||||||
|
|
||||||
|
# Set this to allow for command line history if readline is available. This
|
||||||
|
# can be as simple as $var_dir/history.py to put the file in the var directory.
|
||||||
|
history_file:
|
||||||
|
|
||||||
|
|
||||||
|
[paths.debian]
|
||||||
|
# Important directories for Mailman operation. These are defined here so that
|
||||||
|
# different layouts can be supported. For example, a developer layout would
|
||||||
|
# be different from a FHS layout. Most paths are based off the var_dir, and
|
||||||
|
# often just setting that will do the right thing for all the other paths.
|
||||||
|
# You might also have to set spool_dir though.
|
||||||
|
#
|
||||||
|
# Substitutions are allowed, but must be of the form $var where 'var' names a
|
||||||
|
# configuration variable in the paths.* section. Substitutions are expanded
|
||||||
|
# recursively until no more $-variables are present. Beware of infinite
|
||||||
|
# expansion loops!
|
||||||
|
#
|
||||||
|
# This is the root of the directory structure that Mailman will use to store
|
||||||
|
# its run-time data.
|
||||||
|
var_dir: /var/lib/mailman3
|
||||||
|
# This is where the Mailman queue files directories will be created.
|
||||||
|
queue_dir: $var_dir/queue
|
||||||
|
# This is the directory containing the Mailman 'runner' and 'master' commands
|
||||||
|
# if set to the string '$argv', it will be taken as the directory containing
|
||||||
|
# the 'mailman' command.
|
||||||
|
bin_dir: /usr/lib/mailman3/bin
|
||||||
|
# All list-specific data.
|
||||||
|
list_data_dir: $var_dir/lists
|
||||||
|
# Directory where log files go.
|
||||||
|
log_dir: /var/log/mailman3
|
||||||
|
# Directory for system-wide locks.
|
||||||
|
lock_dir: $var_dir/locks
|
||||||
|
# Directory for system-wide data.
|
||||||
|
data_dir: $var_dir/data
|
||||||
|
# Cache files.
|
||||||
|
cache_dir: $var_dir/cache
|
||||||
|
# Directory for configuration files and such.
|
||||||
|
etc_dir: /etc/mailman3
|
||||||
|
# Directory containing Mailman plugins.
|
||||||
|
ext_dir: $var_dir/ext
|
||||||
|
# Directory where the default IMessageStore puts its messages.
|
||||||
|
messages_dir: $var_dir/messages
|
||||||
|
# Directory for archive backends to store their messages in. Archivers should
|
||||||
|
# create a subdirectory in here to store their files.
|
||||||
|
archive_dir: $var_dir/archives
|
||||||
|
# Root directory for site-specific template override files.
|
||||||
|
template_dir: $var_dir/templates
|
||||||
|
# There are also a number of paths to specific file locations that can be
|
||||||
|
# defined. For these, the directory containing the file must already exist,
|
||||||
|
# or be one of the directories created by Mailman as per above.
|
||||||
|
#
|
||||||
|
# This is where PID file for the master runner is stored.
|
||||||
|
pid_file: /run/mailman3/master.pid
|
||||||
|
# Lock file.
|
||||||
|
lock_file: $lock_dir/master.lck
|
||||||
|
|
||||||
|
|
||||||
|
[database]
|
||||||
|
# The class implementing the IDatabase.
|
||||||
|
class: mailman.database.sqlite.SQLiteDatabase
|
||||||
|
#class: mailman.database.mysql.MySQLDatabase
|
||||||
|
#class: mailman.database.postgresql.PostgreSQLDatabase
|
||||||
|
|
||||||
|
# Use this to set the Storm database engine URL. You generally have one
|
||||||
|
# primary database connection for all of Mailman. List data and most rosters
|
||||||
|
# will store their data in this database, although external rosters may access
|
||||||
|
# other databases in their own way. This string supports standard
|
||||||
|
# 'configuration' substitutions.
|
||||||
|
url: sqlite:///$DATA_DIR/mailman.db
|
||||||
|
#url: mysql+pymysql://mailman3:mmpass@localhost/mailman3?charset=utf8&use_unicode=1
|
||||||
|
#url: postgresql://mailman3:mmpass@localhost/mailman3
|
||||||
|
|
||||||
|
debug: no
|
||||||
|
|
||||||
|
|
||||||
|
[logging.debian]
|
||||||
|
# This defines various log settings. The options available are:
|
||||||
|
#
|
||||||
|
# - level -- Overrides the default level; this may be any of the
|
||||||
|
# standard Python logging levels, case insensitive.
|
||||||
|
# - format -- Overrides the default format string
|
||||||
|
# - datefmt -- Overrides the default date format string
|
||||||
|
# - path -- Overrides the default logger path. This may be a relative
|
||||||
|
# path name, in which case it is relative to Mailman's LOG_DIR,
|
||||||
|
# or it may be an absolute path name. You cannot change the
|
||||||
|
# handler class that will be used.
|
||||||
|
# - propagate -- Boolean specifying whether to propagate log message from this
|
||||||
|
# logger to the root "mailman" logger. You cannot override
|
||||||
|
# settings for the root logger.
|
||||||
|
#
|
||||||
|
# In this section, you can define defaults for all loggers, which will be
|
||||||
|
# prefixed by 'mailman.'. Use subsections to override settings for specific
|
||||||
|
# loggers. The names of the available loggers are:
|
||||||
|
#
|
||||||
|
# - archiver -- All archiver output
|
||||||
|
# - bounce -- All bounce processing logs go here
|
||||||
|
# - config -- Configuration issues
|
||||||
|
# - database -- Database logging (SQLAlchemy and Alembic)
|
||||||
|
# - debug -- Only used for development
|
||||||
|
# - error -- All exceptions go to this log
|
||||||
|
# - fromusenet -- Information related to the Usenet to Mailman gateway
|
||||||
|
# - http -- Internal wsgi-based web interface
|
||||||
|
# - locks -- Lock state changes
|
||||||
|
# - mischief -- Various types of hostile activity
|
||||||
|
# - runner -- Runner process start/stops
|
||||||
|
# - smtp -- Successful SMTP activity
|
||||||
|
# - smtp-failure -- Unsuccessful SMTP activity
|
||||||
|
# - subscribe -- Information about leaves/joins
|
||||||
|
# - vette -- Message vetting information
|
||||||
|
format: %(asctime)s (%(process)d) %(message)s
|
||||||
|
datefmt: %b %d %H:%M:%S %Y
|
||||||
|
propagate: no
|
||||||
|
level: info
|
||||||
|
path: mailman.log
|
||||||
|
|
||||||
|
[webservice]
|
||||||
|
# The hostname at which admin web service resources are exposed.
|
||||||
|
hostname: localhost
|
||||||
|
|
||||||
|
# The port at which the admin web service resources are exposed.
|
||||||
|
port: 8001
|
||||||
|
|
||||||
|
# Whether or not requests to the web service are secured through SSL.
|
||||||
|
use_https: no
|
||||||
|
|
||||||
|
# Whether or not to show tracebacks in an HTTP response for a request that
|
||||||
|
# raised an exception.
|
||||||
|
show_tracebacks: yes
|
||||||
|
|
||||||
|
# The API version number for the current (highest) API.
|
||||||
|
api_version: 3.1
|
||||||
|
|
||||||
|
# The administrative username.
|
||||||
|
admin_user: restadmin
|
||||||
|
|
||||||
|
# The administrative password.
|
||||||
|
admin_pass: ${api_password}
|
||||||
|
|
||||||
|
[mta]
|
||||||
|
# The class defining the interface to the incoming mail transport agent.
|
||||||
|
#incoming: mailman.mta.exim4.LMTP
|
||||||
|
incoming: mailman.mta.postfix.LMTP
|
||||||
|
|
||||||
|
# The callable implementing delivery to the outgoing mail transport agent.
|
||||||
|
# This must accept three arguments, the mailing list, the message, and the
|
||||||
|
# message metadata dictionary.
|
||||||
|
outgoing: mailman.mta.deliver.deliver
|
||||||
|
|
||||||
|
# How to connect to the outgoing MTA. If smtp_user and smtp_pass is given,
|
||||||
|
# then Mailman will attempt to log into the MTA when making a new connection.
|
||||||
|
smtp_host: 127.0.0.1
|
||||||
|
smtp_port: 25
|
||||||
|
smtp_user:
|
||||||
|
smtp_pass:
|
||||||
|
|
||||||
|
# Where the LMTP server listens for connections. Use 127.0.0.1 instead of
|
||||||
|
# localhost for Postfix integration, because Postfix only consults DNS
|
||||||
|
# (e.g. not /etc/hosts).
|
||||||
|
lmtp_host: 127.0.0.1
|
||||||
|
lmtp_port: 8024
|
||||||
|
|
||||||
|
# Where can we find the mail server specific configuration file? The path can
|
||||||
|
# be either a file system path or a Python import path. If the value starts
|
||||||
|
# with python: then it is a Python import path, otherwise it is a file system
|
||||||
|
# path. File system paths must be absolute since no guarantees are made about
|
||||||
|
# the current working directory. Python paths should not include the trailing
|
||||||
|
# .cfg, which the file must end with.
|
||||||
|
#configuration: python:mailman.config.exim4
|
||||||
|
configuration: python:mailman.config.postfix
|
||||||
53
bundles/mailman/files/postfix.cf
Normal file
53
bundles/mailman/files/postfix.cf
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
# See /usr/share/postfix/main.cf.dist for a commented, more complete version
|
||||||
|
|
||||||
|
# Debian specific: Specifying a file name will cause the first
|
||||||
|
# line of that file to be used as the name. The Debian default
|
||||||
|
# is /etc/mailname.
|
||||||
|
#myorigin = /etc/mailname
|
||||||
|
|
||||||
|
smtpd_banner = $myhostname ESMTP $mail_name (Debian/GNU)
|
||||||
|
biff = no
|
||||||
|
|
||||||
|
# appending .domain is the MUA's job.
|
||||||
|
append_dot_mydomain = no
|
||||||
|
|
||||||
|
# Uncomment the next line to generate "delayed mail" warnings
|
||||||
|
#delay_warning_time = 4h
|
||||||
|
|
||||||
|
readme_directory = no
|
||||||
|
|
||||||
|
# See http://www.postfix.org/COMPATIBILITY_README.html -- default to 3.6 on
|
||||||
|
# fresh installs.
|
||||||
|
compatibility_level = 3.6
|
||||||
|
|
||||||
|
# TLS parameters
|
||||||
|
smtpd_tls_cert_file=/etc/ssl/certs/ssl-cert-snakeoil.pem
|
||||||
|
smtpd_tls_key_file=/etc/ssl/private/ssl-cert-snakeoil.key
|
||||||
|
smtpd_tls_security_level=may
|
||||||
|
|
||||||
|
smtp_tls_CApath=/etc/ssl/certs
|
||||||
|
smtp_tls_security_level=may
|
||||||
|
smtp_tls_session_cache_database = <%text>btree:${data_directory}/smtp_scache</%text>
|
||||||
|
|
||||||
|
smtpd_relay_restrictions = permit_mynetworks permit_sasl_authenticated defer_unauth_destination
|
||||||
|
myhostname = ${hostname}
|
||||||
|
alias_maps = hash:/etc/aliases
|
||||||
|
alias_database = hash:/etc/aliases
|
||||||
|
mydestination = $myhostname, localhost, localhost.localdomain, ${hostname}
|
||||||
|
relayhost =
|
||||||
|
mynetworks = 127.0.0.0/8 [::ffff:127.0.0.0]/104 [::1]/128
|
||||||
|
mailbox_size_limit = 0
|
||||||
|
recipient_delimiter = +
|
||||||
|
inet_interfaces = all
|
||||||
|
#inet_protocols = all
|
||||||
|
inet_protocols = ipv4
|
||||||
|
|
||||||
|
unknown_local_recipient_reject_code = 550
|
||||||
|
owner_request_special = no
|
||||||
|
|
||||||
|
transport_maps =
|
||||||
|
hash:/var/lib/mailman3/data/postfix_lmtp
|
||||||
|
local_recipient_maps =
|
||||||
|
hash:/var/lib/mailman3/data/postfix_lmtp
|
||||||
|
relay_domains =
|
||||||
|
hash:/var/lib/mailman3/data/postfix_domains
|
||||||
50
bundles/mailman/files/uwsgi.ini
Normal file
50
bundles/mailman/files/uwsgi.ini
Normal file
|
|
@ -0,0 +1,50 @@
|
||||||
|
[uwsgi]
|
||||||
|
# Port on which uwsgi will be listening.
|
||||||
|
uwsgi-socket = /run/mailman3-web/uwsgi.sock
|
||||||
|
|
||||||
|
#Enable threading for python
|
||||||
|
enable-threads = true
|
||||||
|
|
||||||
|
# Move to the directory wher the django files are.
|
||||||
|
chdir = /usr/share/mailman3-web
|
||||||
|
|
||||||
|
# Use the wsgi file provided with the django project.
|
||||||
|
wsgi-file = wsgi.py
|
||||||
|
|
||||||
|
# Setup default number of processes and threads per process.
|
||||||
|
master = true
|
||||||
|
process = 2
|
||||||
|
threads = 2
|
||||||
|
|
||||||
|
# Drop privielges and don't run as root.
|
||||||
|
uid = www-data
|
||||||
|
gid = www-data
|
||||||
|
|
||||||
|
plugins = python3
|
||||||
|
|
||||||
|
# Setup the django_q related worker processes.
|
||||||
|
attach-daemon = python3 manage.py qcluster
|
||||||
|
|
||||||
|
# Setup hyperkitty's cron jobs.
|
||||||
|
#unique-cron = -1 -1 -1 -1 -1 ./manage.py runjobs minutely
|
||||||
|
#unique-cron = -15 -1 -1 -1 -1 ./manage.py runjobs quarter_hourly
|
||||||
|
#unique-cron = 0 -1 -1 -1 -1 ./manage.py runjobs hourly
|
||||||
|
#unique-cron = 0 0 -1 -1 -1 ./manage.py runjobs daily
|
||||||
|
#unique-cron = 0 0 1 -1 -1 ./manage.py runjobs monthly
|
||||||
|
#unique-cron = 0 0 -1 -1 0 ./manage.py runjobs weekly
|
||||||
|
#unique-cron = 0 0 1 1 -1 ./manage.py runjobs yearly
|
||||||
|
|
||||||
|
# Setup the request log.
|
||||||
|
#req-logger = file:/var/log/mailman3/web/mailman-web.log
|
||||||
|
|
||||||
|
# Log cron seperately.
|
||||||
|
#logger = cron file:/var/log/mailman3/web/mailman-web-cron.log
|
||||||
|
#log-route = cron uwsgi-cron
|
||||||
|
|
||||||
|
# Log qcluster commands seperately.
|
||||||
|
#logger = qcluster file:/var/log/mailman3/web/mailman-web-qcluster.log
|
||||||
|
#log-route = qcluster uwsgi-daemons
|
||||||
|
|
||||||
|
# Last log and it logs the rest of the stuff.
|
||||||
|
#logger = file:/var/log/mailman3/web/mailman-web-error.log
|
||||||
|
logto = /var/log/mailman3/web/mailman-web.log
|
||||||
104
bundles/mailman/items.py
Normal file
104
bundles/mailman/items.py
Normal file
|
|
@ -0,0 +1,104 @@
|
||||||
|
directories = {
|
||||||
|
'/var/lib/mailman3': {
|
||||||
|
'owner': 'list',
|
||||||
|
'group': 'list',
|
||||||
|
'needs': {
|
||||||
|
'zfs_dataset:tank/mailman',
|
||||||
|
'pkg_apt:mailman3-full',
|
||||||
|
},
|
||||||
|
'needed_by': {
|
||||||
|
'svc_systemd:mailman3.service',
|
||||||
|
'svc_systemd:mailman3-web.service',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
files = {
|
||||||
|
'/etc/postfix/main.cf': {
|
||||||
|
'source': 'postfix.cf',
|
||||||
|
'content_type': 'mako',
|
||||||
|
'mode': '0644',
|
||||||
|
'context': {
|
||||||
|
'hostname': node.metadata.get('mailman/hostname'),
|
||||||
|
},
|
||||||
|
'needs': {
|
||||||
|
'pkg_apt:postfix',
|
||||||
|
},
|
||||||
|
'triggers': {
|
||||||
|
'svc_systemd:postfix.service:restart',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'/etc/mailman3/mailman.cfg': {
|
||||||
|
'content_type': 'mako',
|
||||||
|
'owner': 'root',
|
||||||
|
'group': 'list',
|
||||||
|
'mode': '0640',
|
||||||
|
'context': node.metadata.get('mailman'),
|
||||||
|
'needs': {
|
||||||
|
'pkg_apt:mailman3-full',
|
||||||
|
},
|
||||||
|
'triggers': {
|
||||||
|
'svc_systemd:mailman3.service:restart',
|
||||||
|
'svc_systemd:mailman3-web.service:restart',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'/etc/mailman3/mailman-web.py': {
|
||||||
|
'content_type': 'mako',
|
||||||
|
'owner': 'root',
|
||||||
|
'group': 'www-data',
|
||||||
|
'mode': '0640',
|
||||||
|
'context': node.metadata.get('mailman'),
|
||||||
|
'needs': {
|
||||||
|
'pkg_apt:mailman3-full',
|
||||||
|
},
|
||||||
|
'triggers': {
|
||||||
|
'svc_systemd:mailman3.service:restart',
|
||||||
|
'svc_systemd:mailman3-web.service:restart',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'/etc/mailman3/mailman-hyperkitty.cfg': {
|
||||||
|
'content_type': 'mako',
|
||||||
|
'owner': 'root',
|
||||||
|
'group': 'list',
|
||||||
|
'mode': '0640',
|
||||||
|
'context': node.metadata.get('mailman'),
|
||||||
|
'needs': {
|
||||||
|
'pkg_apt:mailman3-full',
|
||||||
|
},
|
||||||
|
'triggers': {
|
||||||
|
'svc_systemd:mailman3.service:restart',
|
||||||
|
'svc_systemd:mailman3-web.service:restart',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'/etc/mailman3/uwsgi.ini': {
|
||||||
|
'content_type': 'text',
|
||||||
|
'owner': 'root',
|
||||||
|
'group': 'root',
|
||||||
|
'mode': '0644',
|
||||||
|
'needs': {
|
||||||
|
'pkg_apt:mailman3-full',
|
||||||
|
},
|
||||||
|
'triggers': {
|
||||||
|
'svc_systemd:mailman3.service:restart',
|
||||||
|
'svc_systemd:mailman3-web.service:restart',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
svc_systemd = {
|
||||||
|
'postfix.service': {
|
||||||
|
'needs': {
|
||||||
|
'pkg_apt:postfix',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'mailman3.service': {
|
||||||
|
'needs': {
|
||||||
|
'pkg_apt:mailman3-full',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'mailman3-web.service': {
|
||||||
|
'needs': {
|
||||||
|
'pkg_apt:mailman3-full',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
149
bundles/mailman/metadata.py
Normal file
149
bundles/mailman/metadata.py
Normal file
|
|
@ -0,0 +1,149 @@
|
||||||
|
import base64
|
||||||
|
|
||||||
|
def derive_mailadmin_secret(metadata, salt):
|
||||||
|
node_id = metadata.get('id')
|
||||||
|
raw = base64.b64decode(
|
||||||
|
repo.vault.random_bytes_as_base64_for(f'{node_id}_{salt}', length=32).value
|
||||||
|
)
|
||||||
|
return base64.urlsafe_b64encode(raw).rstrip(b'=').decode('ascii')
|
||||||
|
|
||||||
|
|
||||||
|
defaults = {
|
||||||
|
'apt': {
|
||||||
|
'packages': {
|
||||||
|
'mailman3-full': {
|
||||||
|
'needs': {
|
||||||
|
'postgres_db:mailman',
|
||||||
|
'postgres_role:mailman',
|
||||||
|
'zfs_dataset:tank/mailman',
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'postfix': {},
|
||||||
|
'python3-psycopg2': {
|
||||||
|
'needed_by': {
|
||||||
|
'pkg_apt:mailman3-full',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'apache2': {
|
||||||
|
'installed': False,
|
||||||
|
'needs': {
|
||||||
|
'pkg_apt:mailman3-full',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'zfs': {
|
||||||
|
'datasets': {
|
||||||
|
'tank/mailman': {
|
||||||
|
'mountpoint': '/var/lib/mailman3',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@metadata_reactor.provides(
|
||||||
|
'postgresql',
|
||||||
|
'mailman',
|
||||||
|
)
|
||||||
|
def postgresql(metadata):
|
||||||
|
node_id = metadata.get('id')
|
||||||
|
db_password = repo.vault.password_for(f'{node_id} database mailman')
|
||||||
|
|
||||||
|
return {
|
||||||
|
'postgresql': {
|
||||||
|
'databases': {
|
||||||
|
'mailman': {
|
||||||
|
'owner': 'mailman',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'roles': {
|
||||||
|
'mailman': {
|
||||||
|
'password': db_password,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'mailman': {
|
||||||
|
'db_password': db_password,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@metadata_reactor.provides(
|
||||||
|
'nginx/vhosts',
|
||||||
|
)
|
||||||
|
def nginx(metadata):
|
||||||
|
return {
|
||||||
|
'nginx': {
|
||||||
|
'vhosts': {
|
||||||
|
metadata.get('mailman/hostname'): {
|
||||||
|
'content': 'mailman/vhost.conf',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@metadata_reactor.provides(
|
||||||
|
'mailman/secret_key',
|
||||||
|
)
|
||||||
|
def secret_key(metadata):
|
||||||
|
import base64
|
||||||
|
|
||||||
|
node_id = metadata.get('id')
|
||||||
|
raw = base64.b64decode(
|
||||||
|
repo.vault.random_bytes_as_base64_for(f'{node_id}_mailman_secret_key', length=32).value
|
||||||
|
)
|
||||||
|
secret_key = base64.urlsafe_b64encode(raw).rstrip(b'=').decode('ascii')
|
||||||
|
|
||||||
|
return {
|
||||||
|
'mailman': {
|
||||||
|
'secret_key': secret_key,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@metadata_reactor.provides(
|
||||||
|
'mailman',
|
||||||
|
)
|
||||||
|
def secrets(metadata):
|
||||||
|
return {
|
||||||
|
'mailman': {
|
||||||
|
'web_secret': derive_mailadmin_secret(metadata, 'secret_key'),
|
||||||
|
'api_password': derive_mailadmin_secret(metadata, 'api_password'),
|
||||||
|
'archiver_key': derive_mailadmin_secret(metadata, 'archiver_key'),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@metadata_reactor.provides(
|
||||||
|
'dns',
|
||||||
|
)
|
||||||
|
def dns(metadata):
|
||||||
|
report_email = metadata.get('mailman/dmarc_report_email')
|
||||||
|
|
||||||
|
return {
|
||||||
|
'dns': {
|
||||||
|
metadata.get('mailman/hostname'): {
|
||||||
|
'MX': [f"5 {metadata.get('mailman/hostname')}."],
|
||||||
|
'TXT': [
|
||||||
|
'v=spf1 a mx -all',
|
||||||
|
'; '.join(f'{k}={v}' for k, v in {
|
||||||
|
# dmarc version
|
||||||
|
'v': 'DMARC1',
|
||||||
|
# reject on failure
|
||||||
|
'p': 'reject',
|
||||||
|
# standard reports
|
||||||
|
'rua': f'mailto:{report_email}',
|
||||||
|
# forensic reports
|
||||||
|
'fo': 1,
|
||||||
|
'ruf': f'mailto:{report_email}',
|
||||||
|
# require alignment between the DKIM domain and the parent Header From domain
|
||||||
|
'adkim': 's',
|
||||||
|
# require alignment between the SPF domain (the sender) and the Header From domain
|
||||||
|
'aspf': 's',
|
||||||
|
}.items())
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
@ -32,10 +32,14 @@ defaults = {
|
||||||
'tank/vmail': {
|
'tank/vmail': {
|
||||||
'mountpoint': '/var/vmail',
|
'mountpoint': '/var/vmail',
|
||||||
'compression': 'on',
|
'compression': 'on',
|
||||||
|
'atime': 'off',
|
||||||
|
'recordsize': '16384',
|
||||||
},
|
},
|
||||||
'tank/vmail/index': {
|
'tank/vmail/index': {
|
||||||
'mountpoint': '/var/vmail/index',
|
'mountpoint': '/var/vmail/index',
|
||||||
'compression': 'on',
|
'compression': 'on',
|
||||||
|
'atime': 'off',
|
||||||
|
'recordsize': '4096',
|
||||||
'com.sun:auto-snapshot': 'false',
|
'com.sun:auto-snapshot': 'false',
|
||||||
'backup': False,
|
'backup': False,
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
% for section, options in sorted(conf.items()):
|
|
||||||
[${section}]
|
|
||||||
% for key, value in sorted(options.items()):
|
|
||||||
% if value is None:
|
|
||||||
${key}
|
|
||||||
% else:
|
|
||||||
${key} = ${value}
|
|
||||||
% endif
|
|
||||||
% endfor
|
|
||||||
|
|
||||||
% endfor
|
|
||||||
|
|
@ -10,8 +10,6 @@ directories = {
|
||||||
'group': 'mysql',
|
'group': 'mysql',
|
||||||
'needs': [
|
'needs': [
|
||||||
'zfs_dataset:tank/mariadb',
|
'zfs_dataset:tank/mariadb',
|
||||||
],
|
|
||||||
'needed_by': [
|
|
||||||
'pkg_apt:mariadb-server',
|
'pkg_apt:mariadb-server',
|
||||||
'pkg_apt:mariadb-client',
|
'pkg_apt:mariadb-client',
|
||||||
],
|
],
|
||||||
|
|
@ -20,10 +18,8 @@ directories = {
|
||||||
|
|
||||||
files = {
|
files = {
|
||||||
'/etc/mysql/conf.d/override.conf': {
|
'/etc/mysql/conf.d/override.conf': {
|
||||||
'context': {
|
'content': repo.libs.ini.dumps(node.metadata.get('mariadb/conf')),
|
||||||
'conf': node.metadata.get('mariadb/conf'),
|
'content_type': 'text',
|
||||||
},
|
|
||||||
'content_type': 'mako',
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,33 +0,0 @@
|
||||||
assert node.has_bundle('nodejs')
|
|
||||||
assert node.has_bundle('postgresql')
|
|
||||||
assert node.has_bundle('zfs')
|
|
||||||
|
|
||||||
# To update:
|
|
||||||
#
|
|
||||||
# - systemctl stop n8n postgresql
|
|
||||||
# - tempsnap pre-n8n-update (for psql, emergency rollback)
|
|
||||||
# - apply
|
|
||||||
|
|
||||||
version = node.metadata.get("n8n/version")
|
|
||||||
actions['install_n8n'] = {
|
|
||||||
'command': f'cd /opt/n8n && sudo -u n8n npm install n8n@{version}',
|
|
||||||
'unless': f'test -e /opt/n8n/node_modules && '
|
|
||||||
f'test $(jq -r ".version" < /opt/n8n/node_modules/n8n/package.json) = "{version}"',
|
|
||||||
'needs': {
|
|
||||||
'directory:/opt/n8n',
|
|
||||||
'pkg_apt:nodejs',
|
|
||||||
'user:n8n',
|
|
||||||
},
|
|
||||||
'triggers': {
|
|
||||||
'svc_systemd:n8n.service:restart',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
svc_systemd['n8n.service'] = {
|
|
||||||
'enabled': True,
|
|
||||||
'running': True,
|
|
||||||
'needs': {
|
|
||||||
'pkg_apt:nodejs',
|
|
||||||
'action:install_n8n',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
@ -1,89 +0,0 @@
|
||||||
database_password = repo.vault.password_for(f'{node.name} postgresql n8n')
|
|
||||||
|
|
||||||
defaults = {
|
|
||||||
'backups': {
|
|
||||||
'paths': {
|
|
||||||
'/opt/n8n',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'npm': {
|
|
||||||
'n8n': {},
|
|
||||||
},
|
|
||||||
'n8n': {
|
|
||||||
'DB_TYPE': 'postgresdb',
|
|
||||||
'DB_POSTGRESDB_DATABASE': 'n8n',
|
|
||||||
'DB_POSTGRESDB_HOST': 'localhost',
|
|
||||||
'DB_POSTGRESDB_PORT': 5432,
|
|
||||||
'DB_POSTGRESDB_USER': 'n8n',
|
|
||||||
'DB_POSTGRESDB_PASSWORD': database_password,
|
|
||||||
},
|
|
||||||
'postgresql': {
|
|
||||||
'databases': {
|
|
||||||
'n8n': {
|
|
||||||
'when_creating': {
|
|
||||||
'encoding': 'UTF8',
|
|
||||||
'collation': 'C.UTF-8',
|
|
||||||
'ctype': 'C.UTF-8',
|
|
||||||
},
|
|
||||||
'owner': 'n8n',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'roles': {
|
|
||||||
'n8n': {
|
|
||||||
'password': database_password,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'systemd': {
|
|
||||||
'units': {
|
|
||||||
'n8n.service': {
|
|
||||||
'Unit': {
|
|
||||||
'Description': 'n8n',
|
|
||||||
'Requires': 'network.target postgresql.service',
|
|
||||||
'After': 'postgresql.service',
|
|
||||||
},
|
|
||||||
'Service': {
|
|
||||||
'Restart': 'always',
|
|
||||||
'RestartSec': '5',
|
|
||||||
'WorkingDirectory': '/opt/n8n',
|
|
||||||
'ExecStart': '/usr/bin/npx n8n start',
|
|
||||||
'User': 'n8n',
|
|
||||||
'Group': 'n8n',
|
|
||||||
'Environment': {
|
|
||||||
'NODE_ENV=production',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'users': {
|
|
||||||
'n8n': {
|
|
||||||
'home': '/opt/n8n',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'zfs': {
|
|
||||||
'datasets': {
|
|
||||||
'tank/n8n': {
|
|
||||||
'mountpoint': '/opt/n8n',
|
|
||||||
'needed_by': {'directory:/opt/n8n'},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@metadata_reactor.provides(
|
|
||||||
'systemd/services/n8n.service',
|
|
||||||
)
|
|
||||||
def systemd(metadata):
|
|
||||||
return {
|
|
||||||
'systemd': {
|
|
||||||
'units': {
|
|
||||||
'n8n.service': {
|
|
||||||
'Service': {
|
|
||||||
'Environment': metadata.get('n8n'),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
19
bundles/network/items.py
Normal file
19
bundles/network/items.py
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
for network_name, network_conf in node.metadata.get('network').items():
|
||||||
|
if 'qdisc' in network_conf:
|
||||||
|
svc_systemd[f'qdisc-{network_name}.service'] = {
|
||||||
|
'enabled': True,
|
||||||
|
'running': None,
|
||||||
|
'needs': {
|
||||||
|
f'file:/usr/local/lib/systemd/system/qdisc-{network_name}.service',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
actions[f'qdisc-{network_name}.service_restart_workaround'] = {
|
||||||
|
'command': 'true',
|
||||||
|
'triggered': True,
|
||||||
|
'triggered_by': {
|
||||||
|
f'file:/usr/local/lib/systemd/system/qdisc-{network_name}.service',
|
||||||
|
},
|
||||||
|
'triggers': {
|
||||||
|
f'svc_systemd:qdisc-{network_name}.service:restart',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
@ -34,63 +34,112 @@ def dhcp(metadata):
|
||||||
|
|
||||||
@metadata_reactor.provides(
|
@metadata_reactor.provides(
|
||||||
'systemd/units',
|
'systemd/units',
|
||||||
|
'modules-load',
|
||||||
)
|
)
|
||||||
def units(metadata):
|
def units(metadata):
|
||||||
units = {}
|
if node.has_bundle('systemd-networkd'):
|
||||||
|
units = {}
|
||||||
|
modules_load = set()
|
||||||
|
|
||||||
for network_name, network_conf in metadata.get('network').items():
|
for network_name, network_conf in metadata.get('network').items():
|
||||||
interface_type = network_conf.get('type', None)
|
interface_type = network_conf.get('type', None)
|
||||||
|
|
||||||
# network
|
# network
|
||||||
|
|
||||||
units[f'{network_name}.network'] = {
|
units[f'{network_name}.network'] = {
|
||||||
'Match': {
|
'Match': {
|
||||||
'Name': network_name if interface_type == 'vlan' else network_conf['interface'],
|
'Name': network_name if interface_type == 'vlan' else network_conf['interface'],
|
||||||
},
|
|
||||||
'Network': {
|
|
||||||
'DHCP': network_conf.get('dhcp', 'no'),
|
|
||||||
'IPv6AcceptRA': network_conf.get('dhcp', 'no'),
|
|
||||||
'VLAN': set(network_conf.get('vlans', set()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# type
|
|
||||||
|
|
||||||
if interface_type:
|
|
||||||
units[f'{network_name}.network']['Match']['Type'] = interface_type
|
|
||||||
|
|
||||||
# ips
|
|
||||||
|
|
||||||
for i in [4, 6]:
|
|
||||||
if network_conf.get(f'ipv{i}', None):
|
|
||||||
units[f'{network_name}.network'].update({
|
|
||||||
f'Address#ipv{i}': {
|
|
||||||
'Address': network_conf[f'ipv{i}'],
|
|
||||||
},
|
|
||||||
})
|
|
||||||
if f'gateway{i}' in network_conf:
|
|
||||||
units[f'{network_name}.network'].update({
|
|
||||||
f'Route#ipv{i}': {
|
|
||||||
'Gateway': network_conf[f'gateway{i}'],
|
|
||||||
'GatewayOnlink': 'yes',
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
# as vlan
|
|
||||||
|
|
||||||
if interface_type == 'vlan':
|
|
||||||
units[f"{network_name}.netdev"] = {
|
|
||||||
'NetDev': {
|
|
||||||
'Name': network_name,
|
|
||||||
'Kind': 'vlan',
|
|
||||||
},
|
},
|
||||||
'VLAN': {
|
'Network': {
|
||||||
'Id': network_conf['id'],
|
'DHCP': network_conf.get('dhcp', 'no'),
|
||||||
|
'IPv6AcceptRA': network_conf.get('IPv6AcceptRA', 'no'),
|
||||||
|
'VLAN': set(
|
||||||
|
other_network_name
|
||||||
|
for other_network_name, other_network_conf in metadata.get('network', {}).items()
|
||||||
|
if other_network_conf.get('type') == 'vlan' and other_network_conf['vlan_interface'] == network_name
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
# type
|
||||||
'systemd': {
|
|
||||||
'units': units,
|
if interface_type:
|
||||||
|
units[f'{network_name}.network']['Match']['Type'] = interface_type
|
||||||
|
|
||||||
|
# ips
|
||||||
|
|
||||||
|
for i in [4, 6]:
|
||||||
|
if network_conf.get(f'ipv{i}', None):
|
||||||
|
units[f'{network_name}.network'].update({
|
||||||
|
f'Address#ipv{i}': {
|
||||||
|
'Address': network_conf[f'ipv{i}'],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if f'gateway{i}' in network_conf:
|
||||||
|
units[f'{network_name}.network'].update({
|
||||||
|
f'Route#ipv{i}': {
|
||||||
|
'Gateway': network_conf[f'gateway{i}'],
|
||||||
|
'GatewayOnlink': 'yes',
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
# as vlan
|
||||||
|
|
||||||
|
if interface_type == 'vlan':
|
||||||
|
units[f"{network_name}.netdev"] = {
|
||||||
|
'NetDev': {
|
||||||
|
'Name': network_name,
|
||||||
|
'Kind': 'vlan',
|
||||||
|
},
|
||||||
|
'VLAN': {
|
||||||
|
'Id': network_conf['id'],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# cake WIP
|
||||||
|
|
||||||
|
if 'cake' in network_conf:
|
||||||
|
units[f'{network_name}.network']['CAKE'] = network_conf['cake']
|
||||||
|
modules_load.add('sch_cake')
|
||||||
|
|
||||||
|
return {
|
||||||
|
'systemd': {
|
||||||
|
'units': units,
|
||||||
|
},
|
||||||
|
'modules-load': modules_load,
|
||||||
}
|
}
|
||||||
}
|
else:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
@metadata_reactor.provides(
|
||||||
|
'systemd/units',
|
||||||
|
)
|
||||||
|
def queuing_disciplines(metadata):
|
||||||
|
if node.has_bundle('systemd-networkd'):
|
||||||
|
return {
|
||||||
|
'systemd': {
|
||||||
|
'units': {
|
||||||
|
f'qdisc-{network_name}.service': {
|
||||||
|
'Unit': {
|
||||||
|
'Description': f'setup queuing discipline for interface {network_name}',
|
||||||
|
'Wants': 'network.target',
|
||||||
|
'After': 'network.target',
|
||||||
|
'BindsTo': 'network.target',
|
||||||
|
},
|
||||||
|
'Service': {
|
||||||
|
'Type': 'oneshot',
|
||||||
|
'ExecStart': f'/sbin/tc qdisc replace root dev {network_name} {network_conf["qdisc"]}',
|
||||||
|
'RemainAfterExit': 'yes',
|
||||||
|
},
|
||||||
|
'Install': {
|
||||||
|
'WantedBy': 'network-online.target',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for network_name, network_conf in metadata.get('network').items()
|
||||||
|
if 'qdisc' in network_conf
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return {}
|
||||||
|
|
|
||||||
|
|
@ -8,4 +8,5 @@ examples
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
nft add rule inet filter input tcp dport 5201 accept
|
nft add rule inet filter input tcp dport 5201 accept
|
||||||
|
nft add rule inet filter input udp dport 5201 accept
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,23 @@
|
||||||
|
|
||||||
flush ruleset
|
flush ruleset
|
||||||
|
|
||||||
|
% if nat:
|
||||||
|
table ip nat {
|
||||||
|
|
||||||
|
# NAT
|
||||||
|
|
||||||
|
chain postrouting {
|
||||||
|
type nat hook postrouting priority 100
|
||||||
|
policy accept
|
||||||
|
|
||||||
|
# rules
|
||||||
|
% for rule in sorted(nat):
|
||||||
|
${rule}
|
||||||
|
% endfor
|
||||||
|
}
|
||||||
|
}
|
||||||
|
% endif
|
||||||
|
|
||||||
table inet filter {
|
table inet filter {
|
||||||
|
|
||||||
# INPUT
|
# INPUT
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ files = {
|
||||||
'input': node.metadata.get('nftables/input'),
|
'input': node.metadata.get('nftables/input'),
|
||||||
'forward': node.metadata.get('nftables/forward'),
|
'forward': node.metadata.get('nftables/forward'),
|
||||||
'output': node.metadata.get('nftables/output'),
|
'output': node.metadata.get('nftables/output'),
|
||||||
|
'nat': node.metadata.get('nftables/nat'),
|
||||||
},
|
},
|
||||||
'triggers': [
|
'triggers': [
|
||||||
'svc_systemd:nftables.service:reload',
|
'svc_systemd:nftables.service:reload',
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,8 @@ defaults = {
|
||||||
'input': {
|
'input': {
|
||||||
'tcp dport 22 accept',
|
'tcp dport 22 accept',
|
||||||
},
|
},
|
||||||
'forward': {},
|
'forward': set(),
|
||||||
'output': {},
|
'nat': set(),
|
||||||
|
'output': set(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -31,5 +31,13 @@ http {
|
||||||
}
|
}
|
||||||
|
|
||||||
% endif
|
% endif
|
||||||
include /etc/nginx/sites/*;
|
|
||||||
|
% if has_websockets:
|
||||||
|
map $http_upgrade $connection_upgrade {
|
||||||
|
default upgrade;
|
||||||
|
'' close;
|
||||||
|
}
|
||||||
|
% endif
|
||||||
|
|
||||||
|
include /etc/nginx/sites-enabled/*;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ directories = {
|
||||||
'svc_systemd:nginx:restart',
|
'svc_systemd:nginx:restart',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
'/etc/nginx/sites': {
|
'/etc/nginx/sites-available': {
|
||||||
'purge': True,
|
'purge': True,
|
||||||
'triggers': {
|
'triggers': {
|
||||||
'svc_systemd:nginx:restart',
|
'svc_systemd:nginx:restart',
|
||||||
|
|
@ -33,6 +33,7 @@ files = {
|
||||||
'context': {
|
'context': {
|
||||||
'modules': node.metadata.get('nginx/modules'),
|
'modules': node.metadata.get('nginx/modules'),
|
||||||
'worker_processes': node.metadata.get('vm/cores'),
|
'worker_processes': node.metadata.get('vm/cores'),
|
||||||
|
'has_websockets': node.metadata.get('nginx/has_websockets'),
|
||||||
},
|
},
|
||||||
'triggers': {
|
'triggers': {
|
||||||
'svc_systemd:nginx:restart',
|
'svc_systemd:nginx:restart',
|
||||||
|
|
@ -75,6 +76,12 @@ files = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
symlinks = {
|
||||||
|
'/etc/nginx/sites-enabled': {
|
||||||
|
'target': '/etc/nginx/sites-available',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
actions = {
|
actions = {
|
||||||
'nginx-generate-dhparam': {
|
'nginx-generate-dhparam': {
|
||||||
'command': 'openssl dhparam -dsaparam -out /etc/ssl/certs/dhparam.pem 4096',
|
'command': 'openssl dhparam -dsaparam -out /etc/ssl/certs/dhparam.pem 4096',
|
||||||
|
|
@ -93,7 +100,7 @@ svc_systemd = {
|
||||||
|
|
||||||
|
|
||||||
for name, config in node.metadata.get('nginx/vhosts').items():
|
for name, config in node.metadata.get('nginx/vhosts').items():
|
||||||
files[f'/etc/nginx/sites/{name}'] = {
|
files[f'/etc/nginx/sites-available/{name}'] = {
|
||||||
'content': Template(filename=join(repo.path, 'data', config['content'])).render(
|
'content': Template(filename=join(repo.path, 'data', config['content'])).render(
|
||||||
server_name=name,
|
server_name=name,
|
||||||
**config.get('context', {}),
|
**config.get('context', {}),
|
||||||
|
|
@ -109,6 +116,6 @@ for name, config in node.metadata.get('nginx/vhosts').items():
|
||||||
}
|
}
|
||||||
|
|
||||||
if name in node.metadata.get('letsencrypt/domains'):
|
if name in node.metadata.get('letsencrypt/domains'):
|
||||||
files[f'/etc/nginx/sites/{name}']['needs'].append(
|
files[f'/etc/nginx/sites-available/{name}']['needs'].append(
|
||||||
f'action:letsencrypt_ensure-some-certificate_{name}',
|
f'action:letsencrypt_ensure-some-certificate_{name}',
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -18,6 +18,7 @@ defaults = {
|
||||||
'nginx': {
|
'nginx': {
|
||||||
'vhosts': {},
|
'vhosts': {},
|
||||||
'modules': set(),
|
'modules': set(),
|
||||||
|
'has_websockets': False,
|
||||||
},
|
},
|
||||||
'systemd': {
|
'systemd': {
|
||||||
'units': {
|
'units': {
|
||||||
|
|
@ -95,7 +96,7 @@ def monitoring(metadata):
|
||||||
'monitoring': {
|
'monitoring': {
|
||||||
'services': {
|
'services': {
|
||||||
hostname: {
|
hostname: {
|
||||||
'vars.command': f"/usr/bin/curl -X GET -L --fail --no-progress-meter -o /dev/null {quote(hostname + vhost.get('check_path', ''))}",
|
'vars.command': f"/usr/bin/curl -X GET -L --fail --no-progress-meter -o /dev/null {vhost.get('check_protocol', 'https')}://{quote(hostname + vhost.get('check_path', '/'))}",
|
||||||
}
|
}
|
||||||
for hostname, vhost in metadata.get('nginx/vhosts').items()
|
for hostname, vhost in metadata.get('nginx/vhosts').items()
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,9 @@ defaults = {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
'npm': {},
|
'npm': {
|
||||||
|
'yarn': {},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -26,9 +28,7 @@ def sources(metadata):
|
||||||
'deb',
|
'deb',
|
||||||
'deb-src',
|
'deb-src',
|
||||||
},
|
},
|
||||||
'urls': {
|
'url': 'https://deb.nodesource.com/node_{version}.x',
|
||||||
f'https://deb.nodesource.com/node_{version}.x',
|
|
||||||
},
|
|
||||||
'suites': {
|
'suites': {
|
||||||
'{codename}',
|
'{codename}',
|
||||||
},
|
},
|
||||||
|
|
|
||||||
22
bundles/postgresql/files/pg_hba.conf
Normal file
22
bundles/postgresql/files/pg_hba.conf
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
# DO NOT DISABLE!
|
||||||
|
# If you change this first entry you will need to make sure that the
|
||||||
|
# database superuser can access the database using some other method.
|
||||||
|
# Noninteractive access to all databases is required during automatic
|
||||||
|
# maintenance (custom daily cronjobs, replication, and similar tasks).
|
||||||
|
#
|
||||||
|
# Database administrative login by Unix domain socket
|
||||||
|
local all postgres peer
|
||||||
|
|
||||||
|
# TYPE DATABASE USER ADDRESS METHOD
|
||||||
|
|
||||||
|
# "local" is for Unix domain socket connections only
|
||||||
|
local all all peer
|
||||||
|
# IPv4 local connections:
|
||||||
|
host all all 127.0.0.1/32 ${node.metadata.get('postgresql/password_algorithm', 'md5')}
|
||||||
|
# IPv6 local connections:
|
||||||
|
host all all ::1/128 ${node.metadata.get('postgresql/password_algorithm', 'md5')}
|
||||||
|
# Allow replication connections from localhost, by a user with the
|
||||||
|
# replication privilege.
|
||||||
|
local replication all peer
|
||||||
|
host replication all 127.0.0.1/32 ${node.metadata.get('postgresql/password_algorithm', 'md5')}
|
||||||
|
host replication all ::1/128 ${node.metadata.get('postgresql/password_algorithm', 'md5')}
|
||||||
|
|
@ -18,6 +18,21 @@ directories = {
|
||||||
}
|
}
|
||||||
|
|
||||||
files = {
|
files = {
|
||||||
|
f"/etc/postgresql/{version}/main/pg_hba.conf": {
|
||||||
|
'content_type': 'mako',
|
||||||
|
'mode': '0640',
|
||||||
|
'owner': 'postgres',
|
||||||
|
'group': 'postgres',
|
||||||
|
'needs': [
|
||||||
|
'pkg_apt:postgresql',
|
||||||
|
],
|
||||||
|
'needed_by': [
|
||||||
|
'svc_systemd:postgresql.service',
|
||||||
|
],
|
||||||
|
'triggers': [
|
||||||
|
'svc_systemd:postgresql.service:restart',
|
||||||
|
],
|
||||||
|
},
|
||||||
f"/etc/postgresql/{version}/main/conf.d/managed.conf": {
|
f"/etc/postgresql/{version}/main/conf.d/managed.conf": {
|
||||||
'content': '\n'.join(
|
'content': '\n'.join(
|
||||||
f'{key} = {value}'
|
f'{key} = {value}'
|
||||||
|
|
|
||||||
36
bundles/pppoe/REAMDE.md
Normal file
36
bundles/pppoe/REAMDE.md
Normal file
|
|
@ -0,0 +1,36 @@
|
||||||
|
# Firtzbox
|
||||||
|
|
||||||
|
Internet > Zugangsdaten
|
||||||
|
|
||||||
|
Internetanbieter
|
||||||
|
- weitere Internetanbieter
|
||||||
|
- anderer Internetanbieter
|
||||||
|
- Name: "My PPPOE" (nicht leer lassen)
|
||||||
|
|
||||||
|
Anschluss
|
||||||
|
(x) Anschluss an einen DSL-Anschluss
|
||||||
|
|
||||||
|
Zugangsdaten
|
||||||
|
(x) Nein
|
||||||
|
|
||||||
|
Verbindungseinstellungen
|
||||||
|
|
||||||
|
[x] VLAN für den Internetanschluss verwenden
|
||||||
|
VLAN-ID: 7
|
||||||
|
PBit: 0
|
||||||
|
|
||||||
|
DSL-ATM-Einstellungen
|
||||||
|
VPI: 1
|
||||||
|
VCI: 32
|
||||||
|
|
||||||
|
Kapselung
|
||||||
|
(x) Routed Bridge Encapsulation
|
||||||
|
[x] IP-Adresse automatisch über DHCP beziehen
|
||||||
|
DHCP-Hostname: fritz.box
|
||||||
|
|
||||||
|
PPPoE-Passthrough
|
||||||
|
[x] Angeschlossene Netzwerkgeräte dürfen zusätzlich ihre eigene Internetverbindung aufbauen (nicht empfohlen)
|
||||||
|
|
||||||
|
[ ] Internetzugang nach dem "Übernehmen" prüfen
|
||||||
|
|
||||||
|
-> Danach muss bei "Internetanbieter" statt "weitere Internetanbieter" der gewählte Name stehen, also zB "My PPPOE"
|
||||||
3
bundles/pppoe/files/chap-secrets
Normal file
3
bundles/pppoe/files/chap-secrets
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
# Secrets for authentication using CHAP
|
||||||
|
# client server secret IP addresses
|
||||||
|
"${user}" * "${secret}" *
|
||||||
30
bundles/pppoe/files/isp
Normal file
30
bundles/pppoe/files/isp
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
# --- Plugin & Interface ---
|
||||||
|
plugin rp-pppoe.so ${interface}
|
||||||
|
unit 0
|
||||||
|
|
||||||
|
# --- IPv4 Einstellungen ---
|
||||||
|
noipdefault # keine selbstgewählte lokale IP
|
||||||
|
defaultroute # Default-Route über ppp0 anlegen
|
||||||
|
replacedefaultroute # ersetzt vorherige Default-Route
|
||||||
|
|
||||||
|
# --- IPv6 Einstellungen ---
|
||||||
|
+ipv6 # IPv6CP aktivieren
|
||||||
|
ipv6cp-accept-local # lokale IPv6 vom ISP übernehmen
|
||||||
|
ipv6cp-accept-remote # remote IPv6 vom ISP übernehmen
|
||||||
|
ipv6cp-use-ipaddr # statt Link-Local die zugewiesene IPv6 nutzen
|
||||||
|
defaultroute6
|
||||||
|
|
||||||
|
# --- Verbindungsmanagement ---
|
||||||
|
persist # bei Abbruch automatisch neu verbinden
|
||||||
|
maxfail 0 # unbegrenzt Neuversuche
|
||||||
|
|
||||||
|
# --- LCP‐Keepalive (zuverlässiger Ausfall-Check) ---
|
||||||
|
lcp-echo-interval 20
|
||||||
|
lcp-echo-failure 3
|
||||||
|
|
||||||
|
# --- Sicherheit / Logging ---
|
||||||
|
hide-password # Passwort nicht im Log anzeigen
|
||||||
|
noauth # Auth nur über chap-secrets
|
||||||
|
|
||||||
|
# --- Zugangsdaten (nur Username, das Passwort kommt aus /etc/ppp/chap-secrets) ---
|
||||||
|
user "${user}"
|
||||||
38
bundles/pppoe/items.py
Normal file
38
bundles/pppoe/items.py
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
files = {
|
||||||
|
'/etc/ppp/peers/isp': {
|
||||||
|
'content_type': 'mako',
|
||||||
|
'mode': '0644',
|
||||||
|
'context': {
|
||||||
|
'interface': node.metadata.get('pppoe/interface'),
|
||||||
|
'user': node.metadata.get('pppoe/user'),
|
||||||
|
},
|
||||||
|
'needs': {
|
||||||
|
'pkg_apt:pppoe',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'/etc/ppp/chap-secrets': {
|
||||||
|
'content_type': 'mako',
|
||||||
|
'mode': '0600',
|
||||||
|
'context': {
|
||||||
|
'user': node.metadata.get('pppoe/user'),
|
||||||
|
'secret': node.metadata.get('pppoe/secret'),
|
||||||
|
},
|
||||||
|
'needs': {
|
||||||
|
'pkg_apt:pppoe',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
svc_systemd = {
|
||||||
|
'pppoe-isp.service': {
|
||||||
|
'needs': {
|
||||||
|
'file:/etc/ppp/peers/isp',
|
||||||
|
'file:/etc/ppp/chap-secrets',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'qdisc-ppp0.service': {
|
||||||
|
'needs': {
|
||||||
|
'svc_systemd:pppoe-isp.service',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
55
bundles/pppoe/metadata.py
Normal file
55
bundles/pppoe/metadata.py
Normal file
|
|
@ -0,0 +1,55 @@
|
||||||
|
defaults = {
|
||||||
|
'apt': {
|
||||||
|
'packages': {
|
||||||
|
'pppoe': {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'modules-load': {
|
||||||
|
'pppoe',
|
||||||
|
'pppox',
|
||||||
|
'ppp_generic',
|
||||||
|
},
|
||||||
|
'nftables': {
|
||||||
|
'nat': {
|
||||||
|
'oifname ppp0 masquerade',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'systemd': {
|
||||||
|
'units': {
|
||||||
|
'pppoe-isp.service': {
|
||||||
|
'Unit': {
|
||||||
|
'Description': 'PPPoE Internet Connection',
|
||||||
|
'After': 'network.target',
|
||||||
|
},
|
||||||
|
'Service': {
|
||||||
|
'Type': 'forking',
|
||||||
|
'ExecStart': '/usr/sbin/pppd call isp',
|
||||||
|
'Restart': 'on-failure',
|
||||||
|
'RestartSec': 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'qdisc-ppp0.service': {
|
||||||
|
'Unit': {
|
||||||
|
'Description': 'setup queuing discipline for interface ppp0',
|
||||||
|
'After': {
|
||||||
|
'pppoe-isp.service',
|
||||||
|
'sys-devices-virtual-net-ppp0.device',
|
||||||
|
},
|
||||||
|
'PartOf': 'pppoe-isp.service',
|
||||||
|
'BindsTo': 'sys-devices-virtual-net-ppp0.device',
|
||||||
|
},
|
||||||
|
'Service': {
|
||||||
|
'Type': 'oneshot',
|
||||||
|
'ExecStart': '/sbin/tc qdisc replace root dev ppp0 cake bandwidth 37Mbit internet besteffort triple-isolate nat egress memlimit 256mb',
|
||||||
|
# - no drops save
|
||||||
|
# - bis 37MBit keine retries bei: iperf3 --client 49.12.184.229 -t999 -i5 --bidir
|
||||||
|
#'ExecStart': '/sbin/tc qdisc replace root dev ppp0 cake bandwidth 37Mbit internet besteffort nat egress memlimit 256mb',
|
||||||
|
'RemainAfterExit': 'yes',
|
||||||
|
},
|
||||||
|
'Install': {
|
||||||
|
'WantedBy': 'multi-user.target',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
21
bundles/proxmox-ve/items.py
Normal file
21
bundles/proxmox-ve/items.py
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
files = {
|
||||||
|
'/etc/apt/apt.conf.d/10pveapthook': {
|
||||||
|
'content_type': 'any',
|
||||||
|
'mode': '0644',
|
||||||
|
},
|
||||||
|
'/etc/apt/apt.conf.d/76pveconf': {
|
||||||
|
'content_type': 'any',
|
||||||
|
'mode': '0444',
|
||||||
|
},
|
||||||
|
'/etc/apt/apt.conf.d/76pveproxy': {
|
||||||
|
'content_type': 'any',
|
||||||
|
'mode': '0644',
|
||||||
|
},
|
||||||
|
'/etc/network/interfaces': {
|
||||||
|
'content_type': 'any',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
symlinks['/etc/ssh/ssh_host_rsa_key.pub'] = {
|
||||||
|
'target': '/etc/ssh/ssh_host_managed_key.pub',
|
||||||
|
}
|
||||||
100
bundles/proxmox-ve/metadata.py
Normal file
100
bundles/proxmox-ve/metadata.py
Normal file
|
|
@ -0,0 +1,100 @@
|
||||||
|
defaults = {
|
||||||
|
'apt': {
|
||||||
|
'packages': {
|
||||||
|
'linux-image-amd64': {
|
||||||
|
'installed': False,
|
||||||
|
},
|
||||||
|
'proxmox-default-kernel': {},
|
||||||
|
# after reboot
|
||||||
|
'proxmox-ve': {},
|
||||||
|
'postfix': {},
|
||||||
|
'open-iscsi': {},
|
||||||
|
'chrony': {},
|
||||||
|
'os-prober': {
|
||||||
|
'installed': False,
|
||||||
|
},
|
||||||
|
'dnsmasq-base': {},
|
||||||
|
},
|
||||||
|
'sources': {
|
||||||
|
'proxmox-ve': {
|
||||||
|
'options': {
|
||||||
|
'aarch': 'amd64',
|
||||||
|
},
|
||||||
|
'urls': {
|
||||||
|
'http://download.proxmox.com/debian/pve',
|
||||||
|
},
|
||||||
|
'suites': {
|
||||||
|
'{codename}',
|
||||||
|
},
|
||||||
|
'components': {
|
||||||
|
'pve-no-subscription',
|
||||||
|
},
|
||||||
|
'key': 'proxmox-ve-{codename}',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
# 'nftables': {
|
||||||
|
# 'input': {
|
||||||
|
# 'tcp dport 8006 accept',
|
||||||
|
# },
|
||||||
|
# },
|
||||||
|
'zfs': {
|
||||||
|
'datasets': {
|
||||||
|
'tank/proxmox-ve': {
|
||||||
|
'mountpoint': '/var/lib/proxmox-ve',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# @metadata_reactor.provides(
|
||||||
|
# 'systemd',
|
||||||
|
# )
|
||||||
|
# def bridge(metadata):
|
||||||
|
# return {
|
||||||
|
# 'systemd': {
|
||||||
|
# 'units': {
|
||||||
|
# # f'internal.network': {
|
||||||
|
# # 'Network': {
|
||||||
|
# # 'Bridge': 'br0',
|
||||||
|
# # },
|
||||||
|
# # },
|
||||||
|
# 'br0.netdev': {
|
||||||
|
# 'NetDev': {
|
||||||
|
# 'Name': 'br0',
|
||||||
|
# 'Kind': 'bridge'
|
||||||
|
# },
|
||||||
|
# },
|
||||||
|
# 'br0.network': {
|
||||||
|
# 'Match': {
|
||||||
|
# 'Name': 'br0',
|
||||||
|
# },
|
||||||
|
# 'Network': {
|
||||||
|
# 'Unmanaged': 'yes'
|
||||||
|
# },
|
||||||
|
# },
|
||||||
|
# },
|
||||||
|
# },
|
||||||
|
# }
|
||||||
|
|
||||||
|
|
||||||
|
@metadata_reactor.provides(
|
||||||
|
'nginx/has_websockets',
|
||||||
|
'nginx/vhosts',
|
||||||
|
)
|
||||||
|
def nginx(metadata):
|
||||||
|
return {
|
||||||
|
'nginx': {
|
||||||
|
'has_websockets': True,
|
||||||
|
'vhosts': {
|
||||||
|
metadata.get('proxmox-ve/domain'): {
|
||||||
|
'content': 'nginx/proxy_pass.conf',
|
||||||
|
'context': {
|
||||||
|
'target': 'https://localhost:8006',
|
||||||
|
'websockets': True,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue