Compare commits

...

153 commits

Author SHA1 Message Date
dcd2ebc49c
dist-upgrade -> full-upgrade 2025-01-16 10:20:34 +01:00
555350eab7
debian update 2025-01-16 10:20:18 +01:00
e117acac04
backup all doesnt stop on first error 2025-01-09 23:41:21 +01:00
16313b9e40
disable tasnomta charge 2025-01-09 22:45:27 +01:00
033a1cf6e5
macbook gnu grep 2025-01-01 13:04:42 +01:00
8befec9769
readme git sign 2024-12-09 09:07:19 +01:00
d22add5bfd
shortcut 2024-12-09 09:03:14 +01:00
69fb93a664
macbook compat 2024-12-09 08:58:14 +01:00
f4b59dc702
stuff 2024-11-23 15:58:10 +01:00
17aa3d7e48
no wg while at home 2024-11-23 14:50:49 +01:00
8bb9dae45c
all via usb interface, internal is broken 2024-11-23 14:50:40 +01:00
c244645020
kea deps 2024-11-23 14:50:22 +01:00
64029d2147
freescout readme 2024-11-23 11:51:31 +01:00
8081f12315
freescout comment 2024-11-23 11:18:11 +01:00
4ec2d5192a
freescout repair? 2024-11-23 11:02:28 +01:00
0e78afea6a
fix ip 2024-11-23 09:53:05 +01:00
f0d1cf9861
new icinga apt key 2024-11-23 09:53:05 +01:00
e17b023503
some grafana permsission 2024-11-23 09:53:05 +01:00
a3ba06bcb0
pipes -> shlex 2024-11-23 09:52:58 +01:00
01bcfd8638
dhcp from interface 2024-11-23 09:52:48 +01:00
c0944f9fa2
fix dhcp 2024-11-23 09:52:48 +01:00
dedbffa107
vlans 2024-11-23 09:52:48 +01:00
67d5a4bff8
TOTAL FACKUP 2024-11-23 09:52:22 +01:00
6d64a5e52d
dont apply freescout by accident 2024-09-05 23:02:29 +02:00
07e9eb4d8f
freescout timer less timeout 2024-09-05 22:59:24 +02:00
1f53ff63a9
freescout timer timeout and stuff 2024-09-05 22:58:40 +02:00
0eaed67334
dommy against unintentional apply all 2024-09-05 22:08:16 +02:00
fd5e4180fa
supervised update command readme 2024-09-05 22:04:01 +02:00
ab87fe6f96
freescout 2024-09-05 21:57:33 +02:00
95efe10ef6
roundcube 1.6.7 2024-08-19 12:23:35 +02:00
e47c709f39
dedup 2024-07-29 10:26:38 +02:00
24d346962a
omz permissions 2024-07-22 10:35:50 +02:00
3e2cae42e6
nextcloud update 2024-07-03 11:13:41 +02:00
6e410bfc25
nextcloud maintenance_window_start 2024-07-03 11:13:09 +02:00
8ebf4e0ec0
oh my zsh fix permissions 2024-07-03 10:12:27 +02:00
8e8f77e546
ssh host key: use custom path to not collide with auto generated keys 2024-07-03 10:05:44 +02:00
c128b8a1ca
comment 2024-06-23 13:17:44 +02:00
53d2928de2
errors and deprecatons 2024-06-22 02:59:15 +02:00
4996f98cd1 Merge pull request 'homeassistant-supervised' (#18) from homeassistant-supervised into master
Reviewed-on: #18
2024-06-11 18:41:31 +02:00
5b254b1b28
homeassistant-supervised 2024-06-11 18:40:22 +02:00
4348e6045e
zfs.headers use system/architecture 2024-06-11 18:03:32 +02:00
28e9d69571
nginx fix ssl_dhparam path 2024-06-11 18:03:08 +02:00
32011c5b1f
bundles/macbook/files/venv: install optional requirements 2024-06-11 18:02:03 +02:00
5c8e28ddb5
homeass more log 2024-06-05 21:34:47 +02:00
d62e609863
faster better dhparams that actually get used 2024-06-05 21:34:28 +02:00
ff51b41c38
hass bluez 2024-05-31 16:11:15 +02:00
76cf14a9ef
hass more timeout 2024-05-31 16:11:07 +02:00
301889ab8b
homeassistant kinda works 2024-05-31 15:14:49 +02:00
1a163ce9f0
dep order 2024-05-31 15:14:16 +02:00
15a78737cb
sort 2024-05-31 15:13:37 +02:00
d90e0a18e8
update nextcloud 2024-05-28 11:11:22 +02:00
a55ec37d21
elimu-kwanza.de google-site-verification 2024-05-14 11:18:20 +02:00
ee23f3ef6e
some default 2024-05-10 10:28:59 +02:00
de67571f5e
lobercrew killed letsencrypt 2024-05-10 10:28:52 +02:00
a04163b72f
update forgejo 2024-04-30 14:19:28 +02:00
fc7f7e2c23
update gitea 2024-04-30 14:12:51 +02:00
e18306058a
nodes/netcup.mails.py: upgrade roundcube 2024-04-16 10:58:36 +02:00
e982f1e076
comment 2024-04-16 10:58:16 +02:00
a2639bc370
reactivate backupserver 2024-04-16 10:58:00 +02:00
fd1d0ac976
xapian indexes in dataset without snapshots 2024-03-15 15:42:22 +01:00
e3fe0eeb79
wp 2024-02-08 10:46:27 +01:00
782b3fbe0b
improve wireguard script 2024-01-26 13:40:41 +01:00
3d8a77f9e4
tidyup and doc raspberrymatic cert 2024-01-26 12:01:25 +01:00
535ec252b5
mua_helo_restrictions Outlook compat 2024-01-17 18:28:38 +01:00
d1bd92e6cc
bundles/roundcube/files/password.config.inc.php 2024-01-17 17:50:32 +01:00
4f990f8d6f
stromzaehler is offline for now 2024-01-08 12:26:04 +01:00
cd9a7e172e
macbook manage zsh theme and remove clamav 2024-01-08 12:25:44 +01:00
206e62e698
leftover 2023-12-21 11:24:56 +01:00
57aa3b8433
direnv pyenv reset .pip_upgrade_timestamp 2023-12-14 11:34:28 +01:00
70091eca8c
disable steam logger, package is broken 2023-12-11 09:38:49 +01:00
fdd35e0a2c
cargo PATH 2023-12-11 09:38:31 +01:00
ccc54b53a5
nextcloud update against CVE-2023-48239 2023-11-24 08:55:32 +01:00
1222eb813d
grafana/influx file eprmissions 2023-11-15 11:51:36 +01:00
054087fa1c
crystal source http 2023-11-15 11:51:22 +01:00
b64470b160
pg: apt/config/APT/NeverAutoRemove 2023-11-15 11:41:51 +01:00
0dabb39ca4
some minor fixes 2023-10-24 11:17:29 +02:00
d302a22d3e
python 3.12 compat 2023-10-09 08:58:31 +02:00
1f3740dd59
some gitea fixes 2023-09-29 10:27:39 +02:00
919f5f2c08
remove print() 2023-09-29 10:21:18 +02:00
a6f1695e4e
gitea -> forgejo 2023-09-29 10:19:53 +02:00
8f45a39967
bundles/download-server/items.py: obsolete 2023-09-25 17:05:58 +02:00
0eb37a909e
bundles/macbook/files/macbook-update: xcode acept license 2023-09-25 16:59:23 +02:00
2211571689
exclude some dummies 2023-09-25 16:59:03 +02:00
6cb4275e31
bin/upgrade_and_restart_all: use /var/run/reboot-required 2023-09-25 16:58:34 +02:00
5373954567
roundcube disable installer 2023-09-06 09:29:02 +02:00
a5ec5eca7a
rc 1.6 options rename 2023-09-06 09:26:50 +02:00
b459821a8d
roundcube update +
composer_lock_reset
2023-09-06 09:25:17 +02:00
4415bc32f5
macbook clamav 2023-09-06 09:18:21 +02:00
5cb5396817
nodes/home.openhab.py: remove for now 2023-09-04 12:36:28 +02:00
85673abb29
data/apt/keys/grafana.asc: update 2023-09-04 12:35:01 +02:00
29be9d9896
cronekorkn.de redirct twitch 2023-08-30 20:54:30 +02:00
c4da3ee013
nicer 2023-08-24 11:31:23 +02:00
9288836b3a
fix apt config datatype 2023-08-15 12:06:12 +02:00
66624141f8
comment 2023-08-15 10:19:05 +02:00
9c639b4977
remove apt-listchanges 2023-08-14 15:28:00 +02:00
98e05fc151
apt listcahnges fix 2023-08-14 14:45:29 +02:00
402dca9b31
add cronekorkn.de 2023-08-09 19:21:23 +02:00
89d6b6d93c
update nextcloud 2023-08-09 19:21:11 +02:00
33a6e2a979
some more apt configs, which used to be used on the fly 2023-08-09 19:20:55 +02:00
14715fdab7
PATH_add bin 2023-08-09 07:16:06 +02:00
13d91fa512
englisch sprache schwere sprache 2023-08-09 07:14:33 +02:00
0e8afa29e5
bw less parallelism because it breaks :( 2023-08-08 19:11:58 +02:00
d300866bc8
dummy sources.list file 2023-08-02 14:20:24 +02:00
aede8d21c1
unfault 2023-08-01 15:53:14 +02:00
1fe2e0710f
macbook custom ssh key 2023-08-01 12:57:27 +02:00
fe884f446a
is_known_as known_hosts metadata 2023-08-01 12:52:15 +02:00
637ab05590
apt source multiple urls 2023-08-01 12:15:49 +02:00
843712d7bf
apt README 2023-08-01 11:58:01 +02:00
4aa8a18b4f
comment 2023-08-01 10:48:44 +02:00
83cc936c82
apt key variables 2023-07-31 21:12:15 +02:00
e1e1920ffb
apt new sources format 2023-07-31 21:00:17 +02:00
34d55f0849
apt conf in metadata 2023-07-31 18:41:52 +02:00
594b7d3c86
manage /etc/apt, use keyring dir 2023-07-31 11:47:58 +02:00
49b05fe8b8
known_hosts 2023-07-28 10:00:16 +02:00
789897acf6
nginx: more managed, hopefully survives updates 2023-07-28 02:34:37 +02:00
1233da8dd6
disable broken clamav-clamonacc 2023-07-28 02:20:52 +02:00
fce2425c56
java headless 2023-07-28 02:20:27 +02:00
219bbf9983
debian updates 2023-07-28 01:35:41 +02:00
d3b90cfe89
new key 2023-07-28 01:35:26 +02:00
b5d48db4dd
from debian 12 2023-07-28 01:35:15 +02:00
b81b6472fd
htop cpu frq 2023-07-28 01:34:48 +02:00
d380701703
update nextcloud vhost 2023-07-06 00:07:55 +02:00
b2aadeb98c
home.server debian-12 2023-07-05 23:43:20 +02:00
b8675adf99
fail-with-body didnt work 2023-07-05 19:21:27 +02:00
0463637d9f
elimu-kwanza.de 2023-07-05 18:12:37 +02:00
9b7171864a
netcup.mails debian 12 2023-07-05 17:31:42 +02:00
964b248de3
home.backups mdadm config 2023-07-05 15:59:08 +02:00
c756729cac
home.backups debian 12 2023-07-05 15:58:18 +02:00
49498c0ca9
update debian apt keys 2023-07-04 23:11:54 +02:00
be26672b85
obsolete import 2023-07-04 23:00:29 +02:00
0f4b01f996
fix sleep 2023-07-04 22:42:19 +02:00
bb0f123e02
mitigate apt cache config bug 2023-07-04 22:41:17 +02:00
a4fd08a8cd
nextcloud update 2023-07-04 22:29:12 +02:00
4a5711a570
macbook more bw threads 2023-06-26 19:37:42 +02:00
0cf83d0744
some grafana bundle fixes 2023-06-26 19:37:30 +02:00
5e66318c38
debian 12 preps 2023-06-26 19:37:21 +02:00
53d22e8c67
more precise requriements.txt 2023-06-26 19:36:48 +02:00
3256329064
grafana sleep befoer trying admin reset 2023-06-06 17:09:41 +02:00
d2f8df88bf
comment 2023-06-06 11:39:20 +02:00
5259e13eef
apt disable package cache 2023-06-06 10:25:46 +02:00
ab01562c85
nextcloud 25.0.7 2023-06-06 10:25:30 +02:00
4d440bcb5b
bundles/ssh/metadata.py: host key no user name 2023-05-25 10:58:25 +02:00
0fb1899322
macbook more managed 2023-05-25 10:57:39 +02:00
cb463350b4
home.router ... 2023-05-25 08:52:30 +02:00
5dd6e56ca9
initialize_swapfile unless 2023-05-08 16:35:39 +02:00
e8a5379ccd
some weird space 2023-05-05 18:25:21 +02:00
226b152fa0
bundles/systemd-swap/metadata.py: start swap on boot 2023-05-04 18:09:02 +02:00
4e9c6bf67b
libs/systemd.py: nicer ordering for Swap 2023-05-04 18:08:18 +02:00
c0ccd78517
libs/systemd.py: dont sort lists and sets 2023-05-04 17:07:21 +02:00
5b6d31742e
some influxdb dir permission 2023-05-04 17:07:12 +02:00
04a271a1e5
.envrc: delete git deploy cache after 24h 2023-05-04 12:49:57 +02:00
0f74cc8c7e
dmarc adkim & aspf 2023-04-28 11:25:08 +02:00
a0dc65f568
dmarc 2023-04-28 11:11:11 +02:00
163 changed files with 3610 additions and 799 deletions

22
.envrc
View file

@ -1,23 +1,7 @@
#!/usr/bin/env bash
if test -f .venv/bin/python && test "$(realpath .venv/bin/python)" != "$(realpath "$(pyenv which python)")"
then
echo "rebuilding venv für new python version"
rm -rf .venv
fi
python3 -m venv .venv
source .venv/bin/activate
PATH_add .venv/bin
PATH_add bin
python3 -m pip --require-virtualenv --quiet install --upgrade pip wheel
python3 -m pip --require-virtualenv --quiet install --upgrade -r requirements.txt
rm -rf .cache/bw/git_deploy
export BW_GIT_DEPLOY_CACHE=.cache/bw/git_deploy
mkdir -p "$BW_GIT_DEPLOY_CACHE"
export EXPERIMENTAL_UPLOAD_VIA_CAT=1
export BW_ITEM_WORKERS=32
export BW_NODE_WORKERS=12
export DISK_CACHE_DIR=.cache/cache_to_disk
unset PS1
source_env ~/.local/share/direnv/pyenv
source_env ~/.local/share/direnv/venv
source_env ~/.local/share/direnv/bundlewrap

View file

@ -37,3 +37,12 @@ fi
telegraf: execd for daemons
TEST
# git signing
git config --global gpg.format ssh
git config --global commit.gpgsign true
git config user.name CroneKorkN
git config user.email i@ckn.li
git config user.signingkey "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILMVroYmswD4tLk6iH+2tvQiyaMe42yfONDsPDIdFv6I"

View file

@ -10,7 +10,6 @@ nodes = [
for node in sorted(repo.nodes_in_group('debian'))
if not node.dummy
]
reboot_nodes = []
print('updating nodes:', sorted(node.name for node in nodes))
@ -24,14 +23,13 @@ for node in nodes:
print(node.run('DEBIAN_FRONTEND=noninteractive apt update').stdout.decode())
print(node.run('DEBIAN_FRONTEND=noninteractive apt list --upgradable').stdout.decode())
if int(node.run('DEBIAN_FRONTEND=noninteractive apt list --upgradable 2> /dev/null | grep upgradable | wc -l').stdout.decode()):
print(node.run('DEBIAN_FRONTEND=noninteractive apt -y dist-upgrade').stdout.decode())
reboot_nodes.append(node)
print(node.run('DEBIAN_FRONTEND=noninteractive apt -qy full-upgrade').stdout.decode())
# REBOOT IN ORDER
wireguard_servers = [
node
for node in reboot_nodes
for node in nodes
if node.has_bundle('wireguard')
and (
ip_interface(node.metadata.get('wireguard/my_ip')).network.prefixlen <
@ -41,7 +39,7 @@ wireguard_servers = [
wireguard_s2s = [
node
for node in reboot_nodes
for node in nodes
if node.has_bundle('wireguard')
and (
ip_interface(node.metadata.get('wireguard/my_ip')).network.prefixlen ==
@ -51,7 +49,7 @@ wireguard_s2s = [
everything_else = [
node
for node in reboot_nodes
for node in nodes
if not node.has_bundle('wireguard')
]
@ -62,8 +60,11 @@ for node in [
*wireguard_s2s,
*wireguard_servers,
]:
print('rebooting', node.name)
try:
print(node.run('systemctl reboot').stdout.decode())
if node.run('test -e /var/run/reboot-required', may_fail=True).return_code == 0:
print('rebooting', node.name)
print(node.run('systemctl reboot').stdout.decode())
else:
print('not rebooting', node.name)
except Exception as e:
print(e)

View file

@ -5,9 +5,17 @@ from os.path import realpath, dirname
from sys import argv
from ipaddress import ip_network, ip_interface
repo = Repository(dirname(dirname(realpath(__file__))))
if len(argv) != 3:
print(f'usage: {argv[0]} <node> <client>')
exit(1)
repo = Repository(dirname(dirname(realpath(__file__))))
server_node = repo.get_node(argv[1])
if argv[2] not in server_node.metadata.get('wireguard/clients'):
print(f'client {argv[2]} not found in: {server_node.metadata.get("wireguard/clients").keys()}')
exit(1)
data = server_node.metadata.get(f'wireguard/clients/{argv[2]}')
vpn_network = ip_interface(server_node.metadata.get('wireguard/my_ip')).network
@ -20,9 +28,7 @@ for peer in server_node.metadata.get('wireguard/s2s').values():
if not ip_network(network).subnet_of(vpn_network):
allowed_ips.append(ip_network(network))
conf = \
f'''>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
conf = f'''
[Interface]
PrivateKey = {repo.libs.wireguard.privkey(data['peer_id'])}
ListenPort = 51820
@ -35,11 +41,12 @@ PresharedKey = {repo.libs.wireguard.psk(data['peer_id'], server_node.metadata.ge
AllowedIPs = {', '.join(str(client_route) for client_route in sorted(allowed_ips))}
Endpoint = {ip_interface(server_node.metadata.get('network/external/ipv4')).ip}:51820
PersistentKeepalive = 10
'''
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<'''
print('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
print(conf)
print('<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<')
if input("print qrcode? [yN]: ").upper() == 'Y':
if input("print qrcode? [Yn]: ").upper() in ['', 'Y']:
import pyqrcode
print(pyqrcode.create(conf).terminal(quiet_zone=1))

View file

@ -1,3 +1,6 @@
# https://manpages.debian.org/latest/apt/sources.list.5.de.html
# https://repolib.readthedocs.io/en/latest/deb822-format.html
```python
{
'apt': {
@ -5,8 +8,29 @@
'apt-transport-https': {},
},
'sources': {
# place key under data/apt/keys/packages.cloud.google.com.{asc|gpg}
'deb https://packages.cloud.google.com/apt cloud-sdk main',
'debian': {
'types': { # optional, defaults to `{'deb'}``
'deb',
'deb-src',
},
'urls': {
'https://deb.debian.org/debian',
},
'suites': { # at least one
'{codename}',
'{codename}-updates',
'{codename}-backports',
},
'components': { # optional
'main',
'contrib',
'non-frese',
},
# key:
# - optional, defaults to source name (`debian` in this example)
# - place key under data/apt/keys/debian-12.{asc|gpg}
'key': 'debian-{version}',
},
},
},
}

View file

@ -1,2 +0,0 @@
APT::Periodic::Update-Package-Lists "1";
APT::Periodic::Unattended-Upgrade "1";

View file

@ -1,3 +0,0 @@
Unattended-Upgrade::Origins-Pattern {
"origin=*";
};

View file

@ -1,35 +1,64 @@
# TODO pin repo: https://superuser.com/a/1595920
from os.path import join
from urllib.parse import urlparse
from glob import glob
from os.path import join, basename
directories = {
'/etc/apt/sources.list.d': {
'/etc/apt': {
'purge': True,
'triggers': {
'action:apt_update',
},
},
'/etc/apt/trusted.gpg.d': {
'/etc/apt/apt.conf.d': {
# existance is expected
'purge': True,
'triggers': {
'action:apt_update',
},
},
'/etc/apt/keyrings': {
# https://askubuntu.com/a/1307181
'purge': True,
'triggers': {
'action:apt_update',
},
},
# '/etc/apt/listchanges.conf.d': {
# 'purge': True,
# 'triggers': {
# 'action:apt_update',
# },
# },
'/etc/apt/preferences.d': {
'purge': True,
'triggers': {
'action:apt_update',
},
},
'/etc/apt/sources.list.d': {
'purge': True,
'triggers': {
'action:apt_update',
},
},
}
files = {
'/etc/apt/sources.list': {
'content': '# managed'
'/etc/apt/apt.conf': {
'content': repo.libs.apt.render_apt_conf(node.metadata.get('apt/config')),
'triggers': {
'action:apt_update',
},
},
'/etc/apt/sources.list': {
'content': '# managed by bundlewrap\n',
'triggers': {
'action:apt_update',
},
},
# '/etc/apt/listchanges.conf': {
# 'content': repo.libs.ini.dumps(node.metadata.get('apt/list_changes')),
# },
'/usr/lib/nagios/plugins/check_apt_upgradable': {
'mode': '0755',
},
@ -37,7 +66,7 @@ files = {
actions = {
'apt_update': {
'command': 'apt-get update -o APT::Update::Error-Mode=any',
'command': 'apt-get update',
'needed_by': {
'pkg_apt:',
},
@ -46,41 +75,22 @@ actions = {
},
}
# group sources by apt server hostname
# create sources.lists and respective keyfiles
hosts = {}
for source_string in node.metadata.get('apt/sources'):
source = repo.libs.apt.AptSource(source_string)
hosts\
.setdefault(source.url.hostname, list())\
.append(source)
# create sources lists and keyfiles
for host, sources in hosts.items():
keyfile = basename(glob(join(repo.path, 'data', 'apt', 'keys', f'{host}.*'))[0])
destination_path = f'/etc/apt/trusted.gpg.d/{keyfile}'
for source in sources:
source.options['signed-by'] = [destination_path]
files[f'/etc/apt/sources.list.d/{host}.list'] = {
'content': '\n'.join(sorted(set(
str(source).format(
codename=node.metadata.get('os_codename'),
version=node.os_version[0], # WIP crystal
)
for source in sources
))),
for name, config in node.metadata.get('apt/sources').items():
# place keyfile
keyfile_destination_path = repo.libs.apt.format_variables(node, config['options']['Signed-By'])
files[keyfile_destination_path] = {
'source': join(repo.path, 'data', 'apt', 'keys', basename(keyfile_destination_path)),
'content_type': 'binary',
'triggers': {
'action:apt_update',
},
}
files[destination_path] = {
'source': join(repo.path, 'data', 'apt', 'keys', keyfile),
'content_type': 'binary',
# place sources.list
files[f'/etc/apt/sources.list.d/{name}.sources'] = {
'content': repo.libs.apt.render_source(node, name),
'triggers': {
'action:apt_update',
},
@ -112,8 +122,6 @@ for package, options in node.metadata.get('apt/packages', {}).items():
# apt-daily.timer: performs apt update
# apt-daily-upgrade.timer: performs apt upgrade
files['/etc/apt/apt.conf.d/20auto-upgrades'] = {}
files['/etc/apt/apt.conf.d/50unattended-upgrades'] = {}
svc_systemd['unattended-upgrades.service'] = {
'needs': [
'pkg_apt:unattended-upgrades',

View file

@ -1,9 +1,55 @@
defaults = {
'apt': {
'packages': {
'unattended-upgrades': {},
'apt-listchanges': {
'installed': False,
},
},
'sources': set(),
'config': {
'DPkg': {
'Pre-Install-Pkgs': {
'/usr/sbin/dpkg-preconfigure --apt || true',
},
'Post-Invoke': {
# keep package cache empty
'/bin/rm -f /var/cache/apt/archives/*.deb || true',
},
'Options': {
# https://unix.stackexchange.com/a/642541/357916
'--force-confold',
'--force-confdef',
},
},
'APT': {
'NeverAutoRemove': {
'^firmware-linux.*',
'^linux-firmware$',
'^linux-image-[a-z0-9]*$',
'^linux-image-[a-z0-9]*-[a-z0-9]*$',
},
'VersionedKernelPackages': {
# kernels
'linux-.*',
'kfreebsd-.*',
'gnumach-.*',
# (out-of-tree) modules
'.*-modules',
'.*-kernel',
},
'Never-MarkAuto-Sections': {
'metapackages',
'tasks',
},
'Move-Autobit-Sections': {
'oldlibs',
},
'Update': {
# https://unix.stackexchange.com/a/653377/357916
'Error-Mode': 'any',
},
},
},
'sources': {},
},
'monitoring': {
'services': {
@ -23,3 +69,109 @@ defaults = {
},
},
}
@metadata_reactor.provides(
'apt/sources',
)
def key(metadata):
return {
'apt': {
'sources': {
source_name: {
'key': source_name,
}
for source_name, source_config in metadata.get('apt/sources').items()
if 'key' not in source_config
},
},
}
@metadata_reactor.provides(
'apt/sources',
)
def signed_by(metadata):
return {
'apt': {
'sources': {
source_name: {
'options': {
'Signed-By': '/etc/apt/keyrings/' + metadata.get(f'apt/sources/{source_name}/key') + '.' + repo.libs.apt.find_keyfile_extension(node, metadata.get(f'apt/sources/{source_name}/key')),
},
}
for source_name in metadata.get('apt/sources')
},
},
}
@metadata_reactor.provides(
'apt/config',
'apt/packages',
)
def unattended_upgrades(metadata):
return {
'apt': {
'config': {
'APT': {
'Periodic': {
'Update-Package-Lists': '1',
'Unattended-Upgrade': '1',
},
},
'Unattended-Upgrade': {
'Origins-Pattern': {
"origin=*",
},
},
},
'packages': {
'unattended-upgrades': {},
},
},
}
# @metadata_reactor.provides(
# 'apt/config',
# 'apt/list_changes',
# )
# def listchanges(metadata):
# return {
# 'apt': {
# 'config': {
# 'DPkg': {
# 'Pre-Install-Pkgs': {
# '/usr/bin/apt-listchanges --apt || test $? -lt 10',
# },
# 'Tools': {
# 'Options': {
# '/usr/bin/apt-listchanges': {
# 'Version': '2',
# 'InfoFD': '20',
# },
# },
# },
# },
# 'Dir': {
# 'Etc': {
# 'apt-listchanges-main': 'listchanges.conf',
# 'apt-listchanges-parts': 'listchanges.conf.d',
# },
# },
# },
# 'list_changes': {
# 'apt': {
# 'frontend': 'pager',
# 'which': 'news',
# 'email_address': 'root',
# 'email_format': 'text',
# 'confirm': 'false',
# 'headers': 'false',
# 'reverse': 'false',
# 'save_seen': '/var/lib/apt/listchanges.db',
# },
# },
# },
# }

View file

@ -36,7 +36,7 @@ for dataset in config['datasets']:
if snapshot_datetime < two_days_ago:
days_ago = (now - snapshot_datetime).days
errors.add(f'dataset "{dataset}" has no backups sind {days_ago} days')
errors.add(f'dataset "{dataset}" has not been backed up for {days_ago} days')
continue
if errors:

View file

@ -25,7 +25,8 @@ def backup_freshness_check(metadata):
'datasets': {
f"{other_node.metadata.get('id')}/{dataset}"
for other_node in repo.nodes
if other_node.has_bundle('backup')
if not other_node.dummy
and other_node.has_bundle('backup')
and other_node.has_bundle('zfs')
and other_node.metadata.get('backup/server') == metadata.get('backup-freshness-check/server')
for dataset, options in other_node.metadata.get('zfs/datasets').items()

View file

@ -35,6 +35,7 @@ def zfs(metadata):
for other_node in repo.nodes:
if (
not other_node.dummy and
other_node.has_bundle('backup') and
other_node.metadata.get('backup/server') == node.name
):

View file

@ -1,13 +1,31 @@
#!/bin/bash
set -exu
set -u
# FIXME: inelegant
% if wol_command:
${wol_command}
% endif
exit=0
failed_paths=""
for path in $(jq -r '.paths | .[]' < /etc/backup/config.json)
do
echo backing up $path
/opt/backup/backup_path "$path"
# set exit to 1 if any backup fails
if [ $? -ne 0 ]
then
echo ERROR: backing up $path failed >&2
exit=5
failed_paths="$failed_paths $path"
fi
done
if [ $exit -ne 0 ]
then
echo "ERROR: failed to backup paths: $failed_paths" >&2
fi
exit $exit

View file

@ -1,6 +1,6 @@
#!/bin/bash
set -exu
set -eu
path=$1
uuid=$(jq -r .client_uuid < /etc/backup/config.json)

View file

@ -19,7 +19,7 @@ directories[f'/var/lib/bind'] = {
'svc_systemd:bind9',
],
'triggers': [
'svc_systemd:bind9:restart',
'svc_systemd:bind9:reload',
],
}
@ -29,7 +29,7 @@ files['/etc/default/bind9'] = {
'svc_systemd:bind9',
],
'triggers': [
'svc_systemd:bind9:restart',
'svc_systemd:bind9:reload',
],
}
@ -43,7 +43,7 @@ files['/etc/bind/named.conf'] = {
'svc_systemd:bind9',
],
'triggers': [
'svc_systemd:bind9:restart',
'svc_systemd:bind9:reload',
],
}
@ -63,7 +63,7 @@ files['/etc/bind/named.conf.options'] = {
'svc_systemd:bind9',
],
'triggers': [
'svc_systemd:bind9:restart',
'svc_systemd:bind9:reload',
],
}
@ -93,7 +93,7 @@ files['/etc/bind/named.conf.local'] = {
'svc_systemd:bind9',
],
'triggers': [
'svc_systemd:bind9:restart',
'svc_systemd:bind9:reload',
],
}
@ -106,7 +106,7 @@ for view_name, view_conf in master_node.metadata.get('bind/views').items():
'svc_systemd:bind9',
],
'triggers': [
'svc_systemd:bind9:restart',
'svc_systemd:bind9:reload',
],
}
@ -127,7 +127,7 @@ for view_name, view_conf in master_node.metadata.get('bind/views').items():
'svc_systemd:bind9',
],
'triggers': [
'svc_systemd:bind9:restart',
'svc_systemd:bind9:reload',
],
}
@ -139,6 +139,6 @@ actions['named-checkconf'] = {
'unless': 'named-checkconf -z',
'needs': [
'svc_systemd:bind9',
'svc_systemd:bind9:restart',
'svc_systemd:bind9:reload',
]
}

View file

@ -1,6 +1,10 @@
from shlex import quote
defaults = {
'build-ci': {},
}
@metadata_reactor.provides(
'users/build-ci/authorized_users',
'sudoers/build-ci',
@ -18,7 +22,7 @@ def ssh_keys(metadata):
},
'sudoers': {
'build-ci': {
f"/usr/bin/chown -R build-ci\:{quote(ci['group'])} {quote(ci['path'])}"
f"/usr/bin/chown -R build-ci\\:{quote(ci['group'])} {quote(ci['path'])}"
for ci in metadata.get('build-ci').values()
}
},

View file

@ -1,10 +1,20 @@
debian_version = min([node.os_version, (11,)])[0] # FIXME
defaults = {
'apt': {
'packages': {
'crystal': {},
},
'sources': {
'deb https://download.opensuse.org/repositories/devel:/languages:/crystal/Debian_{version}/ /',
'crystal': {
# https://software.opensuse.org/download.html?project=devel%3Alanguages%3Acrystal&package=crystal
'urls': {
'http://download.opensuse.org/repositories/devel:/languages:/crystal/Debian_Testing/',
},
'suites': {
'/',
},
},
},
},
}

View file

@ -6,7 +6,7 @@ ssl_cert = </var/lib/dehydrated/certs/${node.metadata.get('mailserver/hostname')
ssl_key = </var/lib/dehydrated/certs/${node.metadata.get('mailserver/hostname')}/privkey.pem
ssl_dh = </etc/dovecot/dhparam.pem
ssl_client_ca_dir = /etc/ssl/certs
mail_location = maildir:~
mail_location = maildir:${node.metadata.get('mailserver/maildir')}/%u:INDEX=${node.metadata.get('mailserver/maildir')}/index/%u
mail_plugins = fts fts_xapian
namespace inbox {

View file

@ -20,6 +20,10 @@ directories = {
'owner': 'vmail',
'group': 'vmail',
},
'/var/vmail/index': {
'owner': 'vmail',
'group': 'vmail',
},
'/var/vmail/sieve': {
'owner': 'vmail',
'group': 'vmail',

View file

@ -1,6 +0,0 @@
# directories = {
# '/var/lib/downloads': {
# 'owner': 'downloads',
# 'group': 'www-data',
# }
# }

View file

@ -0,0 +1,23 @@
Pg Pass workaround: set manually:
```
root@freescout /ro psql freescout
psql (15.6 (Debian 15.6-0+deb12u1))
Type "help" for help.
freescout=# \password freescout
Enter new password for user "freescout":
Enter it again:
freescout=#
\q
```
# problems
# check if /opt/freescout/.env is resettet
# ckeck `psql -h localhost -d freescout -U freescout -W`with pw from .env
# chown -R www-data:www-data /opt/freescout
# sudo su - www-data -c 'php /opt/freescout/artisan freescout:clear-cache' -s /bin/bash
# javascript funny? `sudo su - www-data -c 'php /opt/freescout/artisan storage:link' -s /bin/bash`
# benutzer bilder weg? aus dem backup holen: `/opt/freescout/.zfs/snapshot/zfs-auto-snap_hourly-2024-11-22-1700/storage/app/public/users` `./customers`

View file

@ -0,0 +1,66 @@
# https://github.com/freescout-helpdesk/freescout/wiki/Installation-Guide
run_as = repo.libs.tools.run_as
php_version = node.metadata.get('php/version')
directories = {
'/opt/freescout': {
'owner': 'www-data',
'group': 'www-data',
# chown -R www-data:www-data /opt/freescout
},
}
actions = {
# 'clone_freescout': {
# 'command': run_as('www-data', 'git clone https://github.com/freescout-helpdesk/freescout.git /opt/freescout'),
# 'unless': 'test -e /opt/freescout/.git',
# 'needs': [
# 'pkg_apt:git',
# 'directory:/opt/freescout',
# ],
# },
# 'pull_freescout': {
# 'command': run_as('www-data', 'git -C /opt/freescout fetch origin dist && git -C /opt/freescout reset --hard origin/dist && git -C /opt/freescout clean -f'),
# 'unless': run_as('www-data', 'git -C /opt/freescout fetch origin && git -C /opt/freescout status -uno | grep -q "Your branch is up to date"'),
# 'needs': [
# 'action:clone_freescout',
# ],
# 'triggers': [
# 'action:freescout_artisan_update',
# f'svc_systemd:php{php_version}-fpm.service:restart',
# ],
# },
# 'freescout_artisan_update': {
# 'command': run_as('www-data', 'php /opt/freescout/artisan freescout:after-app-update'),
# 'triggered': True,
# 'needs': [
# f'svc_systemd:php{php_version}-fpm.service:restart',
# 'action:pull_freescout',
# ],
# },
}
# svc_systemd = {
# f'freescout-cron.service': {},
# }
# files = {
# '/opt/freescout/.env': {
# # https://github.com/freescout-helpdesk/freescout/blob/dist/.env.example
# # Every time you are making changes in .env file, in order changes to take an effect you need to run:
# # ´sudo su - www-data -c 'php /opt/freescout/artisan freescout:clear-cache' -s /bin/bash´
# 'owner': 'www-data',
# 'content': '\n'.join(
# f'{k}={v}' for k, v in
# sorted(node.metadata.get('freescout/env').items())
# ) + '\n',
# 'needs': [
# 'directory:/opt/freescout',
# 'action:clone_freescout',
# ],
# },
# }
#sudo su - www-data -s /bin/bash -c 'php /opt/freescout/artisan freescout:create-user --role admin --firstName M --lastName W --email freescout@freibrief.net --password gyh.jzv2bnf6hvc.HKG --no-interaction'
#sudo su - www-data -s /bin/bash -c 'php /opt/freescout/artisan freescout:create-user --role admin --firstName M --lastName W --email freescout@freibrief.net --password gyh.jzv2bnf6hvc.HKG --no-interaction'

View file

@ -0,0 +1,121 @@
from base64 import b64decode
# hash: SCRAM-SHA-256$4096:tQNfqQi7seqNDwJdHqCHbg==$r3ibECluHJaY6VRwpvPqrtCjgrEK7lAkgtUO8/tllTU=:+eeo4M0L2SowfyHFxT2FRqGzezve4ZOEocSIo11DATA=
database_password = repo.vault.password_for(f'{node.name} postgresql freescout').value
defaults = {
'apt': {
'packages': {
'git': {},
'php': {},
'php-pgsql': {},
'php-fpm': {},
'php-mbstring': {},
'php-xml': {},
'php-imap': {},
'php-zip': {},
'php-gd': {},
'php-curl': {},
'php-intl': {},
},
},
'freescout': {
'env': {
'APP_TIMEZONE': 'Europe/Berlin',
'DB_CONNECTION': 'pgsql',
'DB_HOST': '127.0.0.1',
'DB_PORT': '5432',
'DB_DATABASE': 'freescout',
'DB_USERNAME': 'freescout',
'DB_PASSWORD': database_password,
'APP_KEY': 'base64:' + repo.vault.random_bytes_as_base64_for(f'{node.name} freescout APP_KEY', length=32).value
},
},
'php': {
'php.ini': {
'cgi': {
'fix_pathinfo': '0',
},
},
},
'postgresql': {
'roles': {
'freescout': {
'password_hash': repo.libs.postgres.generate_scram_sha_256(
database_password,
b64decode(repo.vault.random_bytes_as_base64_for(f'{node.name} postgres freescout', length=16).value.encode()),
),
},
},
'databases': {
'freescout': {
'owner': 'freescout',
},
},
},
# 'systemd': {
# 'units': {
# f'freescout-cron.service': {
# 'Unit': {
# 'Description': 'Freescout Cron',
# 'After': 'network.target',
# },
# 'Service': {
# 'User': 'www-data',
# 'Nice': 10,
# 'ExecStart': f"/usr/bin/php /opt/freescout/artisan schedule:run"
# },
# 'Install': {
# 'WantedBy': {
# 'multi-user.target'
# }
# },
# }
# },
# },
'systemd-timers': {
'freescout-cron': {
'command': '/usr/bin/php /opt/freescout/artisan schedule:run',
'when': '*-*-* *:*:00',
'RuntimeMaxSec': '180',
'user': 'www-data',
},
},
'zfs': {
'datasets': {
'tank/freescout': {
'mountpoint': '/opt/freescout',
},
},
},
}
@metadata_reactor.provides(
'freescout/env/APP_URL',
)
def freescout(metadata):
return {
'freescout': {
'env': {
'APP_URL': 'https://' + metadata.get('freescout/domain') + '/',
},
},
}
@metadata_reactor.provides(
'nginx/vhosts',
)
def nginx(metadata):
return {
'nginx': {
'vhosts': {
metadata.get('freescout/domain'): {
'content': 'freescout/vhost.conf',
},
},
},
}

View file

@ -8,7 +8,15 @@ defaults = {
'python3-crcmod': {},
},
'sources': {
'deb https://packages.cloud.google.com/apt cloud-sdk main',
'google-cloud': {
'url': 'https://packages.cloud.google.com/apt/',
'suites': {
'cloud-sdk',
},
'components': {
'main',
},
},
},
},
}

View file

@ -2,10 +2,13 @@ from os.path import join
from bundlewrap.utils.dicts import merge_dict
version = version=node.metadata.get('gitea/version')
version = node.metadata.get('gitea/version')
assert not version.startswith('v')
arch = node.metadata.get('system/architecture')
downloads['/usr/local/bin/gitea'] = {
'url': f'https://dl.gitea.io/gitea/{version}/gitea-{version}-linux-amd64',
# https://forgejo.org/releases/
'url': f'https://codeberg.org/forgejo/forgejo/releases/download/v{version}/forgejo-{version}-linux-{arch}',
'sha256_url': '{url}.sha256',
'triggers': {
'svc_systemd:gitea:restart',
@ -45,6 +48,7 @@ files['/etc/gitea/app.ini'] = {
),
),
'owner': 'git',
'mode': '0600',
'context': node.metadata['gitea'],
'triggers': {
'svc_systemd:gitea:restart',

View file

@ -1,4 +1,4 @@
database_password = repo.vault.password_for(f'{node.name} postgresql gitea')
database_password = repo.vault.password_for(f'{node.name} postgresql gitea').value
defaults = {
'apt': {
@ -11,7 +11,20 @@ defaults = {
},
},
'gitea': {
'conf': {},
'conf': {
'DEFAULT': {
'WORK_PATH': '/var/lib/gitea',
},
'database': {
'DB_TYPE': 'postgres',
'HOST': 'localhost:5432',
'NAME': 'gitea',
'USER': 'gitea',
'PASSWD': database_password,
'SSL_MODE': 'disable',
'LOG_SQL': 'false',
},
},
},
'postgresql': {
'roles': {
@ -83,15 +96,6 @@ def conf(metadata):
'INTERNAL_TOKEN': repo.vault.password_for(f'{node.name} gitea internal_token'),
'SECRET_KEY': repo.vault.password_for(f'{node.name} gitea security_secret_key'),
},
'database': {
'DB_TYPE': 'postgres',
'HOST': 'localhost:5432',
'NAME': 'gitea',
'USER': 'gitea',
'PASSWD': database_password,
'SSL_MODE': 'disable',
'LOG_SQL': 'false',
},
'service': {
'NO_REPLY_ADDRESS': f'noreply.{domain}',
},
@ -114,7 +118,7 @@ def nginx(metadata):
'content': 'nginx/proxy_pass.conf',
'context': {
'target': 'http://127.0.0.1:3500',
}
},
},
},
},

View file

@ -18,7 +18,7 @@ admin_password = node.metadata.get('grafana/config/security/admin_password')
port = node.metadata.get('grafana/config/server/http_port')
actions['reset_grafana_admin_password'] = {
'command': f"grafana-cli admin reset-admin-password {quote(admin_password)}",
'unless': f"curl http://admin:{quote(admin_password)}@localhost:{port}/api/org",
'unless': f"sleep 5 && curl http://admin:{quote(admin_password)}@localhost:{port}/api/org --fail",
'needs': [
'svc_systemd:grafana-server',
],
@ -26,15 +26,23 @@ actions['reset_grafana_admin_password'] = {
directories = {
'/etc/grafana': {},
'/etc/grafana/provisioning': {},
'/etc/grafana/provisioning': {
'owner': 'grafana',
'group': 'grafana',
},
'/etc/grafana/provisioning/datasources': {
'purge': True,
},
'/etc/grafana/provisioning/dashboards': {
'purge': True,
},
'/var/lib/grafana': {},
'/var/lib/grafana': {
'owner': 'grafana',
'group': 'grafana',
},
'/var/lib/grafana/dashboards': {
'owner': 'grafana',
'group': 'grafana',
'purge': True,
'triggers': [
'svc_systemd:grafana-server:restart',
@ -45,6 +53,8 @@ directories = {
files = {
'/etc/grafana/grafana.ini': {
'content': repo.libs.ini.dumps(node.metadata.get('grafana/config')),
'owner': 'grafana',
'group': 'grafana',
'triggers': [
'svc_systemd:grafana-server:restart',
],
@ -54,6 +64,8 @@ files = {
'apiVersion': 1,
'datasources': list(node.metadata.get('grafana/datasources').values()),
}),
'owner': 'grafana',
'group': 'grafana',
'triggers': [
'svc_systemd:grafana-server:restart',
],
@ -70,6 +82,8 @@ files = {
},
}],
}),
'owner': 'grafana',
'group': 'grafana',
'triggers': [
'svc_systemd:grafana-server:restart',
],
@ -158,6 +172,8 @@ for dashboard_id, monitored_node in enumerate(monitored_nodes, start=1):
files[f'/var/lib/grafana/dashboards/{monitored_node.name}.json'] = {
'content': json.dumps(dashboard, indent=4),
'owner': 'grafana',
'group': 'grafana',
'triggers': [
'svc_systemd:grafana-server:restart',
]

View file

@ -8,8 +8,19 @@ defaults = {
'grafana': {},
},
'sources': {
'deb https://packages.grafana.com/oss/deb stable main',
'grafana': {
'urls': {
'https://packages.grafana.com/oss/deb',
},
'suites': {
'stable',
},
'components': {
'main',
},
},
},
},
'grafana': {
'config': {

View file

@ -0,0 +1,23 @@
https://github.com/home-assistant/supervised-installer?tab=readme-ov-file
https://github.com/home-assistant/os-agent/tree/main?tab=readme-ov-file#using-home-assistant-supervised-on-debian
https://docs.docker.com/engine/install/debian/
https://www.home-assistant.io/installation/linux#install-home-assistant-supervised
https://github.com/home-assistant/supervised-installer
https://github.com/home-assistant/architecture/blob/master/adr/0014-home-assistant-supervised.md
DATA_SHARE=/usr/share/hassio dpkg --force-confdef --force-confold -i homeassistant-supervised.deb
neu debian
ha installieren
gucken ob geht
dann bw drüberbügeln
https://www.home-assistant.io/integrations/http/#ssl_certificate
`wget "$(curl -L https://api.github.com/repos/home-assistant/supervised-installer/releases/latest | jq -r '.assets[0].browser_download_url')" -O homeassistant-supervised.deb && dpkg -i homeassistant-supervised.deb`

View file

@ -0,0 +1,30 @@
from shlex import quote
version = node.metadata.get('homeassistant/os_agent_version')
directories = {
'/usr/share/hassio': {},
}
actions = {
'install_os_agent': {
'command': ' && '.join([
f'wget -O /tmp/os-agent.deb https://github.com/home-assistant/os-agent/releases/download/{quote(version)}/os-agent_{quote(version)}_linux_aarch64.deb',
'DEBIAN_FRONTEND=noninteractive dpkg -i /tmp/os-agent.deb',
]),
'unless': f'test "$(apt -qq list os-agent | cut -d" " -f2)" = "{quote(version)}"',
'needs': {
'pkg_apt:',
'zfs_dataset:tank/homeassistant',
},
},
'install_homeassistant_supervised': {
'command': 'wget -O /tmp/homeassistant-supervised.deb https://github.com/home-assistant/supervised-installer/releases/latest/download/homeassistant-supervised.deb && apt install /tmp/homeassistant-supervised.deb',
'unless': 'apt -qq list homeassistant-supervised | grep -q "installed"',
'needs': {
'action:install_os_agent',
},
},
}

View file

@ -0,0 +1,65 @@
defaults = {
'apt': {
'packages': {
# homeassistant-supervised
'apparmor': {},
'bluez': {},
'cifs-utils': {},
'curl': {},
'dbus': {},
'jq': {},
'libglib2.0-bin': {},
'lsb-release': {},
'network-manager': {},
'nfs-common': {},
'systemd-journal-remote': {},
'systemd-resolved': {},
'udisks2': {},
'wget': {},
# docker
'docker-ce': {},
'docker-ce-cli': {},
'containerd.io': {},
'docker-buildx-plugin': {},
'docker-compose-plugin': {},
},
'sources': {
# docker: https://docs.docker.com/engine/install/debian/#install-using-the-repository
'docker': {
'urls': {
'https://download.docker.com/linux/debian',
},
'suites': {
'{codename}',
},
'components': {
'stable',
},
},
},
},
'zfs': {
'datasets': {
'tank/homeassistant': {
'mountpoint': '/usr/share/hassio',
'needed_by': {
'directory:/usr/share/hassio',
},
},
},
},
}
@metadata_reactor.provides(
'nginx/vhosts',
)
def nginx(metadata):
return {
'nginx': {
'vhosts': {
metadata.get('homeassistant/domain'): {
'content': 'homeassistant/vhost.conf',
},
},
},
}

View file

@ -1,20 +0,0 @@
users = {
'homeassistant': {
'home': '/var/lib/homeassistant',
},
}
directories = {
'/var/lib/homeassistant': {
'owner': 'homeassistant',
},
'/var/lib/homeassistant/config': {
'owner': 'homeassistant',
},
'/var/lib/homeassistant/venv': {
'owner': 'homeassistant',
},
}
# https://wiki.instar.com/de/Software/Linux/Home_Assistant/

View file

@ -1,20 +0,0 @@
defaults = {
'apt': {
'packages': {
'python3': {},
'python3-dev': {},
'python3-pip': {},
'python3-venv': {},
'libffi-dev': {},
'libssl-dev': {},
'libjpeg-dev': {},
'zlib1g-dev': {},
'autoconf': {},
'build-essential': {},
'libopenjp2-7': {},
'libtiff5': {},
'libturbojpeg0-dev': {},
'tzdata': {},
},
},
}

View file

@ -24,7 +24,7 @@ header_margin=1
detailed_cpu_time=0
cpu_count_from_one=1
show_cpu_usage=0
show_cpu_frequency=0
show_cpu_frequency=1
show_cpu_temperature=0
degree_fahrenheit=0
update_process_names=0

View file

@ -13,9 +13,9 @@ apply Notification "mail-icingaadmin" to Host {
user_groups = host.vars.notification.mail.groups
users = host.vars.notification.mail.users
//interval = 2h
//vars.notification_logtosyslog = true
assign where host.vars.notification.mail
}
@ -25,9 +25,9 @@ apply Notification "mail-icingaadmin" to Service {
user_groups = host.vars.notification.mail.groups
users = host.vars.notification.mail.users
//interval = 2h
//vars.notification_logtosyslog = true
assign where host.vars.notification.mail
}

View file

@ -269,7 +269,7 @@ svc_systemd = {
'icinga2.service': {
'needs': [
'pkg_apt:icinga2-ido-pgsql',
'svc_systemd:postgresql',
'svc_systemd:postgresql.service',
],
},
}

View file

@ -9,7 +9,21 @@ defaults = {
'monitoring-plugins': {},
},
'sources': {
'deb https://packages.icinga.com/debian icinga-{codename} main',
'icinga': {
'types': {
'deb',
'deb-src',
},
'urls': {
'https://packages.icinga.com/debian',
},
'suites': {
'icinga-{codename}',
},
'components': {
'main',
},
},
},
},
'icinga2': {

View file

@ -11,10 +11,24 @@ defaults = {
'php-imagick': {},
'php-pgsql': {},
'icingaweb2': {},
'icingaweb2-module-monitoring': {},
#'icingaweb2-module-monitoring': {}, # ?
},
'sources': {
'deb https://packages.icinga.com/debian icinga-{codename} main',
'icinga': {
'types': {
'deb',
'deb-src',
},
'urls': {
'https://packages.icinga.com/debian',
},
'suites': {
'icinga-{codename}',
},
'components': {
'main',
},
},
},
},
'icingaweb2': {
@ -163,6 +177,7 @@ def nginx(metadata):
metadata.get('icingaweb2/hostname'): {
'content': 'icingaweb2/vhost.conf',
'context': {
'php_version': metadata.get('php/version'),
},
},
},

View file

@ -4,6 +4,7 @@ from shlex import quote
directories['/var/lib/influxdb'] = {
'owner': 'influxdb',
'group': 'influxdb',
'mode': '0750',
'needs': [
'zfs_dataset:tank/influxdb',
],

View file

@ -7,7 +7,17 @@ defaults = {
'influxdb2-cli': {},
},
'sources': {
'deb https://repos.influxdata.com/debian {codename} stable',
'influxdata': {
'urls': {
'https://repos.influxdata.com/debian',
},
'suites': {
'stable',
},
'components': {
'main',
},
},
},
},
'nftables': {

View file

@ -19,7 +19,7 @@ def apt(metadata):
return {
'apt': {
'packages': {
f'openjdk-{metadata.get("java/version")}-jre': {},
f'openjdk-{metadata.get("java/version")}-jre-headless': {},
}
}
}

View file

@ -0,0 +1,21 @@
from json import dumps
from bundlewrap.metadata import MetadataJSONEncoder
files = {
'/etc/kea/kea-dhcp4.conf': {
'content': dumps(node.metadata.get('kea'), indent=4, sort_keys=True, cls=MetadataJSONEncoder),
'triggers': [
'svc_systemd:kea-dhcp4-server:restart',
],
},
}
svc_systemd = {
'kea-dhcp4-server': {
'needs': [
'pkg_apt:kea-dhcp4-server',
'file:/etc/kea/kea-dhcp4.conf',
'svc_systemd:systemd-networkd:restart',
],
},
}

View file

@ -0,0 +1,96 @@
from ipaddress import ip_interface, ip_network
hashable = repo.libs.hashable.hashable
defaults = {
'apt': {
'packages': {
'kea-dhcp4-server': {},
},
},
'kea': {
'Dhcp4': {
'interfaces-config': {
'interfaces': set(),
},
'lease-database': {
'type': 'memfile',
'lfc-interval': 3600
},
'subnet4': set(),
'loggers': set([
hashable({
'name': 'kea-dhcp4',
'output_options': [
{
'output': 'syslog',
}
],
'severity': 'INFO',
}),
]),
},
},
}
@metadata_reactor.provides(
'kea/Dhcp4/interfaces-config/interfaces',
'kea/Dhcp4/subnet4',
)
def subnets(metadata):
subnet4 = set()
interfaces = set()
reservations = set(
hashable({
'hw-address': network_conf['mac'],
'ip-address': str(ip_interface(network_conf['ipv4']).ip),
})
for other_node in repo.nodes
for network_conf in other_node.metadata.get('network', {}).values()
if 'mac' in network_conf
)
for network_name, network_conf in metadata.get('network').items():
dhcp_server_config = network_conf.get('dhcp_server_config', None)
if dhcp_server_config:
_network = ip_network(dhcp_server_config['subnet'])
subnet4.add(hashable({
'subnet': dhcp_server_config['subnet'],
'pools': [
{
'pool': f'{dhcp_server_config['pool_from']} - {dhcp_server_config['pool_to']}',
},
],
'option-data': [
{
'name': 'routers',
'data': dhcp_server_config['router'],
},
{
'name': 'domain-name-servers',
'data': '10.0.10.2',
},
],
'reservations': set(
reservation
for reservation in reservations
if ip_interface(reservation['ip-address']).ip in _network
),
}))
interfaces.add(network_conf.get('interface', network_name))
return {
'kea': {
'Dhcp4': {
'interfaces-config': {
'interfaces': interfaces,
},
'subnet4': subnet4,
},
},
}

View file

@ -1,36 +1,36 @@
hostname "CroneKorkN : ${name}"
sv_contact "admin@sublimity.de"
// assign serevr to steam group
sv_steamgroup "${','.join(steamgroups)}"
rcon_password "${rcon_password}"
// no annoying message of the day
motd_enabled 0
// enable cheats
sv_cheats 1
// allow inconsistent files on clients (weapon mods for example)
sv_consistency 0
// connect from internet
sv_lan 0
// join game at any point
sv_allow_lobby_connect_only 0
// allowed modes
sv_gametypes "coop,realism,survival,versus,teamversus,scavenge,teamscavenge"
// network
sv_minrate 30000
sv_maxrate 60000
sv_mincmdrate 66
sv_maxcmdrate 101
// logging
sv_logsdir "logs-${name}" //Folder in the game directory where server logs will be stored.
log on //Creates a logfile (on | off)
sv_logecho 0 //default 0; Echo log information to the console.

View file

@ -56,6 +56,7 @@ for domain in node.metadata.get('letsencrypt/domains').keys():
'unless': f'/etc/dehydrated/letsencrypt-ensure-some-certificate {domain} true',
'needs': {
'file:/etc/dehydrated/letsencrypt-ensure-some-certificate',
'pkg_apt:dehydrated',
},
'needed_by': {
'svc_systemd:nginx',

41
bundles/linux/items.py Normal file
View file

@ -0,0 +1,41 @@
from shlex import quote
def generate_sysctl_key_value_pairs_from_json(json_data, parents=[]):
if isinstance(json_data, dict):
for key, value in json_data.items():
yield from generate_sysctl_key_value_pairs_from_json(value, [*parents, key])
elif isinstance(json_data, list):
raise ValueError(f"List not supported: '{json_data}'")
else:
# If it's a leaf node, yield the path
yield (parents, json_data)
key_value_pairs = generate_sysctl_key_value_pairs_from_json(node.metadata.get('sysctl'))
files= {
'/etc/sysctl.conf': {
'content': '\n'.join(
sorted(
f"{'.'.join(path)}={value}"
for path, value in key_value_pairs
),
),
'triggers': [
'svc_systemd:systemd-sysctl.service:restart',
],
},
}
svc_systemd = {
'systemd-sysctl.service': {},
}
for path, value in key_value_pairs:
actions[f'reload_sysctl.conf_{path}'] = {
'command': f"sysctl --values {'.'.join(path)} | grep -q {quote('^'+value+'$')}",
'needs': [
f'action:systemd-sysctl.service',
f'action:systemd-sysctl.service:restart',
],
}

View file

@ -0,0 +1,3 @@
defaults = {
'sysctl': {},
}

View file

@ -20,18 +20,19 @@ files = {
}
actions = {
'systemd-locale': {
'command': f'localectl set-locale LANG="{default_locale}"',
'unless': f'localectl | grep -Fi "system locale" | grep -Fi "{default_locale}"',
'triggers': {
'action:locale-gen',
},
},
'locale-gen': {
'command': 'locale-gen',
'triggered': True,
'needs': {
'pkg_apt:locales',
},
},
'systemd-locale': {
'command': f'localectl set-locale LANG="{default_locale}"',
'unless': f'localectl | grep -Fi "system locale" | grep -Fi "{default_locale}"',
'preceded_by': {
'action:locale-gen',
'action:systemd-locale',
},
},
}

View file

@ -0,0 +1,6 @@
#!/usr/bin/env bash
cd "$OLDPWD"
export BW_ITEM_WORKERS=$(expr "$(sysctl -n hw.logicalcpu)" '*' 12 '/' 10)
export BW_NODE_WORKERS=$(expr 320 '/' "$BW_ITEM_WORKERS")

View file

@ -0,0 +1,6 @@
#!/usr/bin/env bash
cd "$OLDPWD"
PATH_add "/opt/homebrew/opt/gnu-sed/libexec/gnubin"
PATH_add "/opt/homebrew/opt/grep/libexec/gnubin"

View file

@ -10,6 +10,7 @@ password required pam_deny.so
session required pam_permit.so
EOT
sudo xcodebuild -license accept
xcode-select --install
git -C ~/.zsh/oh-my-zsh pull
@ -17,7 +18,7 @@ git -C ~/.zsh/oh-my-zsh pull
brew upgrade
brew upgrade --cask --greedy
pyenv install --keep-existing
pyenv install --skip-existing
sudo softwareupdate -ia --verbose
@ -41,3 +42,5 @@ fi
sudo systemsetup -setremotelogin on # enable ssh
pip install --upgrade pip
# https://sysadmin-journal.com/apache-directory-studio-on-the-apple-m1/

View file

@ -0,0 +1,9 @@
#!/usr/bin/env bash
cd "$OLDPWD"
if test -f .venv/bin/python && test "$(realpath .venv/bin/python)" != "$(realpath "$(pyenv which python)")"
then
echo "rebuilding venv für new python version"
rm -rf .venv .pip_upgrade_timestamp
fi

View file

@ -0,0 +1,3 @@
#!/usr/bin/env bash
cd "$OLDPWD"

View file

@ -0,0 +1,27 @@
#!/usr/bin/env bash
cd "$OLDPWD"
python3 -m venv .venv
source .venv/bin/activate
PATH_add .venv/bin
NOW=$(date +%s)
if test -e .pip_upgrade_timestamp
then
LAST=$(cat .pip_upgrade_timestamp)
else
LAST=0
fi
DELTA=$(expr "$NOW" - "$LAST")
echo "last pip upgrade $DELTA seconds ago"
if test "$DELTA" -gt 86400
then
python3 -m pip --require-virtualenv install pip wheel --upgrade
python3 -m pip --require-virtualenv install -r requirements.txt --upgrade
if test -e optional-requirements.txt
then
python3 -m pip --require-virtualenv install -r optional-requirements.txt --upgrade
fi
date +%s > .pip_upgrade_timestamp
fi

View file

@ -0,0 +1,33 @@
export PATH=~/.bin:$PATH
export PATH=~/.cargo/bin:$PATH
export ZSH=~/.zsh/oh-my-zsh
export ZSH_HOSTNAME='sm'
ZSH_THEME="bw"
HIST_STAMPS="yyyy/mm/dd"
plugins=(
zsh-autosuggestions
git
)
source $ZSH/oh-my-zsh.sh
ulimit -S -n 24000
antivir() {
printf 'scanning for viruses' && sleep 1 && printf '.' && sleep 1 && printf '.' && sleep 1 && printf '.' &&
sleep 1 && echo '\nyour computer is safe!'
}
eval "$(rbenv init -)"
eval "$(pyenv init -)"
eval "$(direnv hook zsh)"
eval "$(op completion zsh)"; compdef _op op
# //S/M
sshn() {
ssh "$(tr '.' ' ' <<< "$1" | tac -s ' ' | xargs | tr ' ' '.').smhss.de"
}
pingn() {
ping "$(tr '.' ' ' <<< "$1" | tac -s ' ' | xargs | tr ' ' '.').smhss.de"
}

View file

@ -1,9 +1,42 @@
# brew install
actions['brew_install'] = {
'command': '/opt/homebrew/bin/brew install ' + ' '.join(node.metadata.get('brew')),
'unless': f"""PKGS=$(/opt/homebrew/bin/brew leaves); for p in {' '.join(node.metadata.get('brew'))}; do grep -q "$p" <<< $PKGS || exit 9; done"""
}
# bw init
directories['/Users/mwiegand/.config/bundlewrap/lock'] = {}
# home
files['/Users/mwiegand/.zshrc'] = {
'source': 'zshrc',
'mode': '0644',
}
# updater
files['/Users/mwiegand/.bin/macbook-update'] = {
'mode': '755',
}
with open(f'{repo.path}/bundles/zsh/files/bw.zsh-theme') as f:
files['/Users/mwiegand/.zsh/oh-my-zsh/themes/bw.zsh-theme'] = {
'content': f.read(),
'mode': '0644',
}
# direnv
directories['/Users/mwiegand/.local/share/direnv'] = {}
files['/Users/mwiegand/.local/share/direnv/gnu'] = {}
files['/Users/mwiegand/.local/share/direnv/pyenv'] = {}
files['/Users/mwiegand/.local/share/direnv/venv'] = {}
files['/Users/mwiegand/.local/share/direnv/bundlewrap'] = {}
##################
for element in [*files.values(), *directories.values()]:

View file

@ -1 +1,3 @@
defaults = {}
defaults = {
'brew': {},
}

View file

@ -1,6 +1,6 @@
<?php
// https://raw.githubusercontent.com/Radiergummi/autodiscover/master/autodiscover/autodiscover.php
/********************************
* Autodiscover responder
@ -8,45 +8,45 @@
* This PHP script is intended to respond to any request to http(s)://mydomain.com/autodiscover/autodiscover.xml.
* If configured properly, it will send a spec-complient autodiscover XML response, pointing mail clients to the
* appropriate mail services.
* If you use MAPI or ActiveSync, stick with the Autodiscover service your mail server provides for you. But if
* If you use MAPI or ActiveSync, stick with the Autodiscover service your mail server provides for you. But if
* you use POP/IMAP servers, this will provide autoconfiguration to Outlook, Apple Mail and mobile devices.
*
* To work properly, you'll need to set the service (sub)domains below in the settings section to the correct
* To work properly, you'll need to set the service (sub)domains below in the settings section to the correct
* domain names, adjust ports and SSL.
*/
//get raw POST data so we can extract the email address
$request = file_get_contents("php://input");
// optional debug log
# file_put_contents( 'request.log', $request, FILE_APPEND );
// retrieve email address from client request
preg_match( "/\<EMailAddress\>(.*?)\<\/EMailAddress\>/", $request, $email );
// check for invalid mail, to prevent XSS
if (filter_var($email[1], FILTER_VALIDATE_EMAIL) === false) {
throw new Exception('Invalid E-Mail provided');
}
// get domain from email address
$domain = substr( strrchr( $email[1], "@" ), 1 );
/**************************************
* Port and server settings below *
**************************************/
// IMAP settings
$imapServer = 'imap.' . $domain; // imap.example.com
$imapPort = 993;
$imapSSL = true;
// SMTP settings
$smtpServer = 'smtp.' . $domain; // smtp.example.com
$smtpPort = 587;
$smtpSSL = true;
//set Content-Type
header( 'Content-Type: application/xml' );
?>
<?php echo '<?xml version="1.0" encoding="utf-8" ?>'; ?>

View file

@ -33,6 +33,12 @@ defaults = {
'mountpoint': '/var/vmail',
'compression': 'on',
},
'tank/vmail/index': {
'mountpoint': '/var/vmail/index',
'compression': 'on',
'com.sun:auto-snapshot': 'false',
'backup': False,
},
},
},
}
@ -43,12 +49,30 @@ defaults = {
)
def dns(metadata):
dns = {}
for domain in metadata.get('mailserver/domains'):
dns[domain] = {
'MX': [f"5 {metadata.get('mailserver/hostname')}."],
'TXT': ['v=spf1 a mx -all'],
}
report_email = metadata.get('mailserver/dmarc_report_email')
dns[f'_dmarc.{domain}'] = {
'TXT': ['; '.join(f'{k}={v}' for k, v in {
# dmarc version
'v': 'DMARC1',
# reject on failure
'p': 'reject',
# standard reports
'rua': f'mailto:{report_email}',
# forensic reports
'fo': 1,
'ruf': f'mailto:{report_email}',
# require alignment between the DKIM domain and the parent Header From domain
'adkim': 's',
# require alignment between the SPF domain (the sender) and the Header From domain
'aspf': 's',
}.items())]
}
return {
'dns': dns,
@ -66,4 +90,4 @@ def letsencrypt(metadata):
},
},
},
}
}

View file

@ -0,0 +1 @@
https://mariadb.com/kb/en/systemd/#configuring-mariadb-to-write-the-error-log-to-syslog

View file

@ -0,0 +1,11 @@
% for section, options in sorted(conf.items()):
[${section}]
% for key, value in sorted(options.items()):
% if value is None:
${key}
% else:
${key} = ${value}
% endif
% endfor
% endfor

91
bundles/mariadb/items.py Normal file
View file

@ -0,0 +1,91 @@
from shlex import quote
def mariadb(sql, **kwargs):
kwargs_string = ''.join(f" --{k} {v}" for k, v in kwargs.items())
return f"mariadb{kwargs_string} -Bsr --execute {quote(sql)}"
directories = {
'/var/lib/mysql': {
'owner': 'mysql',
'group': 'mysql',
'needs': [
'zfs_dataset:tank/mariadb',
],
'needed_by': [
'pkg_apt:mariadb-server',
'pkg_apt:mariadb-client',
],
},
}
files = {
'/etc/mysql/conf.d/override.conf': {
'context': {
'conf': node.metadata.get('mariadb/conf'),
},
'content_type': 'mako',
},
}
svc_systemd = {
'mariadb.service': {
'needs': [
'pkg_apt:mariadb-server',
'pkg_apt:mariadb-client',
],
},
}
actions = {
'mariadb_sec_remove_anonymous_users': {
'command': mariadb("DELETE FROM mysql.global_priv WHERE User=''"),
'unless': mariadb("SELECT count(0) FROM mysql.global_priv WHERE User = ''") + " | grep -q '^0$'",
'needs': [
'svc_systemd:mariadb.service',
],
'triggers': [
'svc_systemd:mariadb.service:restart',
],
},
'mariadb_sec_remove_remote_root': {
'command': mariadb("DELETE FROM mysql.global_priv WHERE User='root' AND Host NOT IN ('localhost', '127.0.0.1', '::1')"),
'unless': mariadb("SELECT count(0) FROM mysql.global_priv WHERE User='root' AND Host NOT IN ('localhost', '127.0.0.1', '::1')") + " | grep -q '^0$'",
'needs': [
'svc_systemd:mariadb.service',
],
'triggers': [
'svc_systemd:mariadb.service:restart',
],
},
}
for db, conf in node.metadata.get('mariadb/databases', {}).items():
actions[f'mariadb_create_database_{db}'] = {
'command': mariadb(f"CREATE DATABASE {db}"),
'unless': mariadb(f"SHOW DATABASES LIKE '{db}'") + f" | grep -q '^{db}$'",
'needs': [
'svc_systemd:mariadb.service',
],
}
actions[f'mariadb_user_{db}_create'] = {
'command': mariadb(f"CREATE USER {db}"),
'unless': mariadb(f"SELECT User FROM mysql.user WHERE User = '{db}'") + f" | grep -q '^{db}$'",
'needs': [
f'action:mariadb_create_database_{db}',
],
}
pw = conf['password']
actions[f'mariadb_user_{db}_password'] = {
'command': mariadb(f"SET PASSWORD FOR {db} = PASSWORD('{conf['password']}')"),
'unless': f'echo {quote(pw)} | mariadb -u {db} -e quit -p',
'needs': [
f'action:mariadb_user_{db}_create',
],
}
actions[f'mariadb_grant_privileges_to_{db}'] = {
'command': mariadb(f"GRANT ALL PRIVILEGES ON {db}.* TO '{db}'", database=db),
'unless': mariadb(f"SHOW GRANTS FOR {db}") + f" | grep -q '^GRANT ALL PRIVILEGES ON `{db}`.* TO `{db}`@`%`'",
'needs': [
f'action:mariadb_user_{db}_create',
],
}

View file

@ -0,0 +1,45 @@
defaults = {
'apt': {
'packages': {
'mariadb-server': {
'needs': {
'zfs_dataset:tank/mariadb',
},
},
'mariadb-client': {
'needs': {
'zfs_dataset:tank/mariadb',
},
},
},
},
'mariadb': {
'databases': {},
'conf': {
# https://www.reddit.com/r/zfs/comments/u1xklc/mariadbmysql_database_settings_for_zfs
'mysqld': {
'skip-innodb_doublewrite': None,
'innodb_flush_method': 'fsync',
'innodb_doublewrite': '0',
'innodb_use_atomic_writes': '0',
'innodb_use_native_aio': '0',
'innodb_read_io_threads': '10',
'innodb_write_io_threads': '10',
'innodb_buffer_pool_size': '26G',
'innodb_flush_log_at_trx_commit': '1',
'innodb_log_file_size': '1G',
'innodb_flush_neighbors': '0',
'innodb_fast_shutdown': '2',
},
},
},
'zfs': {
'datasets': {
'tank/mariadb': {
'mountpoint': '/var/lib/mysql',
'recordsize': '16384',
'atime': 'off',
},
},
},
}

View file

@ -5,38 +5,89 @@ defaults = {
}
@metadata_reactor.provides(
'network',
)
def dhcp(metadata):
networks = {}
for network_name, network_conf in metadata.get('network').items():
_interface = ip_interface(network_conf['ipv4'])
_ip = _interface.ip
_network = _interface.network
_hosts = list(_network.hosts())
if network_conf.get('dhcp_server', False):
networks[network_name] = {
'dhcp_server_config': {
'subnet': str(_network),
'pool_from': str(_hosts[len(_hosts)//2]),
'pool_to': str(_hosts[-3]),
'router': str(_ip),
'domain-name-servers': str(_ip),
}
}
return {
'network': networks,
}
@metadata_reactor.provides(
'systemd/units',
)
def units(metadata):
units = {}
for type, network in metadata.get('network').items():
units[f'{type}.network'] = {
for network_name, network_conf in metadata.get('network').items():
interface_type = network_conf.get('type', None)
# network
units[f'{network_name}.network'] = {
'Match': {
'Name': network['interface'],
'Name': network_name if interface_type == 'vlan' else network_conf['interface'],
},
'Network': {
'DHCP': network.get('dhcp', 'no'),
'IPv6AcceptRA': network.get('dhcp', 'no'),
'DHCP': network_conf.get('dhcp', 'no'),
'IPv6AcceptRA': network_conf.get('dhcp', 'no'),
'VLAN': set(network_conf.get('vlans', set()))
}
}
# type
if interface_type:
units[f'{network_name}.network']['Match']['Type'] = interface_type
# ips
for i in [4, 6]:
if network.get(f'ipv{i}', None):
units[f'{type}.network'].update({
if network_conf.get(f'ipv{i}', None):
units[f'{network_name}.network'].update({
f'Address#ipv{i}': {
'Address': network[f'ipv{i}'],
'Address': network_conf[f'ipv{i}'],
},
})
if f'gateway{i}' in network:
units[f'{type}.network'].update({
if f'gateway{i}' in network_conf:
units[f'{network_name}.network'].update({
f'Route#ipv{i}': {
'Gateway': network[f'gateway{i}'],
'Gateway': network_conf[f'gateway{i}'],
'GatewayOnlink': 'yes',
}
})
# as vlan
if interface_type == 'vlan':
units[f"{network_name}.netdev"] = {
'NetDev': {
'Name': network_name,
'Kind': 'vlan',
},
'VLAN': {
'Id': network_conf['id'],
}
}
return {
'systemd': {

View file

@ -29,8 +29,8 @@ defaults = {
'exclude': [
'^appdata_',
'^updater-',
'^nextcloud\.log',
'^updater\.log',
'^nextcloud\\.log',
'^updater\\.log',
'^[^/]+/cache',
'^[^/]+/files_versions',
'^[^/]+/files_trashbin',
@ -123,9 +123,9 @@ def config(metadata):
],
'cache_path': '/var/lib/nextcloud/.cache',
'upgrade.disable-web': True,
'memcache.local': '\OC\Memcache\Redis',
'memcache.locking': '\OC\Memcache\Redis',
'memcache.distributed': '\OC\Memcache\Redis',
'memcache.local': '\\OC\\Memcache\\Redis',
'memcache.locking': '\\OC\\Memcache\\Redis',
'memcache.distributed': '\\OC\\Memcache\\Redis',
'redis': {
'host': '/var/run/redis/nextcloud.sock'
},
@ -142,6 +142,7 @@ def config(metadata):
'versions_retention_obligation': 'auto, 90',
'simpleSignUpLink.shown': False,
'allow_local_remote_servers': True, # FIXME?
'maintenance_window_start': 1, # https://docs.nextcloud.com/server/29/admin_manual/configuration_server/background_jobs_configuration.html#maintenance-window-start
},
},
}

View file

@ -10,6 +10,12 @@ defaults = {
'tcp dport 1936 accept',
},
},
'nginx': {
'modules': {
'rtmp',
'stream',
},
},
}

View file

@ -17,10 +17,12 @@ fastcgi_param SERVER_SOFTWARE nginx/$nginx_version;
fastcgi_param REMOTE_ADDR $remote_addr;
fastcgi_param REMOTE_PORT $remote_port;
fastcgi_param REMOTE_USER $remote_user;
fastcgi_param SERVER_ADDR $server_addr;
fastcgi_param SERVER_PORT $server_port;
fastcgi_param SERVER_NAME $server_name;
# PHP only, required if PHP was built with --enable-force-cgi-redirect
fastcgi_param REDIRECT_STATUS 200;
# This is the only thing that's different to the debian default.

View file

@ -0,0 +1,95 @@
types {
text/html html htm shtml;
text/css css;
text/xml xml;
image/gif gif;
image/jpeg jpeg jpg;
application/javascript js;
application/atom+xml atom;
application/rss+xml rss;
text/mathml mml;
text/plain txt;
text/vnd.sun.j2me.app-descriptor jad;
text/vnd.wap.wml wml;
text/x-component htc;
image/avif avif;
image/png png;
image/svg+xml svg svgz;
image/tiff tif tiff;
image/vnd.wap.wbmp wbmp;
image/webp webp;
image/x-icon ico;
image/x-jng jng;
image/x-ms-bmp bmp;
font/woff woff;
font/woff2 woff2;
application/java-archive jar war ear;
application/json json;
application/mac-binhex40 hqx;
application/msword doc;
application/pdf pdf;
application/postscript ps eps ai;
application/rtf rtf;
application/vnd.apple.mpegurl m3u8;
application/vnd.google-earth.kml+xml kml;
application/vnd.google-earth.kmz kmz;
application/vnd.ms-excel xls;
application/vnd.ms-fontobject eot;
application/vnd.ms-powerpoint ppt;
application/vnd.oasis.opendocument.graphics odg;
application/vnd.oasis.opendocument.presentation odp;
application/vnd.oasis.opendocument.spreadsheet ods;
application/vnd.oasis.opendocument.text odt;
application/vnd.openxmlformats-officedocument.presentationml.presentation pptx;
application/vnd.openxmlformats-officedocument.spreadsheetml.sheet xlsx;
application/vnd.openxmlformats-officedocument.wordprocessingml.document docx;
application/vnd.wap.wmlc wmlc;
application/wasm wasm;
application/x-7z-compressed 7z;
application/x-cocoa cco;
application/x-java-archive-diff jardiff;
application/x-java-jnlp-file jnlp;
application/x-makeself run;
application/x-perl pl pm;
application/x-pilot prc pdb;
application/x-rar-compressed rar;
application/x-redhat-package-manager rpm;
application/x-sea sea;
application/x-shockwave-flash swf;
application/x-stuffit sit;
application/x-tcl tcl tk;
application/x-x509-ca-cert der pem crt;
application/x-xpinstall xpi;
application/xhtml+xml xhtml;
application/xspf+xml xspf;
application/zip zip;
application/octet-stream bin exe dll;
application/octet-stream deb;
application/octet-stream dmg;
application/octet-stream iso img;
application/octet-stream msi msp msm;
audio/midi mid midi kar;
audio/mpeg mp3;
audio/ogg ogg;
audio/x-m4a m4a;
audio/x-realaudio ra;
video/3gpp 3gpp 3gp;
video/mp2t ts;
video/mp4 mp4;
video/mpeg mpeg mpg;
video/quicktime mov;
video/webm webm;
video/x-flv flv;
video/x-m4v m4v;
video/x-mng mng;
video/x-ms-asf asx asf;
video/x-ms-wmv wmv;
video/x-msvideo avi;
}

View file

@ -1,8 +1,11 @@
pid /var/run/nginx.pid;
user www-data;
worker_processes 10;
worker_processes ${worker_processes};
% for module in sorted(modules):
load_module modules/ngx_${module}_module.so;
% endfor
include /etc/nginx/modules-enabled/*;
include /etc/nginx/conf.d/*;
events {
@ -18,6 +21,9 @@ http {
server_names_hash_bucket_size 128;
tcp_nopush on;
client_max_body_size 32G;
ssl_dhparam "/etc/ssl/certs/dhparam.pem";
# dont show nginx version
server_tokens off;
% if node.has_bundle('php'):
upstream php-handler {

View file

@ -0,0 +1,4 @@
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;

16
bundles/nginx/files/scgi Normal file
View file

@ -0,0 +1,16 @@
scgi_param REQUEST_METHOD $request_method;
scgi_param REQUEST_URI $request_uri;
scgi_param QUERY_STRING $query_string;
scgi_param CONTENT_TYPE $content_type;
scgi_param DOCUMENT_URI $document_uri;
scgi_param DOCUMENT_ROOT $document_root;
scgi_param SCGI 1;
scgi_param SERVER_PROTOCOL $server_protocol;
scgi_param REQUEST_SCHEME $scheme;
scgi_param HTTPS $https if_not_empty;
scgi_param REMOTE_ADDR $remote_addr;
scgi_param REMOTE_PORT $remote_port;
scgi_param SERVER_PORT $server_port;
scgi_param SERVER_NAME $server_name;

16
bundles/nginx/files/uwsgi Normal file
View file

@ -0,0 +1,16 @@
uwsgi_param QUERY_STRING $query_string;
uwsgi_param REQUEST_METHOD $request_method;
uwsgi_param CONTENT_TYPE $content_type;
uwsgi_param CONTENT_LENGTH $content_length;
uwsgi_param REQUEST_URI $request_uri;
uwsgi_param PATH_INFO $document_uri;
uwsgi_param DOCUMENT_ROOT $document_root;
uwsgi_param SERVER_PROTOCOL $server_protocol;
uwsgi_param REQUEST_SCHEME $scheme;
uwsgi_param HTTPS $https if_not_empty;
uwsgi_param REMOTE_ADDR $remote_addr;
uwsgi_param REMOTE_PORT $remote_port;
uwsgi_param SERVER_PORT $server_port;
uwsgi_param SERVER_NAME $server_name;

View file

@ -3,25 +3,26 @@ from mako.template import Template
from os.path import join
directories = {
'/etc/nginx': {
'purge': True,
'triggers': {
'svc_systemd:nginx:restart',
},
},
'/etc/nginx/sites': {
'purge': True,
'triggers': {
'svc_systemd:nginx:restart',
},
},
'/etc/nginx/conf.d': {
'purge': True,
'triggers': {
'svc_systemd:nginx:restart',
},
},
'/etc/nginx/ssl': {
'/etc/nginx/params': {
'purge': True,
'triggers': {
'svc_systemd:nginx:restart',
},
},
'/var/www': {
'purge': True,
'owner': 'www-data',
},
}
@ -29,11 +30,35 @@ directories = {
files = {
'/etc/nginx/nginx.conf': {
'content_type': 'mako',
'context': {
'modules': node.metadata.get('nginx/modules'),
'worker_processes': node.metadata.get('vm/cores'),
},
'triggers': {
'svc_systemd:nginx:restart',
},
},
'/etc/nginx/fastcgi.conf': {
'/etc/nginx/params/fastcgi': {
'triggers': {
'svc_systemd:nginx:restart',
},
},
'/etc/nginx/params/proxy': {
'triggers': {
'svc_systemd:nginx:restart',
},
},
'/etc/nginx/params/uwsgi': {
'triggers': {
'svc_systemd:nginx:restart',
},
},
'/etc/nginx/params/scgi': {
'triggers': {
'svc_systemd:nginx:restart',
},
},
'/etc/nginx/mime.types': {
'triggers': {
'svc_systemd:nginx:restart',
},
@ -48,23 +73,11 @@ files = {
'svc_systemd:nginx:restart',
},
},
'/etc/nginx/sites-available': {
'delete': True,
'needs': {
'pkg_apt:nginx',
},
},
'/etc/nginx/sites-enabled': {
'delete': True,
'needs': {
'pkg_apt:nginx',
},
},
}
actions = {
'nginx-generate-dhparam': {
'command': 'openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048',
'command': 'openssl dhparam -dsaparam -out /etc/ssl/certs/dhparam.pem 4096',
'unless': 'test -f /etc/ssl/certs/dhparam.pem',
},
}

View file

@ -5,6 +5,9 @@ defaults = {
'apt': {
'packages': {
'nginx': {},
'apache2': {
'installed': False,
},
},
},
'nftables': {
@ -13,15 +16,8 @@ defaults = {
},
},
'nginx': {
'vhosts': {
# '80': {
# 'content': 'nginx/80.conf',
# },
# 'stub_status': {
# 'content': 'nginx/stub_status.conf',
# },
},
'includes': {},
'vhosts': {},
'modules': set(),
},
'systemd': {
'units': {
@ -35,24 +31,6 @@ defaults = {
},
}
@metadata_reactor.provides(
'nginx/includes',
)
def includes(metadata):
return {
'nginx': {
'includes': {
'php': {
'location ~ \.php$': {
'include': 'fastcgi.conf',
'fastcgi_split_path_info': '^(.+\.php)(/.+)$',
'fastcgi_pass': f"unix:/run/php/php{metadata.get('php/version')}-fpm.sock",
},
},
},
},
}
@metadata_reactor.provides(
'nginx/vhosts',
@ -95,7 +73,6 @@ def dns(metadata):
@metadata_reactor.provides(
'letsencrypt/domains',
'letsencrypt/reload_after',
)
def letsencrypt(metadata):
return {
@ -124,3 +101,17 @@ def monitoring(metadata):
},
},
}
@metadata_reactor.provides(
'apt/packages',
)
def modules(metadata):
return {
'apt': {
'packages': {
f'libnginx-mod-{module}': {}
for module in metadata.get('nginx/modules')
},
},
}

View file

@ -23,8 +23,19 @@ def sources(metadata):
return {
'apt': {
'sources': {
f'deb https://deb.nodesource.com/node_{version}.x {{codename}} main',
f'deb-src https://deb.nodesource.com/node_{version}.x {{codename}} main',
'nodesource': {
'types': {
'deb',
'deb-src',
},
'url': 'https://deb.nodesource.com/node_{version}.x',
'suites': {
'{codename}',
},
'components': {
'main',
},
},
},
},
}

View file

@ -9,7 +9,17 @@ defaults = {
},
},
'sources': {
'deb https://openhab.jfrog.io/artifactory/openhab-linuxpkg stable main',
'jfrog': {
'urls': {
'https://openhab.jfrog.io/artifactory/openhab-linuxpkg',
},
'suites': {
'stable',
},
'components': {
'main',
},
},
},
},
'zfs': {

View file

@ -1,9 +1,3 @@
from os.path import join
import json
from bundlewrap.utils.dicts import merge_dict
version = node.metadata.get('php/version')
files = {
@ -21,7 +15,7 @@ files = {
f'pkg_apt:php{version}-fpm',
},
'triggers': {
f'svc_systemd:php{version}-fpm:restart',
f'svc_systemd:php{version}-fpm.service:restart',
},
},
f'/etc/php/{version}/fpm/pool.d/www.conf': {
@ -33,13 +27,13 @@ files = {
f'pkg_apt:php{version}-fpm',
},
'triggers': {
f'svc_systemd:php{version}-fpm:restart',
f'svc_systemd:php{version}-fpm.service:restart',
},
},
}
svc_systemd = {
f'php{version}-fpm': {
f'php{version}-fpm.service': {
'needs': {
'pkg_apt:',
f'file:/etc/php/{version}/fpm/php.ini',

View file

@ -113,7 +113,7 @@ def php_ini(metadata):
'opcache.revalidate_freq': '60',
},
}
return {
'php': {
'php.ini': {
@ -145,7 +145,7 @@ def www_conf(metadata):
'pm': 'dynamic',
'pm.max_children': int(threads*2),
'pm.start_servers': int(threads),
'pm.min_spare_servers': int(threads/2),
'pm.min_spare_servers': max([1, int(threads/2)]),
'pm.max_spare_servers': int(threads),
'pm.max_requests': int(threads*32),
},

View file

@ -44,7 +44,9 @@ smtpd_tls_mandatory_protocols = !SSLv2, !SSLv3, !TLSv1, !TLSv1.1
smtpd_restriction_classes = mua_sender_restrictions, mua_client_restrictions, mua_helo_restrictions
mua_client_restrictions = permit_sasl_authenticated, reject
mua_sender_restrictions = permit_sasl_authenticated, reject
mua_helo_restrictions = permit_mynetworks, reject_non_fqdn_hostname, reject_invalid_hostname, permit
## MS Outlook, incompatible with reject_non_fqdn_hostname and/or reject_invalid_hostname
## https://unix.stackexchange.com/a/91753/357916
mua_helo_restrictions = permit_mynetworks, permit
smtpd_milters = inet:localhost:8891 inet:127.0.0.1:11332
non_smtpd_milters = inet:localhost:8891 inet:127.0.0.1:11332

View file

@ -86,6 +86,8 @@ if node.has_bundle('telegraf'):
'needs': [
'pkg_apt:acl',
'svc_systemd:postfix',
'svc_systemd:postfix:reload',
'svc_systemd:postfix:restart',
],
}
actions['postfix_setfacl_default_telegraf'] = {
@ -94,5 +96,7 @@ if node.has_bundle('telegraf'):
'needs': [
'pkg_apt:acl',
'svc_systemd:postfix',
'svc_systemd:postfix:reload',
'svc_systemd:postfix:restart',
],
}

View file

@ -12,7 +12,7 @@ directories = {
'zfs_dataset:tank/postgresql',
],
'needed_by': [
'svc_systemd:postgresql',
'svc_systemd:postgresql.service',
],
}
}
@ -25,16 +25,19 @@ files = {
) + '\n',
'owner': 'postgres',
'group': 'postgres',
'needs': [
'pkg_apt:postgresql',
],
'needed_by': [
'svc_systemd:postgresql',
'svc_systemd:postgresql.service',
],
'triggers': [
'svc_systemd:postgresql:restart',
'svc_systemd:postgresql.service:restart',
],
},
}
svc_systemd['postgresql'] = {
svc_systemd['postgresql.service'] = {
'needs': [
'pkg_apt:postgresql',
],
@ -43,13 +46,13 @@ svc_systemd['postgresql'] = {
for user, config in node.metadata.get('postgresql/roles').items():
postgres_roles[user] = merge_dict(config, {
'needs': [
'svc_systemd:postgresql',
'svc_systemd:postgresql.service',
],
})
for database, config in node.metadata.get('postgresql/databases').items():
postgres_dbs[database] = merge_dict(config, {
'needs': [
'svc_systemd:postgresql',
'svc_systemd:postgresql.service',
],
})

View file

@ -6,7 +6,11 @@ root_password = repo.vault.password_for(f'{node.name} postgresql root')
defaults = {
'apt': {
'packages': {
'postgresql': {},
'postgresql': {
'needs': {
'zfs_dataset:tank/postgresql',
},
},
},
},
'backup': {
@ -54,6 +58,25 @@ def conf(metadata):
}
@metadata_reactor.provides(
'apt/config/APT/NeverAutoRemove',
)
def apt(metadata):
return {
'apt': {
'config': {
'APT': {
'NeverAutoRemove': {
# https://github.com/credativ/postgresql-common/blob/master/pg_updateaptconfig#L17-L21
f"^postgresql.*-{metadata.get('postgresql/version')}",
},
},
},
},
}
@metadata_reactor.provides(
'zfs/datasets',
)

25
bundles/pyenv/items.py Normal file
View file

@ -0,0 +1,25 @@
from shlex import quote
directories = {
'/opt/pyenv': {},
'/opt/pyenv/install': {},
}
git_deploy = {
'/opt/pyenv/install': {
'repo': 'https://github.com/pyenv/pyenv.git',
'rev': 'master',
'needs': {
'directory:/opt/pyenv/install',
},
},
}
for version in node.metadata.get('pyenv/versions'):
actions[f'pyenv_install_{version}'] = {
'command': f'PYENV_ROOT=/opt/pyenv /opt/pyenv/install/bin/pyenv install {quote(version)}',
'unless': f'PYENV_ROOT=/opt/pyenv /opt/pyenv/install/bin/pyenv versions --bare | grep -Fxq {quote(version)}',
'needs': {
'git_deploy:/opt/pyenv/install',
},
}

23
bundles/pyenv/metadata.py Normal file
View file

@ -0,0 +1,23 @@
defaults = {
'apt': {
'packages': {
'build-essential': {},
'libssl-dev': {},
'zlib1g-dev': {},
'libbz2-dev': {},
'libreadline-dev': {},
'libsqlite3-dev': {},
'curl': {},
'libncurses-dev': {},
'xz-utils': {},
'tk-dev': {},
'libxml2-dev': {},
'libxmlsec1-dev': {},
'libffi-dev': {},
'liblzma-dev': {},
},
},
'pyenv': {
'versions': set(),
},
}

View file

@ -0,0 +1,3 @@
- Homematic > Settings > Control panel > Security > SSH > active & set password
- ssh to node > `ssh-copy-id -o StrictHostKeyChecking=no root@{homematic}`
- Homematic > Settings > Control panel > Security > Automatic forwarding to HTTPS > active

View file

@ -1,6 +1,3 @@
from shlex import quote
@metadata_reactor.provides(
'letsencrypt/domains',
)
@ -20,8 +17,6 @@ def letsencrypt(metadata):
'systemd-timers/raspberrymatic-cert',
)
def systemd_timers(metadata):
domain = metadata.get('raspberrymatic-cert/domain')
return {
'systemd-timers': {
'raspberrymatic-cert': {

View file

@ -6,80 +6,16 @@ $config['enable_installer'] = true;
/* Local configuration for Roundcube Webmail */
// ----------------------------------
// SQL DATABASE
// ----------------------------------
// Database connection string (DSN) for read+write operations
// Format (compatible with PEAR MDB2): db_provider://user:password@host/database
// Currently supported db_providers: mysql, pgsql, sqlite, mssql or sqlsrv
// For examples see http://pear.php.net/manual/en/package.database.mdb2.intro-dsn.php
// NOTE: for SQLite use absolute path: 'sqlite:////full/path/to/sqlite.db?mode=0646'
$config['db_dsnw'] = '${database['provider']}://${database['user']}:${database['password']}@${database['host']}/${database['name']}';
// ----------------------------------
// IMAP
// ----------------------------------
// The mail host chosen to perform the log-in.
// Leave blank to show a textbox at login, give a list of hosts
// to display a pulldown menu or set one host as string.
// To use SSL/TLS connection, enter hostname with prefix ssl:// or tls://
// Supported replacement variables:
// %n - hostname ($_SERVER['SERVER_NAME'])
// %t - hostname without the first part
// %d - domain (http hostname $_SERVER['HTTP_HOST'] without the first part)
// %s - domain name after the '@' from e-mail address provided at login screen
// For example %n = mail.domain.tld, %t = domain.tld
// WARNING: After hostname change update of mail_host column in users table is
// required to match old user data records with the new host.
$config['default_host'] = 'localhost';
// ----------------------------------
// SMTP
// ----------------------------------
// SMTP server host (for sending mails).
// To use SSL/TLS connection, enter hostname with prefix ssl:// or tls://
// If left blank, the PHP mail() function is used
// Supported replacement variables:
// %h - user's IMAP hostname
// %n - hostname ($_SERVER['SERVER_NAME'])
// %t - hostname without the first part
// %d - domain (http hostname $_SERVER['HTTP_HOST'] without the first part)
// %z - IMAP domain (IMAP hostname without the first part)
// For example %n = mail.domain.tld, %t = domain.tld
$config['smtp_server'] = 'tls://localhost';
// SMTP username (if required) if you use %u as the username Roundcube
// will use the current username for login
$config['imap_host'] = 'localhost';
$config['smtp_host'] = 'tls://localhost';
$config['smtp_user'] = '%u';
// SMTP password (if required) if you use %p as the password Roundcube
// will use the current user's password for login
$config['smtp_pass'] = '%p';
// provide an URL where a user can get support for this Roundcube installation
// PLEASE DO NOT LINK TO THE ROUNDCUBE.NET WEBSITE HERE!
$config['support_url'] = '';
// this key is used to encrypt the users imap password which is stored
// in the session record (and the client cookie if remember password is enabled).
// please provide a string of exactly 24 chars.
$config['des_key'] = '${des_key}';
// Name your service. This is displayed on the login screen and in the window title
$config['product_name'] = '${product_name}';
// ----------------------------------
// PLUGINS
// ----------------------------------
// List of active plugins (in plugins/ directory)
$config['plugins'] = array(${', '.join(f'"{plugin}"' for plugin in plugins)});
// the default locale setting (leave empty for auto-detection)
// RFC1766 formatted language name like en_US, de_DE, de_CH, fr_FR, pt_BR
$config['language'] = 'de_DE';
// https://serverfault.com/a/991304
$config['smtp_conn_options'] = array(
'ssl' => array(
'verify_peer' => false,

View file

@ -14,4 +14,4 @@ $config['password_dovecotpw'] = '/usr/bin/sudo /usr/bin/doveadm pw';
$config['password_dovecotpw_method'] = 'ARGON2ID';
$config['password_dovecotpw_with_method'] = true;
$config['password_db_dsn'] = 'pgsql://mailserver:${mailserver_db_password}@localhost/mailserver';
$config['password_query'] = "UPDATE users SET password=%D FROM domains WHERE domains.id = domain_id AND domains.name = %d AND users.name = %l";
$config['password_query'] = "UPDATE users SET password = %P FROM domains WHERE domains.id = users.domain_id AND domains.name = %d AND users.name = %l";

View file

@ -1,7 +1,8 @@
assert node.has_bundle('php')
assert node.has_bundle('mailserver')
version = node.metadata.get('roundcube/version')
roundcube_version = node.metadata.get('roundcube/version')
php_version = node.metadata.get('php/version')
directories = {
'/opt/roundcube': {
@ -22,9 +23,9 @@ directories = {
}
files[f'/tmp/roundcube-{version}.tar.gz'] = {
files[f'/tmp/roundcube-{roundcube_version}.tar.gz'] = {
'content_type': 'download',
'source': f'https://github.com/roundcube/roundcubemail/releases/download/{version}/roundcubemail-{version}-complete.tar.gz',
'source': f'https://github.com/roundcube/roundcubemail/releases/download/{roundcube_version}/roundcubemail-{roundcube_version}-complete.tar.gz',
'triggered': True,
}
actions['delete_roundcube'] = {
@ -32,18 +33,18 @@ actions['delete_roundcube'] = {
'triggered': True,
}
actions['extract_roundcube'] = {
'command': f'tar xfvz /tmp/roundcube-{version}.tar.gz --strip 1 -C /opt/roundcube',
'unless': f'grep -q "Version {version}" /opt/roundcube/index.php',
'command': f'tar xfvz /tmp/roundcube-{roundcube_version}.tar.gz --strip 1 -C /opt/roundcube',
'unless': f'grep -q "Version {roundcube_version}" /opt/roundcube/index.php',
'preceded_by': [
'action:delete_roundcube',
f'file:/tmp/roundcube-{version}.tar.gz',
f'file:/tmp/roundcube-{roundcube_version}.tar.gz',
],
'needs': [
'directory:/opt/roundcube',
],
'triggers': [
'action:chown_roundcube',
'action:composer_install',
'action:composer_lock_reset',
],
}
actions['chown_roundcube'] = {
@ -64,6 +65,9 @@ files['/opt/roundcube/config/config.inc.php'] = {
'needs': [
'action:chown_roundcube',
],
'triggers': [
f'svc_systemd:php{php_version}-fpm.service:restart',
],
}
files['/opt/roundcube/plugins/password/config.inc.php'] = {
'source': 'password.config.inc.php',
@ -75,7 +79,16 @@ files['/opt/roundcube/plugins/password/config.inc.php'] = {
'action:chown_roundcube',
],
}
actions['composer_lock_reset'] = {
'command': 'rm /opt/roundcube/composer.lock',
'triggered': True,
'needs': [
'action:chown_roundcube',
],
'triggers': [
'action:composer_install',
],
}
actions['composer_install'] = {
'command': "cp /opt/roundcube/composer.json-dist /opt/roundcube/composer.json && su www-data -s /bin/bash -c '/usr/bin/composer -d /opt/roundcube install'",
'triggered': True,

View file

@ -48,6 +48,14 @@ svc_systemd = {
'pkg_apt:rspamd',
},
},
# FIXME: broken since debian 12
'clamav-clamonacc': {
'enabled': False,
'running': False,
'needs': {
'pkg_apt:clamav',
},
},
}
actions = {

View file

@ -21,3 +21,4 @@ ClientAliveInterval 30
ClientAliveCountMax 5
AcceptEnv LANG
Subsystem sftp /usr/lib/openssh/sftp-server
HostKey /etc/ssh/ssh_host_managed_key

View file

@ -51,14 +51,14 @@ files = {
],
'skip': dont_touch_sshd,
},
'/etc/ssh/ssh_host_ed25519_key': {
'/etc/ssh/ssh_host_managed_key': {
'content': node.metadata.get('ssh/host_key/private') + '\n',
'mode': '0600',
'triggers': [
'svc_systemd:ssh:restart'
],
},
'/etc/ssh/ssh_host_ed25519_key.pub': {
'/etc/ssh/ssh_host_managed_key.pub': {
'content': node.metadata.get('ssh/host_key/public') + '\n',
'mode': '0644',
'triggers': [
@ -66,12 +66,13 @@ files = {
],
},
'/etc/ssh/ssh_known_hosts': {
'content': '\n'.join(
repo.libs.ssh.known_hosts_entry_for(other_node)
for other_node in sorted(repo.nodes)
'content': '\n'.join(sorted(
line
for other_node in repo.nodes
if other_node != node
and other_node.has_bundle('ssh')
) + '\n',
for line in other_node.metadata.get('ssh/is_known_as')
)) + '\n',
},
}

View file

@ -4,6 +4,7 @@ from base64 import b64decode
defaults = {
'ssh': {
'multiplex_incoming': True,
'is_known_as': set(), # known_hosts for other nodes
},
}
@ -35,7 +36,7 @@ def host_key(metadata):
'ssh': {
'host_key': {
'private': private + '\n',
'public': public + f' root@{node.name}',
'public': f'{public} {node.name}',
}
},
}
@ -47,7 +48,7 @@ def host_key(metadata):
def hostnames(metadata):
ips = set()
for network in node.metadata.get('network').values():
for network in metadata.get('network').values():
if network.get('ipv4', None):
ips.add(str(ip_interface(network['ipv4']).ip))
if network.get('ipv6', None):
@ -55,7 +56,7 @@ def hostnames(metadata):
domains = {
domain
for domain, records in node.metadata.get('dns').items()
for domain, records in metadata.get('dns').items()
for type, values in records.items()
if type in {'A', 'AAAA'}
and set(values) & ips
@ -70,3 +71,18 @@ def hostnames(metadata):
}
},
}
@metadata_reactor.provides(
'ssh/is_known_as',
)
def is_known_as(metadata):
return {
'ssh': {
'is_known_as': repo.libs.ssh.known_hosts_entry_for(
node_id=metadata.get('id'),
hostnames=tuple(sorted(metadata.get('ssh/hostnames'))),
pubkey=metadata.get('ssh/host_key/public'),
),
},
}

View file

@ -34,18 +34,19 @@ defaults = {
)
def systemd_timer(metadata):
return {
'systemd-timers': {
f'steam-chat-logger': {
'command': '/opt/steam_chat_logger/steam_chat_logger.py',
'when': 'hourly',
'user': 'steam_chat_logger',
'env': {
'DB_NAME': 'steam_chat_logger',
'DB_USER': 'steam_chat_logger',
'DB_PASSWORD': metadata.get('postgresql/roles/steam_chat_logger/password'),
**metadata.get('steam_chat_logger'),
},
'working_dir': '/var/lib/steam_chat_logger',
},
},
# steam python login is broken: https://github.com/ValvePython/steam/issues/442
# 'systemd-timers': {
# f'steam-chat-logger': {
# 'command': '/opt/steam_chat_logger/steam_chat_logger.py',
# 'when': 'hourly',
# 'user': 'steam_chat_logger',
# 'env': {
# 'DB_NAME': 'steam_chat_logger',
# 'DB_USER': 'steam_chat_logger',
# 'DB_PASSWORD': metadata.get('postgresql/roles/steam_chat_logger/password'),
# **metadata.get('steam_chat_logger'),
# },
# 'working_dir': '/var/lib/steam_chat_logger',
# },
# },
}

View file

@ -1,7 +1,7 @@
files = {
'/etc/systemd/journald.conf.d/managed.conf': {
'content': repo.libs.systemd.generate_unitfile({
'Jorunal': node.metadata.get('systemd-journald'),
'Journal': node.metadata.get('systemd-journald'),
}),
'triggers': {
'svc_systemd:systemd-journald:restart',

View file

@ -22,7 +22,6 @@ actions = {
'action:remove_swapfile',
},
'triggers': {
'action:initialize_swapfile',
'svc_systemd:swapfile.swap:restart',
},
},
@ -38,7 +37,7 @@ actions = {
},
'initialize_swapfile': {
'command': f'mkswap /swapfile',
'triggered': True,
'unless': 'blkid -o value -s TYPE /swapfile | grep -q "^swap$"',
'needs': {
'action:swapfile_mode',
}
@ -47,9 +46,6 @@ actions = {
svc_systemd = {
'swapfile.swap': {
'preceded_by': {
'action:initialize_swapfile',
},
'needs': {
'action:initialize_swapfile',
'action:systemd-reload',

View file

@ -6,6 +6,11 @@ defaults = {
'Swap': {
'What': '/swapfile',
},
'Install': {
'WantedBy': {
'swap.target',
},
},
},
},
},

View file

@ -42,6 +42,8 @@ def systemd(metadata):
units[f'{name}.service']['Service']['SuccessExitStatus'] = config['success_exit_status']
if config.get('kill_mode'):
units[f'{name}.service']['Service']['KillMode'] = config['kill_mode']
if config.get('RuntimeMaxSec'):
units[f'{name}.service']['Service']['RuntimeMaxSec'] = config['RuntimeMaxSec']
services[f'{name}.timer'] = {}

View file

@ -9,7 +9,7 @@ files = {
node.metadata.get('telegraf/config'),
cls=MetadataJSONEncoder,
)),
sort_keys=True
sort_keys=True,
),
'triggers': [
'svc_systemd:telegraf:restart',

View file

@ -7,6 +7,21 @@ defaults = {
# needed by crystal plugins:
'libgc-dev': {},
'libevent-dev': {},
# crystal based (procio, pressure_stall):
'libpcre3': {},
},
'sources': {
'influxdata': {
'urls': {
'https://repos.influxdata.com/debian',
},
'suites': {
'stable',
},
'components': {
'main',
},
},
},
},
'telegraf': {
@ -43,7 +58,7 @@ defaults = {
'procstat': {h({
'interval': '60s',
'pattern': '.',
'fieldpass': [
'fieldinclude': [
'cpu_usage',
'memory_rss',
],
@ -91,28 +106,6 @@ defaults = {
}
@metadata_reactor.provides(
'apt/sources',
)
def apt(metadata):
codename = {
'buster': 'buster',
'bullseye': 'bullseye',
'bookworm': 'bullseye',
}[metadata.get('os_codename')]
return {
'apt': {
'packages': {
'telegraf': {},
},
'sources': {
f"deb https://repos.influxdata.com/debian {codename} stable",
},
},
}
@metadata_reactor.provides(
'telegraf/config/outputs/influxdb_v2',
)

Some files were not shown because too many files have changed in this diff Show more