Compare commits
2 commits
1ac8a53a9a
...
aa40654863
Author | SHA1 | Date | |
---|---|---|---|
|
aa40654863 | ||
|
5bbe47cada |
154 changed files with 1642 additions and 2008 deletions
|
@ -30,7 +30,6 @@ Rule of thumb: keep ports below 10000 free for stuff that reserves ports.
|
|||
| 20010 | mautrix-telegram | Bridge |
|
||||
| 20020 | mautrix-whatsapp | Bridge |
|
||||
| 20030 | matrix-dimension | Matrix Integrations Manager|
|
||||
| 20070 | matrix-synapse | sliding-sync |
|
||||
| 20080 | matrix-synapse | client, federation |
|
||||
| 20081 | matrix-synapse | prometheus metrics |
|
||||
| 20090 | matrix-media-repo | media_repo |
|
||||
|
|
|
@ -6,9 +6,9 @@ apt-get update
|
|||
|
||||
DEBIAN_FRONTEND=noninteractive apt-get -y -q -o Dpkg::Options::=--force-confold dist-upgrade
|
||||
|
||||
DEBIAN_FRONTEND=noninteractive apt-get -y -q autoremove
|
||||
DEBIAN_FRONTEND=noninteractive apt-get -y -q autoclean
|
||||
|
||||
DEBIAN_FRONTEND=noninteractive apt-get -y -q clean
|
||||
DEBIAN_FRONTEND=noninteractive apt-get -y -q autoremove
|
||||
|
||||
% if clean_old_kernels:
|
||||
existing=$(dpkg --get-selections | grep -E '^linux-(image|headers)-[0-9]' || true)
|
||||
|
|
|
@ -153,9 +153,6 @@ pkg_apt = {
|
|||
'popularity-contest': {
|
||||
'installed': False,
|
||||
},
|
||||
'python3-packaging': {
|
||||
'installed': False,
|
||||
},
|
||||
'unattended-upgrades': {
|
||||
'installed': False,
|
||||
},
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
context.exec = [
|
||||
{ path = "pactl" args = "load-module module-native-protocol-tcp" }
|
||||
{ path = "pactl" args = "load-module module-zeroconf-discover" }
|
||||
{ path = "pactl" args = "load-module module-zeroconf-publish" }
|
||||
]
|
|
@ -44,11 +44,6 @@ directories = {
|
|||
}
|
||||
|
||||
svc_systemd = {
|
||||
'avahi-daemon': {
|
||||
'needs': {
|
||||
'pkg_pacman:avahi',
|
||||
},
|
||||
},
|
||||
'sddm': {
|
||||
'needs': {
|
||||
'pkg_pacman:sddm',
|
||||
|
@ -66,8 +61,6 @@ git_deploy = {
|
|||
},
|
||||
}
|
||||
|
||||
files['/etc/pipewire/pipewire-pulse.conf.d/50-network.conf'] = {}
|
||||
|
||||
for filename in listdir(join(repo.path, 'data', 'arch-with-gui', 'files', 'fonts')):
|
||||
if filename.startswith('.'):
|
||||
continue
|
||||
|
|
|
@ -9,14 +9,6 @@ defaults = {
|
|||
'icinga_options': {
|
||||
'exclude_from_monitoring': True,
|
||||
},
|
||||
'nftables': {
|
||||
'input': {
|
||||
'50-avahi': {
|
||||
'udp dport 5353 accept',
|
||||
'udp sport 5353 accept',
|
||||
},
|
||||
},
|
||||
},
|
||||
'pacman': {
|
||||
'packages': {
|
||||
# fonts
|
||||
|
@ -31,7 +23,6 @@ defaults = {
|
|||
'sddm': {},
|
||||
|
||||
# networking
|
||||
'avahi': {},
|
||||
'netctl': {},
|
||||
'rfkill': {},
|
||||
'wpa_supplicant': {},
|
||||
|
@ -54,7 +45,6 @@ defaults = {
|
|||
'pipewire': {},
|
||||
'pipewire-jack': {},
|
||||
'pipewire-pulse': {},
|
||||
'pipewire-zeroconf': {},
|
||||
'qpwgraph': {},
|
||||
|
||||
# window management
|
||||
|
|
|
@ -35,15 +35,8 @@ def get_my_clients(metadata):
|
|||
continue
|
||||
|
||||
my_clients[rnode.name] = {
|
||||
'exclude_from_monitoring': rnode.metadata.get(
|
||||
'backup-client/exclude_from_monitoring',
|
||||
rnode.metadata.get(
|
||||
'icinga_options/exclude_from_monitoring',
|
||||
False,
|
||||
),
|
||||
),
|
||||
'one_backup_every_hours': rnode.metadata.get('backup-client/one_backup_every_hours', 24),
|
||||
'user': rnode.metadata.get('backup-client/user-name'),
|
||||
'one_backup_every_hours': rnode.metadata.get('backup-client/one_backup_every_hours', 24),
|
||||
'retain': {
|
||||
'daily': rnode.metadata.get('backups/retain/daily', retain_defaults['daily']),
|
||||
'weekly': rnode.metadata.get('backups/retain/weekly', retain_defaults['weekly']),
|
||||
|
|
|
@ -29,19 +29,8 @@ files = {
|
|||
},
|
||||
}
|
||||
|
||||
if node.has_any_bundle([
|
||||
'dovecot',
|
||||
'nginx',
|
||||
'postfix',
|
||||
]):
|
||||
actions['generate-dhparam'] = {
|
||||
'command': 'openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048',
|
||||
'unless': 'test -f /etc/ssl/certs/dhparam.pem',
|
||||
}
|
||||
|
||||
|
||||
locale_needs = set()
|
||||
for locale in sorted(node.metadata.get('locale/installed')):
|
||||
for locale in sorted(node.metadata['locale']['installed']):
|
||||
actions[f'ensure_locale_{locale}_is_enabled'] = {
|
||||
'command': f"sed -i '/{locale}/s/^# *//g' /etc/locale.gen",
|
||||
'unless': f"grep -e '^{locale}' /etc/locale.gen",
|
||||
|
@ -52,9 +41,11 @@ for locale in sorted(node.metadata.get('locale/installed')):
|
|||
}
|
||||
locale_needs = {f'action:ensure_locale_{locale}_is_enabled'}
|
||||
|
||||
actions['locale-gen'] = {
|
||||
'triggered': True,
|
||||
'command': 'locale-gen',
|
||||
actions = {
|
||||
'locale-gen': {
|
||||
'triggered': True,
|
||||
'command': 'locale-gen',
|
||||
},
|
||||
}
|
||||
|
||||
description = []
|
||||
|
|
|
@ -24,7 +24,7 @@ defaults = {
|
|||
},
|
||||
'sysctl': {
|
||||
'options': {
|
||||
'net.ipv4.conf.all.forwarding': '1',
|
||||
'net.ipv4.ip_forward': '1',
|
||||
'net.ipv6.conf.all.forwarding': '1',
|
||||
},
|
||||
},
|
||||
|
@ -65,10 +65,8 @@ def neighbor_info_from_wireguard(metadata):
|
|||
)
|
||||
def my_ip(metadata):
|
||||
if node.has_bundle('wireguard'):
|
||||
wg_ifaces = sorted({iface for iface in metadata.get('interfaces').keys() if iface.startswith('wg_')})
|
||||
if not wg_ifaces:
|
||||
return {}
|
||||
my_ip = sorted(metadata.get(f'interfaces/{wg_ifaces[0]}/ips'))[0].split('/')[0]
|
||||
wg_iface = sorted({iface for iface in metadata.get('interfaces').keys() if iface.startswith('wg_')})[0]
|
||||
my_ip = sorted(metadata.get(f'interfaces/{wg_iface}/ips'))[0].split('/')[0]
|
||||
else:
|
||||
my_ip = str(sorted(repo.libs.tools.resolve_identifier(repo, node.name))[0])
|
||||
|
||||
|
|
|
@ -3,4 +3,3 @@ driver = pgsql
|
|||
default_pass_scheme = MD5-CRYPT
|
||||
password_query = SELECT username as user, password FROM mailbox WHERE username = '%u' AND active = true
|
||||
user_query = SELECT '/var/mail/vmail/' || maildir as home, 65534 as uid, 65534 as gid FROM mailbox WHERE username = '%u' AND active = true
|
||||
iterate_query = SELECT username as user FROM mailbox WHERE active = true
|
||||
|
|
|
@ -28,19 +28,19 @@ namespace inbox {
|
|||
mail_location = maildir:/var/mail/vmail/%d/%n
|
||||
protocols = imap lmtp sieve
|
||||
|
||||
ssl = required
|
||||
ssl = yes
|
||||
ssl_cert = </var/lib/dehydrated/certs/${node.metadata.get('postfix/myhostname', node.metadata['hostname'])}/fullchain.pem
|
||||
ssl_key = </var/lib/dehydrated/certs/${node.metadata.get('postfix/myhostname', node.metadata['hostname'])}/privkey.pem
|
||||
ssl_dh = </etc/ssl/certs/dhparam.pem
|
||||
ssl_dh = </etc/dovecot/ssl/dhparam.pem
|
||||
ssl_min_protocol = TLSv1.2
|
||||
ssl_cipher_list = ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-CHACHA20-POLY1305
|
||||
ssl_prefer_server_ciphers = no
|
||||
ssl_cipher_list = EECDH+AESGCM:EDH+AESGCM
|
||||
ssl_prefer_server_ciphers = yes
|
||||
|
||||
login_greeting = IMAPd ready
|
||||
auth_mechanisms = plain login
|
||||
first_valid_uid = 65534
|
||||
disable_plaintext_auth = yes
|
||||
mail_plugins = $mail_plugins zlib old_stats fts fts_xapian
|
||||
mail_plugins = $mail_plugins zlib old_stats
|
||||
|
||||
plugin {
|
||||
zlib_save_level = 6
|
||||
|
@ -56,15 +56,6 @@ plugin {
|
|||
old_stats_refresh = 30 secs
|
||||
old_stats_track_cmds = yes
|
||||
|
||||
fts = xapian
|
||||
fts_xapian = partial=3 full=20
|
||||
|
||||
fts_autoindex = yes
|
||||
fts_enforced = yes
|
||||
|
||||
# Index attachements
|
||||
fts_decoder = decode2text
|
||||
|
||||
% if node.has_bundle('rspamd'):
|
||||
sieve_before = /var/mail/vmail/sieve/global/spam-global.sieve
|
||||
|
||||
|
@ -95,19 +86,14 @@ service auth {
|
|||
}
|
||||
}
|
||||
|
||||
service decode2text {
|
||||
executable = script /usr/lib/dovecot/decode2text.sh
|
||||
user = dovecot
|
||||
unix_listener decode2text {
|
||||
mode = 0666
|
||||
service lmtp {
|
||||
unix_listener /var/spool/postfix/private/dovecot-lmtp {
|
||||
group = postfix
|
||||
mode = 0600
|
||||
user = postfix
|
||||
}
|
||||
}
|
||||
|
||||
service indexer-worker {
|
||||
vsz_limit = 0
|
||||
process_limit = 0
|
||||
}
|
||||
|
||||
service imap {
|
||||
executable = imap
|
||||
}
|
||||
|
@ -118,14 +104,6 @@ service imap-login {
|
|||
vsz_limit = 64M
|
||||
}
|
||||
|
||||
service lmtp {
|
||||
unix_listener /var/spool/postfix/private/dovecot-lmtp {
|
||||
group = postfix
|
||||
mode = 0600
|
||||
user = postfix
|
||||
}
|
||||
}
|
||||
|
||||
service managesieve-login {
|
||||
inet_listener sieve {
|
||||
port = 4190
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
# by this bundle
|
||||
repo.libs.tools.require_bundle(node, 'postfix')
|
||||
|
||||
directories = {
|
||||
'/etc/dovecot/ssl': {},
|
||||
}
|
||||
|
||||
files = {
|
||||
'/etc/dovecot/dovecot.conf': {
|
||||
'content_type': 'mako',
|
||||
|
@ -45,17 +49,25 @@ files = {
|
|||
},
|
||||
}
|
||||
|
||||
symlinks['/usr/lib/dovecot/decode2text.sh'] = {
|
||||
'target': '/usr/share/doc/dovecot-core/examples/decode2text.sh',
|
||||
'before': {
|
||||
'svc_systemd:dovecot',
|
||||
actions = {
|
||||
'dovecot_generate_dhparam': {
|
||||
'command': 'openssl dhparam -out /etc/dovecot/ssl/dhparam.pem 2048',
|
||||
'unless': 'test -f /etc/dovecot/ssl/dhparam.pem',
|
||||
'cascade_skip': False,
|
||||
'needs': {
|
||||
'directory:/etc/dovecot/ssl',
|
||||
'pkg_apt:'
|
||||
},
|
||||
'triggers': {
|
||||
'svc_systemd:dovecot:restart',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
svc_systemd = {
|
||||
'dovecot': {
|
||||
'needs': {
|
||||
'action:generate-dhparam',
|
||||
'action:dovecot_generate_dhparam',
|
||||
'file:/etc/dovecot/dovecot.conf',
|
||||
'file:/etc/dovecot/dovecot-sql.conf',
|
||||
},
|
||||
|
|
|
@ -3,7 +3,6 @@ from bundlewrap.metadata import atomic
|
|||
defaults = {
|
||||
'apt': {
|
||||
'packages': {
|
||||
'dovecot-fts-xapian': {},
|
||||
'dovecot-imapd': {},
|
||||
'dovecot-lmtpd': {},
|
||||
'dovecot-managesieved': {},
|
||||
|
@ -36,16 +35,6 @@ defaults = {
|
|||
'dovecot',
|
||||
},
|
||||
},
|
||||
'systemd-timers': {
|
||||
'timers': {
|
||||
'dovecot_fts_optimize': {
|
||||
'command': [
|
||||
'/usr/bin/doveadm fts optimize -A',
|
||||
],
|
||||
'when': '02:{}:00'.format(node.magic_number % 60),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if node.has_bundle('postfixadmin'):
|
||||
|
|
33
bundles/gce-workaround/items.py
Normal file
33
bundles/gce-workaround/items.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
svc_systemd = {}
|
||||
pkg_apt = {}
|
||||
|
||||
for i in {
|
||||
'gce-disk-expand',
|
||||
'google-cloud-packages-archive-keyring',
|
||||
'google-cloud-sdk',
|
||||
'google-compute-engine',
|
||||
'google-compute-engine-oslogin',
|
||||
'google-guest-agent',
|
||||
'google-osconfig-agent',
|
||||
}:
|
||||
pkg_apt[i] = {
|
||||
'installed': False,
|
||||
}
|
||||
|
||||
for i in {
|
||||
'google-accounts-daemon.service',
|
||||
'google-accounts-manager.service',
|
||||
'google-clock-skew-daemon.service',
|
||||
'google-clock-sync-manager.service',
|
||||
'google-guest-agent.service',
|
||||
'google-osconfig-agent.service',
|
||||
'google-shutdown-scripts.service',
|
||||
'google-startup-scripts.service',
|
||||
'sshguard.service',
|
||||
|
||||
'google-oslogin-cache.timer',
|
||||
}:
|
||||
svc_systemd[i] = {
|
||||
'enabled': False,
|
||||
'running': False,
|
||||
}
|
|
@ -1,9 +1,3 @@
|
|||
if node.has_bundle('pyenv'):
|
||||
python_version = sorted(node.metadata.get('pyenv/python_versions'))[-1]
|
||||
python_path = f'/opt/pyenv/versions/{python_version}/bin/python'
|
||||
else:
|
||||
python_path = '/usr/bin/python3'
|
||||
|
||||
users = {
|
||||
'homeassistant': {
|
||||
'home': '/var/opt/homeassistant',
|
||||
|
@ -38,7 +32,7 @@ files = {
|
|||
|
||||
actions = {
|
||||
'homeassistant_create_virtualenv': {
|
||||
'command': f'sudo -u homeassistant virtualenv -p {python_path} /opt/homeassistant/venv/',
|
||||
'command': 'sudo -u homeassistant /usr/bin/python3 -m virtualenv -p python3 /opt/homeassistant/venv/',
|
||||
'unless': 'test -d /opt/homeassistant/venv/',
|
||||
'needs': {
|
||||
'directory:/opt/homeassistant',
|
||||
|
|
|
@ -4,7 +4,6 @@ defaults = {
|
|||
'autoconf': {},
|
||||
'bluez': {},
|
||||
'build-essential': {},
|
||||
'ffmpeg': {},
|
||||
'libffi-dev': {},
|
||||
'libjpeg-dev': {},
|
||||
'libopenjp2-7': {},
|
||||
|
|
|
@ -13,6 +13,7 @@ BLOCKLISTS = {
|
|||
'dnsbl-1.uceprotect.net': set(),
|
||||
'l2.spews.dnsbl.sorbs.net': set(),
|
||||
'list.dsbl.org': set(),
|
||||
'map.spam-rbl.com': set(),
|
||||
'multihop.dsbl.org': set(),
|
||||
'ns1.unsubscore.com': set(),
|
||||
'opm.blitzed.org': set(),
|
||||
|
|
|
@ -9,11 +9,6 @@ app = Flask(__name__)
|
|||
@app.route('/status')
|
||||
def statuspage():
|
||||
everything_fine = True
|
||||
try:
|
||||
check_output(['/usr/local/share/icinga/plugins/check_mounts'])
|
||||
except:
|
||||
everything_fine = False
|
||||
|
||||
try:
|
||||
check_output(['/usr/lib/nagios/plugins/check_procs', '-C', 'icinga2', '-c', '1:'])
|
||||
except:
|
||||
|
|
|
@ -3,6 +3,8 @@ Description=Icinga2 Statusmonitor
|
|||
After=network.target
|
||||
|
||||
[Service]
|
||||
User=nagios
|
||||
Group=nagios
|
||||
Environment="FLASK_APP=/etc/icinga2/icinga_statusmonitor.py"
|
||||
ExecStart=/usr/bin/python3 -m flask run
|
||||
WorkingDirectory=/tmp
|
||||
|
|
|
@ -113,14 +113,9 @@ def notify_per_ntfy():
|
|||
else:
|
||||
subject = '[ICINGA] {}'.format(args.host_name)
|
||||
|
||||
if args.notification_type.lower() == 'recovery':
|
||||
priority = 'default'
|
||||
else:
|
||||
priority = 'urgent'
|
||||
|
||||
headers = {
|
||||
'Title': subject,
|
||||
'Priority': priority,
|
||||
'Priority': 'urgent',
|
||||
}
|
||||
|
||||
try:
|
||||
|
@ -199,7 +194,6 @@ if __name__ == '__main__':
|
|||
notify_per_mail()
|
||||
|
||||
if args.sms:
|
||||
if args.service_name:
|
||||
notify_per_sms()
|
||||
notify_per_sms()
|
||||
if CONFIG['ntfy']['user']:
|
||||
notify_per_ntfy()
|
||||
|
|
|
@ -275,27 +275,6 @@ files = {
|
|||
'mode': '0660',
|
||||
'group': 'icingaweb2',
|
||||
},
|
||||
|
||||
# monitoring
|
||||
'/etc/icinga2/icinga_statusmonitor.py': {
|
||||
'triggers': {
|
||||
'svc_systemd:icinga_statusmonitor:restart',
|
||||
},
|
||||
},
|
||||
'/usr/local/lib/systemd/system/icinga_statusmonitor.service': {
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
'svc_systemd:icinga_statusmonitor:restart',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
svc_systemd['icinga_statusmonitor'] = {
|
||||
'needs': {
|
||||
'file:/etc/icinga2/icinga_statusmonitor.py',
|
||||
'file:/usr/local/lib/systemd/system/icinga_statusmonitor.service',
|
||||
'pkg_apt:python3-flask',
|
||||
},
|
||||
}
|
||||
|
||||
actions = {
|
||||
|
@ -337,12 +316,15 @@ for name in files:
|
|||
for name in symlinks:
|
||||
icinga_run_deps.add(f'symlink:{name}')
|
||||
|
||||
svc_systemd['icinga2'] = {
|
||||
'needs': icinga_run_deps,
|
||||
svc_systemd = {
|
||||
'icinga2': {
|
||||
'needs': icinga_run_deps,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
# The actual hosts and services management starts here
|
||||
bundles = set()
|
||||
downtimes = []
|
||||
|
@ -353,14 +335,19 @@ for rnode in sorted(repo.nodes):
|
|||
host_ips = repo.libs.tools.resolve_identifier(repo, rnode.name, only_physical=True)
|
||||
icinga_ips = {}
|
||||
|
||||
for ip_type in ('ipv4', 'ipv6'):
|
||||
for ip in sorted(host_ips[ip_type]):
|
||||
if ip.is_private and not ip.is_link_local:
|
||||
icinga_ips[ip_type] = str(ip)
|
||||
break
|
||||
else:
|
||||
if host_ips[ip_type]:
|
||||
icinga_ips[ip_type] = sorted(host_ips[ip_type])[0]
|
||||
# XXX for the love of god, PLEASE remove this once DNS is no longer
|
||||
# hosted at GCE
|
||||
if rnode.in_group('gce'):
|
||||
icinga_ips['ipv4'] = rnode.metadata.get('external_ipv4')
|
||||
else:
|
||||
for ip_type in ('ipv4', 'ipv6'):
|
||||
for ip in sorted(host_ips[ip_type]):
|
||||
if ip.is_private and not ip.is_link_local:
|
||||
icinga_ips[ip_type] = str(ip)
|
||||
break
|
||||
else:
|
||||
if host_ips[ip_type]:
|
||||
icinga_ips[ip_type] = sorted(host_ips[ip_type])[0]
|
||||
|
||||
if not icinga_ips:
|
||||
raise ValueError(f'{rnode.name} requests monitoring, but has neither IPv4 nor IPv6 addresses!')
|
||||
|
@ -398,7 +385,7 @@ for rnode in sorted(repo.nodes):
|
|||
'host': rnode.name,
|
||||
'comment': f'Downtime for upgrade-and-reboot of node {rnode.name}',
|
||||
'times': {
|
||||
DAYS_TO_STRING[day%7]: f'{hour}:{minute}-{hour}:{minute+15}',
|
||||
DAYS_TO_STRING[day%7]: f'{hour}:{minute}-{hour}:{minute+30}',
|
||||
},
|
||||
})
|
||||
elif (
|
||||
|
@ -414,7 +401,7 @@ for rnode in sorted(repo.nodes):
|
|||
'host': rnode.name,
|
||||
'comment': f'Downtime for upgrade-and-reboot of node {rnode.name}',
|
||||
'times': {
|
||||
DAYS_TO_STRING[day%7]: f'{hour}:{minute}-{hour}:{minute+15}',
|
||||
DAYS_TO_STRING[day%7]: f'{hour}:{minute}-{hour}:{minute+30}',
|
||||
},
|
||||
})
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@ defaults = {
|
|||
'icingaweb2': {},
|
||||
'icingaweb2-module-monitoring': {},
|
||||
'python3-easysnmp': {},
|
||||
'python3-flask': {},
|
||||
'snmp': {},
|
||||
}
|
||||
},
|
||||
|
@ -132,9 +131,6 @@ def nginx(metadata):
|
|||
'/api/': {
|
||||
'target': 'https://127.0.0.1:5665/',
|
||||
},
|
||||
'/statusmonitor/': {
|
||||
'target': 'http://127.0.0.1:5000/',
|
||||
},
|
||||
},
|
||||
'extras': True,
|
||||
},
|
||||
|
|
|
@ -4,8 +4,7 @@ After=network.target
|
|||
Requires=infobeamer-cms.service
|
||||
|
||||
[Service]
|
||||
Environment=SETTINGS=/opt/infobeamer-cms/settings.toml
|
||||
WorkingDirectory=/opt/infobeamer-cms/src
|
||||
User=infobeamer-cms
|
||||
Group=infobeamer-cms
|
||||
ExecStart=/opt/infobeamer-cms/venv/bin/python syncer.py
|
||||
WorkingDirectory=/opt/infobeamer-cms
|
||||
ExecStart=curl -s -H "Host: ${domain}" http://127.0.0.1:8000/sync
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
Description=Run infobeamer-cms sync
|
||||
|
||||
[Timer]
|
||||
OnCalendar=minutely
|
||||
OnCalendar=*:0/5
|
||||
Persistent=true
|
||||
|
||||
[Install]
|
||||
|
|
4
bundles/infobeamer-cms/files/settings.toml
Normal file
4
bundles/infobeamer-cms/files/settings.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
<%
|
||||
from tomlkit import dumps as toml_dumps
|
||||
from bundlewrap.utils.text import toml_clean
|
||||
%>${toml_clean(toml_dumps(repo.libs.faults.resolve_faults(config), sort_keys=True))}
|
|
@ -1,4 +1,8 @@
|
|||
actions = {
|
||||
'infobeamer-cms_set_directory_permissions': {
|
||||
'triggered': True,
|
||||
'command': 'chown -R infobeamer-cms:infobeamer-cms /opt/infobeamer-cms/src/static/'
|
||||
},
|
||||
'infobeamer-cms_create_virtualenv': {
|
||||
'command': '/usr/bin/python3 -m virtualenv -p python3 /opt/infobeamer-cms/venv/',
|
||||
'unless': 'test -d /opt/infobeamer-cms/venv/',
|
||||
|
@ -8,11 +12,7 @@ actions = {
|
|||
},
|
||||
},
|
||||
'infobeamer-cms_install_requirements': {
|
||||
'command': ' && '.join([
|
||||
'cd /opt/infobeamer-cms/src',
|
||||
'/opt/infobeamer-cms/venv/bin/pip install --upgrade pip gunicorn -r requirements.txt',
|
||||
'rsync /opt/infobeamer-cms/src/static/* /opt/infobeamer-cms/static/',
|
||||
]),
|
||||
'command': 'cd /opt/infobeamer-cms/src && /opt/infobeamer-cms/venv/bin/pip install --upgrade pip gunicorn -r requirements.txt',
|
||||
'needs': {
|
||||
'action:infobeamer-cms_create_virtualenv',
|
||||
},
|
||||
|
@ -29,6 +29,7 @@ git_deploy = {
|
|||
},
|
||||
'triggers': {
|
||||
'svc_systemd:infobeamer-cms:restart',
|
||||
'action:infobeamer-cms_set_directory_permissions',
|
||||
'action:infobeamer-cms_install_requirements',
|
||||
},
|
||||
},
|
||||
|
@ -36,9 +37,6 @@ git_deploy = {
|
|||
|
||||
directories = {
|
||||
'/opt/infobeamer-cms/src': {},
|
||||
'/opt/infobeamer-cms/static': {
|
||||
'owner': 'infobeamer-cms',
|
||||
},
|
||||
}
|
||||
|
||||
config = node.metadata.get('infobeamer-cms/config', {})
|
||||
|
@ -68,7 +66,10 @@ for room, device_id in sorted(node.metadata.get('infobeamer-cms/rooms', {}).item
|
|||
|
||||
files = {
|
||||
'/opt/infobeamer-cms/settings.toml': {
|
||||
'content': repo.libs.faults.dict_as_toml(config),
|
||||
'content_type': 'mako',
|
||||
'context': {
|
||||
'config': config,
|
||||
},
|
||||
'triggers': {
|
||||
'svc_systemd:infobeamer-cms:restart',
|
||||
},
|
||||
|
@ -108,7 +109,7 @@ svc_systemd = {
|
|||
'infobeamer-cms': {
|
||||
'needs': {
|
||||
'action:infobeamer-cms_install_requirements',
|
||||
'directory:/opt/infobeamer-cms/static',
|
||||
'action:infobeamer-cms_set_directory_permissions',
|
||||
'file:/etc/systemd/system/infobeamer-cms.service',
|
||||
'file:/opt/infobeamer-cms/settings.toml',
|
||||
'git_deploy:/opt/infobeamer-cms/src',
|
||||
|
@ -116,12 +117,8 @@ svc_systemd = {
|
|||
},
|
||||
'infobeamer-cms-runperiodic.timer': {
|
||||
'needs': {
|
||||
'action:infobeamer-cms_install_requirements',
|
||||
'directory:/opt/infobeamer-cms/static',
|
||||
'file:/etc/systemd/system/infobeamer-cms-runperiodic.service',
|
||||
'file:/etc/systemd/system/infobeamer-cms-runperiodic.timer',
|
||||
'file:/opt/infobeamer-cms/settings.toml',
|
||||
'git_deploy:/opt/infobeamer-cms/src',
|
||||
'file:/etc/systemd/system/infobeamer-cms-runperiodic.service',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@ defaults = {
|
|||
'MAX_UPLOADS': 5,
|
||||
'PREFERRED_URL_SCHEME': 'https',
|
||||
'SESSION_COOKIE_NAME': '__Host-sess',
|
||||
'STATIC_PATH': '/opt/infobeamer-cms/static',
|
||||
'URL_KEY': repo.vault.password_for(f'{node.name} infobeamer-cms url key'),
|
||||
'VERSION': 1,
|
||||
},
|
||||
|
@ -30,13 +29,15 @@ def nginx(metadata):
|
|||
'/': {
|
||||
'target': 'http://127.0.0.1:8000',
|
||||
},
|
||||
'/sync': {
|
||||
'return': 403,
|
||||
},
|
||||
'/static': {
|
||||
'alias': '/opt/infobeamer-cms/static',
|
||||
'alias': '/opt/infobeamer-cms/src/static',
|
||||
},
|
||||
},
|
||||
'website_check_path': '/',
|
||||
'website_check_string': 'Share your projects',
|
||||
'do_not_set_content_security_headers': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -44,7 +45,6 @@ def nginx(metadata):
|
|||
|
||||
|
||||
@metadata_reactor.provides(
|
||||
'infobeamer-cms/config/DOMAIN',
|
||||
'infobeamer-cms/config/TIME_MAX',
|
||||
'infobeamer-cms/config/TIME_MIN',
|
||||
)
|
||||
|
@ -57,7 +57,6 @@ def event_times(metadata):
|
|||
return {
|
||||
'infobeamer-cms': {
|
||||
'config': {
|
||||
'DOMAIN': metadata.get('infobeamer-cms/domain'),
|
||||
'TIME_MAX': int(event_end.timestamp()),
|
||||
'TIME_MIN': int(event_start.timestamp()),
|
||||
},
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
[Unit]
|
||||
Description=infobeamer-monitor
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=exec
|
||||
Restart=always
|
||||
RestartSec=5s
|
||||
ExecStart=/opt/infobeamer-cms/venv/bin/python monitor.py
|
||||
User=infobeamer-cms
|
||||
Group=infobeamer-cms
|
||||
WorkingDirectory=/opt/infobeamer-monitor/
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -1,185 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from json import dumps
|
||||
from time import sleep
|
||||
|
||||
import paho.mqtt.client as mqtt
|
||||
from requests import RequestException, get
|
||||
|
||||
try:
|
||||
# python 3.11
|
||||
from tomllib import loads as toml_load
|
||||
except ImportError:
|
||||
from rtoml import load as toml_load
|
||||
|
||||
with open("config.toml") as f:
|
||||
CONFIG = toml_load(f.read())
|
||||
|
||||
|
||||
logging.basicConfig(
|
||||
format="[%(levelname)s %(name)s] %(message)s",
|
||||
level=logging.INFO,
|
||||
)
|
||||
|
||||
LOG = logging.getLogger("main")
|
||||
MLOG = logging.getLogger("mqtt")
|
||||
|
||||
state = None
|
||||
|
||||
client = mqtt.Client()
|
||||
client.username_pw_set(CONFIG["mqtt"]["user"], CONFIG["mqtt"]["password"])
|
||||
client.connect(CONFIG["mqtt"]["host"], 1883, 60)
|
||||
client.loop_start()
|
||||
|
||||
|
||||
def mqtt_out(message, level="INFO", device=None):
|
||||
key = "infobeamer"
|
||||
if device:
|
||||
key += f"/{device['id']}"
|
||||
message = f"[{device['description']}] {message}"
|
||||
|
||||
client.publish(
|
||||
CONFIG["mqtt"]["topic"],
|
||||
dumps(
|
||||
{
|
||||
"level": level,
|
||||
"component": key,
|
||||
"msg": message,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def mqtt_dump_state(device):
|
||||
if not device["is_online"]:
|
||||
return
|
||||
|
||||
out = []
|
||||
if device["location"]:
|
||||
out.append("Location: {}".format(device["location"]))
|
||||
out.append("Setup: {} ({})".format(device["setup"]["name"], device["setup"]["id"]))
|
||||
out.append("Resolution: {}".format(device["run"].get("resolution", "unknown")))
|
||||
if not device["is_synced"]:
|
||||
out.append("syncing ...")
|
||||
|
||||
mqtt_out(
|
||||
" - ".join(out),
|
||||
device=device,
|
||||
)
|
||||
|
||||
|
||||
mqtt_out("Monitor starting up")
|
||||
while True:
|
||||
try:
|
||||
try:
|
||||
r = get(
|
||||
"https://info-beamer.com/api/v1/device/list",
|
||||
auth=("", CONFIG["api_key"]),
|
||||
)
|
||||
r.raise_for_status()
|
||||
ib_state = r.json()["devices"]
|
||||
except RequestException as e:
|
||||
LOG.exception("Could not get data from info-beamer")
|
||||
mqtt_out(
|
||||
f"Could not get data from info-beamer: {e!r}",
|
||||
level="WARN",
|
||||
)
|
||||
else:
|
||||
new_state = {}
|
||||
online_devices = set()
|
||||
for device in ib_state:
|
||||
did = str(device["id"])
|
||||
|
||||
if did in new_state:
|
||||
mqtt_out("DUPLICATE DETECTED!", level="ERROR", device=device)
|
||||
continue
|
||||
|
||||
new_state[did] = device
|
||||
must_dump_state = False
|
||||
|
||||
if state is not None:
|
||||
if did not in state:
|
||||
LOG.info(
|
||||
"new device found: {} [{}]".format(
|
||||
did,
|
||||
device["description"],
|
||||
)
|
||||
)
|
||||
mqtt_out(
|
||||
"new device found!",
|
||||
device=device,
|
||||
)
|
||||
must_dump_state = True
|
||||
|
||||
else:
|
||||
if device["is_online"] != state[did]["is_online"]:
|
||||
online_status = (
|
||||
"online from {}".format(device["run"]["public_addr"])
|
||||
if device["is_online"]
|
||||
else "offline"
|
||||
)
|
||||
|
||||
LOG.info("device {} is now {}".format(did, online_status))
|
||||
mqtt_out(
|
||||
f"status changed to {online_status}",
|
||||
level="INFO" if device["is_online"] else "WARN",
|
||||
device=device,
|
||||
)
|
||||
must_dump_state = True
|
||||
|
||||
if device["description"] != state[did]["description"]:
|
||||
LOG.info(
|
||||
"device {} changed name to {}".format(
|
||||
did, device["description"]
|
||||
)
|
||||
)
|
||||
must_dump_state = True
|
||||
|
||||
if device["is_online"]:
|
||||
if device["maintenance"]:
|
||||
mqtt_out(
|
||||
"maintenance required: {}".join(
|
||||
sorted(device["maintenance"])
|
||||
),
|
||||
level="WARN",
|
||||
device=device,
|
||||
)
|
||||
must_dump_state = True
|
||||
|
||||
if (
|
||||
device["is_synced"] != state[did]["is_synced"]
|
||||
or device["location"] != state[did]["location"]
|
||||
or device["setup"]["id"] != state[did]["setup"]["id"]
|
||||
or device["run"].get("resolution")
|
||||
!= state[did]["run"].get("resolution")
|
||||
):
|
||||
must_dump_state = True
|
||||
|
||||
if must_dump_state:
|
||||
mqtt_dump_state(device)
|
||||
else:
|
||||
LOG.info("adding device {} to empty state".format(device["id"]))
|
||||
|
||||
if device["is_online"]:
|
||||
online_devices.add(
|
||||
"{} ({})".format(
|
||||
device["id"],
|
||||
device["description"],
|
||||
)
|
||||
)
|
||||
|
||||
state = new_state
|
||||
|
||||
if (
|
||||
datetime.now(timezone.utc).strftime("%H%M") == "1312"
|
||||
and online_devices
|
||||
and int(datetime.now(timezone.utc).strftime("%S")) < 30
|
||||
):
|
||||
mqtt_out("Online Devices: {}".format(", ".join(sorted(online_devices))))
|
||||
sleep(30)
|
||||
except KeyboardInterrupt:
|
||||
break
|
||||
|
||||
mqtt_out("Monitor exiting")
|
|
@ -1,30 +0,0 @@
|
|||
assert node.has_bundle('infobeamer-cms') # uses same venv
|
||||
|
||||
files['/opt/infobeamer-monitor/config.toml'] = {
|
||||
'content': repo.libs.faults.dict_as_toml(node.metadata.get('infobeamer-monitor')),
|
||||
'triggers': {
|
||||
'svc_systemd:infobeamer-monitor:restart',
|
||||
},
|
||||
}
|
||||
|
||||
files['/opt/infobeamer-monitor/monitor.py'] = {
|
||||
'mode': '0755',
|
||||
'triggers': {
|
||||
'svc_systemd:infobeamer-monitor:restart',
|
||||
},
|
||||
}
|
||||
|
||||
files['/usr/local/lib/systemd/system/infobeamer-monitor.service'] = {
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
'svc_systemd:infobeamer-monitor:restart',
|
||||
},
|
||||
}
|
||||
|
||||
svc_systemd['infobeamer-monitor'] = {
|
||||
'needs': {
|
||||
'file:/opt/infobeamer-monitor/config.toml',
|
||||
'file:/opt/infobeamer-monitor/monitor.py',
|
||||
'file:/usr/local/lib/systemd/system/infobeamer-monitor.service',
|
||||
},
|
||||
}
|
|
@ -55,15 +55,3 @@ def nginx(metadata):
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
@metadata_reactor.provides(
|
||||
'firewall/port_rules',
|
||||
)
|
||||
def firewall(metadata):
|
||||
return {
|
||||
'firewall': {
|
||||
'port_rules': {
|
||||
'8096/tcp': atomic(metadata.get('jellyfin/restrict-to', {'*'})),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
actions['modprobe_jool'] = {
|
||||
'command': 'modprobe jool',
|
||||
'unless': 'lsmod | grep -F jool',
|
||||
}
|
||||
|
||||
actions['jool_add_nat64_instance'] = {
|
||||
'command': 'jool instance add "nat64" --netfilter --pool6 64:ff9b::/96',
|
||||
'unless': 'jool instance display --no-headers --csv | grep -E ",nat64,netfilter$"',
|
||||
'needs': {
|
||||
'action:modprobe_jool',
|
||||
'pkg_apt:jool-dkms',
|
||||
'pkg_apt:jool-tools',
|
||||
'pkg_apt:linux-headers-amd64',
|
||||
},
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
defaults = {
|
||||
'apt': {
|
||||
'packages': {
|
||||
'jool-dkms': {},
|
||||
'jool-tools': {},
|
||||
'linux-headers-amd64': {},
|
||||
},
|
||||
},
|
||||
'modules': {
|
||||
'jool': [
|
||||
'jool',
|
||||
],
|
||||
},
|
||||
}
|
4
bundles/jugendhackt_tools/files/config.toml
Normal file
4
bundles/jugendhackt_tools/files/config.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
<%
|
||||
from tomlkit import dumps as toml_dumps
|
||||
from bundlewrap.utils.text import toml_clean
|
||||
%>${toml_clean(toml_dumps(repo.libs.faults.resolve_faults(node.metadata.get('jugendhackt_tools')), sort_keys=True))}
|
|
@ -1,7 +1,7 @@
|
|||
directories['/opt/jugendhackt_tools/src'] = {}
|
||||
|
||||
git_deploy['/opt/jugendhackt_tools/src'] = {
|
||||
'repo': 'https://github.com/sophieschi/jugendhackt_schedule.git',
|
||||
'repo': 'https://github.com/kunsi/jugendhackt_schedule.git',
|
||||
'rev': 'main',
|
||||
'triggers': {
|
||||
'action:jugendhackt_tools_install',
|
||||
|
@ -49,7 +49,7 @@ actions['jugendhackt_tools_migrate'] = {
|
|||
}
|
||||
|
||||
files['/opt/jugendhackt_tools/config.toml'] = {
|
||||
'content': repo.libs.faults.dict_as_toml(node.metadata.get('jugendhackt_tools')),
|
||||
'content_type': 'mako',
|
||||
'triggers': {
|
||||
'svc_systemd:jugendhackt_tools:restart',
|
||||
},
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from csv import DictReader
|
||||
from datetime import datetime, timezone
|
||||
from os import scandir
|
||||
from os.path import join
|
||||
|
||||
|
||||
def parse():
|
||||
NOW = datetime.now()
|
||||
active_leases = {}
|
||||
for file in scandir("/var/lib/kea/"):
|
||||
with open(file.path) as f:
|
||||
for row in DictReader(f):
|
||||
expires = datetime.fromtimestamp(int(row["expire"]))
|
||||
|
||||
if expires >= NOW:
|
||||
if (
|
||||
row["address"] not in active_leases
|
||||
or active_leases[row["address"]]["expires_dt"] < expires
|
||||
):
|
||||
row["expires_dt"] = expires
|
||||
active_leases[row["address"]] = row
|
||||
return active_leases.values()
|
||||
|
||||
|
||||
def print_table(leases):
|
||||
print(""" address | MAC | expires | hostname
|
||||
-----------------+-------------------+---------+----------""")
|
||||
for lease in sorted(leases, key=lambda r: r["address"]):
|
||||
print(
|
||||
f' {lease["address"]:<15} | {lease["hwaddr"].lower()} | {lease["expires_dt"]:%H:%M} | {lease["hostname"]}'
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print_table(parse())
|
|
@ -44,10 +44,6 @@ files['/etc/kea/kea-dhcp4.conf'] = {
|
|||
},
|
||||
}
|
||||
|
||||
files['/usr/local/bin/kea-lease-list'] = {
|
||||
'mode': '0500',
|
||||
}
|
||||
|
||||
svc_systemd['kea-dhcp4-server'] = {
|
||||
'needs': {
|
||||
'file:/etc/kea/kea-dhcp4.conf',
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
# This file is managed using bundlewrap
|
||||
% for identifier, modules in sorted(node.metadata.get('modules', {}).items()):
|
||||
|
||||
# ${identifier}
|
||||
% for module in modules:
|
||||
${module}
|
||||
% endfor
|
||||
% endfor
|
|
@ -1,3 +0,0 @@
|
|||
files['/etc/modules'] = {
|
||||
'content_type': 'mako',
|
||||
}
|
|
@ -39,7 +39,6 @@ def cron(metadata):
|
|||
'/usr/bin/dehydrated --cleanup',
|
||||
],
|
||||
'when': '04:{}:00'.format(node.magic_number % 60),
|
||||
'exclude_from_monitoring': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
<%
|
||||
database = node.metadata.get('matrix-synapse/database')
|
||||
db_string = 'postgresql://{}:{}@{}/{}?sslmode=disable'.format(
|
||||
database['user'],
|
||||
database['password'],
|
||||
database.get('host', 'localhost'),
|
||||
database['database'],
|
||||
)
|
||||
%>\
|
||||
[Unit]
|
||||
Description=matrix-org sliding-sync proxy
|
||||
After=network.target
|
||||
Requires=postgresql.service
|
||||
|
||||
[Service]
|
||||
User=matrix-synapse
|
||||
Group=matrix-synapse
|
||||
Environment=SYNCV3_SERVER=https://${node.metadata.get('matrix-synapse/baseurl')}
|
||||
Environment=SYNCV3_DB=${db_string}
|
||||
Environment=SYNCV3_SECRET=${node.metadata.get('matrix-synapse/sliding_sync/secret')}
|
||||
Environment=SYNCV3_BINDADDR=127.0.0.1:20070
|
||||
ExecStart=/usr/local/bin/matrix-sliding-sync
|
||||
Restart=always
|
||||
RestartSec=10s
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -57,32 +57,3 @@ svc_systemd = {
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
if node.metadata.get('matrix-synapse/sliding_sync/version', None):
|
||||
files['/usr/local/bin/matrix-sliding-sync'] = {
|
||||
'content_type': 'download',
|
||||
'source': 'https://github.com/matrix-org/sliding-sync/releases/download/{}/syncv3_linux_amd64'.format(
|
||||
node.metadata.get('matrix-synapse/sliding_sync/version'),
|
||||
),
|
||||
'content_hash': node.metadata.get('matrix-synapse/sliding_sync/sha1', None),
|
||||
'mode': '0755',
|
||||
'triggers': {
|
||||
'svc_systemd:matrix-sliding-sync:restart',
|
||||
},
|
||||
}
|
||||
|
||||
files['/usr/local/lib/systemd/system/matrix-sliding-sync.service'] = {
|
||||
'content_type': 'mako',
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
'svc_systemd:matrix-sliding-sync:restart',
|
||||
},
|
||||
}
|
||||
|
||||
svc_systemd['matrix-sliding-sync'] = {
|
||||
'needs': {
|
||||
'file:/usr/local/bin/matrix-sliding-sync',
|
||||
'file:/usr/local/lib/systemd/system/matrix-sliding-sync.service',
|
||||
'postgres_db:synapse',
|
||||
},
|
||||
}
|
||||
|
|
|
@ -88,14 +88,6 @@ def nginx(metadata):
|
|||
if not node.has_bundle('nginx'):
|
||||
raise DoNotRunAgain
|
||||
|
||||
wellknown_client_sliding_sync = {}
|
||||
if metadata.get('matrix-synapse/sliding_sync/version', None):
|
||||
wellknown_client_sliding_sync = {
|
||||
'org.matrix.msc3575.proxy': {
|
||||
'url': 'https://{}'.format(metadata.get('matrix-synapse/baseurl')),
|
||||
},
|
||||
}
|
||||
|
||||
wellknown = {
|
||||
'/.well-known/matrix/client': {
|
||||
'content': dumps({
|
||||
|
@ -105,7 +97,6 @@ def nginx(metadata):
|
|||
'm.identity_server': {
|
||||
'base_url': metadata.get('matrix-synapse/identity_server', 'https://matrix.org'),
|
||||
},
|
||||
**wellknown_client_sliding_sync,
|
||||
**metadata.get('matrix-synapse/additional_client_config', {}),
|
||||
}, sort_keys=True),
|
||||
'return': 200,
|
||||
|
@ -127,16 +118,10 @@ def nginx(metadata):
|
|||
}
|
||||
|
||||
locations = {
|
||||
'/_client/': {
|
||||
'target': 'http://127.0.0.1:20070',
|
||||
},
|
||||
'/_matrix': {
|
||||
'target': 'http://[::1]:20080',
|
||||
'max_body_size': '50M',
|
||||
},
|
||||
'/_matrix/client/unstable/org.matrix.msc3575/sync': {
|
||||
'target': 'http://127.0.0.1:20070',
|
||||
},
|
||||
'/_synapse': {
|
||||
'target': 'http://[::1]:20080',
|
||||
},
|
||||
|
|
|
@ -21,7 +21,7 @@ pip install --upgrade pip yt-dlp
|
|||
|
||||
errors=0
|
||||
|
||||
for i in Neosignal tasmo starkato b4m ProjectPoltergeist jakehunnter davem_dokebi El1s4
|
||||
for i in Neosignal tasmo starkato b4m ProjectPoltergeist jakehunnter davem_dokebi
|
||||
do
|
||||
echo "> mixcloud $i" >&2
|
||||
if ! [[ -d "/storage/nas/Musik/mixcloud/$i" ]]
|
||||
|
@ -53,7 +53,7 @@ do
|
|||
) || errors=1
|
||||
done
|
||||
|
||||
for i in tschunkelmusik zotanmew
|
||||
for i in tschunkelmusik
|
||||
do
|
||||
echo "> soundcloud $i" >&2
|
||||
if ! [[ -d "/storage/nas/Musik/mixcloud/$i" ]]
|
||||
|
|
|
@ -1,138 +1,124 @@
|
|||
users['netbox'] = {
|
||||
'home': '/opt/netbox',
|
||||
}
|
||||
|
||||
directories['/opt/netbox/src'] = {}
|
||||
|
||||
directories['/opt/netbox/media'] = {
|
||||
'owner': 'netbox',
|
||||
}
|
||||
|
||||
directories['/opt/netbox/scripts'] = {
|
||||
'owner': 'netbox',
|
||||
}
|
||||
|
||||
git_deploy['/opt/netbox/src'] = {
|
||||
'repo': 'https://github.com/netbox-community/netbox.git',
|
||||
'rev': node.metadata.get('netbox/version'),
|
||||
'triggers': {
|
||||
'action:netbox_install',
|
||||
'svc_systemd:netbox-web:restart',
|
||||
'svc_systemd:netbox-worker:restart',
|
||||
users = {
|
||||
'netbox': {
|
||||
'home': '/opt/netbox',
|
||||
},
|
||||
'tags': {
|
||||
'netbox-install',
|
||||
}
|
||||
|
||||
directories = {
|
||||
'/opt/netbox/src': {},
|
||||
'/opt/netbox/media': {
|
||||
'owner': 'netbox',
|
||||
},
|
||||
'/opt/netbox/scripts': {
|
||||
'owner': 'netbox',
|
||||
},
|
||||
}
|
||||
|
||||
git_deploy = {
|
||||
'/opt/netbox/src': {
|
||||
'repo': 'https://github.com/netbox-community/netbox.git',
|
||||
'rev': node.metadata.get('netbox/version'),
|
||||
'triggers': {
|
||||
'action:netbox_install',
|
||||
'action:netbox_upgrade',
|
||||
'svc_systemd:netbox-web:restart',
|
||||
'svc_systemd:netbox-worker:restart',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
# This is a recreation of https://github.com/netbox-community/netbox/blob/develop/upgrade.sh
|
||||
actions['netbox_create_virtualenv'] = {
|
||||
'command': '/usr/bin/python3 -m virtualenv -p python3 /opt/netbox/venv',
|
||||
'unless': 'test -d /opt/netbox/venv/',
|
||||
'needed_by': {
|
||||
'action:netbox_install',
|
||||
actions = {
|
||||
'netbox_create_virtualenv': {
|
||||
'command': '/usr/bin/python3 -m virtualenv -p python3 /opt/netbox/venv',
|
||||
'unless': 'test -d /opt/netbox/venv/',
|
||||
'needed_by': {
|
||||
'action:netbox_install',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
actions['netbox_install'] = {
|
||||
'triggered': True,
|
||||
'command': ' && '.join([
|
||||
'cd /opt/netbox/src',
|
||||
'/opt/netbox/venv/bin/pip install --upgrade pip wheel setuptools django-auth-ldap gunicorn',
|
||||
'/opt/netbox/venv/bin/pip install --upgrade -r requirements.txt',
|
||||
]),
|
||||
'needs': {
|
||||
'pkg_apt:build-essential',
|
||||
'pkg_apt:graphviz',
|
||||
'pkg_apt:libffi-dev',
|
||||
'pkg_apt:libldap2-dev',
|
||||
'pkg_apt:libpq-dev',
|
||||
'pkg_apt:libsasl2-dev',
|
||||
'pkg_apt:libssl-dev',
|
||||
'pkg_apt:libxml2-dev',
|
||||
'pkg_apt:libxslt1-dev',
|
||||
'pkg_apt:python3-dev',
|
||||
'pkg_apt:zlib1g-dev',
|
||||
},
|
||||
'tags': {
|
||||
'netbox-install',
|
||||
},
|
||||
}
|
||||
|
||||
last_action = 'netbox_install'
|
||||
for upgrade_command in (
|
||||
'migrate',
|
||||
'trace_paths --no-input',
|
||||
'collectstatic --no-input',
|
||||
'remove_stale_contenttypes --no-input',
|
||||
'reindex --lazy',
|
||||
'clearsessions',
|
||||
):
|
||||
actions[f'netbox_upgrade_{upgrade_command.split()[0]}'] = {
|
||||
'netbox_install': {
|
||||
'triggered': True,
|
||||
'command': f'/opt/netbox/venv/bin/python /opt/netbox/src/netbox/manage.py {upgrade_command}',
|
||||
'command': ' && '.join([
|
||||
'cd /opt/netbox/src',
|
||||
'/opt/netbox/venv/bin/pip install --upgrade pip wheel setuptools django-auth-ldap gunicorn',
|
||||
'/opt/netbox/venv/bin/pip install --upgrade -r requirements.txt',
|
||||
]),
|
||||
'needs': {
|
||||
f'action:{last_action}',
|
||||
'pkg_apt:build-essential',
|
||||
'pkg_apt:graphviz',
|
||||
'pkg_apt:libffi-dev',
|
||||
'pkg_apt:libldap2-dev',
|
||||
'pkg_apt:libpq-dev',
|
||||
'pkg_apt:libsasl2-dev',
|
||||
'pkg_apt:libssl-dev',
|
||||
'pkg_apt:libxml2-dev',
|
||||
'pkg_apt:libxslt1-dev',
|
||||
'pkg_apt:python3-dev',
|
||||
'pkg_apt:zlib1g-dev',
|
||||
}
|
||||
},
|
||||
'netbox_upgrade': {
|
||||
'triggered': True,
|
||||
'command': ' && '.join([
|
||||
'/opt/netbox/venv/bin/python /opt/netbox/src/netbox/manage.py migrate',
|
||||
'/opt/netbox/venv/bin/python /opt/netbox/src/netbox/manage.py collectstatic --no-input',
|
||||
'/opt/netbox/venv/bin/python /opt/netbox/src/netbox/manage.py remove_stale_contenttypes --no-input',
|
||||
'/opt/netbox/venv/bin/python /opt/netbox/src/netbox/manage.py clearsessions',
|
||||
]),
|
||||
'needs': {
|
||||
'action:netbox_install',
|
||||
'file:/opt/netbox/src/netbox/netbox/configuration.py',
|
||||
},
|
||||
'tags': {
|
||||
'netbox-upgrade',
|
||||
},
|
||||
}
|
||||
|
||||
files = {
|
||||
'/usr/local/lib/systemd/system/netbox-web.service': {
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
'svc_systemd:netbox-web:restart',
|
||||
},
|
||||
'triggered_by': {
|
||||
'tag:netbox-install',
|
||||
},
|
||||
'/usr/local/lib/systemd/system/netbox-worker.service': {
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
'svc_systemd:netbox-worker:restart',
|
||||
},
|
||||
},
|
||||
'/opt/netbox/src/netbox/netbox/configuration.py': {
|
||||
'content_type': 'mako',
|
||||
'triggers': {
|
||||
'svc_systemd:netbox-web:restart',
|
||||
'svc_systemd:netbox-worker:restart',
|
||||
},
|
||||
'needs': {
|
||||
'git_deploy:/opt/netbox/src',
|
||||
},
|
||||
},
|
||||
'/opt/netbox/gunicorn_config.py': {
|
||||
'content_type': 'mako',
|
||||
'triggers': {
|
||||
'svc_systemd:netbox-web:restart',
|
||||
},
|
||||
}
|
||||
last_action = f'netbox_upgrade_{upgrade_command.split()[0]}'
|
||||
|
||||
files['/usr/local/lib/systemd/system/netbox-web.service'] = {
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
'svc_systemd:netbox-web:restart',
|
||||
},
|
||||
}
|
||||
|
||||
files['/usr/local/lib/systemd/system/netbox-worker.service'] = {
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
'svc_systemd:netbox-worker:restart',
|
||||
},
|
||||
}
|
||||
|
||||
files['/opt/netbox/src/netbox/netbox/configuration.py'] = {
|
||||
'content_type': 'mako',
|
||||
'triggers': {
|
||||
'svc_systemd:netbox-web:restart',
|
||||
'svc_systemd:netbox-worker:restart',
|
||||
},
|
||||
'needs': {
|
||||
'git_deploy:/opt/netbox/src',
|
||||
},
|
||||
'tags': {
|
||||
'netbox-install',
|
||||
},
|
||||
}
|
||||
|
||||
files['/opt/netbox/gunicorn_config.py'] = {
|
||||
'content_type': 'mako',
|
||||
'triggers': {
|
||||
'svc_systemd:netbox-web:restart',
|
||||
},
|
||||
}
|
||||
|
||||
svc_systemd['netbox-web'] = {
|
||||
'needs': {
|
||||
'file:/usr/local/lib/systemd/system/netbox-web.service',
|
||||
'file:/opt/netbox/gunicorn_config.py',
|
||||
'file:/opt/netbox/src/netbox/netbox/configuration.py',
|
||||
'tag:netbox-install',
|
||||
'tag:netbox-upgrade',
|
||||
},
|
||||
}
|
||||
|
||||
svc_systemd['netbox-worker'] = {
|
||||
'needs': {
|
||||
'file:/usr/local/lib/systemd/system/netbox-worker.service',
|
||||
'file:/opt/netbox/src/netbox/netbox/configuration.py',
|
||||
'tag:netbox-install',
|
||||
'tag:netbox-upgrade',
|
||||
svc_systemd = {
|
||||
'netbox-web': {
|
||||
'needs': {
|
||||
'action:netbox_install',
|
||||
'action:netbox_upgrade',
|
||||
'file:/usr/local/lib/systemd/system/netbox-web.service',
|
||||
'file:/opt/netbox/gunicorn_config.py',
|
||||
'file:/opt/netbox/src/netbox/netbox/configuration.py',
|
||||
},
|
||||
},
|
||||
'netbox-worker': {
|
||||
'needs': {
|
||||
'action:netbox_install',
|
||||
'action:netbox_upgrade',
|
||||
'file:/usr/local/lib/systemd/system/netbox-worker.service',
|
||||
'file:/opt/netbox/src/netbox/netbox/configuration.py',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -14,13 +14,6 @@ table inet filter {
|
|||
|
||||
iif lo accept
|
||||
|
||||
% for address in sorted(blocked_v4):
|
||||
ip saddr ${address} drop
|
||||
% endfor
|
||||
% for address in sorted(blocked_v6):
|
||||
ip6 saddr ${address} drop
|
||||
% endfor
|
||||
|
||||
icmp type timestamp-request drop
|
||||
icmp type timestamp-reply drop
|
||||
ip protocol icmp accept
|
||||
|
|
|
@ -17,8 +17,6 @@ files = {
|
|||
'/etc/nftables.conf': {
|
||||
'content_type': 'mako',
|
||||
'context': {
|
||||
'blocked_v4': node.metadata.get('nftables/blocked_v4', set()),
|
||||
'blocked_v6': node.metadata.get('nftables/blocked_v6', set()),
|
||||
'forward': node.metadata.get('nftables/forward', {}),
|
||||
'input': node.metadata.get('nftables/input', {}),
|
||||
'postrouting': node.metadata.get('nftables/postrouting', {}),
|
||||
|
|
|
@ -6,10 +6,6 @@ defaults = {
|
|||
'nftables': {},
|
||||
},
|
||||
},
|
||||
'nftables': {
|
||||
'blocked_v4': repo.libs.firewall.global_ip4_blocklist,
|
||||
'blocked_v6': repo.libs.firewall.global_ip6_blocklist,
|
||||
},
|
||||
'pacman': {
|
||||
'packages': {
|
||||
'nftables': {},
|
||||
|
|
|
@ -10,9 +10,6 @@ events {
|
|||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
types {
|
||||
application/javascript js mjs;
|
||||
}
|
||||
default_type application/octet-stream;
|
||||
charset UTF-8;
|
||||
override_charset on;
|
||||
|
|
|
@ -12,26 +12,27 @@ server {
|
|||
|
||||
% if ssl:
|
||||
location / {
|
||||
return 301 https://${domain}$request_uri;
|
||||
return 308 https://$host$request_uri;
|
||||
}
|
||||
|
||||
% if ssl == 'letsencrypt':
|
||||
% if ssl == 'letsencrypt':
|
||||
location /.well-known/acme-challenge/ {
|
||||
alias /var/lib/dehydrated/acme-challenges/;
|
||||
}
|
||||
% endif
|
||||
% endif
|
||||
}
|
||||
|
||||
% if domain_aliases and force_domain:
|
||||
server {
|
||||
server_name ${' '.join(sorted(domain_aliases))};
|
||||
|
||||
% if domain_aliases:
|
||||
server_name ${domain} ${' '.join(sorted(domain_aliases))};
|
||||
% else:
|
||||
server_name ${domain};
|
||||
% endif
|
||||
root ${webroot if webroot else '/var/www/{}/'.format(vhost)};
|
||||
index ${' '.join(index)};
|
||||
|
||||
listen 443 ssl;
|
||||
listen [::]:443 ssl;
|
||||
http2 on;
|
||||
listen 443 ssl http2;
|
||||
listen [::]:443 ssl http2;
|
||||
|
||||
% if ssl == 'letsencrypt':
|
||||
ssl_certificate /var/lib/dehydrated/certs/${domain}/fullchain.pem;
|
||||
|
@ -47,49 +48,6 @@ server {
|
|||
ssl_session_cache shared:SSL:10m;
|
||||
ssl_session_tickets off;
|
||||
|
||||
add_header Strict-Transport-Security "max-age=63072000; includeSubDomains";
|
||||
|
||||
% if ssl == 'letsencrypt':
|
||||
location /.well-known/acme-challenge/ {
|
||||
alias /var/lib/dehydrated/acme-challenges/;
|
||||
}
|
||||
% endif
|
||||
|
||||
location / {
|
||||
return 301 https://${domain}$request_uri;
|
||||
}
|
||||
}
|
||||
|
||||
% endif
|
||||
server {
|
||||
% if domain_aliases and not force_domain:
|
||||
server_name ${domain} ${' '.join(sorted(domain_aliases))};
|
||||
% else:
|
||||
server_name ${domain};
|
||||
% endif
|
||||
|
||||
root ${webroot if webroot else '/var/www/{}/'.format(vhost)};
|
||||
index ${' '.join(index)};
|
||||
|
||||
listen 443 ssl;
|
||||
listen [::]:443 ssl;
|
||||
http2 on;
|
||||
|
||||
% if ssl == 'letsencrypt':
|
||||
ssl_certificate /var/lib/dehydrated/certs/${domain}/fullchain.pem;
|
||||
ssl_certificate_key /var/lib/dehydrated/certs/${domain}/privkey.pem;
|
||||
% else:
|
||||
ssl_certificate /etc/nginx/ssl/${vhost}.crt;
|
||||
ssl_certificate_key /etc/nginx/ssl/${vhost}.key;
|
||||
% endif
|
||||
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-CHACHA20-POLY1305;
|
||||
ssl_dhparam /etc/ssl/certs/dhparam.pem;
|
||||
ssl_prefer_server_ciphers off;
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
ssl_session_cache shared:SSL:10m;
|
||||
ssl_session_tickets off;
|
||||
ssl_session_timeout 1d;
|
||||
|
||||
add_header Strict-Transport-Security "max-age=63072000; includeSubDomains";
|
||||
% endif
|
||||
|
||||
|
|
|
@ -78,10 +78,17 @@ if node.has_bundle('pacman'):
|
|||
},
|
||||
}
|
||||
|
||||
actions = {
|
||||
'nginx-generate-dhparam': {
|
||||
'command': 'openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048',
|
||||
'unless': 'test -f /etc/ssl/certs/dhparam.pem',
|
||||
},
|
||||
}
|
||||
|
||||
svc_systemd = {
|
||||
'nginx': {
|
||||
'needs': {
|
||||
'action:generate-dhparam',
|
||||
'action:nginx-generate-dhparam',
|
||||
'directory:/var/log/nginx-timing',
|
||||
package,
|
||||
},
|
||||
|
|
|
@ -81,7 +81,6 @@ def letsencrypt(metadata):
|
|||
domains[domain] = config.get('domain_aliases', set())
|
||||
vhosts[vhost] = {
|
||||
'ssl': 'letsencrypt',
|
||||
'force_domain': True,
|
||||
}
|
||||
|
||||
return {
|
||||
|
@ -200,8 +199,8 @@ def telegraf_anon_timing(metadata):
|
|||
result[f'nginx-{vname}'] = {
|
||||
'files': [f'/var/log/nginx-timing/{vname}.log'],
|
||||
'from_beginning': False,
|
||||
'grok_patterns': [r'%{LOGPATTERN}'],
|
||||
'grok_custom_patterns': r'LOGPATTERN \[%{HTTPDATE:ts:ts-httpd}\] %{NUMBER:request_time:float} (?:%{NUMBER:upstream_response_time:float}|-) "%{WORD:verb:tag} %{NOTSPACE:request} HTTP/%{NUMBER:http_version:float}" %{NUMBER:resp_code:tag}',
|
||||
'grok_patterns': ['%{LOGPATTERN}'],
|
||||
'grok_custom_patterns': 'LOGPATTERN \[%{HTTPDATE:ts:ts-httpd}\] %{NUMBER:request_time:float} (?:%{NUMBER:upstream_response_time:float}|-) "%{WORD:verb:tag} %{NOTSPACE:request} HTTP/%{NUMBER:http_version:float}" %{NUMBER:resp_code:tag}',
|
||||
'data_format': 'grok',
|
||||
'name_override': 'nginx_timing',
|
||||
}
|
||||
|
|
|
@ -2,8 +2,8 @@ actions = {
|
|||
'nodejs_install_yarn': {
|
||||
'command': 'npm install -g yarn@latest',
|
||||
'unless': 'test -e /usr/lib/node_modules/yarn',
|
||||
'after': {
|
||||
'pkg_apt:',
|
||||
'needs': {
|
||||
'pkg_apt:nodejs',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ defaults = {
|
|||
'apt': {
|
||||
'additional_update_commands': {
|
||||
# update npm to latest version
|
||||
'npm install -g npm@latest',
|
||||
'npm install -g yarn@latest',
|
||||
},
|
||||
'packages': {
|
||||
|
@ -13,42 +14,20 @@ defaults = {
|
|||
},
|
||||
}
|
||||
|
||||
VERSIONS_SHIPPED_BY_DEBIAN = {
|
||||
10: 10,
|
||||
11: 12,
|
||||
12: 18,
|
||||
13: 18,
|
||||
}
|
||||
|
||||
@metadata_reactor.provides(
|
||||
'apt/repos/nodejs/items',
|
||||
'apt/additional_update_commands',
|
||||
)
|
||||
def nodejs_from_version(metadata):
|
||||
version = metadata.get('nodejs/version')
|
||||
|
||||
if version != VERSIONS_SHIPPED_BY_DEBIAN[node.os_version[0]]:
|
||||
return {
|
||||
'apt': {
|
||||
'additional_update_commands': {
|
||||
# update npm to latest version
|
||||
'npm install -g npm@latest',
|
||||
},
|
||||
'repos': {
|
||||
'nodejs': {
|
||||
'items': {
|
||||
f'deb https://deb.nodesource.com/node_{version}.x {{os_release}} main',
|
||||
f'deb-src https://deb.nodesource.com/node_{version}.x {{os_release}} main',
|
||||
},
|
||||
return {
|
||||
'apt': {
|
||||
'repos': {
|
||||
'nodejs': {
|
||||
'items': {
|
||||
f'deb https://deb.nodesource.com/node_{version}.x {{os_release}} main',
|
||||
f'deb-src https://deb.nodesource.com/node_{version}.x {{os_release}} main',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'apt': {
|
||||
'packages': {
|
||||
'npm': {},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
25
bundles/openvpn-client/items.py
Normal file
25
bundles/openvpn-client/items.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
from os.path import join
|
||||
|
||||
directories = {
|
||||
'/etc/openvpn/client': {
|
||||
'mode': '0750',
|
||||
'owner': 'openvpn',
|
||||
'group': None,
|
||||
'purge': True,
|
||||
},
|
||||
}
|
||||
|
||||
for fname, config in node.metadata.get('openvpn-client/configs', {}).items():
|
||||
files[f'/etc/openvpn/client/{fname}.conf'] = {
|
||||
'content': repo.vault.decrypt_file(join('openvpn-client', f'{fname}.conf.vault')),
|
||||
'triggers': {
|
||||
f'svc_systemd:openvpn-client@{config}:restart',
|
||||
} if config.get('running', True) else set(),
|
||||
}
|
||||
|
||||
svc_systemd[f'openvpn-client@{fname}'] = {
|
||||
'needs': {
|
||||
f'file:/etc/openvpn/client/{fname}.conf',
|
||||
},
|
||||
**config,
|
||||
}
|
20
bundles/openvpn-client/metadata.py
Normal file
20
bundles/openvpn-client/metadata.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
defaults = {
|
||||
'apt': {
|
||||
'packages': {
|
||||
'openvpn': {
|
||||
'needed_by': {
|
||||
'directory:/etc/openvpn/client',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
'pacman': {
|
||||
'packages': {
|
||||
'openvpn': {
|
||||
'needed_by': {
|
||||
'directory:/etc/openvpn/client',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
|
@ -36,13 +36,13 @@ pkg_pacman = {
|
|||
'at': {},
|
||||
'autoconf': {},
|
||||
'automake': {},
|
||||
'bind': {},
|
||||
'binutils': {},
|
||||
'bison': {},
|
||||
'bzip2': {},
|
||||
'curl': {},
|
||||
'dialog': {},
|
||||
'diffutils': {},
|
||||
'dnsutils': {},
|
||||
'fakeroot': {},
|
||||
'file': {},
|
||||
'findutils': {},
|
||||
|
|
|
@ -5,12 +5,8 @@ Requires=redis.service
|
|||
[Service]
|
||||
User=paperless
|
||||
Group=paperless
|
||||
Environment=PAPERLESS_CONFIGURATION_PATH=/opt/paperless/paperless.conf
|
||||
WorkingDirectory=/opt/paperless/src/paperless-ngx/src
|
||||
WorkingDirectory=/opt/paperless/src/src
|
||||
ExecStart=/opt/paperless/venv/bin/python manage.py document_consumer
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
SyslogIdentifier=paperless-consumer
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
|
|
@ -5,12 +5,8 @@ Requires=redis.service
|
|||
[Service]
|
||||
User=paperless
|
||||
Group=paperless
|
||||
Environment=PAPERLESS_CONFIGURATION_PATH=/opt/paperless/paperless.conf
|
||||
WorkingDirectory=/opt/paperless/src/paperless-ngx/src
|
||||
ExecStart=/opt/paperless/venv/bin/celery --app paperless beat --loglevel INFO
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
SyslogIdentifier=paperless-scheduler
|
||||
WorkingDirectory=/opt/paperless/src/src
|
||||
ExecStart=/opt/paperless/venv/bin/python manage.py qcluster
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
[Unit]
|
||||
Description=Paperless task queue
|
||||
Requires=redis.service
|
||||
|
||||
[Service]
|
||||
User=paperless
|
||||
Group=paperless
|
||||
Environment=PAPERLESS_CONFIGURATION_PATH=/opt/paperless/paperless.conf
|
||||
WorkingDirectory=/opt/paperless/src/paperless-ngx/src
|
||||
ExecStart=/opt/paperless/venv/bin/celery --app paperless worker --loglevel INFO
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
SyslogIdentifier=paperless-taskqueue
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -7,12 +7,8 @@ Requires=redis.service
|
|||
[Service]
|
||||
User=paperless
|
||||
Group=paperless
|
||||
Environment=PAPERLESS_CONFIGURATION_PATH=/opt/paperless/paperless.conf
|
||||
WorkingDirectory=/opt/paperless/src/paperless-ngx/src
|
||||
ExecStart=/opt/paperless/venv/bin/gunicorn -c /opt/paperless/src/paperless-ngx/gunicorn.conf.py -b 127.0.0.1:22070 paperless.asgi:application
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
SyslogIdentifier=paperless-webserver
|
||||
WorkingDirectory=/opt/paperless/src/src
|
||||
ExecStart=/opt/paperless/venv/bin/gunicorn -c /opt/paperless/src/gunicorn.conf.py -b 127.0.0.1:22070 paperless.asgi:application
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
|
|
@ -11,14 +11,14 @@ PAPERLESS_DBSSLMODE=disable
|
|||
PAPERLESS_CONSUMPTION_DIR=/mnt/paperless/consume
|
||||
PAPERLESS_DATA_DIR=/mnt/paperless/data
|
||||
PAPERLESS_MEDIA_ROOT=/mnt/paperless/media
|
||||
PAPERLESS_STATICDIR=/opt/paperless/src/paperless-ngx/static
|
||||
PAPERLESS_STATICDIR=/opt/paperless/static
|
||||
PAPERLESS_FILENAME_FORMAT={created_year}/{created_month}/{correspondent}/{asn}_{title}
|
||||
|
||||
# Security and hosting
|
||||
|
||||
PAPERLESS_SECRET_KEY=${repo.vault.random_bytes_as_base64_for(f'{node.name} paperless secret key')}
|
||||
PAPERLESS_ALLOWED_HOSTS=${node.metadata.get('paperless/domain')}
|
||||
PAPERLESS_CORS_ALLOWED_HOSTS=http://${node.metadata.get('paperless/domain')},https://${node.metadata.get('paperless/domain')}
|
||||
PAPERLESS_ALLOWED_HOSTS=${node.metadata.get('nginx/vhosts/paperless/domain', '127.0.0.1')}
|
||||
PAPERLESS_CORS_ALLOWED_HOSTS=http://${node.metadata.get('nginx/vhosts/paperless/domain', '127.0.0.1')},https://${node.metadata.get('nginx/vhosts/paperless/domain', '127.0.0.1')}
|
||||
#PAPERLESS_FORCE_SCRIPT_NAME=
|
||||
#PAPERLESS_STATIC_URL=/static/
|
||||
#PAPERLESS_AUTO_LOGIN_USERNAME=
|
||||
|
@ -28,10 +28,7 @@ PAPERLESS_CORS_ALLOWED_HOSTS=http://${node.metadata.get('paperless/domain')},htt
|
|||
# OCR settings
|
||||
|
||||
PAPERLESS_OCR_LANGUAGE=${'+'.join(sorted(node.metadata.get('paperless/ocr_languages', {'deu', 'eng'})))}
|
||||
PAPERLESS_OCR_MODE=skip
|
||||
PAPERLESS_OCR_SKIP_ARCHIVE_FILE=never
|
||||
PAPERLESS_OCR_USER_ARGS='{"invalidate_digital_signatures": true}'
|
||||
PAPERLESS_PRE_CONSUME_SCRIPT=/opt/paperless/pre-consume.sh
|
||||
PAPERLESS_OCR_MODE=skip_noarchive
|
||||
#PAPERLESS_OCR_OUTPUT_TYPE=pdfa
|
||||
#PAPERLESS_OCR_PAGES=1
|
||||
#PAPERLESS_OCR_IMAGE_DPI=300
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
[[ -n "$DEBUG" ]] && set -x
|
||||
set -euo pipefail
|
||||
|
||||
pdfinfo "${DOCUMENT_WORKING_PATH}" | grep -q "Encrypted:"
|
||||
|
||||
if pdfinfo "${DOCUMENT_WORKING_PATH}" | grep -q "Encrypted: yes"
|
||||
then
|
||||
qpdf --replace-input --decrypt "${DOCUMENT_WORKING_PATH}"
|
||||
fi
|
|
@ -1,97 +1,146 @@
|
|||
version = node.metadata.get('paperless/version')
|
||||
workers = ('consumer', 'scheduler', 'taskqueue', 'webserver')
|
||||
|
||||
users['paperless'] = {
|
||||
'home': '/opt/paperless',
|
||||
}
|
||||
|
||||
directories['/opt/paperless'] = {}
|
||||
|
||||
files['/opt/paperless/paperless.conf'] = {
|
||||
'content_type': 'mako',
|
||||
'triggers': {
|
||||
f'svc_systemd:paperless-{worker}:restart'
|
||||
for worker in workers
|
||||
users = {
|
||||
'paperless': {
|
||||
'home': '/opt/paperless',
|
||||
},
|
||||
}
|
||||
|
||||
files['/opt/paperless/pre-consume.sh'] = {
|
||||
'mode': '0755',
|
||||
}
|
||||
|
||||
actions['paperless_create_virtualenv'] = {
|
||||
'command': '/usr/bin/python3 -m virtualenv -p python3 /opt/paperless/venv/',
|
||||
'unless': 'test -d /opt/paperless/venv/',
|
||||
'needs': {
|
||||
'directory:/opt/paperless',
|
||||
'pkg_apt:python3',
|
||||
'pkg_apt:python3-pip',
|
||||
'pkg_apt:python3-virtualenv',
|
||||
directories = {
|
||||
'/opt/paperless/src': {},
|
||||
'/opt/paperless/static': {
|
||||
'owner': 'paperless',
|
||||
},
|
||||
}
|
||||
|
||||
actions['paperless_install'] = {
|
||||
'command': ' && '.join([
|
||||
f'wget -qO /opt/paperless/{version}.tar.xz https://github.com/paperless-ngx/paperless-ngx/releases/download/{version}/paperless-ngx-{version}.tar.xz',
|
||||
'rm -rf /opt/paperless/src/',
|
||||
'mkdir -p /opt/paperless/src/',
|
||||
f'tar -C /opt/paperless/src -xf /opt/paperless/{version}.tar.xz',
|
||||
f'rm /opt/paperless/{version}.tar.xz',
|
||||
'cd /opt/paperless/src/paperless-ngx',
|
||||
'/opt/paperless/venv/bin/pip install --upgrade pip',
|
||||
'/opt/paperless/venv/bin/pip install --upgrade -r requirements.txt',
|
||||
f'echo "{version}" > /opt/paperless/version',
|
||||
]),
|
||||
'unless': f'''bash -c '[[ "$(cat /opt/paperless/version)" == "{version}" ]]' ''',
|
||||
'after': {
|
||||
'pkg_apt:',
|
||||
},
|
||||
'needs': {
|
||||
'action:paperless_create_virtualenv',
|
||||
},
|
||||
'triggers': {
|
||||
'action:paperless_migrate_database',
|
||||
*{
|
||||
f'svc_systemd:paperless-{worker}:restart' for worker in workers
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
actions['paperless_migrate_database'] = {
|
||||
'command': ' && '.join([
|
||||
'cd /opt/paperless/src/paperless-ngx/src',
|
||||
'sudo -Hu paperless PAPERLESS_CONFIGURATION_PATH=/opt/paperless/paperless.conf /opt/paperless/venv/bin/python manage.py migrate',
|
||||
]),
|
||||
'triggered': True,
|
||||
'needs': {
|
||||
# /mnt/paperless is NOT created by this bundle.
|
||||
'action:paperless_install',
|
||||
'directory:/mnt/paperless',
|
||||
'file:/opt/paperless/paperless.conf',
|
||||
'user:paperless',
|
||||
'postgres_db:paperless',
|
||||
},
|
||||
}
|
||||
|
||||
for worker in workers:
|
||||
files[f'/etc/systemd/system/paperless-{worker}.service'] = {
|
||||
'delete': True,
|
||||
git_deploy = {
|
||||
'/opt/paperless/src': {
|
||||
'repo': 'https://github.com/paperless-ngx/paperless-ngx.git',
|
||||
'rev': node.metadata.get('paperless/version'),
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
},
|
||||
}
|
||||
|
||||
files[f'/usr/local/lib/systemd/system/paperless-{worker}.service'] = {
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
f'svc_systemd:paperless-{worker}:restart',
|
||||
},
|
||||
}
|
||||
|
||||
svc_systemd[f'paperless-{worker}'] = {
|
||||
'needs': {
|
||||
'action:paperless_install',
|
||||
'action:paperless_collectstatic',
|
||||
'action:paperless_compile_frontend',
|
||||
'action:paperless_install_deps',
|
||||
'action:paperless_migrate_database',
|
||||
f'file:/usr/local/lib/systemd/system/paperless-{worker}.service',
|
||||
'svc_systemd:paperless-consumer:restart',
|
||||
'svc_systemd:paperless-scheduler:restart',
|
||||
'svc_systemd:paperless-webserver:restart',
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
files = {
|
||||
'/etc/systemd/system/paperless-consumer.service': {
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
'svc_systemd:paperless-consumer:restart',
|
||||
},
|
||||
},
|
||||
'/etc/systemd/system/paperless-scheduler.service': {
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
'svc_systemd:paperless-scheduler:restart',
|
||||
},
|
||||
},
|
||||
'/etc/systemd/system/paperless-webserver.service': {
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
'svc_systemd:paperless-webserver:restart',
|
||||
},
|
||||
},
|
||||
'/opt/paperless/src/paperless.conf': {
|
||||
'content_type': 'mako',
|
||||
'needs': {
|
||||
'git_deploy:/opt/paperless/src',
|
||||
},
|
||||
'triggers': {
|
||||
'svc_systemd:paperless-consumer:restart',
|
||||
'svc_systemd:paperless-scheduler:restart',
|
||||
'svc_systemd:paperless-webserver:restart',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
actions = {
|
||||
'paperless_create_virtualenv': {
|
||||
'command': '/usr/bin/python3 -m virtualenv -p python3 /opt/paperless/venv/',
|
||||
'unless': 'test -d /opt/paperless/venv/',
|
||||
'needs': {
|
||||
# actually /opt/paperless, but we don't create that
|
||||
'directory:/opt/paperless/src',
|
||||
'pkg_apt:python3',
|
||||
'pkg_apt:python3-pip',
|
||||
'pkg_apt:python3-virtualenv',
|
||||
},
|
||||
},
|
||||
'paperless_install_deps': {
|
||||
'command':
|
||||
'cd /opt/paperless/src && '
|
||||
'/opt/paperless/venv/bin/pip install --upgrade pip && '
|
||||
'/opt/paperless/venv/bin/pip install --upgrade -r requirements.txt',
|
||||
'triggered': True,
|
||||
'needs': {
|
||||
'action:paperless_create_virtualenv',
|
||||
},
|
||||
},
|
||||
'paperless_migrate_database': {
|
||||
'command':
|
||||
'cd /opt/paperless/src/src && '
|
||||
'sudo -Hu paperless /opt/paperless/venv/bin/python manage.py migrate',
|
||||
'triggered': True,
|
||||
'needs': {
|
||||
# /mnt/paperless is NOT created by this bundle.
|
||||
'action:paperless_install_deps',
|
||||
'directory:/mnt/paperless',
|
||||
'directory:/opt/paperless/static',
|
||||
'file:/opt/paperless/src/paperless.conf',
|
||||
'user:paperless',
|
||||
'postgres_db:paperless',
|
||||
},
|
||||
},
|
||||
'paperless_compile_frontend': {
|
||||
'command':
|
||||
'cd /opt/paperless/src/src-ui && '
|
||||
'npm install && '
|
||||
'node_modules/.bin/ng build',
|
||||
'triggered': True,
|
||||
'needs': {
|
||||
'file:/opt/paperless/src/paperless.conf',
|
||||
'pkg_apt:nodejs',
|
||||
},
|
||||
},
|
||||
'paperless_collectstatic': {
|
||||
'command':
|
||||
'cd /opt/paperless/src/src && '
|
||||
'sudo -Hu paperless /opt/paperless/venv/bin/python manage.py collectstatic',
|
||||
'triggered': True,
|
||||
'needs': {
|
||||
'directory:/opt/paperless/static',
|
||||
'file:/opt/paperless/src/paperless.conf',
|
||||
'action:paperless_install_deps',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
svc_systemd = {
|
||||
'paperless-consumer': {
|
||||
'needs': {
|
||||
'action:paperless_migrate_database',
|
||||
'file:/etc/systemd/system/paperless-consumer.service',
|
||||
'git_deploy:/opt/paperless/src',
|
||||
},
|
||||
},
|
||||
'paperless-scheduler': {
|
||||
'needs': {
|
||||
'action:paperless_migrate_database',
|
||||
'file:/etc/systemd/system/paperless-scheduler.service',
|
||||
'git_deploy:/opt/paperless/src',
|
||||
},
|
||||
},
|
||||
'paperless-webserver': {
|
||||
'needs': {
|
||||
'action:paperless_compile_frontend',
|
||||
'action:paperless_migrate_database',
|
||||
'file:/etc/systemd/system/paperless-webserver.service',
|
||||
'git_deploy:/opt/paperless/src',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -6,12 +6,9 @@ defaults = {
|
|||
'gnupg': {},
|
||||
'imagemagick': {},
|
||||
'libmagic-dev': {},
|
||||
'default-libmysqlclient-dev': {},
|
||||
'libpq-dev': {},
|
||||
'mariadb-client': {},
|
||||
'mime-support': {},
|
||||
'optipng': {},
|
||||
'poppler-utils': {},
|
||||
'python3-wheel': {},
|
||||
|
||||
# for OCRmyPDF
|
||||
|
@ -22,8 +19,6 @@ defaults = {
|
|||
'pngquant': {},
|
||||
'qpdf': {},
|
||||
'tesseract-ocr': {},
|
||||
'tesseract-ocr-deu': {},
|
||||
'tesseract-ocr-eng': {},
|
||||
'unpaper': {},
|
||||
'zlib1g': {},
|
||||
},
|
||||
|
@ -80,36 +75,3 @@ def icinga_check_for_new_release(metadata):
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@metadata_reactor.provides(
|
||||
'nginx/vhosts/paperless',
|
||||
)
|
||||
def nginx(metadata):
|
||||
if not node.has_bundle('nginx'):
|
||||
raise DoNotRunAgain
|
||||
|
||||
return {
|
||||
'nginx': {
|
||||
'vhosts': {
|
||||
'paperless': {
|
||||
'domain': metadata.get('paperless/domain'),
|
||||
'locations': {
|
||||
'/': {
|
||||
'target': 'http://127.0.0.1:22070',
|
||||
'websockets': True,
|
||||
'proxy_set_header': {
|
||||
'X-Forwarded-Host': '$server_name',
|
||||
},
|
||||
},
|
||||
'/static/': {
|
||||
'alias': '/opt/paperless/src/paperless-ngx/static/',
|
||||
},
|
||||
},
|
||||
'max_body_size': '100M',
|
||||
'website_check_path': '/accounts/login/',
|
||||
'website_check_string': 'Paperless-ngx',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
% for address in sorted(blocked):
|
||||
${address} REJECT
|
||||
% endfor
|
|
@ -48,18 +48,17 @@ smtpd_client_restrictions = permit_mynetworks permit_sasl_authenticated
|
|||
smtpd_helo_required = yes
|
||||
smtpd_helo_restrictions = permit_mynetworks reject_invalid_helo_hostname
|
||||
smtpd_data_restrictions = reject_unauth_pipelining
|
||||
smtpd_recipient_restrictions = permit_mynetworks, check_recipient_access hash:/etc/postfix/blocked_recipients
|
||||
smtpd_relay_before_recipient_restrictions = yes
|
||||
|
||||
# https://ssl-config.mozilla.org/#server=postfix&version=3.7.10&config=intermediate&openssl=3.0.11&guideline=5.7
|
||||
# generated using mozilla ssl generator, using "old" configuration.
|
||||
# we need this to support CentOS 7 systems, sadly ...
|
||||
# https://ssl-config.mozilla.org/#server=postfix&version=3.5.13&config=old&openssl=1.1.1k&guideline=5.6
|
||||
smtpd_tls_security_level = may
|
||||
smtpd_tls_auth_only = yes
|
||||
smtpd_tls_mandatory_protocols = !SSLv2, !SSLv3, !TLSv1, !TLSv1.1
|
||||
smtpd_tls_protocols = !SSLv2, !SSLv3, !TLSv1, !TLSv1.1
|
||||
smtpd_tls_mandatory_protocols = !SSLv2, !SSLv3
|
||||
smtpd_tls_protocols = !SSLv2, !SSLv3
|
||||
smtpd_tls_mandatory_ciphers = medium
|
||||
smtpd_tls_dh1024_param_file = /etc/ssl/certs/dhparam.pem;
|
||||
tls_medium_cipherlist = ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-CHACHA20-POLY1305
|
||||
tls_preempt_cipherlist = no
|
||||
tls_medium_cipherlist = ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES256-SHA256:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA
|
||||
tls_preempt_cipherlist = yes
|
||||
</%text>
|
||||
|
||||
relay_domains = $mydestination, pgsql:/etc/postfix/pgsql/relay_domains.cf
|
||||
|
|
|
@ -39,16 +39,6 @@ files = {
|
|||
'action:postfix_newaliases',
|
||||
},
|
||||
},
|
||||
'/etc/postfix/blocked_recipients': {
|
||||
'content_type': 'mako',
|
||||
'context': {
|
||||
'blocked': node.metadata.get('postfix/blocked_recipients', set()),
|
||||
},
|
||||
'triggers': {
|
||||
'action:postfix_postmap_blocked_recipients',
|
||||
'svc_systemd:postfix:restart',
|
||||
},
|
||||
},
|
||||
'/etc/postfix/master.cf': {
|
||||
'content_type': 'mako',
|
||||
'triggers': {
|
||||
|
@ -84,19 +74,6 @@ actions = {
|
|||
'needs': {
|
||||
my_package,
|
||||
},
|
||||
'before': {
|
||||
'svc_systemd:postfix',
|
||||
},
|
||||
},
|
||||
'postfix_postmap_blocked_recipients': {
|
||||
'command': 'postmap hash:/etc/postfix/blocked_recipients',
|
||||
'triggered': True,
|
||||
'needs': {
|
||||
my_package,
|
||||
},
|
||||
'before': {
|
||||
'svc_systemd:postfix',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -143,14 +143,11 @@ def generate_dns_entries_for_nodes(metadata):
|
|||
if not ip6 and not ip.is_private:
|
||||
ip6 = ip
|
||||
|
||||
if not (ip4 or ip6) and (found_ips['ipv4'] or found_ips['ipv6']):
|
||||
if not (ip4 or ip6) and found_ips['ipv4']:
|
||||
# do it again, but do not filter out private addresses
|
||||
for ip in sorted(found_ips['ipv4']):
|
||||
if not ip4:
|
||||
ip4 = ip
|
||||
for ip in sorted(found_ips['ipv6']):
|
||||
if not ip6:
|
||||
ip6 = ip
|
||||
|
||||
if ip4:
|
||||
results.add('{} IN A {}'.format(dns_name, ip4))
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
from shlex import quote
|
||||
|
||||
directories = {
|
||||
'/opt/pyenv': {},
|
||||
'/opt/pyenv/install': {},
|
||||
}
|
||||
|
||||
git_deploy = {
|
||||
'/opt/pyenv/install': {
|
||||
'repo': 'https://github.com/pyenv/pyenv.git',
|
||||
'rev': node.metadata.get('pyenv/version'),
|
||||
'needs': {
|
||||
'directory:/opt/pyenv/install',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for version in node.metadata.get('pyenv/python_versions', set()):
|
||||
actions[f'pyenv_install_{version}'] = {
|
||||
'command': f'PYENV_ROOT=/opt/pyenv /opt/pyenv/install/bin/pyenv install {quote(version)}',
|
||||
'unless': f'PYENV_ROOT=/opt/pyenv /opt/pyenv/install/bin/pyenv versions --bare | grep -E "^{quote(version)}$"',
|
||||
'needs': {
|
||||
'git_deploy:/opt/pyenv/install',
|
||||
},
|
||||
'after': {
|
||||
'pkg_apt:',
|
||||
},
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
defaults = {
|
||||
'apt': {
|
||||
'packages': {
|
||||
'build-essential': {},
|
||||
'curl': {},
|
||||
'libbz2-dev': {},
|
||||
'libffi-dev': {},
|
||||
'liblzma-dev': {},
|
||||
'libncurses-dev': {},
|
||||
'libreadline-dev': {},
|
||||
'libsqlite3-dev': {},
|
||||
'libssl-dev': {},
|
||||
'libxml2-dev': {},
|
||||
'libxmlsec1-dev': {},
|
||||
'tk-dev': {},
|
||||
'xz-utils': {},
|
||||
'zlib1g-dev': {},
|
||||
},
|
||||
},
|
||||
}
|
|
@ -2,8 +2,8 @@
|
|||
interface ${interface}
|
||||
{
|
||||
AdvSendAdvert on;
|
||||
MinRtrAdvInterval 60;
|
||||
MaxRtrAdvInterval 300;
|
||||
MinRtrAdvInterval 10;
|
||||
MaxRtrAdvInterval 30;
|
||||
MinDelayBetweenRAs 10;
|
||||
prefix ${config.get('prefix', '::/64')}
|
||||
{
|
||||
|
@ -11,7 +11,7 @@ interface ${interface}
|
|||
AdvAutonomous on;
|
||||
AdvRouterAddr on;
|
||||
};
|
||||
% if config.get('rdnss'):
|
||||
% if 'rdnss' in config:
|
||||
RDNSS ${' '.join(sorted(config['rdnss']))}
|
||||
{
|
||||
AdvRDNSSLifetime 900;
|
||||
|
|
|
@ -101,7 +101,7 @@ if 'dkim' in node.metadata.get('rspamd', {}):
|
|||
actions = {
|
||||
'rspamd_assure_dkim_key_permissions': {
|
||||
'command': 'chown _rspamd:_rspamd /var/lib/rspamd/dkim/*.key',
|
||||
'unless': r'test -z "$(find /var/lib/rspamd/ -iname \"*.key\" \! -user _rspamd)"',
|
||||
'unless': 'test -z "$(find /var/lib/rspamd/ -iname \"*.key\" \! -user _rspamd)"',
|
||||
'needs': {
|
||||
'action:rspamd_generate_dkim_key',
|
||||
'directory:/var/lib/rspamd/dkim',
|
||||
|
|
13
bundles/seafile/files/seafile.service
Normal file
13
bundles/seafile/files/seafile.service
Normal file
|
@ -0,0 +1,13 @@
|
|||
[Unit]
|
||||
Description=Seafile
|
||||
After=network.target mysql.service
|
||||
|
||||
[Service]
|
||||
Type=forking
|
||||
ExecStart=/opt/seafile/seafile-server-latest/seafile.sh start
|
||||
ExecStop=/opt/seafile/seafile-server-latest/seafile.sh stop
|
||||
User=seafile
|
||||
Group=seafile
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
13
bundles/seafile/files/seahub.service
Normal file
13
bundles/seafile/files/seahub.service
Normal file
|
@ -0,0 +1,13 @@
|
|||
[Unit]
|
||||
Description=Seafile hub
|
||||
After=network.target seafile.service
|
||||
|
||||
[Service]
|
||||
Type=forking
|
||||
ExecStart=/opt/seafile/seafile-server-latest/seahub.sh start
|
||||
ExecStop=/opt/seafile/seafile-server-latest/seahub.sh stop
|
||||
User=seafile
|
||||
Group=seafile
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
73
bundles/seafile/items.py
Normal file
73
bundles/seafile/items.py
Normal file
|
@ -0,0 +1,73 @@
|
|||
users = {
|
||||
'seafile': {
|
||||
'home': '/opt/seafile',
|
||||
},
|
||||
}
|
||||
|
||||
directories = {
|
||||
'/opt/seafile': {
|
||||
'mode': '0755',
|
||||
'owner': 'seafile',
|
||||
'group': 'seafile',
|
||||
},
|
||||
}
|
||||
|
||||
files = {
|
||||
'/etc/systemd/system/seafile.service': {
|
||||
'needed_by': {
|
||||
'svc_systemd:seafile',
|
||||
},
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
},
|
||||
},
|
||||
'/etc/systemd/system/seahub.service': {
|
||||
'needed_by': {
|
||||
'svc_systemd:seafile',
|
||||
},
|
||||
'triggers': {
|
||||
'action:systemd-reload',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
svc_systemd = {
|
||||
'seafile': {
|
||||
'needs': {
|
||||
'pkg_pip:',
|
||||
},
|
||||
},
|
||||
'seahub': {
|
||||
'needs': {
|
||||
'svc_systemd:seafile',
|
||||
'pkg_pip:',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for pkg in (
|
||||
'django==3.2.19',
|
||||
'future==0.18.3',
|
||||
'mysqlclient==2.1.1',
|
||||
'pymysql',
|
||||
'pillow==9.3.0',
|
||||
'pylibmc',
|
||||
'captcha==0.4',
|
||||
'markupsafe==2.0.1',
|
||||
'jinja2',
|
||||
'sqlalchemy==1.4.3',
|
||||
'psd-tools',
|
||||
'django-pylibmc',
|
||||
'django_simple_captcha==0.5.17',
|
||||
'djangosaml2==1.5.7',
|
||||
'pysaml2==7.2.1',
|
||||
'pycryptodome==3.16.0',
|
||||
'cffi==1.15.1',
|
||||
'lxml',
|
||||
):
|
||||
if '==' in pkg:
|
||||
pkg, version = pkg.split('==', 1)
|
||||
else:
|
||||
version = None
|
||||
|
||||
pkg_pip[pkg.replace('_', '-')] = {'version': version}
|
28
bundles/seafile/metadata.py
Normal file
28
bundles/seafile/metadata.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
defaults = {
|
||||
'apt': {
|
||||
'packages': {
|
||||
'mariadb-server': {},
|
||||
'python3': {},
|
||||
'python3-setuptools': {},
|
||||
'python3-pip': {},
|
||||
'default-libmysqlclient-dev': {},
|
||||
},
|
||||
},
|
||||
'backups': {
|
||||
'paths': {
|
||||
'/opt/seafile',
|
||||
},
|
||||
},
|
||||
'icinga2_api': {
|
||||
'seafile': {
|
||||
'services': {
|
||||
'SEAFILE PROCESS': {
|
||||
'command_on_monitored_host': '/usr/local/share/icinga/plugins/check_systemd_unit seafile',
|
||||
},
|
||||
'SEAHUB PROCESS': {
|
||||
'command_on_monitored_host': '/usr/local/share/icinga/plugins/check_systemd_unit seahub',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
4
bundles/simple-icinga-dashboard/files/config.toml
Normal file
4
bundles/simple-icinga-dashboard/files/config.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
<%
|
||||
from tomlkit import dumps as toml_dumps
|
||||
from bundlewrap.utils.text import toml_clean
|
||||
%>${toml_clean(toml_dumps(repo.libs.faults.resolve_faults(node.metadata['simple-icinga-dashboard']), sort_keys=True))}
|
|
@ -44,7 +44,7 @@ git_deploy = {
|
|||
|
||||
files = {
|
||||
'/opt/simple-icinga-dashboard/config.toml': {
|
||||
'content': repo.libs.faults.dict_as_toml(node.metadata.get('simple-icinga-dashboard')),
|
||||
'content_type': 'mako',
|
||||
'needs': {
|
||||
'git_deploy:/opt/simple-icinga-dashboard/src',
|
||||
},
|
||||
|
|
|
@ -55,7 +55,7 @@ def zfs_disks_to_metadata(metadata):
|
|||
continue
|
||||
|
||||
for disk in option['devices']:
|
||||
if search(r'p([0-9]+)$', disk) or disk.startswith('/dev/mapper/'):
|
||||
if search(r'p([0-9]+)$', disk):
|
||||
continue
|
||||
|
||||
disks.add(disk)
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
% for user, config in sorted(node.metadata['users'].items()):
|
||||
% for p in sorted(config.get('sudo_commands', [])):
|
||||
% if config.get('is_admin', False):
|
||||
${user} ALL=(ALL) NOPASSWD:ALL
|
||||
% else:
|
||||
% for p in sorted(config.get('sudo_commands', [])):
|
||||
${user} ALL=(ALL) NOPASSWD:${p}
|
||||
% endfor
|
||||
% endfor
|
||||
% endif
|
||||
% endfor
|
||||
|
|
|
@ -1,9 +1,3 @@
|
|||
<%
|
||||
if config.get('exclude_from_monitoring', False):
|
||||
monitored = ''
|
||||
else:
|
||||
monitored = f'/usr/local/sbin/systemd-timer-monitored {timer} '
|
||||
%>\
|
||||
[Unit]
|
||||
Description=Service for Timer ${timer}
|
||||
After=network.target
|
||||
|
@ -21,8 +15,10 @@ WorkingDirectory=${config.get('pwd', '/')}
|
|||
Type=oneshot
|
||||
% if isinstance(config['command'], list):
|
||||
% for command in config['command']:
|
||||
ExecStart=${monitored}${command}
|
||||
ExecStart=/usr/local/sbin/systemd-timer-monitored ${timer} ${command}
|
||||
% endfor
|
||||
% elif config.get('exclude_from_monitoring', False):
|
||||
ExecStart=${config['command']}
|
||||
% else:
|
||||
ExecStart=${monitored}${config['command']}
|
||||
ExecStart=/usr/local/sbin/systemd-timer-monitored ${timer} ${config['command']}
|
||||
% endif
|
||||
|
|
4
bundles/telegraf/files/telegraf.conf
Normal file
4
bundles/telegraf/files/telegraf.conf
Normal file
|
@ -0,0 +1,4 @@
|
|||
<%
|
||||
from tomlkit import dumps as toml_dumps
|
||||
from bundlewrap.utils.text import toml_clean
|
||||
%>${toml_clean(toml_dumps(repo.libs.faults.resolve_faults(config), sort_keys=True))}
|
|
@ -93,7 +93,10 @@ for name, config in sorted(node.metadata.get('telegraf/input_plugins/prometheus'
|
|||
|
||||
files = {
|
||||
'/etc/telegraf/telegraf.conf': {
|
||||
'content': repo.libs.faults.dict_as_toml(telegraf_config),
|
||||
'content_type': 'mako',
|
||||
'context': {
|
||||
'config': telegraf_config,
|
||||
},
|
||||
'triggers': {
|
||||
'svc_systemd:telegraf:restart',
|
||||
},
|
||||
|
|
|
@ -1,54 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
from logging import basicConfig, getLogger
|
||||
from sys import argv
|
||||
|
||||
from requests import get
|
||||
|
||||
basicConfig(level="INFO")
|
||||
L = getLogger(__name__)
|
||||
|
||||
|
||||
def out(keys, values):
|
||||
print(
|
||||
"airgradient,{} {}".format(
|
||||
",".join([f"{k}={v}" for k, v in keys.items()]),
|
||||
",".join([f"{k}={v}" for k, v in values.items()]),
|
||||
),
|
||||
flush=True,
|
||||
)
|
||||
|
||||
|
||||
try:
|
||||
r = get(
|
||||
f"https://api.airgradient.com/public/api/v1/locations/measures/current?token={argv[2]}"
|
||||
)
|
||||
L.debug(r.status_code)
|
||||
L.info(r.text)
|
||||
r.raise_for_status()
|
||||
for location in r.json():
|
||||
L.debug(location)
|
||||
out(
|
||||
{
|
||||
"place": argv[1],
|
||||
"location": location["locationName"],
|
||||
},
|
||||
{
|
||||
k: location[k]
|
||||
for k in (
|
||||
"atmp",
|
||||
"noxIndex",
|
||||
"pm003Count",
|
||||
"pm01",
|
||||
"pm02",
|
||||
"pm10",
|
||||
"rco2",
|
||||
"rhum",
|
||||
"tvoc",
|
||||
"tvocIndex",
|
||||
"wifi",
|
||||
)
|
||||
},
|
||||
)
|
||||
except Exception:
|
||||
L.exception("fail!")
|
|
@ -1,3 +0,0 @@
|
|||
files['/usr/local/bin/airgradient_telegraf'] = {
|
||||
'mode': '0755',
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
@metadata_reactor.provides(
|
||||
'telegraf/input_plugins/exec',
|
||||
)
|
||||
def telegraf(metadata):
|
||||
result = {}
|
||||
for location, api_key in metadata.get('telegraf_airgradient', {}).items():
|
||||
result[f'airgradient_{location}'] = {
|
||||
'commands': [f'/usr/local/bin/airgradient_telegraf {location} {api_key}'],
|
||||
'data_format': 'influx',
|
||||
'timeout': '10s',
|
||||
}
|
||||
|
||||
return {
|
||||
'telegraf': {
|
||||
'input_plugins': {
|
||||
'exec': result,
|
||||
},
|
||||
},
|
||||
}
|
|
@ -1,11 +1,6 @@
|
|||
server:
|
||||
# provided by pkg_apt:unbound-anchor
|
||||
auto-trust-anchor-file: "/var/lib/unbound/root.key"
|
||||
% if node.metadata.get('unbound/dns64', node.has_bundle('jool')):
|
||||
module-config: "dns64 validator iterator"
|
||||
% else:
|
||||
module-config: "validator iterator"
|
||||
% endif
|
||||
|
||||
verbosity: 0
|
||||
|
||||
|
@ -28,6 +23,10 @@ server:
|
|||
access-control: ::1 allow
|
||||
% endif
|
||||
|
||||
% if node.has_bundle('pppd'):
|
||||
prefer-ip4: yes
|
||||
% endif
|
||||
|
||||
msg-cache-size: ${cache_size}
|
||||
msg-cache-slabs: ${cache_slabs}
|
||||
rrset-cache-size: ${cache_size}
|
||||
|
|
|
@ -64,8 +64,3 @@ ${k}() {
|
|||
${v}
|
||||
}
|
||||
% endfor
|
||||
|
||||
if [[ -f "/etc/bashrc_bundlewrap/$(logname)" ]]
|
||||
then
|
||||
source "/etc/bashrc_bundlewrap/$(logname)"
|
||||
fi
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
from os.path import exists, join
|
||||
|
||||
directories['/etc/bashrc_bundlewrap'] = {
|
||||
'purge': True,
|
||||
}
|
||||
|
||||
files = {
|
||||
'/etc/bash.bashrc': {
|
||||
'source': 'bashrc',
|
||||
|
@ -68,13 +64,14 @@ for username, attrs in node.metadata['users'].items():
|
|||
}
|
||||
|
||||
if exists(join(repo.path, 'data', 'users', 'files', 'bash', '{}.bashrc'.format(username))):
|
||||
files[f'/etc/bashrc_bundlewrap/{username}'] = {
|
||||
files[home + '/.bashrc'] = {
|
||||
'content_type': 'mako',
|
||||
'source': 'bash/{}.bashrc'.format(username),
|
||||
}
|
||||
files[f"{home}/.bashrc"] = {
|
||||
'delete': True,
|
||||
}
|
||||
else:
|
||||
files[home + '/.bashrc'] = {
|
||||
'delete': True,
|
||||
}
|
||||
|
||||
if attrs.get('enable_linger', False):
|
||||
linger_test = ''
|
||||
|
|
|
@ -36,7 +36,7 @@ def add_users_from_json(metadata):
|
|||
if config.get('is_admin', False) or uname in metadata_users:
|
||||
users[uname] = {
|
||||
'ssh_pubkey': set(config['ssh_pubkey']),
|
||||
'sudo_commands': ['ALL'],
|
||||
'is_admin': config.get('is_admin', False),
|
||||
}
|
||||
|
||||
# Then, run again to get all 'to be deleted' users
|
||||
|
|
|
@ -52,7 +52,7 @@ if node.has_bundle('arch-with-gui'):
|
|||
def libvirt_group_for_admins(metadata):
|
||||
result = {}
|
||||
for user, config in metadata.get('users', {}).items():
|
||||
if 'ALL' in config.get('sudo_commands', set()):
|
||||
if config.get('is_admin', False):
|
||||
result[user] = {
|
||||
'groups': {
|
||||
'libvirt',
|
||||
|
|
10
bundles/wireguard/files/pppd-ip-up
Normal file
10
bundles/wireguard/files/pppd-ip-up
Normal file
|
@ -0,0 +1,10 @@
|
|||
#!/bin/bash
|
||||
|
||||
# We need to send some traffic over the wireguard tunnel to make sure
|
||||
# it gets connected. Easiest way is to simply send some pings to the
|
||||
# other side.
|
||||
|
||||
% for peer, config in sorted(node.metadata.get('wireguard/peers', {}).items()):
|
||||
# refresh connection to ${peer}
|
||||
/usr/bin/ping -c 4 ${config['their_ip']}
|
||||
% endfor
|
|
@ -10,12 +10,8 @@ ListenPort=${port}
|
|||
[WireGuardPeer]
|
||||
PublicKey=${pubkey}
|
||||
AllowedIPs=0.0.0.0/0
|
||||
% if psk:
|
||||
PresharedKey=${psk}
|
||||
% endif
|
||||
% if endpoint:
|
||||
Endpoint=${endpoint}
|
||||
% endif
|
||||
% if specials.get('persistent_keepalive', True):
|
||||
PersistentKeepalive=30
|
||||
% endif
|
||||
|
|
46
bundles/wireguard/files/wg_health_check
Normal file
46
bundles/wireguard/files/wg_health_check
Normal file
|
@ -0,0 +1,46 @@
|
|||
#!/bin/bash
|
||||
|
||||
if [[ -e "/var/lib/bundlewrap/hard-${node.name}/info" ]]
|
||||
then
|
||||
# make sure we're not restarting during bw apply
|
||||
echo "bw apply running"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
now="$(date +%s)"
|
||||
|
||||
everything_up=1
|
||||
|
||||
% for peer, ip in sorted(peers.items()):
|
||||
# ${peer}
|
||||
if ! /usr/bin/ping -c 4 ${ip} >/dev/null 2>&1
|
||||
then
|
||||
echo "${peer} was not reachable!"
|
||||
everything_up=0
|
||||
fi
|
||||
|
||||
% endfor
|
||||
if [[ "$everything_up" -eq 1 ]]
|
||||
then
|
||||
echo "Everything is up as expected"
|
||||
echo "$now" > /var/tmp/wg_all_reached
|
||||
exit 0
|
||||
fi
|
||||
|
||||
five_min_ago="$(expr $now - 300)"
|
||||
last_reached="$(cat /var/tmp/wg_all_reached)"
|
||||
|
||||
if [[ "$last_reached" -lt "$five_min_ago" ]]
|
||||
then
|
||||
echo "RESTART"
|
||||
|
||||
systemctl restart systemd-networkd
|
||||
|
||||
# only restart once an hour
|
||||
echo "$(expr $now + 3300)" > /var/tmp/wg_all_reached
|
||||
elif [[ "$last_reached" -gt "$now" ]]
|
||||
then
|
||||
echo "Something's broken, but we have recently restarted"
|
||||
else
|
||||
echo "Something's broken, but still in grace time"
|
||||
fi
|
|
@ -25,12 +25,26 @@ for peer, config in sorted(node.metadata.get('wireguard/peers', {}).items()):
|
|||
'peer': peer,
|
||||
'port': config['my_port'],
|
||||
'privatekey': node.metadata.get('wireguard/privatekey'),
|
||||
'psk': config.get('psk'),
|
||||
'psk': config['psk'],
|
||||
'pubkey': config['pubkey'],
|
||||
'specials': repo.libs.s2s.WG_AUTOGEN_SETTINGS.get(peer, {}),
|
||||
},
|
||||
'needs': deps,
|
||||
'triggers': {
|
||||
'svc_systemd:systemd-networkd:restart',
|
||||
},
|
||||
}
|
||||
|
||||
files['/usr/local/bin/wg_health_check'] = {
|
||||
'content_type': 'mako',
|
||||
'context': {
|
||||
'peers': node.metadata.get('wireguard/health_checks'),
|
||||
},
|
||||
'mode': '0755',
|
||||
}
|
||||
|
||||
if node.has_bundle('pppd'):
|
||||
files['/etc/ppp/ip-up.d/reconnect-wireguard'] = {
|
||||
'source': 'pppd-ip-up',
|
||||
'content_type': 'mako',
|
||||
'mode': '0755',
|
||||
}
|
||||
|
|
|
@ -83,15 +83,10 @@ def peer_psks(metadata):
|
|||
'iface': sub('[^a-z0-9-_]+', '_', peer_name)[:12],
|
||||
}
|
||||
|
||||
try:
|
||||
repo.get_node(peer_name)
|
||||
|
||||
if node.name < peer_name:
|
||||
peers[peer_name]['psk'] = repo.vault.random_bytes_as_base64_for(f'{node.name} wireguard {peer_name}')
|
||||
else:
|
||||
peers[peer_name]['psk'] = repo.vault.random_bytes_as_base64_for(f'{peer_name} wireguard {node.name}')
|
||||
except NoSuchNode:
|
||||
pass
|
||||
if node.name < peer_name:
|
||||
peers[peer_name]['psk'] = repo.vault.random_bytes_as_base64_for(f'{node.name} wireguard {peer_name}')
|
||||
else:
|
||||
peers[peer_name]['psk'] = repo.vault.random_bytes_as_base64_for(f'{peer_name} wireguard {node.name}')
|
||||
|
||||
return {
|
||||
'wireguard': {
|
||||
|
@ -180,13 +175,11 @@ def peer_endpoints(metadata):
|
|||
except NoSuchNode:
|
||||
continue
|
||||
|
||||
if repo.libs.s2s.WG_AUTOGEN_SETTINGS.get(name, {}).get('no_autoconnect'):
|
||||
continue
|
||||
|
||||
peers[rnode.name] = {
|
||||
'endpoint': '{}:{}'.format(
|
||||
rnode.hostname,
|
||||
rnode.metadata.get(f'wireguard/peers/{node.name}/my_port'),
|
||||
rnode.metadata.get('wireguard/external_hostname', rnode.hostname),
|
||||
rnode.metadata.get(f'wireguard/peers/{node.name}/my_port', 51820),
|
||||
),
|
||||
}
|
||||
|
||||
|
@ -231,9 +224,7 @@ def firewall(metadata):
|
|||
except NoSuchNode: # roadwarrior
|
||||
ports['{}/udp'.format(config['my_port'])] = atomic(set(metadata.get('wireguard/restrict-to', set())))
|
||||
else:
|
||||
ports['{}/udp'.format(config['my_port'])] = atomic(
|
||||
set(repo.libs.s2s.WG_AUTOGEN_SETTINGS.get(name, {}).get('firewall', set())) | {name}
|
||||
)
|
||||
ports['{}/udp'.format(config['my_port'])] = atomic({name})
|
||||
|
||||
return {
|
||||
'firewall': {
|
||||
|
@ -258,7 +249,7 @@ def interface_ips(metadata):
|
|||
my_ip = '{}/31'.format(config['my_ip'])
|
||||
|
||||
ips = {my_ip}
|
||||
if snat_ip and peer in repo.libs.s2s.WG_AUTOGEN_NODES:
|
||||
if snat_ip:
|
||||
ips.add(snat_ip)
|
||||
|
||||
their_ip = config['their_ip']
|
||||
|
@ -294,14 +285,12 @@ def snat(metadata):
|
|||
forward.add(f'iifname wg_{config["iface"]} accept')
|
||||
forward.add(f'oifname wg_{config["iface"]} accept')
|
||||
|
||||
if snat_ip and peer in repo.libs.s2s.WG_AUTOGEN_NODES:
|
||||
if snat_ip:
|
||||
postrouting.add('ip saddr {} ip daddr != {} snat to {}'.format(
|
||||
config['my_ip'],
|
||||
config['their_ip'],
|
||||
snat_ip,
|
||||
))
|
||||
elif config.get('masquerade', False):
|
||||
postrouting.add(f'oifname wg_{peer} masquerade')
|
||||
|
||||
return {
|
||||
'nftables': {
|
||||
|
@ -313,3 +302,40 @@ def snat(metadata):
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@metadata_reactor.provides(
|
||||
'wireguard/health_checks',
|
||||
'systemd-timers/timers/wg-health-check',
|
||||
)
|
||||
def health_checks(metadata):
|
||||
checks = {}
|
||||
|
||||
for peer, config in metadata.get('wireguard/peers', {}).items():
|
||||
if (
|
||||
config.get('exclude_from_monitoring', False)
|
||||
or not config.get('auto_connection', True)
|
||||
or 'endpoint' not in config
|
||||
):
|
||||
continue
|
||||
|
||||
checks[peer] = config['their_ip']
|
||||
|
||||
if checks:
|
||||
timer = {
|
||||
'wg-health-check': {
|
||||
'command': '/usr/local/bin/wg_health_check',
|
||||
'when': 'minutely',
|
||||
},
|
||||
}
|
||||
else:
|
||||
timer = {}
|
||||
|
||||
return {
|
||||
'systemd-timers': {
|
||||
'timers': timer,
|
||||
},
|
||||
'wireguard': {
|
||||
'health_checks': checks,
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
109.160.36.0/24
|
||||
109.160.37.0/24
|
||||
109.160.38.0/24
|
||||
109.160.39.0/24
|
||||
109.160.40.0/24
|
||||
109.160.41.0/24
|
||||
109.237.176.0/20
|
||||
116.50.16.0/21
|
||||
129.181.208.0/21
|
||||
129.181.216.0/22
|
||||
137.170.112.0/24
|
||||
|
@ -13,12 +18,19 @@
|
|||
139.12.255.0/24
|
||||
139.12.3.0/24
|
||||
139.12.4.0/24
|
||||
141.11.17.0/24
|
||||
141.11.18.0/24
|
||||
141.11.247.0/24
|
||||
141.169.240.0/20
|
||||
141.77.0.0/16
|
||||
141.98.44.0/24
|
||||
143.99.213.0/24
|
||||
145.225.1.0/24
|
||||
145.225.148.0/22
|
||||
145.225.152.0/24
|
||||
145.225.16.0/23
|
||||
146.247.58.0/24
|
||||
145.225.2.0/24
|
||||
147.136.68.0/22
|
||||
147.136.76.0/22
|
||||
147.136.84.0/22
|
||||
147.161.22.0/24
|
||||
147.78.17.0/24
|
||||
|
@ -43,10 +55,8 @@
|
|||
153.17.255.0/24
|
||||
153.96.218.0/24
|
||||
153.96.22.0/24
|
||||
153.97.32.0/24
|
||||
158.116.231.0/24
|
||||
160.211.126.0/24
|
||||
163.5.168.0/24
|
||||
164.133.10.0/24
|
||||
164.133.11.0/24
|
||||
164.133.150.0/24
|
||||
|
@ -55,21 +65,18 @@
|
|||
164.133.91.0/24
|
||||
164.133.98.0/24
|
||||
164.133.99.0/24
|
||||
168.199.128.0/22
|
||||
168.199.160.0/22
|
||||
168.199.192.0/22
|
||||
168.199.212.0/22
|
||||
164.18.96.0/21
|
||||
170.237.92.0/23
|
||||
171.25.178.0/24
|
||||
176.221.24.0/24
|
||||
176.221.25.0/24
|
||||
176.53.136.0/24
|
||||
176.53.137.0/24
|
||||
179.61.160.0/22
|
||||
185.100.160.0/22
|
||||
185.101.244.0/23
|
||||
185.101.246.0/23
|
||||
185.101.4.0/22
|
||||
185.109.108.0/22
|
||||
185.112.249.0/24
|
||||
185.114.200.0/22
|
||||
185.124.48.0/24
|
||||
185.126.168.0/22
|
||||
|
@ -90,31 +97,33 @@
|
|||
185.172.38.0/24
|
||||
185.172.39.0/24
|
||||
185.180.224.0/24
|
||||
185.183.212.0/23
|
||||
185.183.214.0/23
|
||||
185.188.64.0/24
|
||||
185.198.13.0/24
|
||||
185.202.32.0/21
|
||||
185.203.148.0/22
|
||||
185.206.69.0/24
|
||||
185.207.46.0/24
|
||||
185.235.71.0/24
|
||||
185.215.183.0/24
|
||||
185.230.136.0/24
|
||||
185.237.0.0/24
|
||||
185.237.1.0/24
|
||||
185.237.2.0/24
|
||||
185.240.85.0/24
|
||||
185.242.224.0/24
|
||||
185.243.44.0/22
|
||||
185.243.44.0/24
|
||||
185.243.45.0/24
|
||||
185.243.46.0/24
|
||||
185.243.47.0/24
|
||||
185.250.42.0/23
|
||||
185.28.208.0/22
|
||||
185.39.12.0/22
|
||||
185.48.0.0/22
|
||||
185.57.24.0/24
|
||||
185.82.160.0/23
|
||||
185.91.204.0/22
|
||||
185.95.156.0/24
|
||||
185.95.157.0/24
|
||||
185.95.158.0/24
|
||||
185.95.159.0/24
|
||||
188.208.103.0/24
|
||||
192.109.121.0/24
|
||||
192.109.122.0/24
|
||||
192.109.124.0/24
|
||||
|
@ -132,8 +141,10 @@
|
|||
192.109.209.0/24
|
||||
192.109.54.0/24
|
||||
192.109.96.0/24
|
||||
192.124.252.0/24
|
||||
192.129.58.0/24
|
||||
192.145.8.0/22
|
||||
192.166.146.0/23
|
||||
192.166.253.0/24
|
||||
192.166.49.0/24
|
||||
192.166.52.0/24
|
||||
|
@ -141,6 +152,7 @@
|
|||
192.31.102.0/24
|
||||
192.54.39.0/24
|
||||
192.54.48.0/24
|
||||
192.54.66.0/24
|
||||
192.54.73.0/24
|
||||
192.54.79.0/24
|
||||
192.67.167.0/24
|
||||
|
@ -176,21 +188,19 @@
|
|||
193.110.102.0/23
|
||||
193.110.102.0/24
|
||||
193.110.103.0/24
|
||||
193.124.35.0/24
|
||||
193.138.91.0/24
|
||||
193.141.143.0/24
|
||||
193.141.180.0/23
|
||||
193.141.91.0/24
|
||||
193.143.24.0/22
|
||||
193.151.248.0/22
|
||||
193.158.0.0/15
|
||||
193.16.184.0/23
|
||||
193.16.235.0/24
|
||||
193.163.15.0/24
|
||||
193.168.0.0/24
|
||||
193.168.232.0/22
|
||||
193.168.234.0/23
|
||||
193.169.204.0/23
|
||||
193.178.226.0/23
|
||||
193.188.196.0/24
|
||||
193.201.170.0/24
|
||||
193.201.206.0/24
|
||||
|
@ -198,7 +208,6 @@
|
|||
193.22.110.0/24
|
||||
193.22.111.0/24
|
||||
193.22.16.0/22
|
||||
193.22.164.0/24
|
||||
193.22.174.0/24
|
||||
193.22.205.0/24
|
||||
193.22.29.0/24
|
||||
|
@ -224,10 +233,12 @@
|
|||
193.28.34.0/23
|
||||
193.28.48.0/23
|
||||
193.28.50.0/24
|
||||
193.28.64.0/21
|
||||
193.29.112.0/24
|
||||
193.29.115.0/24
|
||||
193.29.116.0/24
|
||||
193.29.126.0/24
|
||||
193.29.152.0/21
|
||||
193.29.158.0/24
|
||||
193.3.240.0/24
|
||||
193.30.136.0/22
|
||||
|
@ -243,10 +254,8 @@
|
|||
193.41.10.0/23
|
||||
193.47.164.0/24
|
||||
193.53.93.0/24
|
||||
193.56.21.0/24
|
||||
193.58.253.0/24
|
||||
193.84.136.0/22
|
||||
193.96.230.0/24
|
||||
193.96.232.0/23
|
||||
193.97.238.0/24
|
||||
193.98.181.0/24
|
||||
|
@ -263,8 +272,6 @@
|
|||
194.115.120.0/24
|
||||
194.115.163.0/24
|
||||
194.115.182.0/23
|
||||
194.115.182.0/24
|
||||
194.115.183.0/24
|
||||
194.115.52.0/24
|
||||
194.115.66.0/24
|
||||
194.115.88.0/21
|
||||
|
@ -286,7 +293,6 @@
|
|||
194.127.182.0/24
|
||||
194.127.195.0/24
|
||||
194.127.208.0/22
|
||||
194.127.242.0/23
|
||||
194.127.254.0/24
|
||||
194.145.252.0/24
|
||||
194.15.194.0/24
|
||||
|
@ -294,7 +300,6 @@
|
|||
194.15.61.0/24
|
||||
194.15.64.0/21
|
||||
194.15.72.0/22
|
||||
194.150.228.0/23
|
||||
194.153.86.0/24
|
||||
194.156.128.0/22
|
||||
194.156.148.0/24
|
||||
|
@ -317,7 +322,6 @@
|
|||
194.25.0.0/16
|
||||
194.25.1.5/32
|
||||
194.26.191.0/24
|
||||
194.31.142.0/24
|
||||
194.31.208.0/24
|
||||
194.31.209.0/24
|
||||
194.31.210.0/24
|
||||
|
@ -332,7 +336,6 @@
|
|||
194.39.48.0/20
|
||||
194.39.48.0/21
|
||||
194.39.56.0/21
|
||||
194.39.61.0/24
|
||||
194.39.62.0/24
|
||||
194.39.63.0/24
|
||||
194.39.88.0/21
|
||||
|
@ -355,8 +358,6 @@
|
|||
194.55.63.0/24
|
||||
194.55.64.0/20
|
||||
194.55.87.0/24
|
||||
194.58.40.0/24
|
||||
194.58.56.0/23
|
||||
194.59.143.0/24
|
||||
194.59.150.0/24
|
||||
194.59.151.0/24
|
||||
|
@ -382,27 +383,15 @@
|
|||
194.76.52.0/24
|
||||
194.77.41.0/24
|
||||
194.77.42.0/24
|
||||
194.85.248.0/24
|
||||
194.85.251.0/24
|
||||
194.87.10.0/24
|
||||
194.87.17.0/24
|
||||
194.87.255.0/24
|
||||
194.87.77.0/24
|
||||
194.88.112.0/20
|
||||
194.88.16.0/21
|
||||
194.88.24.0/23
|
||||
194.88.26.0/24
|
||||
194.88.28.0/23
|
||||
194.88.96.0/21
|
||||
194.99.118.0/24
|
||||
194.99.34.0/24
|
||||
194.99.76.0/23
|
||||
194.99.83.0/24
|
||||
194.99.92.0/22
|
||||
195.133.20.0/24
|
||||
195.133.64.0/22
|
||||
195.133.7.0/24
|
||||
195.133.76.0/24
|
||||
195.137.216.0/23
|
||||
195.138.223.0/24
|
||||
195.144.15.0/24
|
||||
|
@ -412,8 +401,8 @@
|
|||
195.178.132.0/22
|
||||
195.190.2.0/24
|
||||
195.192.254.0/24
|
||||
195.20.114.0/23
|
||||
195.200.207.0/24
|
||||
195.226.200.0/24
|
||||
195.230.116.0/24
|
||||
195.234.133.0/24
|
||||
195.243.0.0/16
|
||||
|
@ -422,13 +411,13 @@
|
|||
195.248.140.0/23
|
||||
195.248.144.0/23
|
||||
195.248.89.0/24
|
||||
195.250.48.0/24
|
||||
195.250.50.0/24
|
||||
195.250.57.0/24
|
||||
195.36.64.0/18
|
||||
195.36.81.0/24
|
||||
195.36.90.0/24
|
||||
195.36.91.0/24
|
||||
195.66.83.0/24
|
||||
195.68.204.0/23
|
||||
195.74.94.0/24
|
||||
195.78.249.0/24
|
||||
|
@ -436,18 +425,12 @@
|
|||
198.40.90.0/24
|
||||
198.57.10.0/24
|
||||
2.160.0.0/12
|
||||
2.58.102.0/24
|
||||
204.69.32.0/24
|
||||
205.142.63.0/24
|
||||
212.102.107.0/24
|
||||
212.184.0.0/15
|
||||
212.185.0.0/16
|
||||
212.87.217.0/24
|
||||
213.145.90.0/23
|
||||
213.145.92.0/23
|
||||
213.173.0.0/19
|
||||
213.209.136.0/24
|
||||
213.209.149.0/24
|
||||
213.209.156.0/24
|
||||
217.0.0.0/13
|
||||
217.117.96.0/24
|
||||
217.224.0.0/11
|
||||
|
@ -457,60 +440,42 @@
|
|||
217.80.0.0/12
|
||||
31.212.0.0/15
|
||||
31.224.0.0/11
|
||||
31.6.56.0/23
|
||||
37.143.0.0/22
|
||||
37.230.56.0/24
|
||||
37.230.57.0/24
|
||||
37.230.58.0/23
|
||||
37.230.60.0/24
|
||||
37.230.63.0/24
|
||||
31.6.52.0/22
|
||||
37.46.11.0/24
|
||||
37.50.0.0/15
|
||||
37.80.0.0/12
|
||||
45.128.14.0/23
|
||||
45.132.217.0/24
|
||||
45.10.157.0/24
|
||||
45.128.158.0/23
|
||||
45.132.80.0/22
|
||||
45.140.208.0/24
|
||||
45.141.130.0/24
|
||||
45.142.236.0/24
|
||||
45.145.241.0/24
|
||||
45.145.243.0/24
|
||||
45.147.227.0/24
|
||||
45.81.255.0/24
|
||||
45.83.136.0/22
|
||||
45.84.214.0/24
|
||||
45.93.186.0/23
|
||||
46.20.216.0/21
|
||||
46.250.224.0/21
|
||||
46.250.232.0/21
|
||||
45.140.8.0/23
|
||||
45.141.232.0/24
|
||||
45.141.62.0/23
|
||||
45.151.112.0/23
|
||||
45.151.114.0/23
|
||||
45.154.238.0/23
|
||||
45.157.202.0/23
|
||||
45.157.32.0/23
|
||||
45.90.184.0/22
|
||||
46.78.0.0/15
|
||||
46.80.0.0/12
|
||||
5.10.208.0/24
|
||||
5.10.209.0/24
|
||||
5.10.220.0/24
|
||||
5.133.112.0/24
|
||||
5.249.188.0/22
|
||||
5.35.192.0/21
|
||||
62.153.0.0/16
|
||||
62.154.0.0/15
|
||||
62.155.0.0/16
|
||||
62.156.0.0/14
|
||||
62.156.153.0/24
|
||||
62.156.168.0/24
|
||||
62.192.152.0/24
|
||||
62.224.0.0/14
|
||||
62.56.208.0/21
|
||||
62.68.73.0/24
|
||||
64.137.119.0/24
|
||||
64.137.125.0/24
|
||||
64.137.127.0/24
|
||||
77.242.149.0/24
|
||||
62.76.229.0/24
|
||||
77.47.152.0/22
|
||||
77.83.136.0/23
|
||||
77.83.138.0/23
|
||||
77.83.32.0/22
|
||||
77.90.156.0/24
|
||||
77.90.184.0/24
|
||||
79.139.52.0/22
|
||||
78.159.131.0/24
|
||||
79.192.0.0/10
|
||||
80.128.0.0/11
|
||||
80.128.0.0/12
|
||||
|
@ -523,49 +488,40 @@
|
|||
80.187.0.0/16
|
||||
80.187.160.0/20
|
||||
80.64.240.0/22
|
||||
80.71.231.0/24
|
||||
80.71.233.0/24
|
||||
80.71.235.0/24
|
||||
80.71.236.0/24
|
||||
80.71.238.0/24
|
||||
81.201.32.0/20
|
||||
81.30.96.0/20
|
||||
82.152.178.0/24
|
||||
82.163.60.0/22
|
||||
82.206.32.0/21
|
||||
82.206.40.0/21
|
||||
82.215.70.0/24
|
||||
83.136.208.0/22
|
||||
83.147.36.0/22
|
||||
83.147.40.0/22
|
||||
83.243.48.0/21
|
||||
83.243.55.0/24
|
||||
84.128.0.0/10
|
||||
84.234.16.0/20
|
||||
84.246.108.0/24
|
||||
84.32.108.0/22
|
||||
84.32.20.0/22
|
||||
84.32.48.0/22
|
||||
85.116.28.0/24
|
||||
85.116.29.0/24
|
||||
85.116.30.0/24
|
||||
85.116.31.0/24
|
||||
84.32.56.0/22
|
||||
84.46.240.0/20
|
||||
85.119.160.0/23
|
||||
85.204.160.0/22
|
||||
85.208.248.0/24
|
||||
85.208.249.0/24
|
||||
85.208.250.0/24
|
||||
85.208.251.0/24
|
||||
85.237.76.0/22
|
||||
85.239.148.0/24
|
||||
85.239.149.0/24
|
||||
85.239.150.0/24
|
||||
85.239.151.0/24
|
||||
86.38.156.0/24
|
||||
86.38.248.0/21
|
||||
86.38.37.0/24
|
||||
87.128.0.0/10
|
||||
87.128.0.0/11
|
||||
87.237.240.0/21
|
||||
88.128.0.0/16
|
||||
88.135.96.0/20
|
||||
88.216.60.0/22
|
||||
88.216.208.0/24
|
||||
89.116.248.0/24
|
||||
89.116.64.0/22
|
||||
89.213.186.0/23
|
||||
89.117.172.0/22
|
||||
89.35.127.0/24
|
||||
89.43.34.0/24
|
||||
89.35.72.0/24
|
||||
91.0.0.0/10
|
||||
91.103.240.0/21
|
||||
91.189.192.0/21
|
||||
|
@ -587,38 +543,42 @@
|
|||
91.212.130.0/24
|
||||
91.212.243.0/24
|
||||
91.213.116.0/24
|
||||
91.214.10.0/24
|
||||
91.215.116.0/22
|
||||
91.216.242.0/24
|
||||
91.216.45.0/24
|
||||
91.217.214.0/24
|
||||
91.222.232.0/22
|
||||
91.227.98.0/23
|
||||
91.232.136.0/22
|
||||
91.232.54.0/24
|
||||
92.114.44.0/22
|
||||
92.119.164.0/22
|
||||
91.92.33.0/24
|
||||
91.92.34.0/24
|
||||
91.92.35.0/24
|
||||
91.92.49.0/24
|
||||
92.118.161.0/24
|
||||
92.119.208.0/24
|
||||
92.119.209.0/24
|
||||
92.119.210.0/24
|
||||
92.119.211.0/24
|
||||
93.119.184.0/21
|
||||
93.152.205.0/24
|
||||
93.152.207.0/24
|
||||
93.152.209.0/24
|
||||
93.152.215.0/24
|
||||
93.152.219.0/24
|
||||
93.152.221.0/24
|
||||
93.152.223.0/24
|
||||
93.152.224.0/24
|
||||
93.152.225.0/24
|
||||
93.192.0.0/10
|
||||
93.95.119.0/24
|
||||
94.126.98.0/24
|
||||
94.26.110.0/23
|
||||
94.26.64.0/23
|
||||
95.178.8.0/21
|
||||
94.26.90.0/24
|
||||
2001:650:cc02::/48
|
||||
2001:678:184::/48
|
||||
2001:678:36c::/48
|
||||
2001:678:480::/48
|
||||
2001:678:5b0::/48
|
||||
2001:678:5d4::/48
|
||||
2001:678:a04::/48
|
||||
2001:678:adc::/48
|
||||
2001:678:b38::/48
|
||||
2001:678:bdc::/48
|
||||
2001:678:d4c::/48
|
||||
2001:678:e9c::/48
|
||||
2001:678:ff0::/48
|
||||
|
@ -638,28 +598,10 @@
|
|||
2001:67c:764::/48
|
||||
2001:67c:94c::/48
|
||||
2001:67c:a34::/48
|
||||
2001:67c:b80::/48
|
||||
2001:67c:c84::/48
|
||||
2001:67c:c9c::/48
|
||||
2003:3c0::/28
|
||||
2003:3e0::/28
|
||||
2003:8:1800::/48
|
||||
2003:8:1803::/48
|
||||
2003:8:f400::/48
|
||||
2003:8:f401::/48
|
||||
2003:8:f402::/48
|
||||
2003:8:f403::/48
|
||||
2003:8:f404::/48
|
||||
2003:8:f405::/48
|
||||
2003:8:f406::/48
|
||||
2003:8:f407::/48
|
||||
2003:8:f408::/48
|
||||
2003:8:f409::/48
|
||||
2003:8:f40a::/48
|
||||
2003:8:f40b::/48
|
||||
2003:8:f40c::/48
|
||||
2003:8:f40d::/48
|
||||
2003:8:f40e::/48
|
||||
2003::/19
|
||||
2003::/20
|
||||
2003::/23
|
||||
|
@ -676,10 +618,12 @@
|
|||
2a06:1800::/29
|
||||
2a06:1a80::/29
|
||||
2a06:7180::/29
|
||||
2a07:b982:c000::/48
|
||||
2a09:6f80::/29
|
||||
2a09:8180::/30
|
||||
2a0a:5340:ffff::/48
|
||||
2a0a:a3c0:b0::/44
|
||||
2a0b:3c41:1::/48
|
||||
2a0b:3c41:2::/48
|
||||
2a0c:9e02:1000::/40
|
||||
2a0c:9e02:100::/40
|
||||
|
@ -695,8 +639,5 @@
|
|||
2a0d:480::/30
|
||||
2a0d:484::/30
|
||||
2a0e:eb40::/32
|
||||
2a0f:15c0::/32
|
||||
2a10:cd80::/29
|
||||
2a11:7400:d1::/48
|
||||
2a12:6900:1000::/40
|
||||
2a13:9500:2::/48
|
||||
|
|
|
@ -6,7 +6,8 @@
|
|||
109.250.160.0/19
|
||||
109.250.192.0/19
|
||||
109.250.224.0/19
|
||||
109.250.32.0/19
|
||||
109.250.32.0/20
|
||||
109.250.48.0/20
|
||||
109.250.64.0/19
|
||||
109.250.80.0/22
|
||||
109.250.84.0/22
|
||||
|
@ -66,22 +67,16 @@
|
|||
193.22.3.0/24
|
||||
193.28.72.0/21
|
||||
193.29.240.0/24
|
||||
193.29.241.0/24
|
||||
193.29.242.0/24
|
||||
193.29.243.0/24
|
||||
193.29.244.0/24
|
||||
193.29.245.0/24
|
||||
193.29.246.0/24
|
||||
193.29.247.0/24
|
||||
193.30.132.0/24
|
||||
193.30.140.0/24
|
||||
193.96.238.0/24
|
||||
193.98.229.0/24
|
||||
193.98.40.0/22
|
||||
193.99.160.0/21
|
||||
194.115.182.0/23
|
||||
194.115.182.0/24
|
||||
194.115.183.0/24
|
||||
194.113.252.0/23
|
||||
194.113.253.0/24
|
||||
194.115.26.0/24
|
||||
194.120.182.0/23
|
||||
194.120.182.0/24
|
||||
|
@ -97,11 +92,9 @@
|
|||
194.156.232.0/23
|
||||
194.156.233.0/24
|
||||
194.174.168.0/22
|
||||
194.180.18.0/24
|
||||
194.180.53.0/24
|
||||
194.180.64.0/20
|
||||
194.187.112.0/24
|
||||
194.30.180.0/24
|
||||
194.31.92.0/24
|
||||
194.39.185.0/24
|
||||
194.39.87.0/24
|
||||
|
@ -113,6 +106,7 @@
|
|||
194.88.25.0/24
|
||||
194.9.190.0/24
|
||||
194.99.0.0/21
|
||||
194.99.113.0/24
|
||||
195.149.80.0/23
|
||||
195.167.208.0/20
|
||||
195.191.20.0/23
|
||||
|
@ -209,7 +203,8 @@
|
|||
83.135.0.0/16
|
||||
83.135.0.0/22
|
||||
83.135.112.0/20
|
||||
83.135.128.0/19
|
||||
83.135.128.0/20
|
||||
83.135.144.0/20
|
||||
83.135.16.0/22
|
||||
83.135.160.0/21
|
||||
83.135.164.0/22
|
||||
|
@ -250,6 +245,9 @@
|
|||
83.135.64.0/19
|
||||
83.135.8.0/21
|
||||
83.135.96.0/20
|
||||
83.243.48.0/21
|
||||
83.243.48.0/22
|
||||
83.243.52.0/22
|
||||
84.19.192.0/19
|
||||
84.19.192.0/20
|
||||
84.19.208.0/20
|
||||
|
@ -259,7 +257,7 @@
|
|||
87.122.128.0/21
|
||||
87.122.136.0/22
|
||||
87.122.144.0/20
|
||||
87.122.16.0/20
|
||||
87.122.16.0/22
|
||||
87.122.160.0/20
|
||||
87.122.176.0/21
|
||||
87.122.184.0/24
|
||||
|
@ -270,15 +268,21 @@
|
|||
87.122.189.0/24
|
||||
87.122.190.0/24
|
||||
87.122.191.0/24
|
||||
87.122.192.0/19
|
||||
87.122.192.0/20
|
||||
87.122.20.0/22
|
||||
87.122.208.0/20
|
||||
87.122.224.0/19
|
||||
87.122.24.0/21
|
||||
87.122.32.0/19
|
||||
87.122.64.0/19
|
||||
87.122.64.0/20
|
||||
87.122.80.0/20
|
||||
87.122.96.0/19
|
||||
87.123.0.0/16
|
||||
87.123.0.0/19
|
||||
87.123.0.0/20
|
||||
87.123.112.0/20
|
||||
87.123.128.0/19
|
||||
87.123.128.0/20
|
||||
87.123.144.0/20
|
||||
87.123.16.0/20
|
||||
87.123.160.0/20
|
||||
87.123.176.0/20
|
||||
87.123.192.0/20
|
||||
|
@ -292,13 +296,13 @@
|
|||
87.123.253.0/24
|
||||
87.123.254.0/24
|
||||
87.123.255.0/24
|
||||
87.123.32.0/19
|
||||
87.123.48.0/20
|
||||
87.123.64.0/20
|
||||
87.123.80.0/20
|
||||
87.123.96.0/19
|
||||
87.123.96.0/20
|
||||
88.130.0.0/16
|
||||
88.130.0.0/19
|
||||
88.130.112.0/20
|
||||
88.130.130.0/23
|
||||
88.130.132.0/22
|
||||
88.130.136.0/21
|
||||
|
@ -354,16 +358,17 @@
|
|||
88.130.62.0/24
|
||||
88.130.63.0/24
|
||||
88.130.64.0/19
|
||||
88.130.96.0/19
|
||||
88.130.96.0/20
|
||||
89.244.0.0/14
|
||||
89.244.0.0/16
|
||||
89.244.112.0/21
|
||||
89.244.120.0/21
|
||||
89.244.120.0/22
|
||||
89.244.124.0/24
|
||||
89.244.125.0/24
|
||||
89.244.126.0/24
|
||||
89.244.127.0/24
|
||||
89.244.160.0/21
|
||||
89.244.160.0/20
|
||||
89.244.164.0/22
|
||||
89.244.168.0/21
|
||||
89.244.176.0/20
|
||||
|
@ -372,6 +377,7 @@
|
|||
89.244.240.0/20
|
||||
89.244.64.0/21
|
||||
89.244.72.0/22
|
||||
89.244.76.0/22
|
||||
89.244.80.0/20
|
||||
89.244.96.0/20
|
||||
89.245.0.0/16
|
||||
|
@ -389,13 +395,13 @@
|
|||
89.245.191.0/24
|
||||
89.245.192.0/19
|
||||
89.245.224.0/19
|
||||
89.245.32.0/19
|
||||
89.245.32.0/20
|
||||
89.245.48.0/20
|
||||
89.245.64.0/20
|
||||
89.245.80.0/20
|
||||
89.245.96.0/20
|
||||
89.246.0.0/16
|
||||
89.246.0.0/19
|
||||
89.246.0.0/20
|
||||
89.246.104.0/23
|
||||
89.246.106.0/24
|
||||
89.246.107.0/24
|
||||
|
@ -410,8 +416,8 @@
|
|||
89.246.122.0/24
|
||||
89.246.123.0/24
|
||||
89.246.124.0/22
|
||||
89.246.16.0/20
|
||||
89.246.160.0/20
|
||||
89.246.160.0/21
|
||||
89.246.176.0/22
|
||||
89.246.180.0/22
|
||||
89.246.184.0/21
|
||||
|
@ -421,20 +427,21 @@
|
|||
89.246.56.0/21
|
||||
89.246.96.0/21
|
||||
89.247.0.0/16
|
||||
89.247.0.0/19
|
||||
89.247.0.0/20
|
||||
89.247.112.0/21
|
||||
89.247.120.0/22
|
||||
89.247.124.0/24
|
||||
89.247.125.0/24
|
||||
89.247.126.0/24
|
||||
89.247.127.0/24
|
||||
89.247.144.0/20
|
||||
89.247.144.0/22
|
||||
89.247.152.0/21
|
||||
89.247.16.0/20
|
||||
89.247.160.0/20
|
||||
89.247.192.0/20
|
||||
89.247.208.0/21
|
||||
89.247.216.0/22
|
||||
89.247.224.0/21
|
||||
89.247.232.0/21
|
||||
89.247.232.0/22
|
||||
89.247.236.0/22
|
||||
89.247.240.0/21
|
||||
|
@ -443,14 +450,15 @@
|
|||
89.247.253.0/24
|
||||
89.247.254.0/24
|
||||
89.247.255.0/24
|
||||
89.247.32.0/19
|
||||
89.247.32.0/20
|
||||
89.247.48.0/20
|
||||
89.247.64.0/20
|
||||
89.247.80.0/20
|
||||
89.247.96.0/20
|
||||
89.27.128.0/17
|
||||
89.27.153.0/24
|
||||
91.194.180.0/23
|
||||
91.195.104.0/23
|
||||
91.198.67.0/24
|
||||
91.199.158.0/24
|
||||
91.201.128.0/22
|
||||
|
@ -461,6 +469,8 @@
|
|||
91.208.212.0/24
|
||||
91.217.145.0/24
|
||||
91.220.125.0/24
|
||||
91.223.2.0/24
|
||||
91.223.41.0/24
|
||||
91.229.3.0/24
|
||||
92.116.0.0/15
|
||||
92.116.0.0/20
|
||||
|
@ -469,8 +479,10 @@
|
|||
92.116.128.0/18
|
||||
92.116.16.0/20
|
||||
92.116.192.0/19
|
||||
92.116.224.0/19
|
||||
92.116.32.0/19
|
||||
92.116.224.0/20
|
||||
92.116.240.0/20
|
||||
92.116.32.0/20
|
||||
92.116.48.0/20
|
||||
92.116.64.0/18
|
||||
92.116.96.0/19
|
||||
92.117.0.0/19
|
||||
|
@ -486,6 +498,7 @@
|
|||
94.134.0.0/15
|
||||
94.134.0.0/18
|
||||
94.134.100.0/22
|
||||
94.134.104.0/21
|
||||
94.134.112.0/21
|
||||
94.134.120.0/24
|
||||
94.134.121.0/24
|
||||
|
@ -496,7 +509,9 @@
|
|||
94.134.126.0/24
|
||||
94.134.127.0/24
|
||||
94.134.128.0/20
|
||||
94.134.144.0/20
|
||||
94.134.144.0/22
|
||||
94.134.148.0/22
|
||||
94.134.152.0/21
|
||||
94.134.160.0/21
|
||||
94.134.168.0/22
|
||||
94.134.172.0/22
|
||||
|
@ -520,7 +535,6 @@
|
|||
94.134.93.0/24
|
||||
94.134.94.0/24
|
||||
94.134.95.0/24
|
||||
94.134.96.0/20
|
||||
94.134.96.0/22
|
||||
2001:1438:1000::/36
|
||||
2001:1438:2000::/36
|
||||
|
@ -550,7 +564,6 @@
|
|||
2001:16b8:1200::/40
|
||||
2001:16b8:1300::/40
|
||||
2001:16b8:1400::/40
|
||||
2001:16b8:2000::/35
|
||||
2001:16b8:2000::/40
|
||||
2001:16b8:200::/40
|
||||
2001:16b8:2100::/40
|
||||
|
@ -568,10 +581,14 @@
|
|||
2001:16b8:2d00::/40
|
||||
2001:16b8:2e00::/40
|
||||
2001:16b8:300::/40
|
||||
2001:16b8:4000::/35
|
||||
2001:16b8:4000::/40
|
||||
2001:16b8:400::/40
|
||||
2001:16b8:4100::/40
|
||||
2001:16b8:4200::/40
|
||||
2001:16b8:4300::/40
|
||||
2001:16b8:4500::/40
|
||||
2001:16b8:4600::/40
|
||||
2001:16b8:500::/40
|
||||
2001:16b8:6000::/35
|
||||
2001:16b8:6000::/40
|
||||
2001:16b8:600::/40
|
||||
2001:16b8:6100::/40
|
||||
|
@ -583,16 +600,13 @@
|
|||
2001:16b8:6700::/40
|
||||
2001:16b8:6800::/40
|
||||
2001:16b8:700::/40
|
||||
2001:16b8:8000::/36
|
||||
2001:16b8:800::/40
|
||||
2001:16b8:9000::/36
|
||||
2001:16b8:900::/40
|
||||
2001:16b8::/32
|
||||
2001:16b8::/35
|
||||
2001:16b8::/40
|
||||
2001:16b8:a000::/35
|
||||
2001:16b8:a00::/40
|
||||
2001:16b8:b00::/40
|
||||
2001:678:274::/48
|
||||
2001:678:c74::/48
|
||||
2001:67c:27ac::/48
|
||||
2001:67c:2878::/48
|
||||
|
|
|
@ -91,7 +91,7 @@
|
|||
"untagged_vlan": "home.clients"
|
||||
},
|
||||
"ether19": {
|
||||
"description": "home.lgtv-wohnzimmer",
|
||||
"description": "home.kodi-wohnzimmer",
|
||||
"enabled": true,
|
||||
"ips": [],
|
||||
"mode": "ACCESS",
|
||||
|
@ -159,8 +159,7 @@
|
|||
"ips": [],
|
||||
"mode": "TAGGED",
|
||||
"tagged_vlans": [
|
||||
"ffwi.client",
|
||||
"home.v6only"
|
||||
"ffwi.client"
|
||||
],
|
||||
"type": "A_1000BASE_T",
|
||||
"untagged_vlan": "home.clients"
|
||||
|
@ -171,8 +170,7 @@
|
|||
"ips": [],
|
||||
"mode": "TAGGED",
|
||||
"tagged_vlans": [
|
||||
"ffwi.client",
|
||||
"home.v6only"
|
||||
"ffwi.client"
|
||||
],
|
||||
"type": "A_1000BASE_T",
|
||||
"untagged_vlan": "home.clients"
|
||||
|
@ -192,8 +190,7 @@
|
|||
"ips": [],
|
||||
"mode": "TAGGED",
|
||||
"tagged_vlans": [
|
||||
"ffwi.client",
|
||||
"home.v6only"
|
||||
"ffwi.client"
|
||||
],
|
||||
"type": "A_1000BASE_T",
|
||||
"untagged_vlan": "home.clients"
|
||||
|
@ -268,10 +265,6 @@
|
|||
"name": "home.dmz",
|
||||
"vid": 1139
|
||||
},
|
||||
{
|
||||
"name": "home.v6only",
|
||||
"vid": 2000
|
||||
},
|
||||
{
|
||||
"name": "ffwi.mesh",
|
||||
"vid": 3000
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue