Compare commits

..

1 Commits

Author SHA1 Message Date
22bd213509 feat: moved puppetdb profiles
- move puppetdb profiles to profiles::puppetdb namespace
- add profile to manage puppetdb api ssl certificates
2024-06-01 14:58:18 +10:00
526 changed files with 1092 additions and 20057 deletions

View File

@ -1,24 +0,0 @@
name: Build
on:
pull_request:
jobs:
precommit:
runs-on: almalinux-8
container:
image: git.unkin.net/unkin/almalinux9-actionsdind:latest
options: --privileged
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Install requirements
run: |
dnf groupinstall -y "Development Tools" -y
dnf install rubygems ruby-devel gcc make redhat-rpm-config glibc-headers glibc-devel -y
- name: Pre-Commit All Files
run: |
uvx pre-commit run --all-files

View File

@ -3,8 +3,3 @@
detectors:
FeatureEnvy:
enabled: false
TooManyStatements:
enabled: false
UncommunicativeVariableName:
accept:
- e

View File

@ -8,6 +8,3 @@ Style/Documentation:
Layout/LineLength:
Max: 140
Metrics/BlockNesting:
Max: 4

View File

@ -2,68 +2,50 @@ forge 'forge.puppetlabs.com'
moduledir 'external_modules'
# puppetlabs
mod 'puppetlabs-stdlib', '9.7.0'
mod 'puppetlabs-inifile', '6.2.0'
mod 'puppetlabs-concat', '9.1.0'
mod 'puppetlabs-vcsrepo', '7.0.0'
mod 'puppetlabs-yumrepo_core', '2.1.0'
mod 'puppetlabs-apt', '10.0.1'
mod 'puppetlabs-lvm', '3.0.1'
mod 'puppetlabs-puppetdb', '7.14.0'
mod 'puppetlabs-postgresql', '9.2.0'
mod 'puppetlabs-firewall', '8.1.4'
mod 'puppetlabs-accounts', '8.2.2'
mod 'puppetlabs-mysql', '16.2.0'
mod 'puppetlabs-stdlib', '9.1.0'
mod 'puppetlabs-inifile', '6.0.0'
mod 'puppetlabs-concat', '9.0.0'
mod 'puppetlabs-vcsrepo', '6.1.0'
mod 'puppetlabs-yumrepo_core', '2.0.0'
mod 'puppetlabs-apt', '9.4.0'
mod 'puppetlabs-lvm', '2.1.0'
mod 'puppetlabs-puppetdb', '7.13.0'
mod 'puppetlabs-postgresql', '9.1.0'
mod 'puppetlabs-firewall', '6.0.0'
mod 'puppetlabs-accounts', '8.1.0'
mod 'puppetlabs-mysql', '15.0.0'
mod 'puppetlabs-xinetd', '3.4.1'
mod 'puppetlabs-haproxy', '8.2.0'
mod 'puppetlabs-java', '11.1.0'
mod 'puppetlabs-reboot', '5.1.0'
mod 'puppetlabs-docker', '10.2.0'
mod 'puppetlabs-mailalias_core', '1.2.0'
mod 'puppetlabs-haproxy', '8.0.0'
mod 'puppetlabs-java', '10.1.2'
mod 'puppetlabs-reboot', '5.0.0'
# puppet
mod 'puppet-python', '7.4.0'
mod 'puppet-systemd', '8.1.0'
mod 'puppet-yum', '7.2.0'
mod 'puppet-archive', '7.1.0'
mod 'puppet-chrony', '3.0.0'
mod 'puppet-puppetboard', '11.0.0'
mod 'puppet-nginx', '6.0.1'
mod 'puppet-selinux', '5.0.0'
mod 'puppet-prometheus', '16.0.0'
mod 'puppet-grafana', '14.1.0'
mod 'puppet-consul', '9.1.0'
mod 'puppet-vault', '4.1.1'
mod 'puppet-python', '7.0.0'
mod 'puppet-systemd', '5.1.0'
mod 'puppet-yum', '7.0.0'
mod 'puppet-archive', '7.0.0'
mod 'puppet-chrony', '2.6.0'
mod 'puppet-puppetboard', '9.0.0'
mod 'puppet-nginx', '5.0.0'
mod 'puppet-selinux', '4.1.0'
mod 'puppet-prometheus', '13.4.0'
mod 'puppet-grafana', '13.1.0'
mod 'puppet-consul', '8.0.0'
mod 'puppet-vault', '4.1.0'
mod 'puppet-dhcp', '6.1.0'
mod 'puppet-keepalived', '5.1.0'
mod 'puppet-extlib', '7.5.1'
mod 'puppet-network', '2.2.1'
mod 'puppet-kmod', '4.1.0'
mod 'puppet-filemapper', '4.0.0'
mod 'puppet-letsencrypt', '11.1.0'
mod 'puppet-rundeck', '9.2.0'
mod 'puppet-redis', '11.1.0'
mod 'puppet-nodejs', '11.0.0'
mod 'puppet-postfix', '5.1.0'
mod 'puppet-alternatives', '6.0.0'
mod 'puppet-keepalived', '3.6.0'
mod 'puppet-extlib', '7.0.0'
# other
mod 'saz-sudo', '9.0.2'
mod 'saz-ssh', '13.1.0'
mod 'saz-limits', '5.0.0'
mod 'ghoneycutt-timezone', '4.0.0'
mod 'ghoneycutt-puppet', '3.3.0'
mod 'saz-sudo', '8.0.0'
mod 'ghoneycutt-timezone', '4.0.0'
mod 'dalen-puppetdbquery', '3.0.1'
mod 'markt-galera', '3.1.0'
mod 'kogitoapp-minio', '1.1.4'
mod 'broadinstitute-certs', '3.0.1'
mod 'stm-file_capability', '6.0.0'
mod 'h0tw1r3-gitea', '3.2.0'
mod 'rehan-mkdir', '2.0.0'
mod 'tailoredautomation-patroni', '2.0.0'
mod 'ssm-crypto_policies', '0.3.3'
mod 'thias-sysctl', '1.0.8'
mod 'cirrax-dovecot', '1.3.3'
mod 'bind',
:git => 'https://git.service.au-syd1.consul/unkinben/puppet-bind.git',

View File

@ -1,9 +0,0 @@
# Group administration
This page exists to list all the locally managed groups, their gid's and what their general purpose is for.
## List of groups
| name | gid | purpose |
|-------------|-------------|-------------|
| admin | 10000 | admin group designed for system admins |
| media | 20000 | group permissions to manage media (*arrs) |

View File

@ -1,60 +0,0 @@
# managing ceph
Always refer back to the official documentation at https://docs.ceph.com/en/latest
## adding new cephfs
- create a erasure code profile which will allow you to customise the raid level
- raid5 with 3 disks? k=2,m=1
- raid5 with 6 disks? k=5,m=1
- raid6 with 4 disks? k=2,m=2, etc
- create osd pool using custom profile for data
- create osd pool using default replicated profile for metadata
- enable ec_overwrites for the data pool
- create the ceph fs volume using data/metadata pools
- set ceph fs settings
- specify minimum number of metadata servers (mds)
- set fs to be for bulk data
- set mds fast failover with standby reply
```
sudo ceph osd erasure-code-profile set ec_4_1 k=4 m=1
sudo ceph osd pool create media_data 128 erasure ec_4_1
sudo ceph osd pool create media_metadata 32 replicated_rule
sudo ceph osd pool set media_data allow_ec_overwrites true
sudo ceph osd pool set media_data bulk true
sudo ceph fs new mediafs media_metadata media_data --force
sudo ceph fs set mediafs allow_standby_replay true
sudo ceph fs set mediafs max_mds 2
```
## creating authentication tokens
- this will create a client keyring named media
- this client will have the following capabilities:
- mon: read
- mds:
- read /
- read/write /media
- read/write /common
- osd: read/write to cephfs_data pool
```
sudo ceph auth get-or-create client.media \
mon 'allow r' \
mds 'allow r path=/, allow rw path=/media, allow rw path=/common' \
osd 'allow rw pool=cephfs_data'
```
## list the authentication tokens and permissions
ceph auth ls
## change the capabilities of a token
this will overwrite the current capabilities of a given client.user
sudo ceph auth caps client.media \
mon 'allow r' \
mds 'allow rw path=/' \
osd 'allow rw pool=media_data'

View File

@ -1,49 +0,0 @@
# add additional master
these steps are required when adding additional puppet masters, as the subject alternative names on the certificate will need to be changed. this requires the old certificate be revoked, cleaned up, and for a new certificate to be generated and signed.
## prepare a new node
- deploy a new now, or identify a space with the base role
- change the hosts class to roles::infra::puppet::master
- apply puppet until there are no more changes
## revoke the current certificate on the puppet master
sudo puppetserver ca clean --certname ausyd1nxvm1023.main.unkin.net
## stop the new puppetserver and cleanup revoked certificates
sudo systemctl stop puppetserver
sudo rm -f /etc/puppetlabs/puppet/ssl/certs/$(hostname -f).pem
sudo rm -f /etc/puppetlabs/puppet/ssl/private_keys/$(hostname -f).pem
## copy the current crl.pem, as puppetserver will overwrite it when starting
sudo cp /etc/puppetlabs/puppet/ssl/crl.pem /root/current_crl.pem
## request new puppet agent certificate
sudo puppet ssl bootstrap
## start the puppetserver service and move the crl.pem back in place
sudo systemctl start puppetserver
sudo cp /root/current_crl.pem /etc/puppetlabs/puppet/ssl/crl.pem
## troubleshooting
### Issue 1:
[sysadmin@ausyd1nxvm2056 ~]$ sudo puppet agent -t
Error: The CRL issued by 'CN=Puppet CA: prodinf01n01.main.unkin.net' is missing
Find another puppetserver that IS working, copy the `/etc/puppetlabs/puppet/ssl/crl.pem` to this host, run puppet again.
### Issue 2:
[sysadmin@ausyd1nxvm2097 ~]$ sudo puppet agent -t
Error: Failed to parse CA certificates as PEM
The puppet-agents CA cert `/etc/puppetlabs/puppet/ssl/certs/ca.pem` is empty or missing. Grab it from any other host. Run puppet again.

View File

@ -1,123 +0,0 @@
# PKI
## root ca
vault secrets enable -path=pki_root pki
vault secrets tune -max-lease-ttl=87600h pki_root
vault write -field=certificate pki_root/root/generate/internal \
common_name="unkin.net" \
issuer_name="UNKIN_ROOTCA_2024" \
ttl=87600h > unkinroot_2024_ca.crt
vault read pki_root/issuer/$(vault list -format=json pki_root/issuers/ | jq -r '.[]') | tail -n 6
vault write pki_root/roles/2024-servers allow_any_name=true
vault write pki_root/config/urls \
issuing_certificates="$VAULT_ADDR/v1/pki_root/ca" \
crl_distribution_points="$VAULT_ADDR/v1/pki_root/crl"
## intermediate
vault secrets enable -path=pki_int pki
vault secrets tune -max-lease-ttl=43800h pki_int
vault write -format=json pki_int/intermediate/generate/internal \
common_name="unkin.net Intermediate Authority" \
issuer_name="UNKIN_VAULTCA_2024" \
| jq -r '.data.csr' > pki_intermediate.csr
vault write -format=json pki_root/root/sign-intermediate \
issuer_ref="UNKIN_ROOTCA_2024" \
csr=@pki_intermediate.csr \
format=pem_bundle ttl="43800h" \
| jq -r '.data.certificate' > intermediate.cert.pem
vault write pki_int/intermediate/set-signed certificate=@intermediate.cert.pem
## create role
vault write pki_int/roles/servers_default \
issuer_ref="$(vault read -field=default pki_int/config/issuers)" \
allow_ip_sans=true \
allowed_domains="unkin.net, *.unkin.net, localhost" \
allow_subdomains=true \
allow_glob_domains=true \
allow_bare_domains=true \
enforce_hostnames=true \
allow_any_name=true \
max_ttl="2160h" \
key_bits=4096 \
country="Australia"
## test generating a domain cert
vault write pki_int/issue/servers_default common_name="test.unkin.net" ttl="24h"
vault write pki_int/issue/servers_default common_name="test.main.unkin.net" ttl="24h"
vault write pki_int/issue/servers_default common_name="*.test.main.unkin.net" ttl="24h"
## remove expired certificates
vault write pki_int/tidy tidy_cert_store=true tidy_revoked_certs=true
# AUTH
## enable approles
vault auth enable approle
# CERTMANAGER
## create certmanager policy and token, limit to puppetmaster
cat <<EOF > certmanager.hcl
path "pki_int/issue/*" {
capabilities = ["create", "update", "read"]
}
path "pki_int/renew/*" {
capabilities = ["update"]
}
path "pki_int/cert/*" {
capabilities = ["read"]
}
EOF
vault policy write certmanager certmanager.hcl
vault write auth/approle/role/certmanager \
bind_secret_id=false \
token_policies="certmanager" \
token_ttl=30s \
token_max_ttl=30s \
token_bound_cidrs="198.18.17.3/32,198.18.13.32/32,198.18.13.33/32,198.18.13.34/32"
## get the certmanager approle id
vault read -field=role_id auth/approle/role/certmanager/role-id
# SSH Hostkey Signing
## create ssh engine, key, set ttl
vault secrets enable -path=ssh-host-signer ssh
vault write ssh-host-signer/config/ca generate_signing_key=true
vault secrets tune -max-lease-ttl=87600h ssh-host-signer
## create role
vault write ssh-host-signer/roles/hostrole \
key_type=ca \
algorithm_signer=rsa-sha2-256 \
ttl=87600h \
allow_host_certificates=true \
allowed_domains="unkin.net" \
allow_subdomains=true \
allow_baredomains=true
## create policy to use hostrole
cat <<EOF > sshsign-host.hcl
path "ssh-host-signer/sign/hostrole" {
capabilities = ["create", "update"]
}
EOF
vault policy write sshsign-host-policy sshsign-host.hcl
vault write auth/approle/role/sshsign-host-role \
bind_secret_id=false \
token_policies="sshsign-host-policy" \
token_ttl=30s \
token_max_ttl=30s \
token_bound_cidrs="198.18.17.3/32,198.18.13.32/32,198.18.13.33/32,198.18.13.34/32"
## get the sshsign-host-role approle id
vault read -field=role_id auth/approle/role/sshsign-host-role/role-id

48
doc/vault/setup.md Normal file
View File

@ -0,0 +1,48 @@
# root ca
vault secrets enable -path=pki_root pki
vault write -field=certificate pki_root/root/generate/internal \
common_name="unkin.net" \
issuer_name="unkinroot-2024" \
ttl=87600h > unkinroot_2024_ca.crt
vault read pki_root/issuer/$(vault list -format=json pki_root/issuers/ | jq -r '.[]') | tail -n 6
vault write pki_root/roles/2024-servers allow_any_name=true
vault write pki_root/config/urls \
issuing_certificates="$VAULT_ADDR/v1/pki_root/ca" \
crl_distribution_points="$VAULT_ADDR/v1/pki_root/crl"
# intermediate
vault secrets enable -path=pki_int pki
vault secrets tune -max-lease-ttl=43800h pki_int
vault write -format=json pki_int/intermediate/generate/internal \
common_name="unkin.net Intermediate Authority" \
issuer_name="unkin-dot-net-intermediate" \
| jq -r '.data.csr' > pki_intermediate.csr
vault write -format=json pki_root/root/sign-intermediate \
issuer_ref="unkinroot-2024" \
csr=@pki_intermediate.csr \
format=pem_bundle ttl="43800h" \
| jq -r '.data.certificate' > intermediate.cert.pem
vault write pki_int/intermediate/set-signed certificate=@intermediate.cert.pem
# create role
vault write pki_int/roles/unkin-dot-net \
issuer_ref="$(vault read -field=default pki_int/config/issuers)" \
allowed_domains="unkin.net" \
allow_subdomains=true \
max_ttl="2160h"
# test generating a domain cert
vault write pki_int/issue/unkin-dot-net common_name="test.unkin.net" ttl="24h"
vault write pki_int/issue/unkin-dot-net common_name="test.main.unkin.net" ttl="24h"
vault write pki_int/issue/unkin-dot-net common_name="*.test.main.unkin.net" ttl="24h"
# remove expired certificates
vault write pki_int/tidy tidy_cert_store=true tidy_revoked_certs=true

View File

@ -1,6 +1,6 @@
---
profiles::accounts::sysadmin::password: ENC[PKCS7,MIIBqQYJKoZIhvcNAQcDoIIBmjCCAZYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAoS7GyofFaXBNTWU+GtSiz4eCX/9j/sh3fDDRgOgNv1qpcQ87ZlTTenbHo9lxeURxKQ2HVVt7IsrBo/SC/WgipAKnliRkkIvo7nfAs+i+kEE8wakjAs0DcB4mhqtIZRuBkLG2Nay//DcG6cltVkbKEEKmKLMkDFZgTWreOZal8nDljpVe1S8QwtwP4/6hKTef5xsOnrisxuffWTXvwYJhj/VXrjdoH7EhtHGLybzEalglkVHEGft/WrrD/0bwJpmR0RegWI4HTsSvGiHgvf5DZJx8fXPZNPnicGtlfA9ccQPuVo17bY4Qf/WIc1A8Ssv4kHSbNIYJKRymI3UFb0Z4wzBsBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBBBxDLb6pCGbittkcX6asd/gEBmMcUNupDjSECq5H09YA70eVwWWe0fBqxTxrr2cXCXtRKFvOk8SJmL0xHAWodaLN9+krTWHJcWbAK8JXEPC7rn]
profiles::accounts::root::password: ENC[PKCS7,MIIB2gYJKoZIhvcNAQcDoIIByzCCAccCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAIgzGQLoHrm7JSnWG4vdAtxSuETmnqbV7kUsQS8WCRUwFGenDFkps+OGMnOGEHLzMJzXihLfgfdWwTAI4fp48M+zhTMo9TQkdzZqtbFk3+RjV2jDF0wfe4kVUIpReOq+EkaDSkoRSG8V6hWvszhDHUrJBC9eDhomL0f3xNAWxmy5EIX/uMEvg9Ux5YX+E6k2pEIKnHNoVIaWDojlofSIzIqTSS7l3jQtJhs3YqBzLL1DsoF1kdn+Rwl5kcsKkhV+vzl76wEbpYVZW8lu4bFfP6QHMLPcep2tuUDMCDvARRXD7YyZcAtS7aMuqll+BLAszpWxAA7EU2hgvdr6t2uyVCTCBnAYJKoZIhvcNAQcBMB0GCWCGSAFlAwQBKgQQ4D5oDoyE6LPdjpVtGPoJD4BwfnQ9ORjYFPvHQmt+lgU4jMqh6BhqP0VN3lqVfUpOmiVMIqkO/cYtlwVLKEg36TPCHBSpqvhuahSF5saCVr8JY3xWOAmTSgnNjQOPlGrPnYWYbuRLxVRsU+KUkpAzR0c6VN0wYi6bI85Pcv8yHF3UYA==]
profiles::accounts::root::password: ENC[PKCS7,MIIBeQYJKoZIhvcNAQcDoIIBajCCAWYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAM79PRxeAZHrDcSm4eSFqU94/LjuSbdUmJWivX/Pa8GumoW2e/PT9nGHW3p98zHthMgCglk52PECQ+TBKjxr+9dTyNK5ePG6ZJEqSHNRqsPGm+kfQj/hlTmq8vOBaFM5GapD1iTHs5JFbGngI56swKBEVXW9+Z37BjQb2xJuyLsu5Bo/tA0BaOKuCtjq1a6E38bOX+nJ+YF1uZgV9ofAEh1YvkcTmnEWYXFRPWd7AaNcWn03V2pfhGqxc+xydak620I47P+FE+qIY72+aQ6tmLU3X9vyA1HLF2Tv572l4a2i+YIk6nAgQdi+hQKznqNL9M9YV+s1AcmcKLT7cfLrjsjA8BgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCMWrdCWBQgtW3NOEpERwP+gBA3KDiqe4pQq6DwRfsEXQNZ]
profiles::consul::client::secret_id_salt: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAS7pNFRX4onccFaR87zB/eFFORuF22j6xqjyeAqqjgEduYhkt6w5kkz+YfUoHUesU0Q6F2p6HrCSZ8yAsx5M25NCiud9P4hIpjKmOZ4zCNO7uhATh4AQDYw3BrdRwfO+c6jOl5wOiNLCfDBJ0sFT3akCvcuPS1xIoRJq4Gyn+uCbOsMbvSl25ld2xKt1/cqs8gc1d8mkpjwWto7t+qZSUFMCehTbehH3G4a3Q5rvfBoNwv42Wbs676BDcCurDaAzHNqE7pDbOWhGuVOBl+q+BU0Ri/CRkGcTViN9fr8Dc9SveVC6EPsMbw+05/8/NlfzQse3KAwQ34nR9tR2PQw5qEzBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBB7LywscQtF7cG2nomfEsu9gDDVqJBFP1jAX2eGZ2crYS5gnBcsRwhc0HNo2/WWdhZprMW+vEJOOGXDelI53NxA3o0=]
profiles::consul::token::node_editor::secret_id: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAO8IIF2r18dFf0bVKEwjJUe1TmXHH0AIzsQHxkHwV7d37kvH1cY9rYw0TtdHn7GTxvotJG7GZbWvbunpBs1g2p2RPADiM6TMhbO8mJ0tAWLnMk7bQ221xu8Pc7KceqWmU17dmgNhVCohyfwJNqbA756TlHVgxGA0LtNrKoLOmgKGXAL1VYZoKEQnWq7xOpO+z3e1UfjoO6CvX/Od2hGYfUkHdro8mwRw4GFKzU7XeKFdAMUGpn5rVmY3xe+1ARXwGFaSrTHzk2n85pvwhPRlQ+OwqzyT19Qo2FNeAO6RoCRIFTtqbsjTWPUlseHIhw4Q5bHO1I0Mrlm5IHDESw/22IzBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCEe9wD72qxnpeq5nCi/d7BgDCP29sDFObkFTabt2uZ/nF9MT1g+QOrrdFKgnG6ThnwH1hwpZPsSVgIs+yRQH8laB4=]
profiles::consul::server::acl_tokens_initial_management: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAi1UH7AZirJ1PdxWy+KEgS5ufm0wbn2xy9rkg14hKYpcVjBa4pOZpSLMGMiiUpBIqBytDMZM4ezYa/luktpkBImJbM/TE16beGtsacQGA+9eZk2Tihs9GR2qbAQiu5lLITiDlwNnf0GeWdqHM8CTeD68DczQF320d9U14/k6pG/7z+w/MGLcjsQoSuOFTm42JVn1BI46t1CYSCHMXQc/9Tfs+FzI+vumohI8DxAYBIuyzU5HBX/MntAsvD/yixMJS1pZL9WwgqZJC/wK34rVRB39DpxWf/WROrI+WLuSJwr7WBjaeF9Ju+89WKCgsI53EWhFTj8GgDZm/jqPoE478NjBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBAoACRzJdQKNYXZv6cghFIIgDAzB81DMcuY815nb8POtZpiA06jT/068AoZmSctHoFK/zW9tY229N5r1Tb+WHElqLk=]

View File

@ -3,10 +3,16 @@ lookup_options:
hiera_classes:
merge:
strategy: deep
profiles::packages::include:
profiles::packages::install:
merge:
strategy: deep
profiles::packages::exclude:
profiles::packages::install_exclude:
merge:
strategy: deep
profiles::packages::remove:
merge:
strategy: deep
profiles::packages::remove_exclude:
merge:
strategy: deep
profiles::pki::vault::alt_names:
@ -36,12 +42,6 @@ lookup_options:
profiles::haproxy::server::listeners:
merge:
strategy: deep
profiles::accounts::root::sshkeys:
merge:
strategy: deep
profiles::accounts::sysadmin::sshkeys:
merge:
strategy: deep
haproxy::backend:
merge:
strategy: deep
@ -108,108 +108,26 @@ lookup_options:
profiles::nginx::simpleproxy::nginx_aliases:
merge:
strategy: deep
networking::interfaces:
merge:
strategy: deep
networking::interface_defaults:
merge:
strategy: deep
networking::routes:
merge:
strategy: deep
networking::route_defaults:
merge:
strategy: deep
ssh::server::options:
merge:
strategy: deep
mysql::db:
merge:
strategy: deep
profiles::ceph::client::keyrings:
merge:
strategy: deep
profiles::ceph::conf::config:
merge:
strategy: deep
profiles::nginx::simpleproxy::locations:
merge:
strategy: deep
certbot::client::domains:
merge:
strategy: deep
keepalived::vrrp_script:
merge:
strategy: deep
keepalived::vrrp_instance:
merge:
strategy: deep
profiles::etcd::node::initial_cluster_token:
convert_to: Sensitive
sysctl::base::values:
merge:
strategy: deep
limits::entries:
merge:
strategy: deep
zfs::zpools:
merge:
strategy: deep
zfs::datasets:
merge:
strategy: deep
rke2::config_hash:
merge:
strategy: deep
postfix::configs:
merge:
strategy: deep
postfix::maps:
merge:
strategy: deep
postfix::virtuals:
merge:
strategy: deep
stalwart::postgresql_password:
convert_to: Sensitive
stalwart::s3_secret_key:
convert_to: Sensitive
stalwart::fallback_admin_password:
convert_to: Sensitive
facts_path: '/opt/puppetlabs/facter/facts.d'
hiera_include:
hiera_classes:
- timezone
- networking
- ssh::server
- profiles::accounts::rundeck
- limits
- sysctl::base
- exporters::node_exporter
profiles::ntp::client::ntp_role: 'roles::infra::ntp::server'
profiles::ntp::client::use_ntp: 'region'
profiles::ntp::client::peers:
- 0.au.pool.ntp.org
- 1.au.pool.ntp.org
- 2.au.pool.ntp.org
- 3.au.pool.ntp.org
- 0.pool.ntp.org
- 1.pool.ntp.org
- 2.pool.ntp.org
- 3.pool.ntp.org
consul::install_method: 'package'
consul::manage_repo: false
consul::bin_dir: /usr/bin
profiles::base::puppet_servers:
- 'prodinf01n01.main.unkin.net'
vault::install_method: 'repo'
vault::manage_repo: false
vault::bin_dir: /usr/bin
vault::manage_service_file: true
vault::manage_config_dir: true
vault::disable_mlock: false
profiles::dns::base::nameservers:
- 198.18.19.16
profiles::dns::master::basedir: '/var/named/sources'
#profiles::dns::base::ns_role: 'roles::infra::dns::resolver'
#profiles::dns::base::use_ns: 'region'
profiles::dns::base::ns_role: 'roles::infra::dns::resolver'
profiles::dns::base::use_ns: 'region'
profiles::consul::server::members_role: roles::infra::storage::consul
profiles::consul::token::node_editor::accessor_id: '024e27bd-c5bb-41e7-a578-b766509e11bc'
profiles::consul::client::members_lookup: true
@ -224,74 +142,58 @@ profiles::consul::client::node_rules:
- resource: node
segment: ''
disposition: read
- resource: service
segment: node_exporter
disposition: write
profiles::packages::include:
bash-completion: {}
bzip2: {}
ccze: {}
curl: {}
dstat: {}
expect: {}
gzip: {}
git: {}
htop: {}
inotify-tools: {}
iotop: {}
jq: {}
lz4: {}
mtr: {}
ncdu: {}
neovim: {}
p7zip: {}
pbzip2: {}
pigz: {}
pv: {}
python3.11: {}
rsync: {}
screen: {}
socat: {}
strace: {}
sysstat: {}
tar: {}
tmux: {}
traceroute: {}
unzip: {}
vim: {}
vnstat: {}
wget: {}
zsh: {}
zstd: {}
iwl100-firmware:
ensure: absent
iwl1000-firmware:
ensure: absent
iwl105-firmware:
ensure: absent
iwl135-firmware:
ensure: absent
iwl2000-firmware:
ensure: absent
iwl2030-firmware:
ensure: absent
iwl3160-firmware:
ensure: absent
iwl5000-firmware:
ensure: absent
iwl5150-firmware:
ensure: absent
iwl6000-firmware:
ensure: absent
iwl6000g2a-firmware:
ensure: absent
iwl6050-firmware:
ensure: absent
iwl7260-firmware:
ensure: absent
puppet7-release:
ensure: absent
profiles::packages::install:
- bash-completion
- bzip2
- ccze
- curl
- dstat
- expect
- gzip
- git
- htop
- inotify-tools
- iotop
- jq
- lz4
- mtr
- ncdu
- neovim
- p7zip
- pbzip2
- pigz
- pv
- python3.11
- rsync
- screen
- socat
- strace
- sysstat
- tmux
- traceroute
- unzip
- vim
- vnstat
- wget
- zsh
- zstd
profiles::packages::remove:
- iwl100-firmware
- iwl1000-firmware
- iwl105-firmware
- iwl135-firmware
- iwl2000-firmware
- iwl2030-firmware
- iwl3160-firmware
- iwl5000-firmware
- iwl5150-firmware
- iwl6000-firmware
- iwl6000g2a-firmware
- iwl6050-firmware
- iwl7260-firmware
- puppet7-release
profiles::base::scripts::scripts:
puppet: puppetwrapper.py
@ -310,42 +212,9 @@ profiles::puppet::client::dns_alt_names:
puppetdbapi: puppetdbapi.query.consul
puppetdbsql: puppetdbsql.service.au-syd1.consul
exporters::node_exporter::enable: true
exporters::node_exporter::cleanup_old_node_exporter: true
prometheus::node_exporter::export_scrape_job: true
prometheus::systemd_exporter::export_scrape_job: true
ssh::server::storeconfigs_enabled: false
ssh::server::options:
Protocol: '2'
ListenAddress:
- '127.0.0.1'
- '%{facts.networking.ip}'
SyslogFacility: 'AUTHPRIV'
HostKey:
- /etc/ssh/ssh_host_rsa_key
- /etc/ssh/ssh_host_ecdsa_key
- /etc/ssh/ssh_host_ed25519_key
HostCertificate: /etc/ssh/ssh_host_rsa_key-cert.pem
AuthorizedKeysFile: .ssh/authorized_keys
PermitRootLogin: no
PasswordAuthentication: no
ChallengeResponseAuthentication: no
PubkeyAuthentication: yes
GSSAPIAuthentication: yes
GSSAPICleanupCredentials: yes
UsePAM: yes
X11Forwarding: no
PrintMotd: no
AcceptEnv:
- LANG LC_CTYPE LC_NUMERIC LC_TIME LC_COLLATE LC_MONETARY LC_MESSAGES
- LC_PAPER LC_NAME LC_ADDRESS LC_TELEPHONE LC_MEASUREMENT
- LC_IDENTIFICATION LC_ALL LANGUAGE
- XMODIFIERS
Subsystem: sftp /usr/libexec/openssh/sftp-server
profiles::ssh::knownhosts::lines:
- '@cert-authority * ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC1HD97vYxLTniE4qNpGuftUlvmkEXIuX8+7nbENv/IzsGUghEDRtyThjQ7ojNKIsQ7f8wXr0gMcI+fAPfrbcOMHCAoYMomikwL0b3h95SZI40q3CyM+0DMnwiVVDX6C1QxkO2Rv9cszSkCa85NotJhXiUuTBI9BFcRPy+mAhbpAru+bfypYofI0wW97XNTl8Jgwmni5MgutBIQAokFIn5ux8iWxndCH3AqDtmkwC5DfQeQ+wZx7rkwqJEpJffQzrjb1gIM6P9hDCVBBVPh/3o80IJ69rFWrJAZUb+JpG4cXJH0NcSW+wqc3JCT/x3q8VlHwOTXSlNNKtOJCRx73mB8e1XTTy2a9FgpKDDg5XQXWHAViJDz1RTRL9gRefMylRgKz4bXoTuY9kJWM8hPTyUejtukbJThlBJc3OmDxBZBF7F0iqB11pHexok43OCEiANodVa36eWu9/5X032Vm48fZ1/akDPY/NSy3wAn7kwut+A0/JAHFHASrq+1mt9YurkJegI+YHXO6eEWpBIpmI7ORHJbGL4MhkHrxYzVamuP8CkU7tXzsv138+wpOcRHNp9yJY4PT40BZkRf/O3O+jt3pj9Dj8rvgywF2W6hFzywh3Y78upOprRkQlQtHfsI8EyrYI8/hUw2u3H+3yPXh3YjWfqvWVG1BRLRHBV7m90uaw=='
profiles::base::groups::local:
admins:
ensure: present
@ -361,109 +230,39 @@ sudo::configs:
profiles::accounts::sysadmin::sshkeys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDZ8SRLlPiDylBpdWR9LpvPg4fDVD+DZst4yRPFwMMhta4mnB1H9XuvZkptDhXywWQ7QIcqa2WbhCen0OQJCtwn3s7EYtacmF5MxmwBYocPoK2AArGuh6NA9rwTdLrPdzhZ+gwe88PAzRLNzjm0ZBR+mA9saMbPJdqpKp0AWeAM8QofRQAWuCzQg9i0Pn1KDMvVDRHCZof4pVlHSTyHNektq4ifovn0zhKC8jD/cYu95mc5ftBbORexpGiQWwQ3HZw1IBe0ZETB1qPIPwsoJpt3suvMrL6T2//fcIIUE3TcyJKb/yhztja4TZs5jT8370G/vhlT70He0YPxqHub8ZfBv0khlkY93VBWYpNGJwM1fVqlw7XbfBNdOuJivJac8eW317ZdiDnKkBTxapThpPG3et9ib1HoPGKRsd/fICzNz16h2R3tddSdihTFL+bmTCa6Lo+5t5uRuFjQvhSLSgO2/gRAprc3scYOB4pY/lxOFfq3pU2VvSJtRgLNEYMUYKk= ben@unkin.net
profiles::accounts::rundeck::sshkeys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQD4F7VcorbGpyZzBFexz7c/o1JBscrl7hZU0UkWV7fq6YLizW0r6fOzD99hMwu1kdYCjPxbvuUSDEHfyBIp2EgLWU6wFVoufQqlMyOV85+ivQZUc1VNV+X9T+U4v3u/01hkAmlpXtbkwhMSR4Wi+tdABd04+D3CuMDM37mvnFmBBmi41X4Mr1rJhOQumn1XHQ7EYbsdw2mxfEVVeWpZIHz5BjNKSGzEIAYZbFt6s0Y7X3J5RT+Gjqmu043Tc8nNIUFlR9E10qd3Euf9RiBYxBx3z+yfOzJPBzWNBSHv1+PIbO5Mq+z5JaAfoFZO41L7nw+FjV6JJUCVLr6Vq+bCxyA7LW4Oq9ZahSrt/vrT0kTa0tA5U9bqK6e7pB//dm7PzoROtTq0XksV8RseA/fvIje20uaN1z9dynx+UcbszXu9pQ5GIg1o7b5DEi3OZHJwpgdudiCyEeR4+00G0z4PjpEMnTSMHAJ53WxtjzrPAOBnAmPE7hPu4coU+XrCWEXAvRMloJmca68e+zFX7VvFK82KVDuQ99vQ6w4X73IESKoLzyAVxpelwHaDG4fN+zqYfqubVQU1L5cUeYKxqm5r3Us6VvMaYs1ZMUmDGXHOq4FNhGUJYxSjkLvunM6qyAAJQCd6Pw/2TV3UQVerbouGOZaeBLvRguHWSbDrO99Zu+t87w== rundeck_runner
networking::interface_defaults:
ensure: present
family: inet
method: static
netmask: 255.255.255.0
onboot: true
networking::route_defaults:
ensure: present
interface: eth0
netmask: 0.0.0.0
network: default
# logging:
victorialogs::client::journald::enable: true
victorialogs::client::journald::inserturl: https://vlinsert.service.consul:9428/insert/journald
# FIXME these are for the proxmox ceph cluster
profiles::ceph::client::fsid: 7f7f00cb-95de-498c-8dcc-14b54e4e9ca8
profiles::ceph::client::mons:
- 10.18.15.1
- 10.18.15.2
- 10.18.15.3
profiles::ceph::conf::config:
global:
auth_client_required: 'cephx'
auth_cluster_required: 'cephx'
auth_service_required: 'cephx'
fsid: 'de96a98f-3d23-465a-a899-86d3d67edab8'
mon_allow_pool_delete: true
mon_initial_members: 'prodnxsr0009,prodnxsr0010,prodnxsr0011,prodnxsr0012,prodnxsr0013'
mon_host: '198.18.23.9,198.18.23.10,198.18.23.11,198.18.23.12,198.18.23.13'
ms_bind_ipv4: true
ms_bind_ipv6: false
osd_crush_chooseleaf_type: 1
osd_pool_default_min_size: 2
osd_pool_default_size: 3
osd_pool_default_pg_num: 128
public_network: >
198.18.23.1/32,198.18.23.2/32,198.18.23.3/32,198.18.23.4/32,
198.18.23.5/32,198.18.23.6/32,198.18.23.7/32,198.18.23.8/32,
198.18.23.9/32,198.18.23.10/32,198.18.23.11/32,198.18.23.12/32,
198.18.23.13/32
client.rgw.ausyd1nxvm2115:
rgw_realm: unkin
rgw_zonegroup: au
rgw_zone: syd1
client.rgw.ausyd1nxvm2116:
rgw_realm: unkin
rgw_zonegroup: au
rgw_zone: syd1
client.rgw.ausyd1nxvm2117:
rgw_realm: unkin
rgw_zonegroup: au
rgw_zone: syd1
client.rgw.ausyd1nxvm2118:
rgw_realm: unkin
rgw_zonegroup: au
rgw_zone: syd1
client.rgw.ausyd1nxvm2119:
rgw_realm: unkin
rgw_zonegroup: au
rgw_zone: syd1
mds:
keyring: /var/lib/ceph/mds/ceph-$id/keyring
mds_standby_replay: true
mds.prodnxsr0009-1:
host: prodnxsr0009
mds.prodnxsr0009-2:
host: prodnxsr0009
mds.prodnxsr0010-1:
host: prodnxsr0010
mds.prodnxsr0010-2:
host: prodnxsr0010
mds.prodnxsr0011-1:
host: prodnxsr0011
mds.prodnxsr0011-2:
host: prodnxsr0011
mds.prodnxsr0012-1:
host: prodnxsr0012
mds.prodnxsr0012-2:
host: prodnxsr0012
mds.prodnxsr0013-1:
host: prodnxsr0013
mds.prodnxsr0013-2:
host: prodnxsr0013
#profiles::base::hosts::additional_hosts:
# - ip: 198.18.17.9
# hostname: prodinf01n09.main.unkin.net
# aliases:
# - prodinf01n09
# - ntp01.main.unkin.net
# - ip: 198.18.17.10
# hostname: prodinf01n10.main.unkin.net
# aliases:
# - prodinf01n10
# - ntp02.main.unkin.net
# - ip: 198.18.17.22
# hostname: prodinf01n22.main.unkin.net
# aliases:
# - prodinf01n22
# - repos.main.unkin.net
profiles::base::hosts::additional_hosts:
- ip: 198.18.17.3
hostname: prodinf01n01.main.unkin.net
aliases:
- prodinf01n01
- puppet
- puppetmaster
- puppetca
- ip: 198.18.17.4
hostname: prodinf01n04.main.unkin.net
aliases:
- prodinf01n04
- ip: 198.18.17.5
hostname: prodinf01n05.main.unkin.net
aliases:
- prodinf01n05
- ip: 198.18.17.6
hostname: prodinf01n06.main.unkin.net
aliases:
- prodinf01n06
- ip: 198.18.17.9
hostname: prodinf01n09.main.unkin.net
aliases:
- prodinf01n09
- ntp01.main.unkin.net
- ip: 198.18.17.10
hostname: prodinf01n10.main.unkin.net
aliases:
- prodinf01n10
- ntp02.main.unkin.net
- ip: 198.18.17.22
hostname: prodinf01n22.main.unkin.net
aliases:
- prodinf01n22
- repos.main.unkin.net

View File

@ -1,9 +1,2 @@
---
timezone::timezone: 'Australia/Darwin'
profiles_dns_upstream_forwarder_unkin:
- 198.18.17.23
- 198.18.17.24
profiles_dns_upstream_forwarder_consul:
- 198.18.17.34
- 198.18.17.35
- 198.18.17.36

View File

@ -1 +1,52 @@
---
profiles::dns::resolver::zones:
main.unkin.net-forward:
domain: 'main.unkin.net'
zone_type: 'forward'
forwarders:
- 198.18.17.23
- 198.18.17.24
forward: 'only'
13.18.198.in-addr.arpa-forward:
domain: '13.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders:
- 198.18.17.23
- 198.18.17.24
forward: 'only'
14.18.198.in-addr.arpa-forward:
domain: '14.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders:
- 198.18.17.23
- 198.18.17.24
forward: 'only'
15.18.198.in-addr.arpa-forward:
domain: '15.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders:
- 198.18.17.23
- 198.18.17.24
forward: 'only'
16.18.198.in-addr.arpa-forward:
domain: '16.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders:
- 198.18.17.23
- 198.18.17.24
forward: 'only'
17.18.198.in-addr.arpa-forward:
domain: '17.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders:
- 198.18.17.23
- 198.18.17.24
forward: 'only'
consul-forward:
domain: 'consul'
zone_type: 'forward'
forwarders:
- 198.18.17.34
- 198.18.17.35
- 198.18.17.36
forward: 'only'

View File

@ -1,4 +1,3 @@
---
certmanager::vault_token: ENC[PKCS7,MIIBygYJKoZIhvcNAQcDoIIBuzCCAbcCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAWh7bsttz/JCBo/CPoCgA2doo3jO6jT6NsOoE3/06W2IW+Ij6KHKYILMkG3tS4NAegMI48QR9n++4Xa7u+97w1HO4ENpfLrkuKUcWUFCxxb2OdWhxucIlt3Ay/2+tofOSvqiRKeEISBtOK//Q1a4Iu5GwEP+lvDQ5rcoS0dryNie/okXaLratWOsmctJ6LFuUw5siCcFyUzfvr2ROsB14YoF989np+X1dJqBWxcLmbVNKx766GrRhb1WGeF0qxounCmWEKGt0zY4Zk27KNFlFu7XByDWZoSCVCMvkQaRKhvdNA39Y9vscZJGPGFhz+qKPoeqwUidz0IY51CaFSXewmzCBjAYJKoZIhvcNAQcBMB0GCWCGSAFlAwQBKgQQC+e2iOlFLlr9inVU8nEVWIBgqb0u/ICsLtxZqOpN9OIFWl+4hVrvTo24JzTc1jMSCONeL4Ab7jtTMbsweE9zUf6XlwhHLXfxfg7FL3WBsOWCUBXIAh338cZCXUGX7m0Qvtgg3VTEbTNDJhZle8Sjo6Gl]
certmanager::role_id: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAJuE+uzgQaBRUXBCigckEo1j+UxxbiUGrzdf/B9K7XPdVxZh6TzYLpBgNnyaT6vLo0boX4uRD/By0gT5R/2qcXD6d/j+fh517Ctk4d2uO64f0vH3PzyyOBalsNtcCdPiV3q/xGqzQSHhPiNkFEjDvMBz5p53UjfKA6gAiPrLklp4rN/NVyiLBw20NeIqbL25VdkQa13ViS0Gm/eUQu7a2xQ1dvQFWWfuLaQxO0dh8L0ynkfmWKIjaiD5412Z8hYURu0otxbqVDdIbEMx5xQsXnFKeN93yHmgs7a7M6fLdp9jh+G8B+IlK1W7/9v2+RT0/yI3ZgWHVTvDRhMHuPGBjfTBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBC5avtOPp9N65U1ILQENnvAgDBqI8XAjqbWIvXHqOEiKYdu+co0EEtsHR1v5xAeCmj/ZA6MLeKFlAVJbvpyCpzjons=]
sshsignhost::role_id: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAT86C/InXrgDtXCc9NFze91YMvjTNDqWgv4uzPFI48clOeQyD6x+vOHWP2yNp1OyNHcYLCiLyrv+rSIQyXlLnbeyZWV+7kXIon057Tp7l0BxWtd0hjQEcyWzqQQE7R264C8/qKRak81LIu6RshWZAchYo/BMPuOqVr0m+1zDwOV9JwZc3bpexzsl57CK5pesOrpfdvnd/xrOoEMR+P+C5PC6QLtQl3zkOD3N9kP6HqwbhWH5ZBPy88Kc+5kYM6QVpQSjFIIHK1SWsN0VZoxpkuFlFXB5KHDgZtg3kxrofzjQghl41zJBCDq9Z5oZ+2b1p/j/9jCASyp/ju68H5WXzbzBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCf4Nqp6SAl/XjmhPDnTvVJgDCdDhxWaChhjJ3eRcW4NTFgf3zm7Bu65za0li26FKuKks00duF4zebfNw7ZUVsYtIU=]

View File

@ -1,9 +1,2 @@
---
timezone::timezone: 'Australia/Sydney'
certbot::client::webserver: ausyd1nxvm2057.main.unkin.net
profiles_dns_upstream_forwarder_unkin:
- 198.18.19.15
profiles_dns_upstream_forwarder_consul:
- 198.18.19.14
profiles_dns_upstream_forwarder_k8s:
- 198.18.19.20

View File

@ -1 +1,52 @@
---
profiles::dns::resolver::zones:
main.unkin.net-forward:
domain: 'main.unkin.net'
zone_type: 'forward'
forwarders:
- 198.18.13.14
- 198.18.13.15
forward: 'only'
13.18.198.in-addr.arpa-forward:
domain: '13.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders:
- 198.18.13.14
- 198.18.13.15
forward: 'only'
14.18.198.in-addr.arpa-forward:
domain: '14.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders:
- 198.18.13.14
- 198.18.13.15
forward: 'only'
15.18.198.in-addr.arpa-forward:
domain: '15.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders:
- 198.18.13.14
- 198.18.13.15
forward: 'only'
16.18.198.in-addr.arpa-forward:
domain: '16.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders:
- 198.18.13.14
- 198.18.13.15
forward: 'only'
17.18.198.in-addr.arpa-forward:
domain: '17.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders:
- 198.18.13.14
- 198.18.13.15
forward: 'only'
consul-forward:
domain: 'consul'
zone_type: 'forward'
forwarders:
- 198.18.13.19
- 198.18.13.20
- 198.18.13.21
forward: 'only'

View File

@ -1,31 +1,4 @@
---
hiera_include:
- keepalived
# keepalived
profiles::haproxy::dns::ipaddr: '198.18.13.250'
profiles::haproxy::dns::vrrp_cnames:
- sonarr.main.unkin.net
- radarr.main.unkin.net
- lidarr.main.unkin.net
- readarr.main.unkin.net
- prowlarr.main.unkin.net
- nzbget.main.unkin.net
keepalived::vrrp_script:
check_haproxy:
script: '/usr/bin/killall -0 haproxy'
keepalived::vrrp_instance:
VI_250:
interface: 'eth0'
virtual_router_id: 250
auth_type: 'PASS'
auth_pass: 'quiiK7oo'
virtual_ipaddress: '198.18.13.250/32'
track_script:
- check_haproxy
# mappings
profiles::haproxy::mappings:
fe_http:
@ -33,27 +6,11 @@ profiles::haproxy::mappings:
mappings:
- 'au-syd1-pve.main.unkin.net be_ausyd1pve_web'
- 'au-syd1-pve-api.main.unkin.net be_ausyd1pve_api'
- 'sonarr.main.unkin.net be_sonarr'
- 'radarr.main.unkin.net be_radarr'
- 'lidarr.main.unkin.net be_lidarr'
- 'readarr.main.unkin.net be_readarr'
- 'prowlarr.main.unkin.net be_prowlarr'
- 'nzbget.main.unkin.net be_nzbget'
- 'jellyfin.main.unkin.net be_jellyfin'
- 'fafflix.unkin.net be_jellyfin'
fe_https:
ensure: present
mappings:
- 'au-syd1-pve.main.unkin.net be_ausyd1pve_web'
- 'au-syd1-pve-api.main.unkin.net be_ausyd1pve_api'
- 'sonarr.main.unkin.net be_sonarr'
- 'radarr.main.unkin.net be_radarr'
- 'lidarr.main.unkin.net be_lidarr'
- 'readarr.main.unkin.net be_readarr'
- 'prowlarr.main.unkin.net be_prowlarr'
- 'nzbget.main.unkin.net be_nzbget'
- 'jellyfin.main.unkin.net be_jellyfin'
- 'fafflix.unkin.net be_jellyfin'
profiles::haproxy::frontends:
fe_http:
@ -63,15 +20,7 @@ profiles::haproxy::frontends:
fe_https:
options:
acl:
- 'acl_ausyd1pve req.hdr(host) -i au-syd1-pve.main.unkin.net'
- 'acl_sonarr req.hdr(host) -i sonarr.main.unkin.net'
- 'acl_radarr req.hdr(host) -i radarr.main.unkin.net'
- 'acl_lidarr req.hdr(host) -i lidarr.main.unkin.net'
- 'acl_readarr req.hdr(host) -i readarr.main.unkin.net'
- 'acl_prowlarr req.hdr(host) -i prowlarr.main.unkin.net'
- 'acl_nzbget req.hdr(host) -i nzbget.main.unkin.net'
- 'acl_jellyfin req.hdr(host) -i jellyfin.main.unkin.net'
- 'acl_fafflix req.hdr(host) -i fafflix.unkin.net'
- 'acl_ausyd1pve req.hdr(host) -i https://au-syd1-pve.main.unkin.net'
- 'acl_internalsubnets src 198.18.0.0/16 10.10.12.0/24'
use_backend:
- "%[req.hdr(host),lower,map(/etc/haproxy/fe_https.map,be_default)]"
@ -79,14 +28,6 @@ profiles::haproxy::frontends:
- 'deny if { hdr_dom(host) -i au-syd1-pve.main.unkin.net } !acl_internalsubnets'
http-response:
- 'set-header X-Frame-Options DENY if acl_ausyd1pve'
- 'set-header X-Frame-Options DENY if acl_sonarr'
- 'set-header X-Frame-Options DENY if acl_radarr'
- 'set-header X-Frame-Options DENY if acl_lidarr'
- 'set-header X-Frame-Options DENY if acl_readarr'
- 'set-header X-Frame-Options DENY if acl_prowlarr'
- 'set-header X-Frame-Options DENY if acl_nzbget'
- 'set-header X-Frame-Options DENY if acl_jellyfin'
- 'set-header X-Frame-Options DENY if acl_fafflix'
- 'set-header X-Content-Type-Options nosniff'
- 'set-header X-XSS-Protection 1;mode=block'
@ -122,152 +63,17 @@ profiles::haproxy::backends:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_sonarr:
description: Backend for au-syd1 sonarr
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /consul/health
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_radarr:
description: Backend for au-syd1 radarr
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /consul/health
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_lidarr:
description: Backend for au-syd1 lidarr
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /consul/health
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_readarr:
description: Backend for au-syd1 readarr
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /consul/health
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_prowlarr:
description: Backend for au-syd1 prowlarr
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /consul/health
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_nzbget:
description: Backend for au-syd1 nzbget
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /consul/health
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_jellyfin:
description: Backend for au-syd1 jellyfin
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
profiles::haproxy::certlist::enabled: true
profiles::haproxy::certlist::certificates:
- /etc/pki/tls/letsencrypt/au-syd1-pve.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/au-syd1-pve-api.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/sonarr.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/radarr.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/lidarr.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/readarr.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/prowlarr.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/nzbget.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/fafflix.unkin.net/fullchain_combined.pem
- /etc/pki/tls/vault/certificate.pem
# additional altnames
profiles::pki::vault::alt_names:
- au-syd1-pve.main.unkin.net
- au-syd1-pve-api.main.unkin.net
- jellyfin.main.unkin.net
# additional cnames
profiles::haproxy::dns::cnames:
- au-syd1-pve.main.unkin.net
- au-syd1-pve-api.main.unkin.net
# letsencrypt certificates
certbot::client::service: haproxy
certbot::client::domains:
- au-syd1-pve.main.unkin.net
- au-syd1-pve-api.main.unkin.net
- sonarr.main.unkin.net
- radarr.main.unkin.net
- lidarr.main.unkin.net
- readarr.main.unkin.net
- prowlarr.main.unkin.net
- nzbget.main.unkin.net
- fafflix.unkin.net

View File

@ -1,424 +0,0 @@
---
profiles::haproxy::dns::ipaddr: "%{hiera('anycast_ip')}"
profiles::haproxy::dns::vrrp_cnames:
- sonarr.main.unkin.net
- radarr.main.unkin.net
- lidarr.main.unkin.net
- readarr.main.unkin.net
- prowlarr.main.unkin.net
- nzbget.main.unkin.net
- git.unkin.net
- fafflix.unkin.net
- grafana.unkin.net
- dashboard.ceph.unkin.net
- mail-webadmin.main.unkin.net
- mail-in.main.unkin.net
- mail.main.unkin.net
- autoconfig.main.unkin.net
- autodiscover.main.unkin.net
profiles::haproxy::mappings:
fe_http:
ensure: present
mappings:
- 'au-syd1-pve.main.unkin.net be_ausyd1pve_web'
- 'au-syd1-pve-api.main.unkin.net be_ausyd1pve_api'
- 'sonarr.main.unkin.net be_sonarr'
- 'radarr.main.unkin.net be_radarr'
- 'lidarr.main.unkin.net be_lidarr'
- 'readarr.main.unkin.net be_readarr'
- 'prowlarr.main.unkin.net be_prowlarr'
- 'nzbget.main.unkin.net be_nzbget'
- 'jellyfin.main.unkin.net be_jellyfin'
- 'fafflix.unkin.net be_jellyfin'
- 'git.unkin.net be_gitea'
- 'grafana.unkin.net be_grafana'
- 'dashboard.ceph.unkin.net be_ceph_dashboard'
- 'mail-webadmin.main.unkin.net be_stalwart_webadmin'
- 'autoconfig.main.unkin.net be_stalwart_webadmin'
- 'autodiscovery.main.unkin.net be_stalwart_webadmin'
fe_https:
ensure: present
mappings:
- 'au-syd1-pve.main.unkin.net be_ausyd1pve_web'
- 'au-syd1-pve-api.main.unkin.net be_ausyd1pve_api'
- 'sonarr.main.unkin.net be_sonarr'
- 'radarr.main.unkin.net be_radarr'
- 'lidarr.main.unkin.net be_lidarr'
- 'readarr.main.unkin.net be_readarr'
- 'prowlarr.main.unkin.net be_prowlarr'
- 'nzbget.main.unkin.net be_nzbget'
- 'jellyfin.main.unkin.net be_jellyfin'
- 'fafflix.unkin.net be_jellyfin'
- 'git.unkin.net be_gitea'
- 'grafana.unkin.net be_grafana'
- 'dashboard.ceph.unkin.net be_ceph_dashboard'
- 'mail-webadmin.main.unkin.net be_stalwart_webadmin'
- 'autoconfig.main.unkin.net be_stalwart_webadmin'
- 'autodiscovery.main.unkin.net be_stalwart_webadmin'
profiles::haproxy::frontends:
fe_http:
options:
use_backend:
- "%[req.hdr(host),lower,map(/etc/haproxy/fe_http.map,be_default)]"
fe_https:
options:
acl:
- 'acl_ausyd1pve req.hdr(host) -i au-syd1-pve.main.unkin.net'
- 'acl_sonarr req.hdr(host) -i sonarr.main.unkin.net'
- 'acl_radarr req.hdr(host) -i radarr.main.unkin.net'
- 'acl_lidarr req.hdr(host) -i lidarr.main.unkin.net'
- 'acl_readarr req.hdr(host) -i readarr.main.unkin.net'
- 'acl_prowlarr req.hdr(host) -i prowlarr.main.unkin.net'
- 'acl_nzbget req.hdr(host) -i nzbget.main.unkin.net'
- 'acl_jellyfin req.hdr(host) -i jellyfin.main.unkin.net'
- 'acl_fafflix req.hdr(host) -i fafflix.unkin.net'
- 'acl_gitea req.hdr(host) -i git.unkin.net'
- 'acl_grafana req.hdr(host) -i grafana.unkin.net'
- 'acl_ceph_dashboard req.hdr(host) -i dashboard.ceph.unkin.net'
- 'acl_stalwart_webadmin req.hdr(host) -i mail-webadmin.main.unkin.net'
- 'acl_stalwart_webadmin req.hdr(host) -i autoconfig.main.unkin.net'
- 'acl_stalwart_webadmin req.hdr(host) -i autodiscovery.main.unkin.net'
- 'acl_internalsubnets src 198.18.0.0/16 10.10.12.0/24'
use_backend:
- "%[req.hdr(host),lower,map(/etc/haproxy/fe_https.map,be_default)]"
http-request:
- 'deny if { hdr_dom(host) -i au-syd1-pve.main.unkin.net } !acl_internalsubnets'
http-response:
- 'set-header X-Frame-Options DENY if acl_ausyd1pve'
- 'set-header X-Frame-Options DENY if acl_sonarr'
- 'set-header X-Frame-Options DENY if acl_radarr'
- 'set-header X-Frame-Options DENY if acl_lidarr'
- 'set-header X-Frame-Options DENY if acl_readarr'
- 'set-header X-Frame-Options DENY if acl_prowlarr'
- 'set-header X-Frame-Options DENY if acl_nzbget'
- 'set-header X-Frame-Options DENY if acl_jellyfin'
- 'set-header X-Frame-Options DENY if acl_fafflix'
- 'set-header X-Frame-Options DENY if acl_gitea'
- 'set-header X-Frame-Options DENY if acl_grafana'
- 'set-header X-Frame-Options DENY if acl_ceph_dashboard'
- 'set-header X-Frame-Options DENY if acl_stalwart_webadmin'
- 'set-header X-Content-Type-Options nosniff'
- 'set-header X-XSS-Protection 1;mode=block'
profiles::haproxy::backends:
be_ausyd1pve_web:
description: Backend for au-syd1 pve cluster (Web)
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_ausyd1pve_api:
description: Backend for au-syd1 pve cluster (API only)
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /
- forwardfor
- http-keep-alive
- prefer-last-server
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_sonarr:
description: Backend for au-syd1 sonarr
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /consul/health
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_radarr:
description: Backend for au-syd1 radarr
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /consul/health
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_lidarr:
description: Backend for au-syd1 lidarr
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /consul/health
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_readarr:
description: Backend for au-syd1 readarr
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /consul/health
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_prowlarr:
description: Backend for au-syd1 prowlarr
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /consul/health
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_nzbget:
description: Backend for au-syd1 nzbget
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /consul/health
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_jellyfin:
description: Backend for au-syd1 jellyfin
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
be_gitea:
description: Backend for gitea cluster
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
stick-table: 'type ip size 200k expire 30m'
stick: 'on src'
be_grafana:
description: Backend for grafana nodes
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
redirect: 'scheme https if !{ ssl_fc }'
stick-table: 'type ip size 200k expire 30m'
stick: 'on src'
be_ceph_dashboard:
description: Backend for Ceph Dashboard from Mgr instances
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-check:
- expect status 200
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 9443 }
redirect: 'scheme https if !{ ssl_fc }'
stick-table: 'type ip size 200k expire 30m'
be_stalwart_webadmin:
description: Backend for Stalwart Webadmin
collect_exported: false # handled in custom function
options:
balance: roundrobin
option:
- httpchk GET /
- forwardfor
- http-keep-alive
- prefer-last-server
cookie: SRVNAME insert indirect nocache
http-reuse: always
http-check:
- expect status 200
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 9443 }
redirect: 'scheme https if !{ ssl_fc }'
stick-table: 'type ip size 200k expire 30m'
be_stalwart_imap:
description: Backend for Stalwart IMAP (STARTTLS)
collect_exported: false
options:
mode: tcp
balance: roundrobin
option:
- tcp-check
- prefer-last-server
stick-table: 'type ip size 200k expire 30m'
stick: 'on src'
tcp-check:
- connect port 143 send-proxy
- expect string "* OK"
- send "A001 STARTTLS\r\n"
- expect rstring "A001 (OK|2.0.0)"
be_stalwart_imaps:
description: Backend for Stalwart IMAPS (implicit TLS)
collect_exported: false
options:
mode: tcp
balance: roundrobin
option:
- tcp-check
- prefer-last-server
stick-table: 'type ip size 200k expire 30m'
stick: 'on src'
tcp-check:
- connect ssl send-proxy
- expect string "* OK"
be_stalwart_smtp:
description: Backend for Stalwart SMTP
collect_exported: false
options:
mode: tcp
balance: roundrobin
option:
- tcp-check
- prefer-last-server
stick-table: 'type ip size 200k expire 30m'
stick: 'on src'
tcp-check:
- connect port 25 send-proxy
- expect string "220 "
be_stalwart_submission:
description: Backend for Stalwart SMTP Submission
collect_exported: false
options:
mode: tcp
balance: roundrobin
option:
- tcp-check
- prefer-last-server
stick-table: 'type ip size 200k expire 30m'
stick: 'on src'
tcp-check:
- connect port 587 send-proxy
- expect string "220 "
profiles::haproxy::certlist::enabled: true
profiles::haproxy::certlist::certificates:
- /etc/pki/tls/letsencrypt/au-syd1-pve.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/au-syd1-pve-api.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/sonarr.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/radarr.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/lidarr.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/readarr.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/prowlarr.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/nzbget.main.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/fafflix.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/git.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/grafana.unkin.net/fullchain_combined.pem
- /etc/pki/tls/letsencrypt/dashboard.ceph.unkin.net/fullchain_combined.pem
- /etc/pki/tls/vault/certificate.pem
# additional altnames
profiles::pki::vault::alt_names:
- au-syd1-pve.main.unkin.net
- au-syd1-pve-api.main.unkin.net
- jellyfin.main.unkin.net
- mail-webadmin.main.unkin.net
# additional cnames
profiles::haproxy::dns::cnames:
- au-syd1-pve.main.unkin.net
- au-syd1-pve-api.main.unkin.net
# letsencrypt certificates
certbot::client::service: haproxy
certbot::client::domains:
- au-syd1-pve.main.unkin.net
- au-syd1-pve-api.main.unkin.net
- sonarr.main.unkin.net
- radarr.main.unkin.net
- lidarr.main.unkin.net
- readarr.main.unkin.net
- prowlarr.main.unkin.net
- nzbget.main.unkin.net
- fafflix.unkin.net
- git.unkin.net
- grafana.unkin.net
- dashboard.ceph.unkin.net

View File

@ -1,4 +1,3 @@
---
certmanager::vault_token: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAJuE+uzgQaBRUXBCigckEo1j+UxxbiUGrzdf/B9K7XPdVxZh6TzYLpBgNnyaT6vLo0boX4uRD/By0gT5R/2qcXD6d/j+fh517Ctk4d2uO64f0vH3PzyyOBalsNtcCdPiV3q/xGqzQSHhPiNkFEjDvMBz5p53UjfKA6gAiPrLklp4rN/NVyiLBw20NeIqbL25VdkQa13ViS0Gm/eUQu7a2xQ1dvQFWWfuLaQxO0dh8L0ynkfmWKIjaiD5412Z8hYURu0otxbqVDdIbEMx5xQsXnFKeN93yHmgs7a7M6fLdp9jh+G8B+IlK1W7/9v2+RT0/yI3ZgWHVTvDRhMHuPGBjfTBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBC5avtOPp9N65U1ILQENnvAgDBqI8XAjqbWIvXHqOEiKYdu+co0EEtsHR1v5xAeCmj/ZA6MLeKFlAVJbvpyCpzjons=]
certmanager::role_id: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAJuE+uzgQaBRUXBCigckEo1j+UxxbiUGrzdf/B9K7XPdVxZh6TzYLpBgNnyaT6vLo0boX4uRD/By0gT5R/2qcXD6d/j+fh517Ctk4d2uO64f0vH3PzyyOBalsNtcCdPiV3q/xGqzQSHhPiNkFEjDvMBz5p53UjfKA6gAiPrLklp4rN/NVyiLBw20NeIqbL25VdkQa13ViS0Gm/eUQu7a2xQ1dvQFWWfuLaQxO0dh8L0ynkfmWKIjaiD5412Z8hYURu0otxbqVDdIbEMx5xQsXnFKeN93yHmgs7a7M6fLdp9jh+G8B+IlK1W7/9v2+RT0/yI3ZgWHVTvDRhMHuPGBjfTBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBC5avtOPp9N65U1ILQENnvAgDBqI8XAjqbWIvXHqOEiKYdu+co0EEtsHR1v5xAeCmj/ZA6MLeKFlAVJbvpyCpzjons=]
sshsignhost::role_id: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAT86C/InXrgDtXCc9NFze91YMvjTNDqWgv4uzPFI48clOeQyD6x+vOHWP2yNp1OyNHcYLCiLyrv+rSIQyXlLnbeyZWV+7kXIon057Tp7l0BxWtd0hjQEcyWzqQQE7R264C8/qKRak81LIu6RshWZAchYo/BMPuOqVr0m+1zDwOV9JwZc3bpexzsl57CK5pesOrpfdvnd/xrOoEMR+P+C5PC6QLtQl3zkOD3N9kP6HqwbhWH5ZBPy88Kc+5kYM6QVpQSjFIIHK1SWsN0VZoxpkuFlFXB5KHDgZtg3kxrofzjQghl41zJBCDq9Z5oZ+2b1p/j/9jCASyp/ju68H5WXzbzBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCf4Nqp6SAl/XjmhPDnTvVJgDCdDhxWaChhjJ3eRcW4NTFgf3zm7Bu65za0li26FKuKks00duF4zebfNw7ZUVsYtIU=]

View File

@ -1,3 +0,0 @@
---
mysql::db::grafana::pass: ENC[PKCS7,MIIBiQYJKoZIhvcNAQcDoIIBejCCAXYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAozi1vFf9KfwsGSdxXvhOWRn9Vqbr9AZ+cVJuIOmmLVZMRP4AJkkNgCCly+o1pxtMiU8EISUQuBWCBHUi+u2Y1YAjIABzTa2K+PCU7qqkdrJaxvKqmmKS7C/CWFKp1stYvYIvIofyvl9Q854nMcpvxQKL0PBGh3rIaUHHV3L0PH1UDLqL5j6OPXaNvKL4UzwMDDIVAd7F5yMbtSx2PT23pBLbPYsbG1jRx7pUS/d1ZE9E8h3OA2jxW/CBf68Sb2Xlrh7IAB/6RiLLpIsUyRMv9zrbPrMuKg/q5lzO0i9fejh5z3Z3YkdnhH61doUT/RSjAjz3JrWDGxomOSK0y8TxjjBMBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCg1kVqFs/mf4r+6EI32b8qgCBm6cL3JhVzj3Btit9D8VYU8mQcrIaXJZ3qF31zJMzCkw==]
mysql::db::rundeck::pass: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAcWmZuTro0DNX8X/6DCJdmxm85hawng2cjSm/M26/sAzlr7i3XLIjg5TQc3BpeiKWZvQ2XZWygOEcW7g0bHH7FBS6XTXswDiLCf7ssd0DYL+eQbh4p6VijBKObug33fp4+YJaqGV7YRUNqBjXQv/SSmxFqbNaRahUqwbMidJCyjGNmfCfbSd9WxI4/8j0L38rjXR3/i+/xzgVIhgz/qymmw0rky6jN14YrwRIkdW6loMFzVd12tqdX9kh7UBdE7j58ntQgJSilQn2pLmQs6dgcXSOeIi8Sln4R0MfAtOQ1c6LoKMUdb7k8xEszpGbhX7sw51kpwvnL1LS6PQ+T8T9wDBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBDm1sAUc6LFtslrIuwk1JlJgDAngDM/0g4dpgyNOZDsvAU8OualEL6HZ2RFGfibteUc11wZzHkdFZlvHz2JZdO7Huo=]

View File

@ -2,23 +2,3 @@
profiles::sql::galera_member::cluster_name: au-syd1
profiles::sql::galera_member::galera_master: ausyd1nxvm1027.main.unkin.net
profiles::sql::galera_member::innodb_buffer_pool_size: 256M
mysql::db:
grafana:
name: grafana
user: grafana
password: "%{alias('mysql::db::grafana::pass')}"
grant:
- SELECT
- INSERT
- UPDATE
- DELETE
rundeck:
name: rundeck
user: rundeck
password: "%{alias('mysql::db::rundeck::pass')}"
grant:
- SELECT
- INSERT
- UPDATE
- DELETE

View File

@ -5,11 +5,5 @@ profiles::puppet::server::dns_alt_names:
- puppetca.query.consul
- puppetca
profiles::ssh::sign::principals:
- puppetca.main.unkin.net
- puppetca.service.consul
- puppetca.query.consul
- puppetca
profiles::puppet::puppetca::is_puppetca: true
profiles::puppet::puppetca::allow_subject_alt_names: true

View File

@ -0,0 +1,9 @@
---
profiles::puppet::server::dns_alt_names:
- puppetca.main.unkin.net
- puppetca.service.consul
- puppetca.query.consul
- puppetca
profiles::puppet::puppetca::is_puppetca: false
profiles::puppet::puppetca::allow_subject_alt_names: true

View File

@ -1,13 +1,5 @@
---
networking_loopback0_ip: 198.18.19.1 # management loopback
networking_loopback1_ip: 198.18.22.1 # ceph-cluster loopback
networking_loopback2_ip: 198.18.23.1 # ceph-public loopback
networking_1000_ip: 198.18.15.1 # 1gbe network
networking_2500_ip: 198.18.21.1 # 2.5gbe network
networking_1000_iface: enp2s0
networking_2500_iface: enp3s0
networking::interfaces:
"%{hiera('networking_1000_iface')}":
mac: d8:9e:f3:75:c3:60
"%{hiera('networking_2500_iface')}":
mac: 00:ac:d0:00:00:50
profiles::proxmox::params::pve_clusterinit_master: true
profiles::proxmox::params::pve_ceph_mon: true
profiles::proxmox::params::pve_ceph_mgr: true
profiles::proxmox::params::pve_ceph_osd: true

View File

@ -1,13 +1,4 @@
---
networking_loopback0_ip: 198.18.19.2 # management loopback
networking_loopback1_ip: 198.18.22.2 # ceph-cluster loopback
networking_loopback2_ip: 198.18.23.2 # ceph-public loopback
networking_1000_ip: 198.18.15.2 # 1gbe network
networking_2500_ip: 198.18.21.2 # 2.5gbe network
networking_1000_iface: enp2s0
networking_2500_iface: enp3s0
networking::interfaces:
"%{hiera('networking_1000_iface')}":
mac: d8:9e:f3:74:b6:08
"%{hiera('networking_2500_iface')}":
mac: 00:e0:4c:68:08:43
profiles::proxmox::params::pve_ceph_mon: true
profiles::proxmox::params::pve_ceph_mgr: true
profiles::proxmox::params::pve_ceph_osd: true

View File

@ -1,13 +1,4 @@
---
networking_loopback0_ip: 198.18.19.3 # management loopback
networking_loopback1_ip: 198.18.22.3 # ceph-cluster loopback
networking_loopback2_ip: 198.18.23.3 # ceph-public loopback
networking_1000_ip: 198.18.15.3 # 1gbe network
networking_2500_ip: 198.18.21.3 # 2.5gbe network
networking_1000_iface: enp2s0
networking_2500_iface: enp3s0
networking::interfaces:
"%{hiera('networking_1000_iface')}":
mac: b8:85:84:a3:25:c5
"%{hiera('networking_2500_iface')}":
mac: 00:e0:4c:68:07:82
profiles::proxmox::params::pve_ceph_mon: true
profiles::proxmox::params::pve_ceph_mgr: true
profiles::proxmox::params::pve_ceph_osd: true

View File

@ -1,13 +1,2 @@
---
networking_loopback0_ip: 198.18.19.4 # management loopback
networking_loopback1_ip: 198.18.22.4 # ceph-cluster loopback
networking_loopback2_ip: 198.18.23.4 # ceph-public loopback
networking_1000_ip: 198.18.15.4 # 1gbe network
networking_2500_ip: 198.18.21.4 # 2.5gbe network
networking_1000_iface: enp2s0
networking_2500_iface: enp3s0
networking::interfaces:
"%{hiera('networking_1000_iface')}":
mac: d8:9e:f3:75:d5:00
"%{hiera('networking_2500_iface')}":
mac: 00:ac:d0:00:00:43
profiles::proxmox::params::pve_ceph_osd: true

View File

@ -1,13 +1,2 @@
---
networking_loopback0_ip: 198.18.19.5 # management loopback
networking_loopback1_ip: 198.18.22.5 # ceph-cluster loopback
networking_loopback2_ip: 198.18.23.5 # ceph-public loopback
networking_1000_ip: 198.18.15.5 # 1gbe network
networking_2500_ip: 198.18.21.5 # 2.5gbe network
networking_1000_iface: enp1s0
networking_2500_iface: enp3s0
networking::interfaces:
"%{hiera('networking_1000_iface')}":
mac: 54:bf:64:a0:08:64
"%{hiera('networking_2500_iface')}":
mac: 00:e0:4c:68:07:79
profiles::proxmox::params::pve_ceph_osd: true

View File

@ -1,13 +1,2 @@
---
networking_loopback0_ip: 198.18.19.6 # management loopback
networking_loopback1_ip: 198.18.22.6 # ceph-cluster loopback
networking_loopback2_ip: 198.18.23.6 # ceph-public loopback
networking_1000_ip: 198.18.15.6 # 1gbe network
networking_2500_ip: 198.18.21.6 # 2.5gbe network
networking_1000_iface: enp2s0
networking_2500_iface: enp3s0
networking::interfaces:
"%{hiera('networking_1000_iface')}":
mac: d8:9e:f3:75:10:8d
"%{hiera('networking_2500_iface')}":
mac: 00:ac:d0:00:00:53
profiles::proxmox::params::pve_ceph_osd: true

View File

@ -1,13 +1,2 @@
---
networking_loopback0_ip: 198.18.19.7 # management loopback
networking_loopback1_ip: 198.18.22.7 # ceph-cluster loopback
networking_loopback2_ip: 198.18.23.7 # ceph-public loopback
networking_1000_ip: 198.18.15.7 # 1gbe network
networking_2500_ip: 198.18.21.7 # 2.5gbe network
networking_1000_iface: enp2s0
networking_2500_iface: enp3s0
networking::interfaces:
"%{hiera('networking_1000_iface')}":
mac: d8:9e:f3:74:b4:27
"%{hiera('networking_2500_iface')}":
mac: 00:ac:d0:00:00:5b
profiles::proxmox::params::pve_ceph_osd: true

View File

@ -1,13 +1,2 @@
---
networking_loopback0_ip: 198.18.19.8 # management loopback
networking_loopback1_ip: 198.18.22.8 # ceph-cluster loopback
networking_loopback2_ip: 198.18.23.8 # ceph-public loopback
networking_1000_ip: 198.18.15.8 # 1gbe network
networking_2500_ip: 198.18.21.8 # 2.5gbe network
networking_1000_iface: enp2s0
networking_2500_iface: enp3s0
networking::interfaces:
"%{hiera('networking_1000_iface')}":
mac: d8:9e:f3:75:06:18
"%{hiera('networking_2500_iface')}":
mac: 00:e0:4c:68:08:4b
profiles::proxmox::params::pve_ceph_osd: true

View File

@ -1,18 +0,0 @@
---
networking_loopback0_ip: 198.18.19.9 # management loopback
networking_loopback1_ip: 198.18.22.9 # ceph-cluster loopback
networking_loopback2_ip: 198.18.23.9 # ceph-public loopback
networking_br10_ip: 198.18.25.254
networking::interfaces:
enp2s0:
mac: 70:b5:e8:38:e9:8d
ipaddress: 198.18.15.9
gateway: 198.18.15.254
enp3s0:
mac: 00:e0:4c:68:0f:5d
ipaddress: 198.18.21.9
#zfs::zpools:
# fastpool:
# ensure: present
# disk: /dev/nvme0n1

View File

@ -1,13 +0,0 @@
---
networking_loopback0_ip: 198.18.19.10 # management loopback
networking_loopback1_ip: 198.18.22.10 # ceph-cluster loopback
networking_loopback2_ip: 198.18.23.10 # ceph-public loopback
networking_br10_ip: 198.18.26.254
networking::interfaces:
enp2s0:
mac: 70:b5:e8:38:e9:37
ipaddress: 198.18.15.10
gateway: 198.18.15.254
enp3s0:
mac: 00:e0:4c:68:0f:de
ipaddress: 198.18.21.10

View File

@ -1,13 +0,0 @@
---
networking_loopback0_ip: 198.18.19.11 # management loopback
networking_loopback1_ip: 198.18.22.11 # ceph-cluster loopback
networking_loopback2_ip: 198.18.23.11 # ceph-public loopback
networking_br10_ip: 198.18.27.254
networking::interfaces:
enp2s0:
mac: 70:b5:e8:38:e9:0f
ipaddress: 198.18.15.11
gateway: 198.18.15.254
enp3s0:
mac: 00:e0:4c:68:0f:55
ipaddress: 198.18.21.11

View File

@ -1,13 +0,0 @@
---
networking_loopback0_ip: 198.18.19.12 # management loopback
networking_loopback1_ip: 198.18.22.12 # ceph-cluster loopback
networking_loopback2_ip: 198.18.23.12 # ceph-public loopback
networking_br10_ip: 198.18.28.254
networking::interfaces:
enp2s0:
mac: 70:b5:e8:4f:05:1e
ipaddress: 198.18.15.12
gateway: 198.18.15.254
enp3s0:
mac: 00:e0:4c:68:0f:e5
ipaddress: 198.18.21.12

View File

@ -1,13 +0,0 @@
---
networking_loopback0_ip: 198.18.19.13 # management loopback
networking_loopback1_ip: 198.18.22.13 # ceph-cluster loopback
networking_loopback2_ip: 198.18.23.13 # ceph-public loopback
networking_br10_ip: 198.18.29.254
networking::interfaces:
enp2s0:
mac: 70:b5:e8:4f:04:b0
ipaddress: 198.18.15.13
gateway: 198.18.15.254
enp3s0:
mac: 00:e0:4c:68:0f:36
ipaddress: 198.18.21.13

View File

@ -1,23 +1,2 @@
# hieradata/os/AlmaLinux/AlmaLinux8.yaml
---
crypto_policies::policy: 'DEFAULT'
profiles::packages::include:
network-scripts: {}
profiles::yum::global::repos:
powertools:
name: powertools
descr: powertools repository
target: /etc/yum.repos.d/powertools.repo
baseurl: https://edgecache.query.consul/almalinux/%{facts.os.release.full}/PowerTools/%{facts.os.architecture}/os
gpgkey: http://edgecache.query.consul/almalinux/RPM-GPG-KEY-AlmaLinux-%{facts.os.release.major}
mirrorlist: absent
unkin:
name: unkin
descr: unkin repository
target: /etc/yum.repos.d/unkin.repo
baseurl: https://git.query.consul/api/packages/unkin/rpm/almalinux/el8
gpgkey: https://git.query.consul/api/packages/unkin/rpm/repository.key
gpgcheck: false
mirrorlist: absent

View File

@ -1,36 +1,2 @@
# hieradata/os/AlmaLinux/AlmaLinux9.yaml
---
crypto_policies::policy: 'DEFAULT:SHA1'
profiles::yum::global::repos:
baseos:
baseurl: https://packagerepo.service.consul/almalinux/%{facts.os.release.full}/baseos-daily/%{facts.os.architecture}/os/
gpgkey: https://packagerepo.service.consul/almalinux/%{facts.os.release.full}/baseos-daily/%{facts.os.architecture}/os//RPM-GPG-KEY-AlmaLinux-9
mirrorlist: absent
extras:
baseurl: https://packagerepo.service.consul/almalinux/%{facts.os.release.full}/extras-daily/%{facts.os.architecture}/os/
gpgkey: https://packagerepo.service.consul/almalinux/%{facts.os.release.full}/extras-daily/%{facts.os.architecture}/os//RPM-GPG-KEY-AlmaLinux-9
mirrorlist: absent
appstream:
baseurl: https://packagerepo.service.consul/almalinux/%{facts.os.release.full}/appstream-daily/%{facts.os.architecture}/os/
gpgkey: https://packagerepo.service.consul/almalinux/%{facts.os.release.full}/appstream-daily/%{facts.os.architecture}/os//RPM-GPG-KEY-AlmaLinux-9
mirrorlist: absent
highavailability:
baseurl: https://packagerepo.service.consul/almalinux/%{facts.os.release.full}/ha-daily/%{facts.os.architecture}/os/
gpgkey: https://packagerepo.service.consul/almalinux/%{facts.os.release.full}/ha-daily/%{facts.os.architecture}/os//RPM-GPG-KEY-AlmaLinux-9
mirrorlist: absent
crb:
name: crb
descr: crb repository
target: /etc/yum.repos.d/crb.repo
baseurl: https://packagerepo.service.consul/almalinux/%{facts.os.release.full}/crb-daily/%{facts.os.architecture}/os/
gpgkey: https://packagerepo.service.consul/almalinux/%{facts.os.release.full}/crb-daily/%{facts.os.architecture}/os//RPM-GPG-KEY-AlmaLinux-9
mirrorlist: absent
unkin:
name: unkin
descr: unkin repository
target: /etc/yum.repos.d/unkin.repo
baseurl: https://git.query.consul/api/packages/unkin/rpm/almalinux/el9
gpgkey: https://git.query.consul/api/packages/unkin/rpm/repository.key
gpgcheck: false
mirrorlist: absent

View File

@ -3,18 +3,12 @@
profiles::firewall::firewalld::ensure_package: 'absent'
profiles::firewall::firewalld::ensure_service: 'stopped'
profiles::firewall::firewalld::enable_service: false
profiles::puppet::agent::version: '7.37.2'
profiles::puppet::agent::openvox_enable: true
profiles::puppet::agent::puppet_version: '7.26.0'
hiera_include:
- profiles::almalinux::base
profiles::packages::include:
crypto-policies-scripts: {}
lzo: {}
policycoreutils: {}
unar: {}
xz: {}
profiles::packages::install:
- lzo
- xz
- policycoreutils
lm-sensors::package: lm_sensors
@ -25,40 +19,44 @@ profiles::yum::global::repos:
target: /etc/yum.repos.d/baseos.repo
baseurl: https://edgecache.query.consul/almalinux/%{facts.os.release.full}/BaseOS/%{facts.os.architecture}/os
gpgkey: http://edgecache.query.consul/almalinux/RPM-GPG-KEY-AlmaLinux-%{facts.os.release.major}
mirrorlist: absent
extras:
name: extras
descr: extras repository
target: /etc/yum.repos.d/extras.repo
baseurl: https://edgecache.query.consul/almalinux/%{facts.os.release.full}/extras/%{facts.os.architecture}/os
gpgkey: http://edgecache.query.consul/almalinux/RPM-GPG-KEY-AlmaLinux-%{facts.os.release.major}
mirrorlist: absent
appstream:
name: appstream
descr: appstream repository
target: /etc/yum.repos.d/appstream.repo
baseurl: https://edgecache.query.consul/almalinux/%{facts.os.release.full}/AppStream/%{facts.os.architecture}/os
gpgkey: http://edgecache.query.consul/almalinux/RPM-GPG-KEY-AlmaLinux-%{facts.os.release.major}
mirrorlist: absent
powertools:
name: powertools
descr: powertools repository
target: /etc/yum.repos.d/powertools.repo
baseurl: https://edgecache.query.consul/almalinux/%{facts.os.release.full}/PowerTools/%{facts.os.architecture}/os
gpgkey: http://edgecache.query.consul/almalinux/RPM-GPG-KEY-AlmaLinux-%{facts.os.release.major}
highavailability:
name: highavailability
descr: highavailability repository
target: /etc/yum.repos.d/highavailability.repo
baseurl: https://edgecache.query.consul/almalinux/%{facts.os.release.full}/HighAvailability/%{facts.os.architecture}/os
gpgkey: http://edgecache.query.consul/almalinux/RPM-GPG-KEY-AlmaLinux-%{facts.os.release.major}
mirrorlist: absent
epel:
name: epel
descr: epel repository
target: /etc/yum.repos.d/epel.repo
baseurl: https://packagerepo.service.consul/epel/%{facts.os.release.major}/everything-daily/%{facts.os.architecture}/os/
gpgkey: https://packagerepo.service.consul/epel/%{facts.os.release.major}/everything-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-EPEL-%{facts.os.release.major}
mirrorlist: absent
unkinben:
name: unkinben
descr: unkinben repository
baseurl: https://edgecache.query.consul/epel/%{facts.os.release.major}/Everything/%{facts.os.architecture}
gpgkey: http://edgecache.query.consul/epel/RPM-GPG-KEY-EPEL-%{facts.os.release.major}
puppet:
name: puppet
descr: puppet repository
target: /etc/yum.repos.d/puppet.repo
baseurl: https://yum.puppet.com/puppet7/el/%{facts.os.release.major}/%{facts.os.architecture}
gpgkey: https://yum.puppet.com/RPM-GPG-KEY-puppet-20250406
unkin:
name: unkin
descr: unkin repository
target: /etc/yum.repos.d/unkin.repo
baseurl: https://git.query.consul/api/packages/unkinben/rpm/el%{facts.os.release.major}
gpgkey: https://git.query.consul/api/packages/unkinben/rpm/repository.key
gpgcheck: false
mirrorlist: absent
baseurl: https://repos.main.unkin.net/unkin/%{facts.os.release.major}/%{facts.os.architecture}/os

View File

@ -11,4 +11,4 @@ profiles::apt::components:
- main
- non-free
profiles::puppet::agent::version: '7.25.0-1bullseye'
profiles::puppet::agent::puppet_version: '7.25.0-1bullseye'

View File

@ -12,4 +12,4 @@ profiles::apt::components:
- non-free
- non-free-firmware
profiles::puppet::agent::version: 'latest'
profiles::puppet::agent::puppet_version: 'latest'

View File

@ -1,19 +1,14 @@
# hieradata/os/debian/all_releases.yaml
---
profiles::apt::base::mirrorurl: http://edgecache.query.consul/debian/
profiles::apt::base::mirrorurl: http://repos.main.unkin.net/debian
profiles::apt::base::secureurl: http://security.debian.org/debian-security
profiles::apt::puppet7::mirror: http://apt.puppetlabs.com
profiles::apt::puppet7::repo: puppet7
profiles::pki::vaultca::ca_cert-path: /usr/local/share/ca-certificates/
profiles::packages::include:
lzop: {}
python3.11-venv: {}
xz-utils: {}
profiles::packages::install:
- lzop
- python3.11-venv
- xz-utils
lm-sensors::package: lm-sensors
networking::nwmgr_dns_none: false
consul::install_method: 'url'
consul::manage_repo: false
consul::bin_dir: /usr/local/bin

View File

@ -1 +0,0 @@
profiles::jupyter::jupyterhub::ldap_bind_pass: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAJspN3e2WzA0uZaLgFZ0Ewqii9dY0tTgbirsW70M2VZtLY+s+C6HE8ZZUtpfnRsFwUUhOj7s25X9xVOZNTpZIGPyfx9MWlSyFw2RFuXSEwaydf1DcBbg8261YrTTysA4Jsa1L4DLsX55q+XJUyeUbimVQkIacVIvzTdnZCBKnVNUh3U2PNAmV7SOL2KH8Jpbfs/EQfBt8XuGMCg3I/4RDyoNERqthW6W2KiMX2Gmd8iQ5+W9udH0lEAMx415oyImmN+dEuThcx9FGMi8BWYtnxH96yWafpT5qltwW6EVzIGWuLhiD1LcWYc5RB8jc3DhbeouChpKsN6c4EHoKt3aWsTBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBC8jcnqilJgY1/AnHWHfX4bgDCi2a3Rj43Z0dgfB5HaHdpfked3Cx+u94r2S5+Cg3QogU1AIF04rjzOL+bD2HdaMfo=]

View File

@ -1,74 +0,0 @@
---
profiles::packages::include:
python3.12: {}
python3.12-pip: {}
hiera_include:
- docker
- profiles::nginx::simpleproxy
# manage docker
docker::version: latest
docker::curl_ensure: false
docker::root_dir: /data/docker
# manage a simple nginx reverse proxy
profiles::nginx::simpleproxy::nginx_vhost: 'jupyterhub.query.consul'
profiles::nginx::simpleproxy::nginx_aliases:
- jupyterhub.service.consul
- jupyterhub.query.consul
- "jupyterhub.service.%{facts.country}-%{facts.region}.consul"
profiles::nginx::simpleproxy::proxy_host: 127.0.0.1
profiles::nginx::simpleproxy::proxy_port: 8000
profiles::nginx::simpleproxy::proxy_path: '/'
profiles::nginx::simpleproxy::use_default_location: false
nginx::client_max_body_size: 20M
profiles::nginx::simpleproxy::locations:
# authorised access from external
default:
ensure: 'present'
server: "%{lookup('profiles::nginx::simpleproxy::nginx_vhost')}"
ssl_only: true
location: '/'
proxy: "http://%{lookup('profiles::nginx::simpleproxy::proxy_host')}:%{lookup('profiles::nginx::simpleproxy::proxy_port')}"
proxy_set_header:
- 'Host $host'
- 'X-Real-IP $remote_addr'
- 'X-Forwarded-For $proxy_add_x_forwarded_for'
- 'X-Forwarded-Host $host'
- 'X-Forwarded-Proto $scheme'
- 'Upgrade $http_upgrade'
- 'Connection $http_connection'
- 'X-Scheme $scheme'
proxy_redirect: 'off'
proxy_http_version: '1.1'
proxy_buffering: 'off'
# additional altnames
profiles::pki::vault::alt_names:
- jupyterhub.service.consul
- jupyterhub.query.consul
- "jupyterhub.service.%{facts.country}-%{facts.region}.consul"
# configure consul service
consul::services:
jupyterhub:
service_name: 'jupyterhub'
tags:
- 'jupyterhub'
address: "%{facts.networking.ip}"
port: 443
checks:
- id: 'jupyterhub_http_check'
name: 'jupyterhub HTTP Check'
http: "https://%{facts.networking.fqdn}"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: jupyterhub
disposition: write

View File

@ -1,2 +0,0 @@
---
ceph::key::media: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAEBANgP2ifU7NbuMs+kWpeg1tchR5IMD7Z7kMpRBejgCMHludTYGf/BzxTe36YjpwLsuUd658QK5vE4EYpM1MuzqfuNiWJa5ec1IR/AgWQUMZcpjEDEqpHTb2qygmpc+jb3vW1EMBleZL2Z4GrgJ00gWO/EvukBSPgyxBsFe4Bb/L3aK6xiucG3JA9A7qA6cS4Oz5pf8dfC0FBjsc+XN7++bJN5pWUgMcEDgiyCy3bkL2gWfPKOWfabTRwuC3qd6SihZMg/tY8uoDfYoI8jHkjU07/mhC6AD930wgcFG+xJwNAX7FxLvLyJ8iN/648LVoZFuszYiTwPib1CszksdYBjBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBBSGXrbrl4FisZN5FT1hfmrgDBnV2SVfCJIYYyZ9+Vo1ykNmzUypJdJ+4llyXA7FOuH90xVZvLZMjNMhVCxP48CiYI=]

View File

@ -1,100 +0,0 @@
---
hiera_include:
- profiles::nginx::simpleproxy
profiles::yum::global::repos:
ceph-reef:
name: ceph-reef
descr: ceph reef repository
target: /etc/yum.repos.d/ceph-reef.repo
baseurl: https://edgecache.query.consul/ceph/yum/el%{facts.os.release.major}/%{facts.os.architecture}
gpgcheck: 0,
mirrorlist: absent
profiles::ceph::client::keyrings:
media:
key: "%{hiera('ceph::key::media')}"
profiles::base::groups::local:
media:
ensure: present
gid: 20000
allowdupe: false
forcelocal: true
ldap_host: 'ldap.service.consul'
ldap_basedn: 'dc=main,dc=unkin,dc=net'
profiles::nginx::simpleproxy::locations:
# authentication proxy
authproxy:
ensure: 'present'
server: "%{lookup('profiles::nginx::simpleproxy::nginx_vhost')}"
ssl_only: true
internal: true
location: '= /auth-proxy'
proxy: "http://%{lookup('profiles::nginx::simpleproxy::proxy_host')}:8888"
proxy_set_header:
- 'Content-Length ""'
- "X-Ldap-URL ldap://%{lookup('ldap_host')}"
- 'X-Ldap-Starttls "false"'
- "X-Ldap-BaseDN %{lookup('ldap_basedn')}"
- "X-Ldap-BindDN %{lookup('ldap_binddn')}"
- "X-Ldap-BindPass %{lookup('ldap_bindpass')}"
- 'X-CookieName "nginxauth"'
- 'Cookie nginxauth=$cookie_nginxauth'
- "X-Ldap-Template %{lookup('ldap_template')}"
- 'X-Ldap-Realm "Restricted"'
proxy_cache: 'cache'
proxy_cache_valid: '200 10m'
proxy_cache_key: '"$http_authorization$cookie_nginxauth"'
location_cfg_append:
proxy_pass_request_body: 'off'
# health checks by consul/haproxy
arrstack_web_healthcheck:
ensure: 'present'
server: "%{lookup('profiles::nginx::simpleproxy::nginx_vhost')}"
ssl_only: true
location: '/consul/health'
proxy: "http://%{lookup('profiles::nginx::simpleproxy::proxy_host')}:%{lookup('profiles::nginx::simpleproxy::proxy_port')}"
proxy_set_header:
- 'Host $host'
- 'X-Forwarded-For $proxy_add_x_forwarded_for'
- 'X-Forwarded-Host $host'
- 'X-Forwarded-Proto $scheme'
- 'Upgrade $http_upgrade'
- 'Connection $http_connection'
proxy_redirect: 'off'
proxy_http_version: '1.1'
location_allow:
- 127.0.0.1
- "%{facts.networking.ip}"
- 198.18.24.0/24
location_deny:
- all
# authorised access from external
arrstack_web_external:
ensure: 'present'
server: "%{lookup('profiles::nginx::simpleproxy::nginx_vhost')}"
ssl_only: true
location: '/'
auth_request: '/auth-proxy'
proxy: "http://%{lookup('profiles::nginx::simpleproxy::proxy_host')}:%{lookup('profiles::nginx::simpleproxy::proxy_port')}"
proxy_set_header:
- 'Host $host'
- 'X-Forwarded-For $proxy_add_x_forwarded_for'
- 'X-Forwarded-Host $host'
- 'X-Forwarded-Proto $scheme'
- 'Upgrade $http_upgrade'
- 'Connection $http_connection'
proxy_redirect: 'off'
proxy_http_version: '1.1'
# location for api, which should be accessible without authentication
arrstack_api:
ensure: 'present'
server: "%{lookup('profiles::nginx::simpleproxy::nginx_vhost')}"
ssl_only: true
location: '~ /api'
proxy: "http://%{lookup('profiles::nginx::simpleproxy::proxy_host')}:%{lookup('profiles::nginx::simpleproxy::proxy_port')}"
location_cfg_append:
client_max_body_size: '20m'

View File

@ -1,77 +0,0 @@
---
hiera_include:
- jellyfin
profiles::packages::include:
intel-media-driver: {}
libva-intel-driver: {}
libva-intel-hybrid-driver: {}
intel-mediasdk: {}
# manage jellyfin
jellyfin::params::service_enable: true
# additional altnames
profiles::pki::vault::alt_names:
- jellyfin.main.unkin.net
- jellyfin.service.consul
- jellyfin.query.consul
- "jellyfin.service.%{facts.country}-%{facts.region}.consul"
# manage a simple nginx reverse proxy
profiles::nginx::simpleproxy::nginx_vhost: 'jellyfin.query.consul'
profiles::nginx::simpleproxy::nginx_aliases:
- jellyfin.main.unkin.net
- jellyfin.service.consul
- jellyfin.query.consul
- "jellyfin.service.%{facts.country}-%{facts.region}.consul"
profiles::nginx::simpleproxy::proxy_port: 8096
profiles::nginx::simpleproxy::proxy_host: 127.0.0.1
profiles::nginx::simpleproxy::proxy_path: '/'
# configure consul service
nginx::client_max_body_size: 10M
consul::services:
jellyfin:
service_name: 'jellyfin'
tags:
- 'media'
- 'jellyfin'
address: "%{facts.networking.ip}"
port: 443
checks:
- id: 'jellyfin_http_check'
name: 'jellyfin HTTP Check'
http: "https://%{facts.networking.fqdn}:443"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: jellyfin
disposition: write
profiles::yum::global::repos:
rpmfusion-free:
name: rpmfusion-free
descr: rpmfusion-free repository
target: /etc/yum.repos.d/rpmfusion.repo
baseurl: https://download1.rpmfusion.org/free/el/updates/%{facts.os.release.major}/%{facts.os.architecture}
gpgkey: https://download1.rpmfusion.org/free/el/RPM-GPG-KEY-rpmfusion-free-el-%{facts.os.release.major}
mirrorlist: absent
rpmfusion-nonfree:
name: rpmfusion-nonfree
descr: rpmfusion-nonfree repository
target: /etc/yum.repos.d/rpmfusion.repo
baseurl: https://download1.rpmfusion.org/nonfree/el/updates/%{facts.os.release.major}/%{facts.os.architecture}
gpgkey: https://download1.rpmfusion.org/nonfree/el/RPM-GPG-KEY-rpmfusion-nonfree-el-%{facts.os.release.major}
mirrorlist: absent
unkinben:
name: unkinben
descr: unkinben repository
target: /etc/yum.repos.d/unkin.repo
baseurl: https://git.query.consul/api/packages/unkinben/rpm/el8
gpgkey: https://git.query.consul/api/packages/unkinben/rpm/repository.key
gpgcheck: false
mirrorlist: absent

View File

@ -1,3 +0,0 @@
---
lidarr::api_key: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAeIT5i5yJ/KCmEBEgF8r36dl2RK/0/LQWPl6bgth7KOdtfNynhH4bCxembrJwzXasT1KBrPWYmTc2IObBz2tqu7BIHoioI2y+GVs2ulhx63lrfeDI/I4QFs5EOh9fIoyOxlIkvKm+p0WVfaegKOKM63XHHvG2TmBwTypEHB1IXaCMVl87tY+3xmMEaiqVPik3llqLCog1rmRLbIQx+whAFPtlhHur0ozfdYLKiM57YHAsQpGgASYkAAjvZuKabOrRZsIhhsHCb4JQ/evvIrhkviK7nP4xHdeqRSJgdEDmIldr2FW3uHCzuq033K3T7HNc3HbUM/5lC0ygP8sZnnM8rDBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBAyfQkaBPJJWVsc2FGiyCyMgDAYuYDAwBBAJzfVZ4RFrQyi48VZeS8MTjf2HNAXBYoYgTtdZAk9i+pIV22p9ee+KsU=]
ldap_bindpass: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAEDEyk6fBBnrjZvfK8MnUVOTWxhFGtgY34/2CuIq55MoVLsk2ZgVrL7Kt+94bqFhwEB67kuNpMGXqTgW5ose2yWs5iVSJLECsf9C+tvGBGwaV35LNwP5S3aQmFagyTpZZz9QlGKC7818jlXz7vZWDtiUhy5TGMHeyS0fdjCveavtZR28A+ZrvWjJeLdN47mmvYwYfFnQBs3kSgkl5KyMVhFWSFOSLeHsuEzCVXHoQ1jQG+2TV5m18wV0RR/sOju2E+vsulqlDgCyifgoiry4GzJeKNrNDI2bifzHCAi6yZqHL/klyqbGTnKLlA4xKoXsHF+xEwcoq4S9JDLAdWeH1SDBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCdvh4yn8knozcYhinybRq3gDAwTKv8VakQG7XK/mcEplwtoiKqLnj9IIGdIUh1zPi2Sg48ET5rfZyl0p7ddIYoHjU=]

View File

@ -1,57 +0,0 @@
---
hiera_include:
- lidarr
- profiles::nginx::ldapauth
- profiles::media::lidarr
# manage lidarr
lidarr::params::user: lidarr
lidarr::params::group: media
lidarr::params::manage_group: false
lidarr::params::archive_version: 2.3.3
lidarr::params::port: 8000
# additional altnames
profiles::pki::vault::alt_names:
- lidarr.main.unkin.net
- lidarr.service.consul
- lidarr.query.consul
- "lidarr.service.%{facts.country}-%{facts.region}.consul"
# manage a simple nginx reverse proxy
profiles::nginx::simpleproxy::nginx_vhost: 'lidarr.query.consul'
profiles::nginx::simpleproxy::nginx_aliases:
- lidarr.main.unkin.net
- lidarr.service.consul
- lidarr.query.consul
- "lidarr.service.%{facts.country}-%{facts.region}.consul"
profiles::nginx::simpleproxy::proxy_port: 8000
profiles::nginx::simpleproxy::proxy_host: 127.0.0.1
profiles::nginx::simpleproxy::proxy_path: '/'
profiles::nginx::simpleproxy::use_default_location: false
nginx::client_max_body_size: 20M
ldap_binddn: 'cn=svc_lidarr,ou=services,ou=users,dc=main,dc=unkin,dc=net'
ldap_template: '(&(uid=%(username)s)(memberOf=ou=lidarr_access,ou=groups,dc=main,dc=unkin,dc=net))'
# configure consul service
consul::services:
lidarr:
service_name: 'lidarr'
tags:
- 'media'
- 'lidarr'
address: "%{facts.networking.ip}"
port: 443
checks:
- id: 'lidarr_http_check'
name: 'Lidarr HTTP Check'
http: "https://%{facts.networking.fqdn}:443/consul/health"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: lidarr
disposition: write

View File

@ -1,2 +0,0 @@
---
ldap_bindpass: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAPomn4iZbT0JEysvDo7OgblpoQLFp9DzryY558UfVWQq6HDAkgoSC42cbgZGBPFclCgLaO/LfBrFpRXkafEVV33Vg2AmP/FiS9SmmwREc3t/ZTvENlDIgasY3pDIph0/i5u0S45mjyzzciBK0KY6cMZvPDVRvU+d0SyVnbSBlef6VmyZOhUk6ILpaYTGu+suVR/BAL/DTKsmmY7iTotTWN+IW/1cY3vprvBMJQVftaO1WSqKftmX29/PAsxbQo6AMpuQFx/dMcMe3d5JTB0mgzIhAFaKmSC8vJFqe21Nrr8F+PxJMSEl1saBJTwJc5RyPVm9ejVKfcPhDfWK5stNNvjBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBAo205Hvo/Z+rhnSGgkTS2YgDB7pTHdgnQz1UOK323DRljWcqx+SnCA7izyF1SNMlzlCck79Fr4zKh0qnbYsMZDWZU=]

View File

@ -1,87 +0,0 @@
---
hiera_include:
- nzbget
- profiles::media::nzbget
- profiles::nginx::ldapauth
- exporters::nzbget_exporter
profiles::packages::include:
unrar: {}
# manage nzbget
nzbget::params::user: nzbget
nzbget::params::group: media
nzbget::params::manage_group: false
# nzbget_exporter
exporters::nzbget_exporter::enable: true
# additional altnames
profiles::pki::vault::alt_names:
- nzbget.main.unkin.net
- nzbget.service.consul
- nzbget.query.consul
- "nzbget.service.%{facts.country}-%{facts.region}.consul"
# manage a simple nginx reverse proxy
profiles::nginx::simpleproxy::nginx_vhost: 'nzbget.query.consul'
profiles::nginx::simpleproxy::nginx_aliases:
- nzbget.main.unkin.net
- nzbget.service.consul
- nzbget.query.consul
- "nzbget.service.%{facts.country}-%{facts.region}.consul"
profiles::nginx::simpleproxy::proxy_port: 6789
profiles::nginx::simpleproxy::proxy_host: 127.0.0.1
profiles::nginx::simpleproxy::proxy_path: '/'
profiles::nginx::simpleproxy::use_default_location: false
nginx::client_max_body_size: 20M
ldap_binddn: 'cn=svc_nzbget,ou=services,ou=users,dc=main,dc=unkin,dc=net'
ldap_template: '(&(uid=%(username)s)(memberOf=ou=nzbget_access,ou=groups,dc=main,dc=unkin,dc=net))'
profiles::nginx::simpleproxy::locations:
arrstack_web_healthcheck:
location_cfg_append:
rewrite: '/consul/health / break'
# configure consul service
consul::services:
nzbget:
service_name: 'nzbget'
tags:
- 'media'
- 'nzbget'
address: "%{facts.networking.ip}"
port: 443
checks:
- id: 'nzbget_http_check'
name: 'nzbget HTTP Check'
http: "https://%{facts.networking.fqdn}:443/consul/health"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: nzbget
disposition: write
- resource: service
segment: nzbget_exporter
disposition: write
profiles::yum::global::repos:
rpmfusion-free:
name: rpmfusion-free
descr: rpmfusion-free repository
target: /etc/yum.repos.d/rpmfusion.repo
baseurl: https://download1.rpmfusion.org/free/el/updates/%{facts.os.release.major}/%{facts.os.architecture}
gpgkey: https://download1.rpmfusion.org/free/el/RPM-GPG-KEY-rpmfusion-free-el-%{facts.os.release.major}
mirrorlist: absent
rpmfusion-nonfree:
name: rpmfusion-nonfree
descr: rpmfusion-nonfree repository
target: /etc/yum.repos.d/rpmfusion.repo
baseurl: https://download1.rpmfusion.org/nonfree/el/updates/%{facts.os.release.major}/%{facts.os.architecture}
gpgkey: https://download1.rpmfusion.org/nonfree/el/RPM-GPG-KEY-rpmfusion-nonfree-el-%{facts.os.release.major}
mirrorlist: absent

View File

@ -1,3 +0,0 @@
---
prowlarr::api_key: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAdAzvi5Z2cX7KWdMlMfR5N+Jz9Pmh3k9yvPgM1JnTM8ZODs5VyQf/d3goWJ5Fn+jcjVqQ+aBga2CHfbdjgg5dGC19Jr8CmxVkYpMVb+e6Md4LEglUD6g70LK8JHB1FAM0fqW82/zqBL73KFKcu71Hpbf9YylJD4LXCr/k4D7hPX3tgEOzFn1iGl/DqxJFWnorj0btk3/2AmA3AMjvFy4r39PwbMfr2jNFSmAdJa7j7W+ESyE08Cc795VORIa/lbrT0ZfBMGXqzNTIpcdJ7uabcrH0qHNM8FPh4eHBzGMqLvIba487bs2TUb8eIivwT2EAwmGDWX1QkG2o6lGyO8PyqzBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBBO8BQpHvHYOA2tjyxpjGw4gDATwt1wP0aPFPnbRoqPdwClfOzbWmtbT/rCBmCQH0HkyA8sqr2I2qlOsuJukCjBDHo=]
ldap_bindpass: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAhduPAqoZuq/xeRs4f/KX4r88evPMogQX79yofLAB5Qqdr48s2X0BAa1iiw0vMdL6Tf0uc794WJN5MP2Yp365Vk1yhwgqH92rt5hKPI+wBN5uak2iLgLzLWsp0HOx7d1ukDWBbj0lI6G5LiofsL3KJbbTnkovn06L4PRJXgn44+ynfywiCl2tPy2294DhfooeM6/Cy+t9lA6blzHLCOHtt/rBKmk1GT2y3YBCPhRfOumWXQWnv4Q+f6KkQkvpfPyAFYNiQxQYBv5bGwLnwiDk3xQnPM4FfcutVuAOKjsoeMa+K1KShDFyEfBxIER8JSpigj2/khstyihcVW0Xrod3uDBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCRqqRwMThwn1F/6byFhTWxgDAfucfkFhmqxBv/u5H+wWnjvK5EH7eU/fECrajYPBW/cmsYjLgXlwrAzFGqWze3AZc=]

View File

@ -1,76 +0,0 @@
---
hiera_include:
- prowlarr
- profiles::nginx::ldapauth
- profiles::media::prowlarr
- exporters::exportarr
# manage prowlarr
prowlarr::params::user: prowlarr
prowlarr::params::group: media
prowlarr::params::manage_group: false
prowlarr::params::archive_version: 1.19.0
prowlarr::params::port: 8000
# exportarr
exporters::exportarr::enable: true
exporters::exportarr::app: prowlarr
exporters::exportarr::api_key: "%{hiera('prowlarr::api_key')}"
exporters::exportarr::backfill: true
# additional altnames
profiles::pki::vault::alt_names:
- prowlarr.main.unkin.net
- prowlarr.service.consul
- prowlarr.query.consul
- "prowlarr.service.%{facts.country}-%{facts.region}.consul"
# manage a simple nginx reverse proxy
profiles::nginx::simpleproxy::nginx_vhost: 'prowlarr.query.consul'
profiles::nginx::simpleproxy::nginx_aliases:
- prowlarr.main.unkin.net
- prowlarr.service.consul
- prowlarr.query.consul
- "prowlarr.service.%{facts.country}-%{facts.region}.consul"
profiles::nginx::simpleproxy::proxy_port: 8000
profiles::nginx::simpleproxy::proxy_host: 127.0.0.1
profiles::nginx::simpleproxy::proxy_path: '/'
profiles::nginx::simpleproxy::use_default_location: false
nginx::client_max_body_size: 20M
ldap_binddn: 'cn=svc_prowlarr,ou=services,ou=users,dc=main,dc=unkin,dc=net'
ldap_template: '(&(uid=%(username)s)(memberOf=ou=prowlarr_access,ou=groups,dc=main,dc=unkin,dc=net))'
# configure consul service
consul::services:
prowlarr:
service_name: 'prowlarr'
tags:
- 'media'
- 'prowlarr'
address: "%{facts.networking.ip}"
port: 443
checks:
- id: 'prowlarr_http_check'
name: 'Prowlarr HTTP Check'
http: "https://%{facts.networking.fqdn}:443/consul/health"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: prowlarr
disposition: write
- resource: service
segment: exportarr
disposition: write
profiles::nginx::simpleproxy::locations:
arrstack_web_external:
location_satisfy: any
location_allow:
- 198.18.26.161
- 198.18.27.131
- 198.18.28.165
- 198.18.29.32

View File

@ -1,3 +0,0 @@
---
radarr::api_key: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEALtNnNr2N7DpP9zx5anmQavFmsTLIyPkpJGCkJpUTHMYFSScS/3FOUuufajk4Cmu4FbPswp/N/U1nHO8oLF6xNQ+H77+xXuKPalW/3R1IRqGoczwsAfstJ6nYF+PLjjeK2TDP+KMs3Eg2+nrXB7NOVOP88RvDLyZq93Wn9qR+1VG6Y2gLqGSJArZpNilV5ygUYRgbMeckjqfLynYBXtgDQQLYNhxDO6WGRRv+0X773nmOdrWFAUjqF6/K+Ejjk5ZbaqnGyjljMstSrhg7NWxtMRbCjeMpjUjUS4Hn/Vayg2M2Ag2s87gsE1e4QFa6KP7GVRu3swvyZ3D54Ba/xrebxzBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBDD6gIEfNGPXA8zv/vysgxJgDADMi7Fx5q+aqTMeqcKLg1AukTlCnJ62zykm6RNGdS0KlpJsvTSmWF4So3v/9BsKdk=]
ldap_bindpass: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAf6LbAXy3EsJqkf4KO8xikpXfSVIVOD2obnwZLPEiCfbC0iJyZ2Uc/RbHM9NnIpg8dgEnYGgaBM9HC+kFKYFP4skLsPQ0mpqOv13WbOAHOphhyBxWc/n4Eg4HqMAiHsC4qA9Sj5j4F29tNiMmpNM0eqDXD6YIrajV5IF+SKQQzZrNSrcDrzjqENaMnJWVc2gj7Zb9kPB1LdM5ZfljpPtfQHXHhh9ShTDKuWMx46nc+UYaRcJqHEns7OExwpsEAtA1SSunD81PTijJ6Bn1Y6cAsZoBot5YgFGaWtgQ4jvEqj7EbG8gDJuHMSwa9RzA9SMsVwEh2g1pko7wYbBR/arV7jBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBAhZlVaEdaLNh60gAY0KwEKgDAHPoXfFRq0I2K2J+NFgNFm/A00jYGVRfuPsX6VZfPaJ499dtMiJifrK3Kfk1oXpvk=]

View File

@ -1,66 +0,0 @@
---
hiera_include:
- radarr
- profiles::nginx::ldapauth
- profiles::media::radarr
- exporters::exportarr
# manage radarr
radarr::params::user: radarr
radarr::params::group: media
radarr::params::manage_group: false
radarr::params::archive_version: 5.7.0
radarr::params::port: 8000
# exportarr
exporters::exportarr::enable: true
exporters::exportarr::app: radarr
exporters::exportarr::api_key: "%{hiera('radarr::api_key')}"
# additional altnames
profiles::pki::vault::alt_names:
- radarr.main.unkin.net
- radarr.service.consul
- radarr.query.consul
- "radarr.service.%{facts.country}-%{facts.region}.consul"
# manage a simple nginx reverse proxy
profiles::nginx::simpleproxy::nginx_vhost: 'radarr.query.consul'
profiles::nginx::simpleproxy::nginx_aliases:
- radarr.main.unkin.net
- radarr.service.consul
- radarr.query.consul
- "radarr.service.%{facts.country}-%{facts.region}.consul"
profiles::nginx::simpleproxy::proxy_port: 8000
profiles::nginx::simpleproxy::proxy_host: 127.0.0.1
profiles::nginx::simpleproxy::proxy_path: '/'
profiles::nginx::simpleproxy::use_default_location: false
nginx::client_max_body_size: 20M
ldap_binddn: 'cn=svc_radarr,ou=services,ou=users,dc=main,dc=unkin,dc=net'
ldap_template: '(&(uid=%(username)s)(memberOf=ou=radarr_access,ou=groups,dc=main,dc=unkin,dc=net))'
# configure consul service
consul::services:
radarr:
service_name: 'radarr'
tags:
- 'media'
- 'radarr'
address: "%{facts.networking.ip}"
port: 443
checks:
- id: 'radarr_http_check'
name: 'radarr HTTP Check'
http: "https://%{facts.networking.fqdn}:443/consul/health"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: radarr
disposition: write
- resource: service
segment: exportarr
disposition: write

View File

@ -1,3 +0,0 @@
---
readarr::api_key: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAlJ5RLp6pVTGQgtbzO5cQSrHBMg80S1ImFprHDeWC3GPN2KbheM80b1FKxvN+oVUJ8/kfiV6zstLOoYPUJQfmJNa/Xe95W/5+9hH2IS/oQ0yVdfLOjRq//qp+mVvSJ7JrtOyYSIrU3HjxaD+eXTPYp4UEJKfdSmGyDr7XuCOVIZe0Lu7OHczs8VKrowN99RJZ589HoMqrqCZWPlx14l/uNFjYdK/w6VcUWoo9y/5z1jtsNIObV8kSAYQQLwSr3tmjJdEE3au4sjeMOOJDpGcd5aJRWpKp12+8oHdVR5BV5326aCb13tkp6Td0jq/W9J2Jyv05vUdpP3PnVH9mHPDh6TBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBDA+2mMNGwfYM+mRoVTQiZMgDBanhVFmpYe42vZgMBKpNcNRjTnoCl27RpxD3KnjYwkE1zw/NeEOLoSZ1Try3GrlaA=]
ldap_bindpass: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAcOegaqGEsivQAlhSYvaiVUij4QJ/kterSg+wX/7P/oWjSN1oSNAFfco6lg5fYUsR7HDKZ4IwYuO1Q/q8hOxYkqzYrH/MIAhZatfQxyxriKuekxqOMuXgwrWCzAexQL0Fb4s5gcHQ4fwy5OxsM1CxFXnSSm1eYNXl5IERd//c0dFoIcshiGlOCFsj8Ne9mookFTJQDZrxM4VMXaVb+Fl9mOyy1ppDBKHTP/1ise/6LIUi+9YngamAWLIrsur5KvR45kvRoxDNLfJZasAqhD/5QLceDdSxqKBDur57QzmoPo2lFdT4WlzphKOdyHZtYOYr7BPdbtCqkXTWdkXvqFlRxjBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCXhYe6zoRW7/OxVrZnAEoTgDDPFTz5S4nZWiwzjdT7Yd88Ii6I/v6ckaKTx0gd0pZKsVZkFYQBBhIfqFS2ho0UG3Y=]

View File

@ -1,57 +0,0 @@
---
hiera_include:
- readarr
- profiles::nginx::ldapauth
- profiles::media::readarr
# manage readarr
readarr::params::user: readarr
readarr::params::group: media
readarr::params::manage_group: false
readarr::params::archive_version: 0.3.28
readarr::params::port: 8000
# additional altnames
profiles::pki::vault::alt_names:
- readarr.main.unkin.net
- readarr.service.consul
- readarr.query.consul
- "readarr.service.%{facts.country}-%{facts.region}.consul"
# manage a simple nginx reverse proxy
profiles::nginx::simpleproxy::nginx_vhost: 'readarr.query.consul'
profiles::nginx::simpleproxy::nginx_aliases:
- readarr.main.unkin.net
- readarr.service.consul
- readarr.query.consul
- "readarr.service.%{facts.country}-%{facts.region}.consul"
profiles::nginx::simpleproxy::proxy_port: 8000
profiles::nginx::simpleproxy::proxy_host: 127.0.0.1
profiles::nginx::simpleproxy::proxy_path: '/'
profiles::nginx::simpleproxy::use_default_location: false
nginx::client_max_body_size: 20M
ldap_binddn: 'cn=svc_readarr,ou=services,ou=users,dc=main,dc=unkin,dc=net'
ldap_template: '(&(uid=%(username)s)(memberOf=ou=readarr_access,ou=groups,dc=main,dc=unkin,dc=net))'
# configure consul service
consul::services:
readarr:
service_name: 'readarr'
tags:
- 'media'
- 'readarr'
address: "%{facts.networking.ip}"
port: 443
checks:
- id: 'readarr_http_check'
name: 'Readarr HTTP Check'
http: "https://%{facts.networking.fqdn}:443/consul/health"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: readarr
disposition: write

View File

@ -1,2 +0,0 @@
sonarr::api_key: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEANhwc0Vk0htkacwqGA/ZEVR7hpC1V2+nyP3lxyqkVNWERdcIsoMjlfwp2QNoLVirALY10Kon1wKXiRLT+QOqF9aTapS2Vb2YH3ZujR5yT6T1z4e0o4EA3IlNJZemVIziIqrK8+8zZzVafYdOYwMwpbc5EzoJPBtdqNHbDaQu4bRTCp2yMISTOzFjZpOoEJlRyC8YrffNU2xzA5mh5Cw+00MdIfPd8enrCnA4b1ddqP/IsfEYRt91ANQBULwKC5wenKdJAN5qKSKW6KU9TUM5YvGvUPgTvcYgXNf3/INL6G3/HwISTE5A7S6lqwYLyRTT1fMHtgDIqS936DPqCdAZtzjBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBBZ4e3jAM0zvV43IrCzQOGIgDBwOWgm+wJG+jAU8r1awWDvzQXgF3h65nAKfoOuGEztsjeBEruU4EmZy6llLbfSSb8=]
ldap_bindpass: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAF5/sP43SJX/FB6cAS0GPqxC78JS7jSNKoUqWB/IXkv8uXYiClqk+Xw4nFx8EtknNn628DHBY3vCLQ59Xk89p0fyimP70m3BM6or5iRGdCqEAOzL399GbYX8WFHjyQRBmGdaLR5h2r5UnmjuPpDtV+fgsqxo4gNpXnuQ+46ZZPQce/dzHzux+4aEK4Q6UbD/ZZSQklD6zEUV+Agj6E9cQlJPBiHtTyUdXOYHYlJN1HhFPXu3C6KIz63YioVCah1n6T/rJtPQ07pVUfizIaJiPpzcUN91+ZWyyjUPo0bRGZtYKVs/uCAYXht4F5ttKQDaGa3wd4a5IdtacmEWQWFqk3TBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBDtB/68xkjxK3nrNh0MilACgDDt85aBvSp/Oj9EY67eNPr+JcnQ7WfyuqAYAnmYqfbQI8MDtYAx7br0+inJXvQ1BvI=]

View File

@ -1,66 +0,0 @@
---
hiera_include:
- sonarr
- profiles::nginx::ldapauth
- profiles::media::sonarr
- exporters::exportarr
# manage sonarr
sonarr::params::user: sonarr
sonarr::params::group: media
sonarr::params::manage_group: false
sonarr::params::archive_version: 4.0.5
sonarr::params::port: 8000
# exportarr
exporters::exportarr::enable: true
exporters::exportarr::app: sonarr
exporters::exportarr::api_key: "%{hiera('sonarr::api_key')}"
# additional altnames
profiles::pki::vault::alt_names:
- sonarr.main.unkin.net
- sonarr.service.consul
- sonarr.query.consul
- "sonarr.service.%{facts.country}-%{facts.region}.consul"
# manage a simple nginx reverse proxy
profiles::nginx::simpleproxy::nginx_vhost: 'sonarr.query.consul'
profiles::nginx::simpleproxy::nginx_aliases:
- sonarr.main.unkin.net
- sonarr.service.consul
- sonarr.query.consul
- "sonarr.service.%{facts.country}-%{facts.region}.consul"
profiles::nginx::simpleproxy::proxy_port: 8000
profiles::nginx::simpleproxy::proxy_host: 127.0.0.1
profiles::nginx::simpleproxy::proxy_path: '/'
profiles::nginx::simpleproxy::use_default_location: false
nginx::client_max_body_size: 20M
ldap_binddn: 'cn=svc_sonarr,ou=services,ou=users,dc=main,dc=unkin,dc=net'
ldap_template: '(&(uid=%(username)s)(memberOf=ou=sonarr_access,ou=groups,dc=main,dc=unkin,dc=net))'
# configure consul service
consul::services:
sonarr:
service_name: 'sonarr'
tags:
- 'media'
- 'sonarr'
address: "%{facts.networking.ip}"
port: 443
checks:
- id: 'sonarr_http_check'
name: 'Sonarr HTTP Check'
http: "https://%{facts.networking.fqdn}:443/consul/health"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: sonarr
disposition: write
- resource: service
segment: exportarr
disposition: write

View File

@ -1,60 +0,0 @@
---
hiera_include:
- frrouting
# networking
systemd::manage_networkd: true
systemd::manage_all_network_files: true
networking::interfaces:
eth0:
type: physical
forwarding: true
dhcp: true
loopback0:
type: dummy
ipaddress: "%{hiera('networking_loopback0_ip')}" # ceph public network
netmask: 255.255.255.255
mtu: 1500
# frrouting
frrouting::ospfd_router_id: "%{facts.networking.ip}"
frrouting::ospfd_redistribute:
- connected
frrouting::ospfd_interfaces:
eth0:
area: 0.0.0.0
loopback0:
area: 0.0.0.0
frrouting::daemons:
ospfd: true
# additional repos
profiles::yum::global::repos:
ceph:
name: ceph
descr: ceph repository
target: /etc/yum.repos.d/ceph.repo
baseurl: https://edgecache.query.consul/ceph/yum/el%{facts.os.release.major}/%{facts.os.architecture}
gpgkey: https://download.ceph.com/keys/release.asc
mirrorlist: absent
ceph-noarch:
name: ceph-noarch
descr: ceph-noarch repository
target: /etc/yum.repos.d/ceph-noarch.repo
baseurl: https://edgecache.query.consul/ceph/yum/el%{facts.os.release.major}/noarch
gpgkey: https://download.ceph.com/keys/release.asc
mirrorlist: absent
frr-extras:
name: frr-extras
descr: frr-extras repository
target: /etc/yum.repos.d/frr-extras.repo
baseurl: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
frr-stable:
name: frr-stable
descr: frr-stable repository
target: /etc/yum.repos.d/frr-stable.repo
baseurl: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent

View File

@ -1,6 +1,5 @@
---
profiles::packages::include:
policycoreutils: {}
profiles::packages::install:
- policycoreutils
puppetdb::master::config::create_puppet_service_resource: false
#puppetdb::master::config::puppetdb_host: "%{lookup('profiles::puppet::puppetdb::puppetdb_host')}"

View File

@ -1,369 +0,0 @@
---
hiera_include:
- glauth
# additional altnames
profiles::pki::vault::alt_names:
- ldap.main.unkin.net
- ldap.service.consul
- ldap.query.consul
- "ldap.service.%{facts.country}-%{facts.region}.consul"
glauth::params::download_version: 2.3.2
glauth::params::ldap_enabled: true
glauth::params::ldaps_enabled: true
glauth::params::basedn: 'dc=main,dc=unkin,dc=net'
glauth::params::behaviors_ignorecapabilities: true
glauth::params::ldap_tlscertpath: /etc/pki/tls/vault/certificate.crt
glauth::params::ldap_tlskeypath: /etc/pki/tls/vault/private.key
glauth::params::ldaps_cert: /etc/pki/tls/vault/certificate.crt
glauth::params::ldaps_key: /etc/pki/tls/vault/private.key
glauth::params::api_cert: /etc/pki/tls/vault/certificate.crt
glauth::params::api_key: /etc/pki/tls/vault/private.key
# configure consul service
consul::services:
ldap:
service_name: 'ldap'
tags:
- 'media'
- 'ldap'
address: "%{facts.networking.ip}"
port: 636
checks:
- id: 'glauth_http_check'
name: 'glauth HTTP Check'
http: "https://%{facts.networking.fqdn}:5555"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: ldap
disposition: write
glauth::users:
benvin:
user_name: 'benvin'
givenname: 'Ben'
sn: 'Vincent'
mail: 'benvin@users.main.unkin.net'
uidnumber: 20000
primarygroup: 20000
othergroups:
- 20010
- 20011
- 20012
- 20013
- 20014
- 20015
- 20016
- 20017
- 20018
- 20023
- 20024
- 20025 # jupyterhub_admin
- 20026 # jupyterhub_user
- 20027 # grafana_user
loginshell: '/bin/bash'
homedir: '/home/benvin'
passsha256: 'd2434f6b4764ef75d5b7b96a876a32deedbd6aa726a109c3f32e823ca66f604a'
sshkeys:
- 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDZ8SRLlPiDylBpdWR9LpvPg4fDVD+DZst4yRPFwMMhta4mnB1H9XuvZkptDhXywWQ7QIcqa2WbhCen0OQJCtwn3s7EYtacmF5MxmwBYocPoK2AArGuh6NA9rwTdLrPdzhZ+gwe88PAzRLNzjm0ZBR+mA9saMbPJdqpKp0AWeAM8QofRQAWuCzQg9i0Pn1KDMvVDRHCZof4pVlHSTyHNektq4ifovn0zhKC8jD/cYu95mc5ftBbORexpGiQWwQ3HZw1IBe0ZETB1qPIPwsoJpt3suvMrL6T2//fcIIUE3TcyJKb/yhztja4TZs5jT8370G/vhlT70He0YPxqHub8ZfBv0khlkY93VBWYpNGJwM1fVqlw7XbfBNdOuJivJac8eW317ZdiDnKkBTxapThpPG3et9ib1HoPGKRsd/fICzNz16h2R3tddSdihTFL+bmTCa6Lo+5t5uRuFjQvhSLSgO2/gRAprc3scYOB4pY/lxOFfq3pU2VvSJtRgLNEYMUYKk= ben@unkin.net'
matsol:
user_name: 'matsol'
givenname: 'Matt'
sn: 'Solomon'
mail: 'matsol@users.main.unkin.net'
uidnumber: 20001
primarygroup: 20000
othergroups:
- 20010
- 20011
- 20012
- 20013
- 20014
- 20015
- 20016
- 20027 # grafana user
loginshell: '/bin/bash'
homedir: '/home/matsol'
passsha256: '369263e2455a57c8c21388860c417b640fcf045a303cfc88def18c5197493600'
seablo:
user_name: 'seablo'
givenname: 'Sean'
sn: 'Bloomfield'
mail: 'seablo@users.main.unkin.net'
uidnumber: 20002
primarygroup: 20000
othergroups:
- 20010 # jelly
- 20011 # sonarr
- 20012 # radarr
- 20013 # lidarr
- 20014 # readarr
- 20016 # nzbget
- 20027 # grafana user
loginshell: '/bin/bash'
homedir: '/home/seablo'
passsha256: '2db12484b2b5fdae7f3a1f9f870143c363af14bf2c31a415a9a7afcb02520df2'
marbal:
user_name: 'marbal'
givenname: 'Mark'
sn: 'Balch'
mail: 'marbal@users.main.unkin.net'
uidnumber: 20003
primarygroup: 20000
othergroups:
- 20010 # jelly
- 20011 # sonarr
- 20012 # radarr
- 20013 # lidarr
- 20014 # readarr
- 20016 # nzbget
- 20027 # grafana user
loginshell: '/bin/bash'
homedir: '/home/marbal'
passsha256: 'cc20cee6269b9970a76549c66b51d0c543352796180d4122260a47f0f7a442a9'
kelren:
user_name: 'kelren'
givenname: 'Kelly'
sn: 'Rennie'
mail: 'kelren@users.main.unkin.net'
uidnumber: 20004
primarygroup: 20000
othergroups:
- 20010 # jelly
- 20011 # sonarr
- 20012 # radarr
- 20013 # lidarr
- 20014 # readarr
- 20016 # nzbget
- 20027 # grafana user
loginshell: '/bin/bash'
homedir: '/home/kelren'
passsha256: '5b01659bca1ecb27847d2f746fab03eb169879ebcc86547024753dac7cb184c4'
ryadun:
user_name: 'ryadun'
givenname: 'Ryan'
sn: 'Dunbar'
mail: 'ryadun@users.main.unkin.net'
uidnumber: 20005
primarygroup: 20000
othergroups:
- 20010 # jelly
- 20011 # sonarr
- 20012 # radarr
- 20013 # lidarr
- 20014 # readarr
- 20016 # nzbget
- 20027 # grafana user
loginshell: '/bin/bash'
homedir: '/home/ryadun'
passsha256: 'ee17174d49545f6f7257ae79eb173de4acf2b2edf55e181de90decd0e4b4e617'
margol:
user_name: 'margol'
givenname: 'Maree'
sn: 'Goldsworthy'
mail: 'margol@users.main.unkin.net'
uidnumber: 20006
primarygroup: 20000
othergroups:
- 20010 # jelly
- 20011 # sonarr
- 20012 # radarr
- 20013 # lidarr
- 20014 # readarr
- 20016 # nzbget
- 20027 # grafana user
loginshell: '/bin/bash'
homedir: '/home/margol'
passsha256: '31a66085fb7eaeb059e51d1376233db72b54f96a6c45947aafbb350c83e618ef'
sudobo:
user_name: 'sudobo'
givenname: 'Sudaporn'
sn: 'Obom'
mail: 'sudobo@users.main.unkin.net'
uidnumber: 20007
primarygroup: 20000
othergroups:
- 20010 # jelly
- 20011 # sonarr
- 20012 # radarr
- 20013 # lidarr
- 20014 # readarr
- 20016 # nzbget
- 20026 # jupyterhub_user
- 20027 # grafana user
loginshell: '/bin/bash'
homedir: '/home/sudobo'
passsha256: 'a326e049c2a615226877946220a978a0a8247c569be1adcd73539b09b14136d0'
waewak:
user_name: 'waewak'
givenname: 'Waew'
sn: 'Wakul'
mail: 'waewak@users.main.unkin.net'
uidnumber: 20008
primarygroup: 20000
othergroups:
- 20010 # jelly
loginshell: '/bin/bash'
homedir: '/home/waewak'
passsha256: 'd9bb99634215fe031c3bdca94149a165192fe8384ecaa238a19354c2f760a811'
debvin:
user_name: 'debvin'
givenname: 'Debbie'
sn: 'Vincent'
mail: 'debvin@users.main.unkin.net'
uidnumber: 20009
primarygroup: 20000
othergroups:
- 20010 # jelly
loginshell: '/bin/bash'
homedir: '/home/debvin'
passsha256: 'cdac05ddb02e665d4ea65a974995f38a10236bc158731d92d78f6cde89b294a1'
glauth::services:
svc_jellyfin:
service_name: 'svc_jellyfin'
mail: 'jellyfin@service.main.unkin.net'
uidnumber: 30000
primarygroup: 20001
passsha256: '97f7b1eb24deb0a86e812d79c56f4901d39a24128dc9f6fde033e7195f7d0739'
svc_sonarr:
service_name: 'svc_sonarr'
mail: 'sonarr@service.main.unkin.net'
uidnumber: 30001
primarygroup: 20001
passsha256: '2c32d4cb831183cfbef15835cc76f99b401d0159621bc580e852253d4d8f8722'
svc_radarr:
service_name: 'svc_radarr'
mail: 'radarr@service.main.unkin.net'
uidnumber: 30002
primarygroup: 20001
passsha256: '805b0182d90c2b5b3ba43e50988447a0bff0115eb5fedd8eeae8eac00ba53025'
svc_lidarr:
service_name: 'svc_lidarr'
mail: 'lidarr@service.main.unkin.net'
uidnumber: 30003
primarygroup: 20001
passsha256: '6d04cd2a45784bacbd50e6714710b55805c7e9886665a6d7790e6d8712b67aff'
svc_readarr:
service_name: 'svc_readarr'
mail: 'readarr@service.main.unkin.net'
uidnumber: 30004
primarygroup: 20001
passsha256: '751f22fbd9c052b2cd0c1cb4be514d8710f1a51f84ce44f607ab3a5591162f8c'
svc_prowlarr:
service_name: 'svc_prowlarr'
mail: 'prowlarr@service.main.unkin.net'
uidnumber: 30005
primarygroup: 20001
passsha256: 'd1e6bcc4a9f2d15b6e3c349155a88e433902dfe765e57bf3c10e6830f151a043'
svc_nzbget:
service_name: 'svc_nzbget'
mail: 'nzbget@service.main.unkin.net'
uidnumber: 30006
primarygroup: 20001
passsha256: 'c9d38f687fcbea754a9f78675d89276d2347f9d15190fff267c3ae1a75f61be6'
svc_nzbsubmit:
service_name: 'svc_nzbsubmit'
mail: 'nzbsubmit@service.main.unkin.net'
uidnumber: 30007
primarygroup: 20001
othergroups:
- 20016
passsha256: '7af7e12fdc56e9050d16c167f4e34091ad3cf938283e13451b35f9b3d212bfa2'
svc_rundeck:
service_name: 'svc_rundeck'
mail: 'rundeck@service.main.unkin.net'
uidnumber: 30007
primarygroup: 20001
passsha256: 'b27786b22c5938d24ffc9be049de366b055c9f054bf38fb73bbd6fba9e1bd525'
svc_terraform:
service_name: 'svc_terraform'
mail: 'terraform@service.main.unkin.net'
uidnumber: 30008
primarygroup: 20001
passsha256: 'b27786b22c5938d24ffc9be049de366b055c9f054bf38fb73bbd6fba9e1bd525'
svc_vault:
service_name: 'svc_vault'
mail: 'vault@service.main.unkin.net'
uidnumber: 30009
primarygroup: 20001
passsha256: 'd63b04884d5c7d630b0c06896046065a0926ac5c3d6177ef85320e5fa1be00b9'
svc_jupyterhub:
service_name: 'svc_jupyterhub'
mail: 'jupyterhub@service.main.unkin.net'
uidnumber: 30010
primarygroup: 20001
passsha256: '09db1e0c2498214da35f3f2ed46a90a7b90635c207f8725e7abf76b48345a39b'
svc_grafana:
service_name: 'svc_grafana'
mail: 'grafana@service.main.unkin.net'
uidnumber: 30011
primarygroup: 20001
passsha256: '1ca9db29c964011d7e535a5c1a5ea2caa6a4c674bd7242974d9f95b24f49f98a'
glauth::groups:
users:
group_name: 'people'
gidnumber: 20000
services:
group_name: 'services'
gidnumber: 20001
jellyfin_access:
group_name: 'jellyfin_access'
gidnumber: 20010
sonarr_access:
group_name: 'sonarr_access'
gidnumber: 20011
radarr_access:
group_name: 'radarr_access'
gidnumber: 20012
lidarr_access:
group_name: 'lidarr_access'
gidnumber: 20013
readarr_access:
group_name: 'readarr_access'
gidnumber: 20014
prowlarr_access:
group_name: 'prowlarr_access'
gidnumber: 20015
nzbget_access:
group_name: 'nzbget_access'
gidnumber: 20016
rundeck_access:
group_name: 'rundeck_access'
gidnumber: 20017
rundeck_globaladmin:
group_name: 'rundeck_globaladmin'
gidnumber: 20018
rundeck_selfservice_admin:
group_name: 'rundeck_selfservice_admin'
gidnumber: 20019
rundeck_selfservice_user:
group_name: 'rundeck_selfservice_user'
gidnumber: 20020
rundeck_infrastructure_admin:
group_name: 'rundeck_infrastructure_admin'
gidnumber: 20021
rundeck_infrastructure_user:
group_name: 'rundeck_infrastructure_user'
gidnumber: 20022
vault_access:
group_name: 'vault_access'
gidnumber: 20023
vault_admin:
group_name: 'vault_admin'
gidnumber: 20024
jupyterhub_admin:
group_name: 'jupyterhub_admin'
gidnumber: 20025
jupyterhub_user:
group_name: 'jupyterhub_user'
gidnumber: 20026
grafana_user:
group_name: 'grafana_user'
gidnumber: 20027

File diff suppressed because one or more lines are too long

View File

@ -1,205 +0,0 @@
---
hiera_include:
- profiles::rundeck::server
- profiles::nginx::simpleproxy
hiera_exclude:
- profiles::accounts::rundeck
profiles::packages::exclude:
- jq
profiles::ssh::sign::principals:
- rundeck.main.unkin.net
- rundeck.service.consul
- rundeck.query.consul
# manage a simple nginx reverse proxy
profiles::nginx::simpleproxy::nginx_vhost: 'rundeck.query.consul'
profiles::nginx::simpleproxy::nginx_aliases:
- rundeck.main.unkin.net
- rundeck.service.consul
- rundeck.query.consul
- "rundeck.service.%{facts.country}-%{facts.region}.consul"
profiles::nginx::simpleproxy::proxy_port: 4440
profiles::nginx::simpleproxy::proxy_path: '/'
nginx::client_max_body_size: 20M
# additional altnames
profiles::pki::vault::alt_names:
- rundeck.main.unkin.net
- rundeck.service.consul
- rundeck.query.consul
- "rundeck.service.%{facts.country}-%{facts.region}.consul"
# configure consul service
consul::services:
rundeck:
service_name: 'rundeck'
tags:
- 'automation'
- 'rundeck'
address: "%{facts.networking.ip}"
port: 443
checks:
- id: 'glauth_http_check'
name: 'glauth HTTP Check'
http: "http://%{facts.networking.fqdn}:4440"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: rundeck
disposition: write
profiles::rundeck::server::mysql_backend: true
profiles::rundeck::server::mysql_host: mariadb-prod.service.au-syd1.consul
profiles::rundeck::server::grails_server_url: https://rundeck.service.consul
profiles::rundeck::server::auth_config:
file:
auth_flag: 'sufficient'
jaas_config:
file: '/etc/rundeck/realm.properties'
realm_config:
admin_user: 'admin'
admin_password: "%{hiera('rundeck_admin_pass')}"
ldap:
jaas_config:
debug: 'true'
providerUrl: 'ldap://ldap.service.consul:389'
bindDn: 'cn=svc_rundeck,ou=services,ou=users,dc=main,dc=unkin,dc=net'
bindPassword: "%{hiera('ldap_bindpass')}"
authenticationMethod: 'simple'
forceBindingLogin: 'true'
userBaseDn: 'ou=people,ou=users,dc=main,dc=unkin,dc=net'
userRdnAttribute: 'uid'
userIdAttribute: 'uid'
userPasswordAttribute: 'userPassword'
userObjectClass: 'posixAccount'
roleBaseDn: 'ou=groups,dc=main,dc=unkin,dc=net'
roleNameAttribute: 'uid'
roleMemberAttribute: 'uniqueMember'
roleObjectClass: 'groupOfUniqueNames'
nestedGroups: 'true'
profiles::rundeck::server::key_storage_config:
- type: 'db'
path: 'keys'
- type: 'vault-storage'
path: 'vault'
config:
prefix: 'rundeck'
address: https://vault.service.consul:8200
storageBehaviour: 'vault'
secretBackend: rundeck
engineVersion: '2'
authBackend: approle
approleAuthMount: approle
approleId: "%{hiera('vault::roleid')}"
profiles::rundeck::server::cli_projects:
Self-Service:
update_method: 'set'
config:
project.description: 'self-service tasks'
project.disable.executions: 'false'
Infrastructure:
config:
project.description: 'infrastructure management'
project.disable.schedule: 'false'
profiles::rundeck::server::acl_policies:
global_admin_policy:
acl_policies:
- description: 'Global Admin, all access'
context:
application: "rundeck"
for:
project:
- allow: '*'
resource:
- allow: '*'
storage:
- allow: '*'
by:
- group: ['rundeck_globaladmin']
- description: 'Global Admin, all access'
context:
project: '.*'
for:
resource:
- allow: '*'
adhoc:
- allow: '*'
job:
- allow: '*'
node:
- allow: '*'
by:
- group: ['rundeck_globaladmin']
selfservice_admin_policy:
acl_policies:
- description: 'Admin, all access for Self-Service project'
context:
project: 'Self-Service'
for:
resource:
- allow: '*'
adhoc:
- allow: '*'
job:
- allow: '*'
node:
- allow: '*'
by:
- group: ['rundeck_selfserice_admin']
selfservice_user_policy:
acl_policies:
- description: 'Users can execute tasks but not edit for Self-Service project'
context:
project: 'Self-Service'
for:
resource:
- allow: ['read']
adhoc:
- allow: ['run']
job:
- allow: ['read', 'run']
node:
- allow: ['read', 'run']
by:
- group: ['rundeck_selfserice_user']
infrastructure_admin_policy:
acl_policies:
- description: 'Admin, all access for Infrastructure project'
context:
project: 'Infrastructure'
for:
resource:
- allow: '*'
adhoc:
- allow: '*'
job:
- allow: '*'
node:
- allow: '*'
by:
- group: ['rundeck_infrastructure_admin']
infrastructure_user_policy:
acl_policies:
- description: 'Users can execute tasks but not edit for Infrastructure project'
context:
project: 'Infrastructure'
for:
resource:
- allow: ['read']
adhoc:
- allow: ['run']
job:
- allow: ['read', 'run']
node:
- allow: ['read', 'run']
by:
- group: ['rundeck_infrastructure_user']

View File

@ -1,8 +0,0 @@
---
profiles::ceph::rgw::ceph_client_keys:
ausyd1nxvm2115: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAovOUUtTm/9nXWj6D+kVhmbZedVVkA5N80ULNYqISrv0A+KlXCx/V2sB56SpJ9eL6aPHdC+CKkrtrrdo1h2j2em7wTA5ghhbFVF0NIS7bbn0jzpl5YqbUrNxOtDSpjzX1aCGnyMvw69Lp+NcHwxIj+1XFgK6335138s9wbX3HmYF3jcsQkMqTzynELe1OQPWFXVKTjUfFFLdCQOFryp8UY8L9j/PpV6wd4w6p7R6eXhX21rjSaN4aqN1zjsnF2OVhL8Ge0QxMhePWKGOqsUfi72kh3II028DjqU0DcZQvoxnoqRPyUUjysH0nTKoLeXOGNgJdphY1dHBJ+SJnw2gqQDBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBB9wA3ZJjmU95W+9r8va5uagDAMAv2APfdzrPnAU7NJPL+IW08osGuQWWamqF+XGVeHRoBmoFKwZ7grYRV2e3aabyc=]
ausyd1nxvm2116: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAf5ksy/pyUZSwTh+HiKw+1Uhj16A0DVZEKAbkKUQzXVmc+QpL4Dn7YBoXlEwrY8CcsTrxTQjADvtu9FC3o34QIdh06noSgYYA+7fna2A+9+oYoNtgwC3b8LeglxO/SQ9dKoJ90jRtmlw5P/CtrxA2RelMK6FNRekp1CaWMM4q20fJGgr/E33vgx38UJyp4/q0bTu2lLehCuDUP80j3XGbSNZ2snfYdIo91Cl+nSxLSU2TdnFpWaabsH19HwDnkWGiILlLBVvvhY7copCxs5DS1ueoOTCsqnWSrTrBMJjnu7WZd/s4NLw/0q/UP5xcFA51caY3Kv+sI6bfIYkNoLazwDBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBDrCF16nrtukRPamx1VbGY+gDAK5dw0kV8MpATpwxTkJG6JtlFlwdpU9THs1bNwqSRD1ZhEWxQeWwsyyTtjUXi4bP8=]
ausyd1nxvm2117: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAKtvsgDk2QTmL8flBBE4nA43sRSnroq4I6T2CUAYv/lRdzCCWvE961o/51zyQEz8L5QyDo7L3gcGYIBqthYjRe9Gp5a5d4qds0qskupgQKnb0KR2RLFVUtH5vxHqyJZHjXaP+PQreyRoSIfRWXAdoZu544FeJ9DKKbmEzZaH5B2OdDMrf4Ufuud0maEAw0PJthS//ghCfGi74F1xlJnIWVvMhp66b0iMxC+ACClEHunG3oKx7M/w05HllG0wcxPTg4PFrbnFXjRuIxsykF9aVHJkRnCdgbMXRM4o6FrYyZRR74F1HKRujFCUA7kYWDKLxHxJpYCvCHp4HMhfzjs824zBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBBbaSig6kgVVNfmSI53sNimgDDQ5O10Dzfa7S7RdJVLLUFBaZ5TG2g3Bwmy0k3wKZvABYMuYyOxQdfk6eMsKC+sC5w=]
ausyd1nxvm2118: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAdKl0ude3ZxW0ihyA758mpQp43qZvzTI2Bp83WzCp2ifJCgAIjRWdd01P64rSHaa4lus/wqB9jxg38g6DrN4ejX56Y/CJ6GQKxz6b1BO5nDfsLx6QEzCt+cfg5d/PPoTtEpz2VSvDfxFUrHiABA6++Sqzb9Og+nQCFMYJD3NHCk67QpkjPGQ/ejZk4MNXZQVCfKOlFqay/fF0jEmQixFOlX/Fdm9UoKttbrKluUmzpaVUzfGRaTTFVgzc3x2t/z1q1k0P7ClI9Uu02kUXpFVs9LPX99Zc2GtrnP06mYqqARhWF1NMK0zlmxtKpfObahRP/HmtI3fgnQsU1Cpwah0emTBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBAILqpYx3FKY3xXLJRu2oDlgDCIOXeX6hxpu0qpj5c/9jMUSeV2DIydnxO+MiT3mceS50ip8B+zGQy5UedPmLt36Zs=]
ausyd1nxvm2119: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEASlZglUxazp+9azfV3QkgRv+ACo+MH0RO5b18blbelgdmr38iwK7MwwEFpEfVJEyc/ph9RunWwrMmofDQHj5bBribfzZ2pH2CGiOrR0i5lZMtN0yQXPBA/+jm1Pi1AWGJLtoquuhMbibuHOTiXwBCBVrHHHaFTR5Xt34ABN/p/mCaG+N9nWux93msHCCextCalKBMmPhmI2q6HodfjanEVgYAe3/5hRPnpsi6IGSDNGygsTC3MG+hjGMpNF8izbwk9Lpzn6kY51aeNxI2ed9Jm8UZ/k+8b+o7ZQyWIBbf7DTFpEzk4G46puaDbXIorBWQ4azCjN3gt8VB91hwihtzcDBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBB6U+9z4cSzMTA1z9bmoX82gDBfy5zbRPK8GxImJo6evecMOTtaY2c4aEnESXtGBCS02enmxljv9dv1UYQD0/a6S3A=]

View File

@ -1,60 +0,0 @@
---
hiera_include:
- profiles::ceph::rgw
- profiles::nginx::simpleproxy
profiles::ceph::rgw::enable: true
# FIXME: puppet-python wants to try manage python-dev, which is required by the ceph package
python::manage_dev_package: false
# additional altnames
profiles::pki::vault::alt_names:
- radosgw.main.unkin.net
- radosgw.service.consul
- radosgw.query.consul
- "radosgw.service.%{facts.country}-%{facts.region}.consul"
# additional repos
profiles::yum::global::repos:
ceph:
name: ceph
descr: ceph repository
target: /etc/yum.repos.d/ceph.repo
baseurl: https://edgecache.query.consul/ceph/yum/el%{facts.os.release.major}/%{facts.os.architecture}
gpgkey: https://download.ceph.com/keys/release.asc
mirrorlist: absent
ceph-noarch:
name: ceph-noarch
descr: ceph-noarch repository
target: /etc/yum.repos.d/ceph-noarch.repo
baseurl: https://edgecache.query.consul/ceph/yum/el%{facts.os.release.major}/noarch
gpgkey: https://download.ceph.com/keys/release.asc
mirrorlist: absent
# manage a simple nginx reverse proxy
profiles::nginx::simpleproxy::nginx_vhost: 'radosgw.service.consul'
profiles::nginx::simpleproxy::nginx_aliases:
- radosgw.service.au-syd1.consul
profiles::nginx::simpleproxy::proxy_port: 7480
profiles::nginx::simpleproxy::proxy_path: '/'
nginx::client_max_body_size: 5000M
# manage consul service
consul::services:
radosgw:
service_name: 'radosgw'
address: "%{facts.networking.ip}"
port: 443
checks:
- id: 'radosgw_https_check'
name: 'RADOSGW HTTPS Check'
http: "https://%{facts.networking.fqdn}:443"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: radosgw
disposition: write

View File

@ -1,78 +1,21 @@
---
hiera_include:
- profiles::selinux::setenforce
- frrouting
- exporters::frr_exporter
profiles::packages::include:
cobbler: {}
cobbler3.2-web: {}
httpd: {}
syslinux: {}
dnf-plugins-core: {}
debmirror: {}
pykickstart: {}
fence-agents: {}
selinux-policy-devel: {}
ipxe-bootimgs: {}
unkin-undionly-kpxe: {}
profiles::packages::install:
- cobbler
- cobbler3.2-web
- httpd
- syslinux
- dnf-plugins-core
- debmirror
- pykickstart
- fence-agents
- selinux-policy-devel
- ipxe-bootimgs
profiles::pki::vault::alt_names:
- cobbler.main.unkin.net
# networking
anycast_ip: 198.18.19.19
systemd::manage_networkd: true
systemd::manage_all_network_files: true
networking::interfaces:
eth0:
type: physical
forwarding: true
dhcp: true
anycast0:
type: dummy
ipaddress: "%{hiera('anycast_ip')}"
netmask: 255.255.255.255
mtu: 1500
# frrouting
exporters::frr_exporter::enable: true
frrouting::ospfd_router_id: "%{facts.networking.ip}"
frrouting::ospfd_redistribute:
- connected
frrouting::ospfd_interfaces:
eth0:
area: 0.0.0.0
anycast0:
area: 0.0.0.0
frrouting::daemons:
ospfd: true
# consul
profiles::consul::client::node_rules:
- resource: service
segment: frr_exporter
disposition: write
# additional repos
profiles::yum::global::repos:
frr-extras:
name: frr-extras
descr: frr-extras repository
target: /etc/yum.repos.d/frr-extras.repo
baseurl: https://packagerepo.service.consul/frr/el%{facts.os.release.major}/extras-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el%{facts.os.release.major}/extras-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
frr-stable:
name: frr-stable
descr: frr-stable repository
target: /etc/yum.repos.d/frr-stable.repo
baseurl: https://packagerepo.service.consul/frr/el%{facts.os.release.major}/stable-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el%{facts.os.release.major}/stable-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
# cobbler settings
profiles::cobbler::params::service_cname: 'cobbler.main.unkin.net'
profiles::cobbler::params::next_server: "%{hiera('anycast_ip')}"
profiles::cobbler::params::server: "%{hiera('anycast_ip')}"
profiles::selinux::setenforce::mode: permissive
hiera_classes:
- profiles::selinux::setenforce

View File

@ -1,2 +0,0 @@
---
redisha::masterauth: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAV8znsSGAbPpPUhAcyOIWFltVyAcx3yVcIvC+JFndkkuVBT1813GSURrXIreXSilvJEHlwRC03A9NhjWJSsHBIS12uUb+7ap95oh2JJ7OHmeWSVD1GDDRpTQAgDEOikAnioRNJfQ83jUa11nJrsavt46hSq8vDq+rZ2P8ugiNk59mNX5vgYthCPXcEJd7UmpLZhgxZ8+42l4TKo7QpqKRcIMteJXk1NvyYfYGnGTZhknuyHM3xPGauGjKzamMlTzD9dGnn2K0/Q7I4PUyT24ZEG3kNVyDlVHTYcKnIT5q8qvmJdDQfZwOETF3SrzcqhQ2nqFvmI19sCTsQVmveb/2ITBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBC9KML3vzwF4vGZdtiu++jmgDAWPZspCckgoXPkgNRMePov3pXSlKUAGxwmdsuIM75rJMxlSTiil2lzMMBWXmUofys=]

View File

@ -1,67 +0,0 @@
---
# additional altnames
profiles::pki::vault::alt_names:
- redis.main.unkin.net
- redis.service.consul
- redis.query.consul
- "redis.service.%{facts.country}-%{facts.region}.consul"
profiles::ssh::sign::principals:
- redis.main.unkin.net
- redis.service.consul
- redis.query.consul
hiera_include:
- redisha
redisha::manage_repo: false
redisha::redisha_members_lookup: true
redisha::redisha_members_role: roles::infra::db::redis
redisha::redis::requirepass: "%{hiera('redisha::masterauth')}"
redisha::redis::masterauth: "%{hiera('redisha::masterauth')}"
redisha::sentinel::master_name: "%{facts.country}-%{facts.region}"
redisha::sentinel::requirepass: "%{hiera('redisha::masterauth')}"
redisha::sentinel::auth_pass: "%{hiera('redisha::masterauth')}"
redisha::tools::requirepass: "%{hiera('redisha::masterauth')}"
sudo::configs:
consul:
priority: 20
content: |
consul ALL=(ALL) NOPASSWD: /usr/local/sbin/sentineladm info
consul::services:
redis-replica:
service_name: "redis-replica-%{facts.environment}"
tags:
- 'redis'
- 'redis-replica'
address: "%{facts.networking.ip}"
port: 6379
checks:
- id: 'redis-replica_tcp_check'
name: 'Redis Replica TCP Check'
tcp: "%{facts.networking.ip}:6379"
interval: '10s'
timeout: '1s'
redis-master:
service_name: "redis-master-%{facts.environment}"
tags:
- 'redis'
- 'redis-master'
address: "%{facts.networking.ip}"
port: 6379
checks:
- id: 'redis-master_tcp_check'
name: "Redis Master Check"
args:
- '/usr/local/bin/check_redis_master'
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: "redis-replica-%{facts.environment}"
disposition: write
- resource: service
segment: "redis-master-%{facts.environment}"
disposition: write

View File

@ -1,65 +1,7 @@
---
hiera_include:
- frrouting
- profiles::dhcp::server
- exporters::frr_exporter
# networking
anycast_ip: 198.18.19.18
systemd::manage_networkd: true
systemd::manage_all_network_files: true
networking::interfaces:
eth0:
type: physical
forwarding: true
dhcp: true
anycast0:
type: dummy
ipaddress: "%{hiera('anycast_ip')}"
netmask: 255.255.255.255
mtu: 1500
# frrouting
exporters::frr_exporter::enable: true
frrouting::ospfd_router_id: "%{facts.networking.ip}"
frrouting::ospfd_redistribute:
- connected
frrouting::ospfd_interfaces:
eth0:
area: 0.0.0.0
anycast0:
area: 0.0.0.0
frrouting::daemons:
ospfd: true
# consul
profiles::consul::client::node_rules:
- resource: service
segment: frr_exporter
disposition: write
# additional repos
profiles::yum::global::repos:
frr-extras:
name: frr-extras
descr: frr-extras repository
target: /etc/yum.repos.d/frr-extras.repo
baseurl: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
frr-stable:
name: frr-stable
descr: frr-stable repository
target: /etc/yum.repos.d/frr-stable.repo
baseurl: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
profiles::dhcp::server::ntpservers:
- 0.au.pool.ntp.org
- 1.au.pool.ntp.org
- 2.au.pool.ntp.org
- 3.au.pool.ntp.org
- ntp01.main.unkin.net
- ntp02.main.unkin.net
profiles::dhcp::server::interfaces:
- eth0
profiles::dhcp::server::default_lease_time: 1200
@ -73,45 +15,55 @@ profiles::dhcp::server::pools:
range:
- '198.18.15.200 198.18.15.220'
gateway: 198.18.15.254
nameservers: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
nameservers:
- 198.18.13.12
- 198.18.13.13
domain_name: main.unkin.net
pxeserver: 198.18.19.19
pxeserver: 198.18.13.27
syd1-test:
network: 198.18.16.0
mask: 255.255.255.0
range:
- '198.18.16.200 198.18.16.220'
gateway: 198.18.16.254
nameservers: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
nameservers:
- 198.18.13.12
- 198.18.13.13
domain_name: main.unkin.net
pxeserver: 198.18.19.19
pxeserver: 198.18.13.27
syd1-prod1:
network: 198.18.13.0
mask: 255.255.255.0
range:
- '198.18.13.200 198.18.13.220'
gateway: 198.18.13.254
nameservers: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
nameservers:
- 198.18.13.12
- 198.18.13.13
domain_name: main.unkin.net
pxeserver: 198.18.19.19
pxeserver: 198.18.13.27
syd1-prod2:
network: 198.18.14.0
mask: 255.255.255.0
range:
- '198.18.14.200 198.18.14.220'
gateway: 198.18.14.254
nameservers: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
nameservers:
- 198.18.13.12
- 198.18.13.13
domain_name: main.unkin.net
pxeserver: 198.18.19.19
pxeserver: 198.18.13.27
drw1-prod:
network: 198.18.17.0
mask: 255.255.255.0
range:
- '198.18.17.200 198.18.17.220'
gateway: 198.18.17.1
nameservers: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
nameservers:
- 198.18.17.7
- 198.18.17.8
domain_name: main.unkin.net
pxeserver: 198.18.19.19
pxeserver: 198.18.13.27
# UFI 64-bit
profiles::dhcp::server::classes:

View File

@ -1,2 +0,0 @@
---
externaldns::externaldns_key_secret: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEABqbZiK1NDTU+w2k7orz2HrB0EXwun7hn4pR6TeCHMp2IfrkPxlQT+f1J9c0PqJaAKvnyz+Cx0xNCrlnONqk+J57f48kYKYV+Vw+L0AYHYFj8/TizY5CwLpJS2XKyfRd4iEsWMonvfIYn71t3+YuXm4dkoEqGekW93qCr/KFtjAu0K3e+ypyl4EJqWokiUs7IbcSBNvrjUkP4yR8F/wHVKM1E5yfr+D1+nmMmt7Ob/J+am14492TppE2C7Xadg4us+kdYtuBsv9kTSi1GwwqUDjbeJVmfK3pKHjXdF+PI07AFLzo5bBZTJOzQfQ4SywpH8R5BDQoUCyHiaskB5wrmSDBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBB2LU9ZhefSg9PqqkwnfV65gDBvXuXco0moKCGjHqm5KcojWCK1BoS/+mltlr8kw9grZjN9jxHRLn1FjgBlq418c8w=]

View File

@ -1,65 +0,0 @@
---
hiera_include:
- externaldns
- frrouting
- exporters::frr_exporter
externaldns::bind_master_hostname: 'ausyd1nxvm2127.main.unkin.net'
externaldns::k8s_zones:
- 'k8s.syd1.au.unkin.net'
- '200.18.198.in-addr.arpa'
externaldns::slave_servers:
- 'ausyd1nxvm2128.main.unkin.net'
- 'ausyd1nxvm2129.main.unkin.net'
externaldns::externaldns_key_algorithm: 'hmac-sha256'
# networking
anycast_ip: 198.18.19.20
systemd::manage_networkd: true
systemd::manage_all_network_files: true
networking::interfaces:
eth0:
type: physical
forwarding: true
dhcp: true
anycast0:
type: dummy
ipaddress: "%{hiera('anycast_ip')}"
netmask: 255.255.255.255
mtu: 1500
# frrouting
exporters::frr_exporter::enable: true
frrouting::ospfd_router_id: "%{facts.networking.ip}"
frrouting::ospfd_redistribute:
- connected
frrouting::ospfd_interfaces:
eth0:
area: 0.0.0.0
anycast0:
area: 0.0.0.0
frrouting::daemons:
ospfd: true
# consul
profiles::consul::client::node_rules:
- resource: service
segment: frr_exporter
disposition: write
# additional repos
profiles::yum::global::repos:
frr-extras:
name: frr-extras
descr: frr-extras repository
target: /etc/yum.repos.d/frr-extras.repo
baseurl: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
frr-stable:
name: frr-stable
descr: frr-stable repository
target: /etc/yum.repos.d/frr-stable.repo
baseurl: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent

View File

@ -1,8 +1,4 @@
---
hiera_include:
- frrouting
- exporters::frr_exporter
profiles::dns::master::ns_role: roles::infra::dns::master
profiles::dns::master::use_ns: region
profiles::dns::master::acls:
@ -13,22 +9,8 @@ profiles::dns::master::acls:
- 198.18.15.0/24
- 198.18.16.0/24
- 198.18.17.0/24
- 198.18.19.0/24
- 198.18.20.0/24
- 198.18.24.0/24
- 198.18.25.0/24
- 198.18.26.0/24
- 198.18.27.0/24
- 198.18.28.0/24
- 198.18.29.0/24
profiles::dns::master::zones:
unkin.net:
domain: 'unkin.net'
zone_type: 'master'
dynamic: false
ns_notify: true
source: '/var/named/sources/unkin.net.conf'
main.unkin.net:
domain: 'main.unkin.net'
zone_type: 'master'
@ -65,95 +47,17 @@ profiles::dns::master::zones:
dynamic: false
ns_notify: true
source: '/var/named/sources/17.18.198.in-addr.arpa.conf'
19.18.198.in-addr.arpa:
domain: '19.18.198.in-addr.arpa'
zone_type: 'master'
dynamic: false
ns_notify: true
source: '/var/named/sources/19.18.198.in-addr.arpa.conf'
20.18.198.in-addr.arpa:
domain: '20.18.198.in-addr.arpa'
zone_type: 'master'
dynamic: false
ns_notify: true
source: '/var/named/sources/20.18.198.in-addr.arpa.conf'
21.18.198.in-addr.arpa:
domain: '21.18.198.in-addr.arpa'
zone_type: 'master'
dynamic: false
ns_notify: true
source: '/var/named/sources/21.18.198.in-addr.arpa.conf'
22.18.198.in-addr.arpa:
domain: '22.18.198.in-addr.arpa'
zone_type: 'master'
dynamic: false
ns_notify: true
source: '/var/named/sources/22.18.198.in-addr.arpa.conf'
23.18.198.in-addr.arpa:
domain: '23.18.198.in-addr.arpa'
zone_type: 'master'
dynamic: false
ns_notify: true
source: '/var/named/sources/23.18.198.in-addr.arpa.conf'
24.18.198.in-addr.arpa:
domain: '24.18.198.in-addr.arpa'
zone_type: 'master'
dynamic: false
ns_notify: true
source: '/var/named/sources/24.18.198.in-addr.arpa.conf'
25.18.198.in-addr.arpa:
domain: '25.18.198.in-addr.arpa'
zone_type: 'master'
dynamic: false
ns_notify: true
source: '/var/named/sources/25.18.198.in-addr.arpa.conf'
26.18.198.in-addr.arpa:
domain: '26.18.198.in-addr.arpa'
zone_type: 'master'
dynamic: false
ns_notify: true
source: '/var/named/sources/26.18.198.in-addr.arpa.conf'
27.18.198.in-addr.arpa:
domain: '27.18.198.in-addr.arpa'
zone_type: 'master'
dynamic: false
ns_notify: true
source: '/var/named/sources/27.18.198.in-addr.arpa.conf'
28.18.198.in-addr.arpa:
domain: '28.18.198.in-addr.arpa'
zone_type: 'master'
dynamic: false
ns_notify: true
source: '/var/named/sources/28.18.198.in-addr.arpa.conf'
29.18.198.in-addr.arpa:
domain: '29.18.198.in-addr.arpa'
zone_type: 'master'
dynamic: false
ns_notify: true
source: '/var/named/sources/29.18.198.in-addr.arpa.conf'
profiles::dns::master::views:
master-zones:
recursion: false
zones:
- unkin.net
- main.unkin.net
- 13.18.198.in-addr.arpa
- 14.18.198.in-addr.arpa
- 15.18.198.in-addr.arpa
- 16.18.198.in-addr.arpa
- 17.18.198.in-addr.arpa
- 19.18.198.in-addr.arpa
- 20.18.198.in-addr.arpa
- 21.18.198.in-addr.arpa
- 22.18.198.in-addr.arpa
- 23.18.198.in-addr.arpa
- 24.18.198.in-addr.arpa
- 25.18.198.in-addr.arpa
- 26.18.198.in-addr.arpa
- 27.18.198.in-addr.arpa
- 28.18.198.in-addr.arpa
- 29.18.198.in-addr.arpa
match_clients:
- acl-main.unkin.net
@ -162,54 +66,3 @@ profiles::dns::master::keys:
secret_bits: 512
algorithm: hmac-sha256
secret: "%{lookup('profiles::dns::master::secret')}"
# networking
anycast_ip: 198.18.19.15
systemd::manage_networkd: true
systemd::manage_all_network_files: true
networking::interfaces:
eth0:
type: physical
forwarding: true
dhcp: true
anycast0:
type: dummy
ipaddress: "%{hiera('anycast_ip')}"
netmask: 255.255.255.255
mtu: 1500
# frrouting
exporters::frr_exporter::enable: true
frrouting::ospfd_router_id: "%{facts.networking.ip}"
frrouting::ospfd_redistribute:
- connected
frrouting::ospfd_interfaces:
eth0:
area: 0.0.0.0
anycast0:
area: 0.0.0.0
frrouting::daemons:
ospfd: true
# consul
profiles::consul::client::node_rules:
- resource: service
segment: frr_exporter
disposition: write
# additional repos
profiles::yum::global::repos:
frr-extras:
name: frr-extras
descr: frr-extras repository
target: /etc/yum.repos.d/frr-extras.repo
baseurl: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
frr-stable:
name: frr-stable
descr: frr-stable repository
target: /etc/yum.repos.d/frr-stable.repo
baseurl: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent

View File

@ -1,8 +1,4 @@
---
hiera_include:
- frrouting
- exporters::frr_exporter
profiles::dns::resolver::acls:
acl-main.unkin.net:
addresses:
@ -14,30 +10,6 @@ profiles::dns::resolver::acls:
- 198.18.15.0/24
- 198.18.16.0/24
- 198.18.17.0/24
- 198.18.18.0/24
- 198.18.19.0/24
- 198.18.20.0/24
- 198.18.21.0/24
- 198.18.22.0/24
- 198.18.23.0/24
acl-dmz:
addresses:
- 198.18.24.0/24
acl-common:
addresses:
- 198.18.25.0/24
- 198.18.26.0/24
- 198.18.27.0/24
- 198.18.28.0/24
- 198.18.29.0/24
acl-nomad-jobs:
addresses:
- 198.18.64.0/24
- 198.18.65.0/24
- 198.18.66.0/24
- 198.18.67.0/24
- 198.18.68.0/24
- 198.18.69.0/24
profiles::dns::resolver::zones:
8.10.10.in-addr.arpa-forward:
@ -61,6 +33,13 @@ profiles::dns::resolver::zones:
- 10.10.16.32
- 10.10.16.33
forward: 'only'
unkin.net-forward:
domain: 'unkin.net'
zone_type: 'forward'
forwarders:
- 10.10.16.32
- 10.10.16.33
forward: 'only'
dmz.unkin.net-forward:
domain: 'dmz.unkin.net'
zone_type: 'forward'
@ -82,195 +61,24 @@ profiles::dns::resolver::zones:
- 10.10.16.32
- 10.10.16.33
forward: 'only'
k8s.syd1.au.unkin.net-forward:
domain: 'k8s.syd1.au.unkin.net'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_k8s')}"
forward: 'only'
unkin.net-forward:
domain: 'unkin.net'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
main.unkin.net-forward:
domain: 'main.unkin.net'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
13.18.198.in-addr.arpa-forward:
domain: '13.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
14.18.198.in-addr.arpa-forward:
domain: '14.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
15.18.198.in-addr.arpa-forward:
domain: '15.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
16.18.198.in-addr.arpa-forward:
domain: '16.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
17.18.198.in-addr.arpa-forward:
domain: '17.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
19.18.198.in-addr.arpa-forward:
domain: '19.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
20.18.198.in-addr.arpa-forward:
domain: '20.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
21.18.198.in-addr.arpa-forward:
domain: '21.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
22.18.198.in-addr.arpa-forward:
domain: '22.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
23.18.198.in-addr.arpa-forward:
domain: '23.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
24.18.198.in-addr.arpa-forward:
domain: '24.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
25.18.198.in-addr.arpa-forward:
domain: '25.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
26.18.198.in-addr.arpa-forward:
domain: '26.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
27.18.198.in-addr.arpa-forward:
domain: '27.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
28.18.198.in-addr.arpa-forward:
domain: '28.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
29.18.198.in-addr.arpa-forward:
domain: '29.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_unkin')}"
forward: 'only'
200.18.198.in-addr.arpa-forward:
domain: '200.18.198.in-addr.arpa'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_k8s')}"
forward: 'only'
consul-forward:
domain: 'consul'
zone_type: 'forward'
forwarders: "%{alias('profiles_dns_upstream_forwarder_consul')}"
forward: 'only'
profiles::dns::resolver::views:
openforwarder:
recursion: true
zones:
- unkin.net-forward
- main.unkin.net-forward
- unkin.net-forward
- dmz.unkin.net-forward
- network.unkin.net-forward
- prod.unkin.net-forward
- consul-forward
- k8s.syd1.au.unkin.net-forward
- 13.18.198.in-addr.arpa-forward
- 14.18.198.in-addr.arpa-forward
- 15.18.198.in-addr.arpa-forward
- 16.18.198.in-addr.arpa-forward
- 17.18.198.in-addr.arpa-forward
- 19.18.198.in-addr.arpa-forward
- 20.18.198.in-addr.arpa-forward
- 21.18.198.in-addr.arpa-forward
- 22.18.198.in-addr.arpa-forward
- 23.18.198.in-addr.arpa-forward
- 24.18.198.in-addr.arpa-forward
- 25.18.198.in-addr.arpa-forward
- 26.18.198.in-addr.arpa-forward
- 27.18.198.in-addr.arpa-forward
- 28.18.198.in-addr.arpa-forward
- 29.18.198.in-addr.arpa-forward
- 8.10.10.in-addr.arpa-forward
- 16.10.10.in-addr.arpa-forward
- 20.10.10.in-addr.arpa-forward
match_clients:
- acl-main.unkin.net
- acl-nomad-jobs
- acl-common
- acl-dmz
# networking
anycast_ip: 198.18.19.16
systemd::manage_networkd: true
systemd::manage_all_network_files: true
networking::interfaces:
eth0:
type: physical
forwarding: true
dhcp: true
anycast0:
type: dummy
ipaddress: "%{hiera('anycast_ip')}"
netmask: 255.255.255.255
mtu: 1500
# frrouting
exporters::frr_exporter::enable: true
frrouting::ospfd_router_id: "%{facts.networking.ip}"
frrouting::ospfd_redistribute:
- connected
frrouting::ospfd_interfaces:
eth0:
area: 0.0.0.0
anycast0:
area: 0.0.0.0
frrouting::daemons:
ospfd: true
# consul
profiles::consul::client::node_rules:
- resource: service
segment: frr_exporter
disposition: write
# additional repos
profiles::yum::global::repos:
frr-extras:
name: frr-extras
descr: frr-extras repository
target: /etc/yum.repos.d/frr-extras.repo
baseurl: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
frr-stable:
name: frr-stable
descr: frr-stable repository
target: /etc/yum.repos.d/frr-stable.repo
baseurl: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent

View File

@ -1,2 +0,0 @@
---
droneci_server::rpc_secret: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAF8cEANj72ACYYdilP52Oz4EXVt+stx838tFVV4SFukoCTmN+kJ3GUUEM9x+oDvo17CsZhDzx5dX0JGo7N9MCLHsR4XKx3GSoAKvaSiD73TbzbdTNJgTIE1tRP9HNbJKizwWLo4lo+OUNtwnu0Z5dkMLYRHyvZ1hG24qmdXVn9DI06gVGQw9YXGmNy8AvA0BKnaHvUnE5XoLNVKZ4g1yvc/nkYpK6nLB+X4sZ96PRig7khiuFVvAfpg5c2iWnF13ljIajnG9uY12RyGaAGfH4l/d3UEyuHyQL4zzT8N7gGt8fvg7eNFx51TyEpVRFLyQ7dqq/lzn0moXVT35PQr9K3DBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCGJ+rkxhqmyUUiIQLG7PnggDBPH9gyYAW5/XjDp5xd6GnuSt/WWOwOGxhg+1BoPzbV5o+Xufg5zyMVdYeI1MWD/u8=]

View File

@ -1,25 +0,0 @@
---
hiera_include:
- profiles::base::datavol
- docker
- droneci::runner
docker::version: latest
docker::curl_ensure: false
droneci::runner::ports:
- 3000:3000
droneci::runner::volumes:
- type=bind,source=/var/run/docker.sock,target=/var/run/docker.sock
- type=bind,source=/data,target=/data
- type=bind,source=/etc/pki/tls/vault/certificate.crt,target=/etc/pki/tls/vault/certificate.crt,readonly
- type=bind,source=/etc/pki/tls/vault/private.key,target=/etc/pki/tls/vault/private.key,readonly
- type=bind,source=/etc/pki/tls/certs/ca-bundle.crt,target=/etc/pki/tls/certs/ca-bundle.crt,readonly
- type=bind,source=/etc/pki/tls/certs/ca-bundle.crt,target=/etc/ssl/certs/ca-certificates.crt,readonly
droneci::runner::env_vars:
DRONE_RPC_PROTO: https
DRONE_RPC_HOST: droneci.query.consul
DRONE_RPC_SECRET: "%{hiera('droneci_server::rpc_secret')}"
DRONE_RUNNER_CAPACITY: 2
DRONE_RUNNER_NAME: "%{facts.networking.fqdn}"
DRONE_RUNNER_VOLUMES: /etc/pki/tls/certs/ca-bundle.crt:/etc/ssl/certs/ca-certificates.crt

View File

@ -1,6 +0,0 @@
---
droneci_server::gitea_client_secret: ENC[PKCS7,MIIBqQYJKoZIhvcNAQcDoIIBmjCCAZYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAJLcMKV4EevB8V/vjex+Srq47pWLvPozjSb+vjS/crFpKq3/dgtkJ8WkqF98fPnXxkMowP/BpNexJBCmPtTR2pPTKHIYkXjxGax0g3P2gcgeT7wsNl24mnUJDWZs1/z8ibwUXv4Zg0nbKi+YdomjsvT3vl2IPI88TKZDkq6HBoturwqx2l/edC5IeAi8vgPRXF8hn2TVuslkmoNXoI28Qp10b2XNOCss6KUJ4rbOwpIgdOVFbXqi7CFjhGz/3uS9xK+2m3iuE/gXi8KSKfiQ+QcGuXL9Zy5PH5rrNg4mbrFiaFfTevsHInl1wwRmULilQ0kEY3NW4b/URXqBBuwcN/TBsBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBDlv7tA+6OQwDuL7z3xGYZjgEBcEAj3TQ9R9TntxTyBa9mKFRXimFTDDxdZ2zD96qFNz+7ZZFGD6pFQNHSMC9AYlAue94UsIHJZaXwgAo5zc98Q]
droneci_server::cookie_secret: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAKTWrB7Cca8RgFEeP46puzhVWOAiI6nB+m5+/sgz1qNnn6VgNh9Q6D26p9HISrMp4k60KVuGXrIbZYpkvKZmv4zHgK2et+50sr/F1anhDRX4rsmGWVV9n8VhaIJFAyQkW4de9YxV9LAgk0tWs1BgfXv4cV4+sDBv0OSVIFJDst3LUWpBR6lsiV9IifvNNUrdOHkdt5XjuL4JVmc0nkjetAg9HSvJ9VHYLIQagFHnTQp0FWluE1/ibGNc2kz7D4K7cPz2bBxRJWomckSUwgQK0NrGID4D13haZXhKXAO/8QpQOwPra8vD9FYrFenMeFLwdw43NzSr2g/W1ss5PekbWGjBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBASshs2UwkGqK4ShFfXni7cgDCDCnEiqcfxIz4X/Bq71IpRKan3uQbHOewFqjNGqoR+1oWupjRxzNL39H9YF1a6i6s=]
droneci_server::database_secret: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEASu4C45TYWZKgIoyqC3YdwYYXn+T+ruP6oIvhYFJ5dxeZ+6HtWbRMViErvpPuWYfgs5qt6Zj9eLz2hqimFCvKiAvzANeZ9hkhw/jkpmvG8iXpDFw6x8QKcPJteRo896KSLiGiVlZfRbgQCGAqiEMw6y6M9CvfCLzE/mZ9gOKjSJVKiioAnXU2fyq0Y0M6g0iLRw0VXl2BVc4ORCnVECARQPo48T3U+TT39q0ar4mRO1AFO0VA5iDJ6/EMPBcH3ekKO/1dB1UbV6VkD3s9BAHGyL5a5Wr6ztg/5Yl6VBCXmECZqpCx8jx8KDUoaj1R/+I83YQxbw9ch76j79haIK6+jzBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBAh6MmfbUELaWbSNZ9/Dev2gDD/E3G/uYJGXNGl1+PIVwGmi0z2BTXNqg7ax/b/uF5Xc9ZtBSPiSxR6BPRXN3GleNo=]
droneci_server::postgres_password: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEANpDnrpratpuYheXFrN4nwRTauPm9rZz2ubDyJlcxmah+kOWqsWIeEkv5GuATlymfAx5UuHPOv3dJPCSK+YuyQY+kGW/8uEFM68QrNi38NdRqEpdXuPBe5+AmWxcjYK3mdJ4maEwsbbxtYJmD8TF6kskS2P/KhnIzYR5PPHZTaYbEf/W5Da3l+J5WnFYpStuLq+86yZokBAygFPI+y/Ic+zJIdhpzVdLyGuqxGLXZq7nNMrjuNyFPKkCj1BBpuJTMCS4oPKCUTlm5hIIeeC2pFREI0CMTV5siZB8NphobPNn/ZbJrcs9q75LtIa47pkFYRbmV4WPctCwZXg6jtMleuzBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBDHJaChyidZq/5FN5n+ASJWgDCqUcR/DG9e8AD7fRmTb5BZM8XQ77a1hUJoaCycnMQ/5UyKmqU/7fLPrsxCf2vZU1M=]
droneci_server::redis_password: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAYMoZlVXHC1qfkDptPy3PyWGrUs5y9M9AOv5Vn1AK6DYViixhjwPZUCfwTONmt7CiBuqmkDOpR5isWFiBo/+TMeMXlM9C/D+Svc9tpeHLpVaXAYeoz5um2InyBFkLWSaUaWSftF9U7O5Nv/OiLIsd7nn4T8Dd21rfUiRfUN/2HPLMCs+mW15Az9XNOcvfm+kPXDAcB+ukHde0vvtYTZEFWjMdwJjjE43DiCmAoLTcpvQxdlclojotBpFeBLZs/F21FDQ4hvBUvPBMuZ1o7ImSP5fuomYSFK8etbD7JO5gg5BN59lGl1ljyR83phv6wmmqlzB8wQl0pjEpni9o7fa9hTBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBCKJrkPA78qx3wAu1eroIjRgDA72N9U0YZf1MzACcGSOMU5RGO242RwlIKUrnuYcdC0dKkjLXtP+yVpSKkX5WxcsDQ=]

View File

@ -1,79 +0,0 @@
---
# additional altnames
profiles::pki::vault::alt_names:
- droneci.main.unkin.net
- droneci.service.consul
- droneci.query.consul
- "droneci.service.%{facts.country}-%{facts.region}.consul"
profiles::ssh::sign::principals:
- droneci.main.unkin.net
- droneci.service.consul
- droneci.query.consul
hiera_include:
- docker
- profiles::sql::postgresdb
- droneci
docker::version: latest
docker::curl_ensure: false
profiles::sql::postgresdb::dbname: droneci
profiles::sql::postgresdb::dbuser: droneci
profiles::sql::postgresdb::dbpass: "%{hiera('droneci_server::postgres_password')}"
profiles::sql::postgresdb::members_lookup: true
profiles::sql::postgresdb::members_role: roles::infra::droneci::server
droneci::ports:
- 80:80
- 443:443
droneci::volumes:
- type=bind,source=/var/lib/drone,target=/data
- type=bind,source=/etc/pki/tls/vault/certificate.crt,target=/etc/pki/tls/vault/certificate.crt,readonly
- type=bind,source=/etc/pki/tls/vault/private.key,target=/etc/pki/tls/vault/private.key,readonly
- type=bind,source=/etc/pki/tls/certs/ca-bundle.crt,target=/etc/pki/tls/certs/ca-bundle.crt,readonly
- type=bind,source=/etc/pki/tls/certs/ca-bundle.crt,target=/etc/ssl/certs/ca-certificates.crt,readonly
droneci::env_vars:
DRONE_GITEA_SERVER: https://git.query.consul
DRONE_GITEA_CLIENT_ID: dda67581-86df-4e65-88ae-1e505b849082
DRONE_USER_CREATE: username:unkinben,admin:true
DRONE_GITEA_CLIENT_SECRET: "%{hiera('droneci_server::gitea_client_secret')}"
DRONE_RPC_SECRET: "%{hiera('droneci_server::rpc_secret')}"
DRONE_SERVER_HOST: droneci.query.consul
DRONE_SERVER_PROTO: https
DRONE_TLS_CERT: /etc/pki/tls/vault/certificate.crt
DRONE_TLS_KEY: /etc/pki/tls/vault/private.key
DRONE_COOKIE_SECRET: "%{hiera('droneci_server::cookie_secret')}"
DRONE_COOKIE_TIMEOUT: 720h
DRONE_HTTP_SSL_REDIRECT: true
DRONE_HTTP_SSL_TEMPORARY_REDIRECT: true
DRONE_HTTP_SSL_HOST: droneci.query.consul
DRONE_LOGS_TEXT: true
DRONE_LOGS_PRETTY: true
DRONE_LOGS_COLOR: true
DRONE_DATABASE_SECRET: "%{hiera('droneci_server::database_secret')}"
DRONE_DATABASE_DRIVER: postgres
DRONE_DATABASE_DATASOURCE: "postgres://droneci:%{hiera('droneci_server::postgres_password')}@master.patroni-prod.service.au-syd1.consul:5432/droneci?sslmode=disable"
DRONE_REDIS_CONNECTION: "redis://%{hiera('droneci_server::redis_password')}@redis-master-prod.service.au-syd1.consul:6379/2"
consul::services:
droneci:
service_name: 'droneci'
tags:
- 'drone'
- 'droneci'
address: "%{facts.networking.ip}"
port: 443
checks:
- id: 'droneci_https_check'
name: 'droneci HTTPS Check'
http: "https://%{facts.networking.fqdn}:443"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: droneci
disposition: write

View File

@ -1,2 +0,0 @@
---
profiles::etcd::node::initial_cluster_token: ENC[PKCS7,MIIBiQYJKoZIhvcNAQcDoIIBejCCAXYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAhLyXszXUU6Dkiw9bEJTH0RXGaV2751NzvLH94i7QHfNukvOslF/kaDOA+FwqG06xSKSKo24Qyj4ewYA3BzhN8XLf2E9uW2LuDrUoA6aXUP2tYPqiTw8zmmgsVV5t7Y5PeNcleV3KmfcJZJKp33yGCKtGF7ggvNvnied5slO6E1BDkcVnqO7sdyI0MqSvsvH4IvEmeiSWAcBRBnwVLIwfn10frIvUg0fH4uZR7DASfO/HstYWKAEacz4xYBv74TtVVtYHlPvnVwC20YIYDMrgBsm3XngyWIQvruQCgyIkRzHjUKCpp76HpyEqzdJdEdaywkODYNOT6ab1B5uUu9WaMjBMBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBADXLPOqFHdnVgJW5+iXJYcgCDK1Eyr+RwvMA+3VszYALU5B6OCH5maplwC5aUgiQZ7ew==]

View File

@ -1,62 +0,0 @@
---
hiera_include:
- profiles::etcd::node
profiles::etcd::node::members_lookup: true
profiles::etcd::node::members_role: roles::infra::etcd::node
profiles::etcd::node::config:
data-dir: /data/etcd
client-cert-auth: false
client-transport-security:
cert-file: /etc/pki/tls/vault/certificate.crt
key-file: /etc/pki/tls/vault/private.key
client-cert-auth: false
auto-tls: false
peer-transport-security:
cert-file: /etc/pki/tls/vault/certificate.crt
key-file: /etc/pki/tls/vault/private.key
client-cert-auth: false
auto-tls: false
allowed-cn:
max-wals: 5
max-snapshots: 5
snapshot-count: 10000
heartbeat-interval: 100
election-timeout: 1000
cipher-suites: [
TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384
]
tls-min-version: 'TLS1.2'
tls-max-version: 'TLS1.3'
profiles::pki::vault::alt_names:
- etcd.service.consul
- etcd.query.consul
- "etcd.service.%{facts.country}-%{facts.region}.consul"
profiles::ssh::sign::principals:
- etcd.query.consul
- etcd.service.consul
- etcd.service.%{facts.country}-%{facts.region}.consul
consul::services:
etcd:
service_name: 'etcd'
tags:
- 'etcd'
address: "%{facts.networking.ip}"
port: 2379
checks:
- id: 'etcd_http_health_check'
name: 'ETCD HTTP Health Check'
http: "https://%{facts.networking.ip}:2379/health"
method: 'GET'
interval: '10s'
timeout: '1s'
tls_skip_verify: true
profiles::consul::client::node_rules:
- resource: service
segment: etcd
disposition: write

View File

@ -1,3 +1,3 @@
---
profiles::gitea::mysql_pass: ENC[PKCS7,MIIBiQYJKoZIhvcNAQcDoIIBejCCAXYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAjmMVHQcvy0PLruFWI6UmYqM2uEqXntV8HdA54RCTm7GaneXsW+rom+ibFVd0i9L+spQPQzcidh7FlzBRYgny8yH8TqZlh7XMraXSYG2EvrjwzNvgnwhY5mGEQNQcQkqN9Orfsf6HjXmXg2CxajYibKu0/belJZFffzPzzrn15wy3Cj5lDjAziqYoD+8Ko1zkF9lWz4ewVjll82yo8iSpidN+PyvoeWsi/eJ9cW72TgFLt/rvGquLq3MuW54J716hrR1Z37Uf0OO18AiKCVjoCi5Cf/k0VKRsXM8Myu2KInqrGcUHAO+fsOXBXnmU0MOxW0OIOmwxfwY6LJfN23arlDBMBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBB6GktEMe8gSTijJ/dIHC5/gCCblMojNKO1ig9fNsuT9I2u5Bt4iJrSMN+GBGnCzO1Bvw==]
profiles::gitea::init::mysql_pass: ENC[PKCS7,MIIBiQYJKoZIhvcNAQcDoIIBejCCAXYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAjmMVHQcvy0PLruFWI6UmYqM2uEqXntV8HdA54RCTm7GaneXsW+rom+ibFVd0i9L+spQPQzcidh7FlzBRYgny8yH8TqZlh7XMraXSYG2EvrjwzNvgnwhY5mGEQNQcQkqN9Orfsf6HjXmXg2CxajYibKu0/belJZFffzPzzrn15wy3Cj5lDjAziqYoD+8Ko1zkF9lWz4ewVjll82yo8iSpidN+PyvoeWsi/eJ9cW72TgFLt/rvGquLq3MuW54J716hrR1Z37Uf0OO18AiKCVjoCi5Cf/k0VKRsXM8Myu2KInqrGcUHAO+fsOXBXnmU0MOxW0OIOmwxfwY6LJfN23arlDBMBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBB6GktEMe8gSTijJ/dIHC5/gCCblMojNKO1ig9fNsuT9I2u5Bt4iJrSMN+GBGnCzO1Bvw==]
profiles::gitea::init::lfs_jwt_secret: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEACd6q4E/4l1EYD3SFjc1okibyJ13kcGGWU+ShbCgwLgkW7INkyCxhbNm69yPA7WcyuRhH/Lfz/XjJKd3BSCyRQPr5IUOIRINspx82tLBcaMzY/99GFrfyDnf3+SV/AxrPJ/zD5TGkKQP7uX6WjC9DXpHE+pFJa9wBAipmV439y0JDVt2gXFmhqBWThSjBDBfJ5X4zO5wY8CfBX4APOcD5hIQP/T4n04dQLNpigEKKy6B+GFuooTbdmMmFj3ZpT+cUS8Aw9mFkBwyyN1o+50XU3vW4eieUz8cYkzDPu574XfTunqD2jcvPiFjCla8G1SpKfHkruKnZWwgO0Ntw9td5QDBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBAIRVL5j4dzbYg6f2XjvkQ6gDAd2qUNzPn2flZgKwsjIZcYdmFMTn48hGPUFfVaMDeyzPoJi84CyRJl8cQvcAe52sw=]

View File

@ -6,11 +6,6 @@ profiles::pki::vault::alt_names:
- git.query.consul
- "git.service.%{facts.country}-%{facts.region}.consul"
profiles::ssh::sign::principals:
- git.main.unkin.net
- git.service.consul
- git.query.consul
consul::services:
git:
service_name: 'git'
@ -41,44 +36,4 @@ profiles::nginx::simpleproxy::nginx_aliases:
- "git.service.%{facts.country}-%{facts.region}.consul"
profiles::nginx::simpleproxy::proxy_port: 3000
profiles::nginx::simpleproxy::proxy_path: '/'
nginx::client_max_body_size: 1024M
profiles::gitea::init::root:
APP_NAME: 'Gitea'
RUN_USER: 'git'
RUN_MODE: 'prod'
profiles::gitea::init::repository:
ROOT: '/data/gitea/repos'
FORCE_PRIVATE: false
MAX_CREATION_LIMIT: -1
DISABLE_HTTP_GIT: false
DEFAULT_BRANCH: 'main'
DEFAULT_PRIVATE: 'last'
profiles::gitea::init::ui:
SHOW_USER_EMAIL: false
profiles::gitea::init::server:
PROTOCOL: 'http'
DOMAIN: 'git.query.consul'
ROOT_URL: 'https://git.query.consul'
HTTP_ADDR: '0.0.0.0'
HTTP_PORT: 3000
START_SSH_SERVER: false
SSH_DOMAIN: 'git.query.consul'
SSH_PORT: 2222
SSH_LISTEN_HOST: '0.0.0.0'
OFFLINE_MODE: true
APP_DATA_PATH: '/data/gitea'
SSH_LISTEN_PORT: 22
LFS_START_SERVER: true
profiles::gitea::init::database:
DB_TYPE: 'mysql'
HOST: 'mariadb-prod.service.au-syd1.consul:3306'
NAME: 'gitea'
USER: 'gitea'
PASSWD: "%{hiera('profiles::gitea::mysql_pass')}"
SSL_MODE: 'disable'
LOG_SQL: false
profiles::gitea::init::lfs:
PATH: '/data/gitea/lfs'
profiles::gitea::init::session:
PROVIDER: db
nginx::client_max_body_size: 250M

View File

@ -1,2 +0,0 @@
---
redisha::masterauth: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAV8znsSGAbPpPUhAcyOIWFltVyAcx3yVcIvC+JFndkkuVBT1813GSURrXIreXSilvJEHlwRC03A9NhjWJSsHBIS12uUb+7ap95oh2JJ7OHmeWSVD1GDDRpTQAgDEOikAnioRNJfQ83jUa11nJrsavt46hSq8vDq+rZ2P8ugiNk59mNX5vgYthCPXcEJd7UmpLZhgxZ8+42l4TKo7QpqKRcIMteJXk1NvyYfYGnGTZhknuyHM3xPGauGjKzamMlTzD9dGnn2K0/Q7I4PUyT24ZEG3kNVyDlVHTYcKnIT5q8qvmJdDQfZwOETF3SrzcqhQ2nqFvmI19sCTsQVmveb/2ITBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBC9KML3vzwF4vGZdtiu++jmgDAWPZspCckgoXPkgNRMePov3pXSlKUAGxwmdsuIM75rJMxlSTiil2lzMMBWXmUofys=]

View File

@ -1,60 +0,0 @@
---
# additional altnames
profiles::pki::vault::alt_names:
- "gitea-redis-replica-%{facts.environment}.main.unkin.net"
- "gitea-redis-replica-%{facts.environment}.service.consul"
- "gitea-redis-replica-%{facts.environment}.query.consul"
- "gitea-redis-replica-%{facts.environment}.service.%{facts.country}-%{facts.region}.consul"
profiles::ssh::sign::principals:
- "gitea-redis-replica-%{facts.environment}.main.unkin.net"
- "gitea-redis-replica-%{facts.environment}.service.consul"
- "gitea-redis-replica-%{facts.environment}.query.consul"
hiera_include:
- redisha
redisha::manage_repo: false
redisha::redisha_members_lookup: true
redisha::redisha_members_role: roles::infra::git::redis
redisha::redis::requirepass: "%{hiera('redisha::masterauth')}"
redisha::redis::masterauth: "%{hiera('redisha::masterauth')}"
redisha::sentinel::master_name: "%{facts.country}-%{facts.region}"
redisha::sentinel::requirepass: "%{hiera('redisha::masterauth')}"
redisha::sentinel::auth_pass: "%{hiera('redisha::masterauth')}"
redisha::tools::requirepass: "%{hiera('redisha::masterauth')}"
sudo::configs:
consul:
priority: 20
content: |
consul ALL=(ALL) NOPASSWD: /usr/local/sbin/sentineladm info
consul::services:
gitea-redis-replica:
service_name: "gitea-redis-replica-%{facts.environment}"
address: "%{facts.networking.ip}"
port: 6379
checks:
- id: 'redis-replica_tcp_check'
name: 'Redis Replica TCP Check'
tcp: "%{facts.networking.ip}:6379"
interval: '10s'
timeout: '1s'
gitea-redis-master:
service_name: "gitea-redis-master-%{facts.environment}"
address: "%{facts.networking.ip}"
port: 6379
checks:
- id: 'redis-master_tcp_check'
name: "Redis Master Check"
args:
- '/usr/local/bin/check_redis_master'
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: "gitea-redis-replica-%{facts.environment}"
disposition: write
- resource: service
segment: "gitea-redis-master-%{facts.environment}"
disposition: write

View File

@ -1 +0,0 @@
profiles::gitea::runner::registration_token: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAOL/ug4IPhEW1n+Lq+SsMSEJUYsDDK2s0+oNF3unxcbH3QDqWo7kuYKkDWQ+W3otcxvuRlbC8+0W2fO2udhF7sSGrF93INsTCDqWlLnaaAgxlgNSXthA4OCJlI8DCLeD/Sr0TTCchUdpQrIpDo6Gh0EUjgRv5574q26or7c/vvtQ4nfLVQOqEV9UpsCgEYiQvXVcf55LEpgaDp4mFL0qCnfzDnGNbZ0GUo6552ka19IocqOqILPnZO0qDcEoLbQ90sP197+5Jw611i1Akx1C4lFP81bazFMpbdiEP0V4Ax+33LfZEb0KnXuMbKOF23vIwwwfpFJaSOAjA5YehA3xM2zBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBDPJHB2uL+VEyntgZocyoMXgDBJ1dnRWiJM77XomzbNdDUO+ktIHLOTL5do0m4CkXZ1s42KtaAwWL+/EGdxg80UMC8=]

View File

@ -1,56 +0,0 @@
---
hiera_include:
- docker
- profiles::gitea::runner
- incus::client
docker::version: latest
docker::curl_ensure: false
docker::root_dir: /data/docker
profiles::gitea::runner::instance: https://git.unkin.net
profiles::gitea::runner::home: /data/runner
profiles::gitea::runner::version: '0.2.12'
profiles::gitea::runner::config:
log:
level: info
runner:
file: "%{hiera('profiles::gitea::runner::home')}/.runner"
capacity: 2
envs:
A_TEST_ENV_NAME_1: a_test_env_value_1
A_TEST_ENV_NAME_2: a_test_env_value_2
env_file: .env
timeout: 3h
insecure: false
fetch_timeout: 5s
fetch_interval: 2s
labels:
- "almalinux-latest"
- "almalinux-8:docker"
- "almalinux-8.10:docker"
cache:
enabled: true
dir: "%{hiera('profiles::gitea::runner::home')}/.cache/actcache"
host: ""
port: 0
external_server: ""
container:
network: ""
privileged: false
options:
workdir_parent: /workspace
valid_volumes:
- /etc/pki/tls/vault
docker_host: ""
force_pull: true
force_rebuild: false
host:
workdir_parent: "%{hiera('profiles::gitea::runner::home')}/.cache/act"
# enable ip forwarding for docker containers
sysctl::base::values:
net.ipv4.conf.all.forwarding:
value: '1'
net.ipv6.conf.all.forwarding:
value: '1'

View File

@ -1,3 +0,0 @@
---
profiles::sql::postgresdb::dbpass: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAhg1qvPHyqM3QXINxU8v0AX2e4rbfaHym0VGH3U1qoJTODYynHwhGDWzVhD9sZPej9VtqdRR1oi+PAD4VSZxFOON3y2HaPJ8nJMDLN3FeKhTK6nCNte00w8EeKXma+0fcur+wKQTSOem/yBBXt2wCxqGk1vdoHod/92wNDljUV0evrChhgjjiEc+WdScrwSl8SSxFzdRa76iahYikZC0aTZViWQwN1XoEuZW/EszmxgEfEwSygvon31JKuyfZTzZazCoUmPq9dvhNvhLbStT4USMopDYuiwrpHKdEzDe1r1Q/FOL5iDIrULAoSHRnBsqWTJ8a+HkSS2DsvOiAnQJjCDBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBAPcCq5Ob9rpxiqcVSBztk4gDCPmuAGDBAeUgFHheDJN6Eu+OSn9zA8kuHw0AVtPou1JERPjDIQ9QA0RQOtZATpybE=]
gitea_redis_pass: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEASaO/tL+NaCWexFbIWWSWilntnvRsmHRpyyDCTigHqY2sx3vlc2HCyxlZOpBEqNoF5tmF6xVJ4pjcVtmdajhvkg3YAjX2SybMM5PLw4WKfnZcAXR3fu87WysszA+lujNrlmGcnQh3j6bSqhBAcjMtW7HiRl6h+GB2UO6cFRTtAkaY2s8rTv67nO8CKbu6iDSJ+ocXkctsnFmkEVK9XBbRoz6ldBEEZM2ARd1LM4aNufu9l0GqM0sdUscgU6CRFYBUzwAa7eIVGJ1nPU+jZZF47pl1tQiaZ6+Gj+lHmmT4AR3SnEGjAclRvUTMf0W3g4oP3TxZvvRoYZHcwBuOsphuaTBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBBgcIuNOiIMZ1ZbLuryeubjgDCvDLMLDy/IgIhgxmI8EOtPMsMLIY66+ADsoagyxz6WSQW1ReD0k/RMpDvPL34fY44=]

View File

@ -1,141 +0,0 @@
---
hiera_include:
- profiles::sql::postgresdb
- profiles::nginx::simpleproxy
- profiles::gitea::user
- profiles::gitea::haproxy
- gitea
# additional altnames
profiles::pki::vault::alt_names:
- git.unkin.net
- git.main.unkin.net
- git.service.consul
- git.query.consul
- "git.service.%{facts.country}-%{facts.region}.consul"
profiles::ssh::sign::principals:
- git.unkin.net
- git.main.unkin.net
- git.service.consul
- git.query.consul
consul::services:
git:
service_name: 'git'
address: "%{facts.networking.ip}"
port: 443
checks:
- id: 'gitea_https_check'
name: 'Gitea HTTPS Check'
http: "https://%{facts.networking.fqdn}:443"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
gitea-metrics:
service_name: 'gitea-metrics'
address: "%{facts.networking.ip}"
port: 3000
tags:
- 'metrics'
- 'metrics_scheme=http'
- 'metrics_job=gitea'
checks:
- id: 'gitea_metrics_http_check'
name: 'Gitea Metrics HTTP Check'
http: "http://%{facts.networking.fqdn}:3000/metrics"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: git
disposition: write
- resource: service
segment: gitea-metrics
disposition: write
# manage the gitea user
profiles::gitea::user::manage: true
# manage a simple nginx reverse proxy
profiles::nginx::simpleproxy::nginx_vhost: 'git.query.consul'
profiles::nginx::simpleproxy::nginx_aliases:
- git.unkin.net
- git.main.unkin.net
- git.service.consul
- git.query.consul
- "git.service.%{facts.country}-%{facts.region}.consul"
profiles::nginx::simpleproxy::proxy_port: 3000
profiles::nginx::simpleproxy::proxy_path: '/'
nginx::client_max_body_size: 5144M
# enable external access via haproxy
profiles::gitea::haproxy::enable: true
# manage a pgsql database + user
profiles::sql::postgresdb::cluster_name: "patroni-shared-%{facts.environment}"
profiles::sql::postgresdb::dbname: gitea
profiles::sql::postgresdb::dbuser: gitea
# deploy gitea
gitea::ensure: '1.22.4'
gitea::checksum: 'd549104f55067e6fb156e7ba060c9af488f36e12d5e747db7563fcc99eaf8532'
gitea::manage_user: false
gitea::manage_group: false
gitea::manage_home: false
gitea::custom_configuration:
'':
APP_NAME: 'Gitea'
RUN_USER: 'git'
RUN_MODE: 'prod'
repository:
ROOT: '/shared/apps/gitea/repos'
FORCE_PRIVATE: false
MAX_CREATION_LIMIT: -1
DISABLE_HTTP_GIT: false
DEFAULT_BRANCH: 'main'
DEFAULT_PRIVATE: 'last'
ui:
SHOW_USER_EMAIL: false
server:
PROTOCOL: 'http'
DOMAIN: 'git.unkin.net'
ROOT_URL: 'https://git.unkin.net'
HTTP_ADDR: '0.0.0.0'
HTTP_PORT: 3000
START_SSH_SERVER: false
SSH_DOMAIN: 'git.unkin.net'
SSH_PORT: 2222
SSH_LISTEN_HOST: '0.0.0.0'
OFFLINE_MODE: true
APP_DATA_PATH: '/shared/apps/gitea'
SSH_LISTEN_PORT: 22
LFS_START_SERVER: true
database:
DB_TYPE: 'postgres'
HOST: "master.%{hiera('profiles::sql::postgresdb::cluster_name')}.service.%{facts.country}-%{facts.region}.consul:5432"
NAME: "%{hiera('profiles::sql::postgresdb::dbname')}"
USER: "%{hiera('profiles::sql::postgresdb::dbuser')}"
PASSWD: "%{hiera('profiles::sql::postgresdb::dbpass')}"
SSL_MODE: 'disable'
LOG_SQL: false
lfs:
PATH: '/shared/apps/gitea/lfs'
session:
PROVIDER: db
metrics:
ENABLED: true
ENABLED_ISSUE_BY_REPOSITORY: true
ENABLED_ISSUE_BY_LABEL: true
queue:
TYPE: redis
CONN_STR: "redis+sentinel://gitea-redis-replica-prod.service.consul:26379/0?master_name=au-syd1&password=%{hiera('gitea_redis_pass')}&sentinel_password=%{hiera('gitea_redis_pass')}"
cache:
ADAPTER: redis
HOST: "redis+sentinel://gitea-redis-replica-prod.service.consul:26379/1?master_name=au-syd1&password=%{hiera('gitea_redis_pass')}&sentinel_password=%{hiera('gitea_redis_pass')}"
indexer:
ISSUE_INDEXER_TYPE: db

View File

@ -15,7 +15,6 @@ profiles::haproxy::server::globals:
stats:
- timeout 30s
- socket /var/lib/haproxy/stats
- socket /var/lib/haproxy/admin.sock mode 660 level admin
ca-base: /etc/ssl/certs
crt-base: /etc/ssl/private
ssl-default-bind-ciphers: EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
@ -53,8 +52,6 @@ profiles::haproxy::frontends:
options:
acl:
- 'acl-letsencrypt path_beg /.well-known/acme-challenge/'
use_backend:
- 'be_letsencrypt if acl-letsencrypt'
http-request:
- 'set-header X-Forwarded-Proto https'
- 'set-header X-Real-IP %[src]'
@ -70,8 +67,6 @@ profiles::haproxy::frontends:
options:
acl:
- 'acl-letsencrypt path_beg /.well-known/acme-challenge/'
use_backend:
- 'be_letsencrypt if acl-letsencrypt'
http-request:
- 'set-header X-Forwarded-Proto https'
- 'set-header X-Real-IP %[src]'
@ -94,6 +89,3 @@ profiles::haproxy::backends:
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
prometheus::haproxy_exporter::cnf_scrape_uri: unix:/var/lib/haproxy/stats
prometheus::haproxy_exporter::export_scrape_job: true

View File

@ -1,231 +0,0 @@
---
hiera_include:
- frrouting
- profiles::haproxy::server
- exporters::frr_exporter
# networking
anycast_ip: 198.18.19.17
systemd::manage_networkd: true
systemd::manage_all_network_files: true
networking::interfaces:
eth0:
type: physical
forwarding: true
dhcp: true
anycast0:
type: dummy
ipaddress: "%{hiera('anycast_ip')}"
netmask: 255.255.255.255
mtu: 1500
# frrouting
exporters::frr_exporter::enable: true
frrouting::ospfd_router_id: "%{facts.networking.ip}"
frrouting::ospfd_redistribute:
- connected
frrouting::ospfd_interfaces:
eth0:
area: 0.0.0.0
anycast0:
area: 0.0.0.0
frrouting::daemons:
ospfd: true
# additional repos
profiles::yum::global::repos:
frr-extras:
name: frr-extras
descr: frr-extras repository
target: /etc/yum.repos.d/frr-extras.repo
baseurl: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
frr-stable:
name: frr-stable
descr: frr-stable repository
target: /etc/yum.repos.d/frr-stable.repo
baseurl: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
# haproxy metrics
consul::services:
haproxy-metrics:
service_name: 'haproxy-metrics'
tags:
- 'metrics'
- 'metrics_scheme=https'
- 'metrics_job=haproxy'
address: "%{facts.networking.ip}"
port: 8405
checks:
- id: 'haproxy_metrics_https_check'
name: 'HAProxy Metrics Check'
http: "https://%{facts.networking.fqdn}:8405/metrics"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: haproxy-metrics
disposition: write
- resource: service
segment: frr_exporter
disposition: write
# haproxy
profiles::haproxy::peers::enable: true
profiles::haproxy::resolvers::enable: true
profiles::haproxy::ls_stats::port: 9090
profiles::haproxy::ls_stats::user: 'admin'
profiles::selinux::setenforce::mode: permissive
profiles::haproxy::server::globals:
log:
- /dev/log local0
- /dev/log local1 notice
stats:
- timeout 30s
- socket /var/lib/haproxy/stats
- socket /var/lib/haproxy/admin.sock mode 660 level admin
ca-base: /etc/ssl/certs
crt-base: /etc/ssl/private
ssl-default-bind-ciphers: EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
ssl-default-bind-options: 'ssl-min-ver TLSv1.2 ssl-max-ver TLSv1.3'
ssl-default-server-ciphers: kEECDH+aRSA+AES:kRSA+AES:+AES256:RC4-SHA:!kEDH:!LOW:!EXP:!MD5:!aNULL:!eNULL
ssl-default-server-options: no-sslv3
tune.ssl.default-dh-param: 2048
profiles::haproxy::server::defaults:
mode: http
option:
- httplog
- dontlognull
- http-server-close
- forwardfor except 127.0.0.0/8
- redispatch
timeout:
- http-request 10s
- queue 1m
- connect 10s
- client 5m
- server 5m
- http-keep-alive 10s
- check 10s
retries: 3
maxconn: 5000
profiles::haproxy::frontends:
fe_http:
description: 'Global HTTP Frontend'
bind:
0.0.0.0:80:
- transparent
mode: 'http'
options:
acl:
- 'acl-letsencrypt path_beg /.well-known/acme-challenge/'
use_backend:
- 'be_letsencrypt if acl-letsencrypt'
http-request:
- 'set-header X-Forwarded-Proto https'
- 'set-header X-Real-IP %[src]'
fe_https:
description: 'Global HTTPS Frontend'
bind:
0.0.0.0:443:
- ssl
- crt-list /etc/haproxy/certificate.list
- ciphers EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
- force-tlsv12
mode: 'http'
options:
acl:
- 'acl-letsencrypt path_beg /.well-known/acme-challenge/'
use_backend:
- 'be_letsencrypt if acl-letsencrypt'
http-request:
- 'set-header X-Forwarded-Proto https'
- 'set-header X-Real-IP %[src]'
fe_metrics:
description: 'Metrics Frontend'
bind:
0.0.0.0:8405:
- ssl
- crt /etc/pki/tls/vault/certificate.pem
- ciphers EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
- force-tlsv12
mode: 'http'
options:
http-request:
- 'set-header X-Forwarded-Proto https'
- 'set-header X-Real-IP %[src]'
- 'use-service prometheus-exporter if { path /metrics }'
fe_imap:
description: 'Frontend for Stalwart IMAP (STARTTLS)'
bind:
0.0.0.0:143: []
mode: 'tcp'
options:
log: global
default_backend: be_stalwart_imap
tcp-request:
- inspect-delay 5s
- content accept if { req_len 0 }
fe_imaps:
description: 'Frontend for Stalwart IMAPS (implicit TLS)'
bind:
0.0.0.0:993: []
mode: 'tcp'
options:
log: global
default_backend: be_stalwart_imaps
tcp-request:
- inspect-delay 5s
- content accept if { req_len 0 }
fe_smtp:
description: 'Frontend for Stalwart SMTP'
bind:
0.0.0.0:25: []
mode: 'tcp'
options:
log: global
default_backend: be_stalwart_smtp
tcp-request:
- inspect-delay 5s
- content accept if { req_len 0 }
fe_submission:
description: 'Frontend for Stalwart SMTP Submission'
bind:
0.0.0.0:587: []
mode: 'tcp'
options:
log: global
default_backend: be_stalwart_submission
tcp-request:
- inspect-delay 5s
- content accept if { req_len 0 }
profiles::haproxy::backends:
be_letsencrypt:
description: Backend for LetsEncrypt Verifications
collect_exported: true
options:
balance: roundrobin
be_default:
description: Backend for unmatched HTTP traffic
collect_exported: true
options:
balance: roundrobin
option:
- httpchk GET /
- forwardfor
cookie: SRVNAME insert
http-request:
- set-header X-Forwarded-Port %[dst_port]
- add-header X-Forwarded-Proto https if { dst_port 443 }
prometheus::haproxy_exporter::cnf_scrape_uri: unix:/var/lib/haproxy/stats
prometheus::haproxy_exporter::export_scrape_job: true

View File

@ -1,65 +0,0 @@
---
hiera_include:
- incus
profiles::packages::include:
bridge-utils: {}
dnsmasq: {}
squashfs-tools: {}
iptables-nft: {}
profiles::pki::vault::alt_names:
- incus-images.service.consul
- incus-images.query.consul
- "incus-images.service.%{facts.country}-%{facts.region}.consul"
profiles::ssh::sign::principals:
- incus-images.service.consul
- incus-images.query.consul
- "incus-images.service.%{facts.country}-%{facts.region}.consul"
# configure consul service
consul::services:
incus-images:
service_name: 'incus-images'
tags:
- 'incus'
- 'images'
- 'container'
- 'lxd'
address: "%{facts.networking.ip}"
port: 8443
checks:
- id: 'incus_https_check'
name: 'incus HTTPS Check'
http: "https://%{facts.networking.fqdn}:8443"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: incus-images
disposition: write
# manage incus
incus::init: true
incus::server_port: 8443
# add sysadmin to incus-admin group
profiles::accounts::sysadmin::extra_groups:
- incus-admin
# sysctl recommendations
sysctl::base::values:
net.ipv4.conf.all.forwarding:
value: '1'
net.ipv6.conf.all.forwarding:
value: '1'
# limits.d recommendations
limits::entries:
'*/nofile':
both: 1048576
'root/nofile':
both: 1048576

View File

@ -1,2 +0,0 @@
ceph::key::media: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAX4pXMYx0Kms74s+CCT9EnuVSq3fvlcxpkezFt/VYR0cfo8K8Sm0CE0MFEQ/sdZJ+bnycjWhnkRrT0Wj2gCcqG5+KSJPVb6OOF/zkCMEjLu7xSatKakdOmnTxJEx47Lo84dGVk+YqiTT1E5XVoKlFUvZw7iIW6BgTSORo75fC2VE8MsV0GVMIusgjqMuWgyhVUGKkPGUsGAPs5Ky7uys9tlyH5s4FiZqUKvPdeKlX0HMH5XrHuKBSJg+Nju7GLNLB+/XjBsTZrY2OMC0fzczb1EQ5KL2Pl502sE8vr1VNbXqDg7vZGzWnI60Yv2t2GUxLpUjM741NP9jZ0ovGQT8I2DBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBDUnyCYnAG1HFiBAef1KYKsgDCtxa4fw0zb7DFzVxjpM1n5fAabbaAxIXaOxLC8u3XM3+5CYsG1dWTIDZOo+qkS9sY=]
ceph::key::apps: ENC[PKCS7,MIIBmQYJKoZIhvcNAQcDoIIBijCCAYYCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEANWnFzl98B+YVXXfj5zYF5XAFbZ8iGu2VZeF53QiYxuTmRCT2/3GI4h1WB/NYcGYgmxynEsdiJus7uRO8dplsJsm+lRNffTwjnC0oDxWI82C/QuccR+YxuywqnbHLFXxZecU6/joHewt8nIu9unmF92552pqOx30ashb30L0ptR3BYBy5e9eGjjWnMrKPM4wU5NJrpW8fb9OibMWWZ1JON9tq8QITm2USCHkVOAPCfo94Dlo+2mdQWtn0GhFnRaiTNNnKT1zqSdagOUOkkY6v3yDQdmOKtR77R8bJLo/WDxUodKOt6Oi8Iuvkkvjjk6t/u05eQTPNoVQo6blV13qoDzBcBgkqhkiG9w0BBwEwHQYJYIZIAWUDBAEqBBDWHi5X6rS0BOznTJZH1IfBgDBFQ7cIEZWzVqrEkKAb7FXdK9U+/Z2Coda3GTZ0FPJ2SzVoAV9CXNuw4808RMsS6RU=]

View File

@ -1,303 +0,0 @@
---
hiera_include:
- profiles::selinux::frr
- frrouting
- incus
- zfs
- profiles::ceph::node
- profiles::ceph::client
- profiles::ceph::dashboard
- profiles::storage::cephfsvols
- exporters::frr_exporter
# FIXME: puppet-python wants to try manage python-dev, which is required by the ceph package
python::manage_dev_package: false
profiles::packages::include:
bridge-utils: {}
cephadm: {}
ceph-common: {}
profiles::pki::vault::alt_names:
- incus.service.consul
- incus.query.consul
- "incus.service.%{facts.country}-%{facts.region}.consul"
profiles::pki::vault::ip_sans:
- "%{hiera('networking_loopback0_ip')}"
- "%{hiera('networking_loopback1_ip')}"
- "%{hiera('networking_loopback2_ip')}"
profiles::ssh::sign::principals:
- incus.service.consul
- incus.query.consul
- "incus.service.%{facts.country}-%{facts.region}.consul"
- "%{hiera('networking_loopback0_ip')}"
- "%{facts.networking.interfaces.enp2s0.ip}"
- "%{facts.networking.interfaces.enp3s0.ip}"
# configure consul service
profiles::consul::client::host_addr: "%{hiera('networking_loopback0_ip')}"
consul::services:
incus:
service_name: 'incus'
tags:
- 'incus'
- 'container'
- 'lxd'
address: "%{hiera('networking_loopback0_ip')}"
port: 8443
checks:
- id: 'incus_https_check'
name: 'incus HTTPS Check'
http: "https://%{hiera('networking_loopback0_ip')}:8443"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
cephmgr:
service_name: 'cephmgr'
tags:
- 'metrics'
- 'metrics_scheme=http'
- 'metrics_job=ceph'
address: "%{hiera('networking_loopback2_ip')}"
port: 9283
checks:
- id: 'cephmgr_metrics_http_check'
name: 'cephmgr metrics HTTP Check'
http: "http://%{hiera('networking_loopback2_ip')}:9283"
method: 'GET'
tls_skip_verify: true
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: incus
disposition: write
- resource: service
segment: cephmgr
disposition: write
- resource: service
segment: frr_exporter
disposition: write
# additional repos
profiles::yum::global::repos:
ceph:
name: ceph
descr: ceph repository
target: /etc/yum.repos.d/ceph.repo
baseurl: https://edgecache.query.consul/ceph/yum/el%{facts.os.release.major}/%{facts.os.architecture}
gpgkey: https://download.ceph.com/keys/release.asc
mirrorlist: absent
ceph-noarch:
name: ceph-noarch
descr: ceph-noarch repository
target: /etc/yum.repos.d/ceph-noarch.repo
baseurl: https://edgecache.query.consul/ceph/yum/el%{facts.os.release.major}/noarch
gpgkey: https://download.ceph.com/keys/release.asc
mirrorlist: absent
frr-extras:
name: frr-extras
descr: frr-extras repository
target: /etc/yum.repos.d/frr-extras.repo
baseurl: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
frr-stable:
name: frr-stable
descr: frr-stable repository
target: /etc/yum.repos.d/frr-stable.repo
baseurl: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
zfs-kmod:
name: zfs-kmod
descr: zfs-kmod repository
target: /etc/yum.repos.d/zfs-kmod.repo
baseurl: https://packagerepo.service.consul/zfs/rhel9/kmod-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/zfs/rhel9/kmod-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-openzfs-2022
mirrorlist: absent
# dns
profiles::dns::base::primary_interface: loopback0
# dashboard/haproxy
profiles::ceph::dashboard::ipaddress: "%{hiera('networking_loopback0_ip')}"
# networking
systemd::manage_networkd: true
systemd::manage_all_network_files: true
networking::interfaces:
enp2s0:
type: physical
txqueuelen: 10000
forwarding: true
enp3s0:
type: physical
mtu: 1500
txqueuelen: 10000
forwarding: true
loopback0:
type: dummy
ipaddress: "%{hiera('networking_loopback0_ip')}"
netmask: 255.255.255.255
mtu: 1500
loopback1:
type: dummy
ipaddress: "%{hiera('networking_loopback1_ip')}"
netmask: 255.255.255.255
mtu: 1500
loopback2:
type: dummy
ipaddress: "%{hiera('networking_loopback2_ip')}"
netmask: 255.255.255.255
mtu: 1500
# frrouting
exporters::frr_exporter::enable: true
frrouting::ospfd_router_id: "%{hiera('networking_loopback0_ip')}"
frrouting::ospf_preferred_source_enable: true
frrouting::ospf_preferred_source: "%{hiera('networking_loopback0_ip')}"
frrouting::ospfd_redistribute:
- connected
frrouting::ospfd_interfaces:
enp2s0:
area: 0.0.0.0
enp3s0:
area: 0.0.0.0
loopback0:
area: 0.0.0.0
loopback1:
area: 0.0.0.0
loopback2:
area: 0.0.0.0
brcom1:
area: 0.0.0.0
brdmz1:
area: 0.0.0.0
brwan1:
area: 0.0.0.0
frrouting::daemons:
ospfd: true
# add loopback interfaces to ssh list
ssh::server::options:
ListenAddress:
- "%{hiera('networking_loopback0_ip')}"
- "%{facts.networking.interfaces.enp2s0.ip}"
- "%{facts.networking.interfaces.enp3s0.ip}"
# zfs settings
zfs::manage_repo: false
zfs::zfs_arc_min: ~
zfs::zfs_arc_max: 4294967296 # 4GB
zfs::zpools:
fastpool:
ensure: present
disk: /dev/nvme1n1
ashift: 12
zfs::datasets:
fastpool:
canmount: 'off'
acltype: posix
atime: 'off'
relatime: 'off'
compression: 'zstd'
xattr: 'sa'
fastpool/data:
canmount: 'on'
mountpoint: '/data'
fastpool/data/incus:
canmount: 'on'
mountpoint: '/data/incus'
# manage incus
incus::init: true
incus::bridge: br10
incus::server_port: 8443
incus::server_addr: "%{hiera('networking_loopback0_ip')}"
# add sysadmin to incus-admin group
profiles::accounts::sysadmin::extra_groups:
- incus-admin
# manage cephfs mounts
profiles::ceph::client::manage_ceph_conf: false
profiles::ceph::client::manage_ceph_package: false
profiles::ceph::client::manage_ceph_paths: false
profiles::ceph::client::fsid: 'de96a98f-3d23-465a-a899-86d3d67edab8'
profiles::ceph::client::mons:
- 198.18.23.9
- 198.18.23.10
- 198.18.23.11
- 198.18.23.12
- 198.18.23.13
profiles::ceph::client::keyrings:
media:
key: "%{hiera('ceph::key::media')}"
apps:
key: "%{hiera('ceph::key::apps')}"
profiles::storage::cephfsvols::volumes:
cephfsvol_media:
mount: "/shared/media"
keyring: "/etc/ceph/ceph.client.media.keyring"
cephfs_name: "media"
cephfs_fs: "mediafs"
cephfs_mon: "%{alias('profiles::ceph::client::mons')}"
require: "Profiles::Ceph::Keyring[media]"
cephfsvol_apps:
mount: "/shared/apps"
keyring: "/etc/ceph/ceph.client.apps.keyring"
cephfs_name: "apps"
cephfs_fs: "appfs"
cephfs_mon: "%{alias('profiles::ceph::client::mons')}"
require: "Profiles::Ceph::Keyring[apps]"
# sysctl recommendations
sysctl::base::values:
fs.aio-max-nr:
value: '524288'
fs.inotify.max_queued_events:
value: '1048576'
fs.inotify.max_user_instances:
value: '1048576'
fs.inotify.max_user_watches:
value: '1048576'
kernel.dmesg_restrict:
value: '1'
kernel.keys.maxbytes:
value: '2000000'
kernel.keys.maxkeys:
value: '2000'
net.core.bpf_jit_limit:
value: '1000000000'
net.ipv4.neigh.default.gc_thresh3:
value: '8192'
net.ipv6.neigh.default.gc_thresh3:
value: '8192'
vm.max_map_count:
value: '262144'
net.ipv4.conf.all.forwarding:
value: '1'
net.ipv6.conf.all.forwarding:
value: '1'
net.ipv4.tcp_l3mdev_accept:
value: '0'
net.ipv4.conf.default.rp_filter:
value: '0'
net.ipv4.conf.all.rp_filter:
value: '0'
# limits.d recommendations
limits::entries:
'*/nofile':
both: 1048576
'root/nofile':
both: 1048576
'*/memlock':
both: unlimited
'root/memlock':
both: unlimited

View File

@ -1 +0,0 @@
rke2::node_token: ENC[PKCS7,MIIB2gYJKoZIhvcNAQcDoIIByzCCAccCAQAxggEhMIIBHQIBADAFMAACAQEwDQYJKoZIhvcNAQEBBQAEggEAOD+w5nJFqEYWFj+tZQ65Oi19eDhaWtpLQ0gwEdBtMmY9sPJ63l1q2qH933NH6TOd1UlMDvGfoDLCae+yt/MAW//cJ15X3QbiVQ23DdfCOlUEZN6fjVrveEt/yIeFQmWvnkMS4pRfPbgQu2OHm37PpuPE7s6dUyGItAYjchrRhtQ7ibhXDnN7miG+oVRXP2T8b/V5WPdmA222DSV6r/AnqaWkna9W/oh/I2sNKeEm5q3f8bh8Gxt1dDy3VwaZ3lAh3uR+SUm7P/6PTYw8opxiumFBvos0mRiXIdOwUuqrAS8hafWBhxnDLlTBfz62Nc4wQmQ8gz0bHJZSipH9G6mIEDCBnAYJKoZIhvcNAQcBMB0GCWCGSAFlAwQBKgQQ2WG6ROWFlQdXx0TuO5oABoBwTDtAXIj7y6I1B3zCnFoMHETf5d7ulPGdgwZsENf0UIHpg2l0w503MUHHbu6YDFiDiTE0oDNJPVHid7TO+XwWFgh5v1MWi/XeEBgCs6nMCW8qkX0Z3UXaZdSBUll1M4sRtuqscBnoD/LLs2kKfxrqQg==]

View File

@ -1,168 +0,0 @@
---
hiera_include:
- profiles::selinux::setenforce
- profiles::ceph::node
- profiles::ceph::client
- exporters::frr_exporter
- frrouting
- rke2
# manage rke2
rke2::bootstrap_node: prodnxsr0001.main.unkin.net
rke2::join_url: https://join-k8s.service.consul:9345
rke2::config_hash:
bind-address: "%{hiera('networking_loopback0_ip')}"
node-ip: "%{hiera('networking_loopback0_ip')}"
node-external-ip: "%{hiera('networking_loopback0_ip')}"
write-kubeconfig-mode: 644
kubelet-arg:
- '--node-status-update-frequency=4s'
- '--max-pods=100'
node-label:
- "region=%{facts.region}"
- "country=%{facts.country}"
- "asset=%{facts.dmi.product.serial_number}"
- "zone=%{zone}"
# FIXME: puppet-python wants to try manage python-dev, which is required by the ceph package
python::manage_dev_package: false
profiles::packages::include:
bridge-utils: {}
cephadm: {}
profiles::selinux::setenforce::mode: disabled
profiles::ceph::client::manage_ceph_conf: false
profiles::ceph::client::manage_ceph_package: false
profiles::ceph::client::manage_ceph_paths: false
profiles::ceph::client::fsid: 'de96a98f-3d23-465a-a899-86d3d67edab8'
profiles::ceph::client::mons:
- 198.18.23.9
- 198.18.23.10
- 198.18.23.11
- 198.18.23.12
- 198.18.23.13
# additional repos
profiles::yum::global::repos:
ceph:
name: ceph
descr: ceph repository
target: /etc/yum.repos.d/ceph.repo
baseurl: https://edgecache.query.consul/ceph/yum/el%{facts.os.release.major}/%{facts.os.architecture}
gpgkey: https://download.ceph.com/keys/release.asc
mirrorlist: absent
ceph-noarch:
name: ceph-noarch
descr: ceph-noarch repository
target: /etc/yum.repos.d/ceph-noarch.repo
baseurl: https://edgecache.query.consul/ceph/yum/el%{facts.os.release.major}/noarch
gpgkey: https://download.ceph.com/keys/release.asc
mirrorlist: absent
frr-extras:
name: frr-extras
descr: frr-extras repository
target: /etc/yum.repos.d/frr-extras.repo
baseurl: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/extras-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
frr-stable:
name: frr-stable
descr: frr-stable repository
target: /etc/yum.repos.d/frr-stable.repo
baseurl: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os
gpgkey: https://packagerepo.service.consul/frr/el9/stable-daily/%{facts.os.architecture}/os/RPM-GPG-KEY-FRR
mirrorlist: absent
rancher-rke2-common-latest:
name: rancher-rke2-common-latest
descr: rancher-rke2-common-latest
target: /etc/yum.repos.d/rke2-common.repo
baseurl: https://packagerepo.service.consul/rke2/rhel%{facts.os.release.major}/common-daily/x86_64/os/
gpgkey: https://packagerepo.service.consul/rke2/rhel%{facts.os.release.major}/common-daily/x86_64/os/public.key
mirrorlist: absent
rancher-rke2-1-33-latest:
name: rancher-rke2-1-33-latest
descr: rancher-rke2-1-33-latest
target: /etc/yum.repos.d/rke2-1-33.repo
baseurl: https://packagerepo.service.consul/rke2/rhel%{facts.os.release.major}/1.33-daily/x86_64/os/
gpgkey: https://packagerepo.service.consul/rke2/rhel%{facts.os.release.major}/1.33-daily/x86_64/os/public.key
mirrorlist: absent
# dns
profiles::dns::base::primary_interface: loopback0
# networking
systemd::manage_networkd: true
systemd::manage_all_network_files: true
networking::interfaces:
"%{hiera('networking_1000_iface')}":
type: physical
ipaddress: "%{hiera('networking_1000_ip')}"
gateway: 198.18.15.254
txqueuelen: 10000
forwarding: true
"%{hiera('networking_2500_iface')}":
type: physical
ipaddress: "%{hiera('networking_2500_ip')}"
mtu: 1500
txqueuelen: 10000
forwarding: true
loopback0:
type: dummy
ipaddress: "%{hiera('networking_loopback0_ip')}"
netmask: 255.255.255.255
mtu: 1500
loopback1:
type: dummy
ipaddress: "%{hiera('networking_loopback1_ip')}"
netmask: 255.255.255.255
mtu: 1500
loopback2:
type: dummy
ipaddress: "%{hiera('networking_loopback2_ip')}"
netmask: 255.255.255.255
mtu: 1500
# configure consul service
profiles::consul::client::host_addr: "%{hiera('networking_loopback0_ip')}"
profiles::consul::client::node_rules:
- resource: service
segment: frr_exporter
disposition: write
# frrouting
exporters::frr_exporter::enable: true
frrouting::ospfd_router_id: "%{hiera('networking_loopback0_ip')}"
frrouting::ospf_preferred_source_enable: true
frrouting::ospf_preferred_source: "%{hiera('networking_loopback0_ip')}"
frrouting::ospfd_redistribute:
- connected
frrouting::ospfd_interfaces:
"%{hiera('networking_1000_iface')}":
area: 0.0.0.0
"%{hiera('networking_2500_iface')}":
area: 0.0.0.0
loopback0:
area: 0.0.0.0
loopback1:
area: 0.0.0.0
loopback2:
area: 0.0.0.0
frrouting::daemons:
ospfd: true
frrouting::ospf_exclude_k8s_enable: true
frrouting::k8s_cluster_cidr: '10.42.0.0/16' # RKE2 cluster-cidr (pods)
frrouting::k8s_service_cidr: '10.43.0.0/16' # RKE2 service-cidr
# add loopback interfaces to ssh list
ssh::server::options:
ListenAddress:
- "%{hiera('networking_loopback0_ip')}"
- "%{hiera('networking_1000_ip')}"
- "%{hiera('networking_2500_ip')}"
profiles::ssh::sign::principals:
- "%{hiera('networking_loopback0_ip')}"
- "%{hiera('networking_1000_ip')}"
- "%{hiera('networking_2500_ip')}"

View File

@ -1,3 +0,0 @@
---
# manage rke2
rke2::node_type: agent

View File

@ -1 +0,0 @@
---

View File

@ -1,78 +0,0 @@
---
# manage rke2
rke2::node_type: server
rke2::helm_install: true
rke2::helm_repos:
rancher-stable: https://releases.rancher.com/server-charts/stable
purelb: https://gitlab.com/api/v4/projects/20400619/packages/helm/stable
jetstack: https://charts.jetstack.io
harbor: https://helm.goharbor.io
traefik: https://traefik.github.io/charts
hashicorp: https://helm.releases.hashicorp.com
rke2::extra_config_files:
- rke2-canal-config
- rke2-nginx-ingress-config
rke2::config_hash:
advertise-address: "%{hiera('networking_loopback0_ip')}"
tls-san:
- "join-k8s.service.consul"
- "api-k8s.service.consul"
- "api.k8s.unkin.net"
- "join.k8s.unkin.net"
cni: canal
cluster-cidr: 10.42.0.0/16
service-cidr: 10.43.0.0/16
cluster-dns: 10.43.0.10
etcd-arg: "--quota-backend-bytes 2048000000"
etcd-snapshot-schedule-cron: "0 3 * * *"
etcd-snapshot-retention: 10
kube-apiserver-arg:
- '--default-not-ready-toleration-seconds=30'
- '--default-unreachable-toleration-seconds=30'
kube-controller-manager-arg:
- '--node-monitor-period=4s'
protect-kernel-defaults: true
disable-kube-proxy: false
# configure consul service
consul::services:
api-k8s:
service_name: 'api-k8s'
address: "%{facts.networking.fqdn}"
port: 6443
checks:
- id: 'api-k8s_livez_check'
name: 'api-k8s livez Check'
args:
- sudo
- /usr/local/bin/check_k8s_api.sh
interval: '10s'
timeout: '1s'
join-k8s:
service_name: 'join-k8s'
address: "%{facts.networking.fqdn}"
port: 9345
checks:
- id: 'rke2_tcp_check_9345'
name: 'rke2 TCP Check 9345'
tcp: "%{hiera('networking_loopback0_ip')}:9345"
interval: '10s'
timeout: '1s'
profiles::consul::client::node_rules:
- resource: service
segment: api-k8s
disposition: write
- resource: service
segment: join-k8s
disposition: write
profiles::pki::vault::alt_names:
- api-k8s.service.consul
- api-k8s.query.consul
- "api-k8s.service.%{facts.country}-%{facts.region}.consul"
sudo::configs:
consul-checks:
priority: 20
content: |
consul ALL=(ALL) NOPASSWD: /usr/local/bin/check_k8s_api.sh

Some files were not shown because too many files have changed in this diff Show More