Compare commits

..

17 Commits

Author SHA1 Message Date
aa2a56230d new: add Umami 2024-06-23 01:52:05 +00:00
6494393821 new: add Whoogle Search 2024-06-23 01:10:55 +00:00
616f2afdfa new: add README.md 2024-06-13 20:45:15 +00:00
0706280a9d new: add victoria-metrics 2024-06-11 21:46:04 +00:00
041ee577f4 new: add Stirling-PDF 2024-06-10 00:55:07 +00:00
595a9bceba new: add Shiori 2024-06-10 00:54:48 +00:00
eb01a093b0 new: add Redlib 2024-06-09 23:14:08 +00:00
30fd28f54c fix: add default prometheus conf 2024-06-09 23:13:42 +00:00
4afa83f9bc new: add Prometheus 2024-06-09 01:02:06 +00:00
365382e7df new: add yarr 2024-06-09 00:52:31 +00:00
a8a09e7cee new: add Ntfy 2024-06-09 00:18:08 +00:00
b9896fe850 new: add lldap 2024-06-08 22:44:15 +00:00
22f4139a3b new: add IT-Tools 2024-06-08 22:14:11 +00:00
0238550f15 new: add Homepage 2024-06-08 22:12:57 +00:00
83cff7450a new: add Grafana 2024-06-08 21:56:04 +00:00
26bab08ea0 new: add Gitea 2024-06-08 21:42:50 +00:00
fd7a550dc5 new: add Excalidraw 2024-06-08 20:35:39 +00:00
23 changed files with 912 additions and 0 deletions

71
README.md Normal file
View File

@@ -0,0 +1,71 @@
# Homelab
## Description
This repository list all my services hosted on several machines of my homelab using Podman
## List of services (WIP)
Web services:
- Caddy
- Dockge
- Excalidraw
- It-tools
- Umami
- Hugo
- Whoogle
- Wikijs
- Stirling-pdf
- Redlib
- Homepage
- yarr
- Traefik
- Ntfy
- Pihole
Security and Authentication:
- Fail2ban
- Authelia
- Wireguard
- lldap
Development:
- Gitea
- Gitea Actions Runner
- code-server
Monitoring and Logging:
- Grafana
- Prometheus
- Prometheus Node Exporter
- Prometheus Blackbox Exporter
- cAdvisor
- victoria-metrics
- Alertmanager
- Vector
- Loki
- Diun
- Dozzle
File management and Backup:
- Filebrowser
- Syncthing
- Linkding
- shiori
Multimedia:
- jellyfin
- sonarr
- radarr
- lidarr
- bazarr
- jellyseerr
- jackett
- flaresolverr
- transmission
- joal
- pigallery2
# Roadmap
- Deploying containers with either Renovate or Ansible
- K8s, ArgoCD ?

View File

@@ -27,6 +27,7 @@ services:
caddy.acme_dns.consumer_key: "{env.CONSUMER_KEY}"
## Debug
# caddy.log.level: DEBUG
networks:
dmz:
external: true

21
excalidraw/compose.yml Normal file
View File

@@ -0,0 +1,21 @@
services:
excalidraw:
container_name: excalidraw
image: excalidraw/excalidraw
restart: unless-stopped
networks:
- dmz
healthcheck:
disable: true
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@draw: host draw.{$$INT_DOMAIN}
caddy.1_handle: "@draw"
caddy.1_handle.reverse_proxy: "{{upstreams 80}}"
# Diun
diun.enable: true
networks:
dmz:
external: true

3
gitea/.env.example Normal file
View File

@@ -0,0 +1,3 @@
# https://docs.gitea.com/administration/config-cheat-sheet
# GITEA____APP_NAME=
# GITEA__SERVER__DOMAIN=

29
gitea/compose.yml Normal file
View File

@@ -0,0 +1,29 @@
services:
gitea:
container_name: gitea
image: gitea/gitea:1
networks:
- dmz
- ldap
ports:
- 2222:22
volumes:
- ./appdata:/data
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
restart: unless-stopped
env_file: .env
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@git: host git.{$$INT_DOMAIN}
caddy.1_handle: "@git"
caddy.1_handle.reverse_proxy: "{{upstreams 3000}}"
# Diun
diun.enable: true
networks:
dmz:
external: true
ldap:
external: true

1
grafana/.env.example Normal file
View File

@@ -0,0 +1 @@
# https://grafana.com/docs/grafana/latest/setup-grafana/configure-grafana/#override-configuration-with-environment-variables

26
grafana/compose.yml Normal file
View File

@@ -0,0 +1,26 @@
services:
grafana:
container_name: grafana
image: grafana/grafana
user: 0:0 # Rootless Podman
env_file: .env
restart: unless-stopped
volumes:
- ./appdata:/var/lib/grafana
networks:
- dmz
- monitoring
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@grafana: host grafana.{$$INT_DOMAIN}
caddy.1_handle: "@grafana"
caddy.1_handle.reverse_proxy: "{{upstreams 3000}}"
# Diun
diun.enable: true
networks:
dmz:
external: true
monitoring:
external: true

1
homepage/.env.example Normal file
View File

@@ -0,0 +1 @@
# https://gethomepage.dev/latest/installation/docker/#using-environment-secrets

23
homepage/compose.yml Normal file
View File

@@ -0,0 +1,23 @@
services:
homepage:
image: ghcr.io/gethomepage/homepage
container_name: homepage
volumes:
- ./appconf:/app/config
- $SOCKET:/var/run/docker.sock
env_file: .env
restart: unless-stopped
networks:
- dmz
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@homepage: host homepage.{$$INT_DOMAIN}
caddy.1_handle: "@homepage"
caddy.1_handle.reverse_proxy: "{{upstreams 3000}}"
# Diun
diun.enable: true
networks:
dmz:
external: true

19
it-tools/compose.yml Normal file
View File

@@ -0,0 +1,19 @@
services:
it-tools:
container_name: it-tools
image: corentinth/it-tools
restart: unless-stopped
networks:
- dmz
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@it-tools: "host it-tools.{$$INT_DOMAIN}"
caddy.1_handle: "@it-tools"
caddy.1_handle.reverse_proxy: "{{upstreams 80}}"
# Diun
diun.enable: true
networks:
dmz:
external: true

6
lldap/.env.example Normal file
View File

@@ -0,0 +1,6 @@
UID=####
GID=####
TZ=####/####
LLDAP_JWT_SECRET=REPLACE_WITH_RANDOM
LLDAP_KEY_SEED=REPLACE_WITH_RANDOM
LLDAP_LDAP_BASE_DN=dc=example,dc=com

32
lldap/compose.yml Normal file
View File

@@ -0,0 +1,32 @@
services:
lldap:
container_name: lldap
image: lldap/lldap:stable
ports:
# For LDAP
#- "3890:3890"
# For LDAPS (LDAP Over SSL), enable port if LLDAP_LDAPS_OPTIONS__ENABLED set true, look env below
#- "6360:6360"
# For the web front-end
#- "17170:17170"
volumes:
- ./appdata:/data
env_file: .env
restart: unless-stopped
networks:
- dmz
- ldap
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@lldap: "host lldap.{$$INT_DOMAIN}"
caddy.1_handle: "@lldap"
caddy.1_handle.reverse_proxy: "{{upstreams 17170}}"
# Diun
diun.enable: true
networks:
dmz:
external: true
ldap:
external: true

28
ntfy/compose.yml Normal file
View File

@@ -0,0 +1,28 @@
services:
ntfy:
container_name: ntfy
image: binwiederhier/ntfy
command: serve
environment:
- TZ=Europe/Paris
volumes:
- /var/cache/ntfy:/var/cache/ntfy
- ./appconf:/etc/ntfy
restart: unless-stopped
networks:
- dmz
- monitoring
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@ntfy: "host ntfy.{$$INT_DOMAIN}"
caddy.1_handle: "@ntfy"
caddy.1_handle.reverse_proxy: "{{upstreams 80}}"
# Diun
diun.enable: true
networks:
dmz:
external: true
monitoring:
external: true

View File

@@ -0,0 +1,442 @@
# my global config
global:
scrape_interval: 15s
evaluation_interval: 30s
body_size_limit: 15MB
sample_limit: 1500
target_limit: 30
label_limit: 30
label_name_length_limit: 200
label_value_length_limit: 200
# scrape_timeout is set to the global default (10s).
external_labels:
monitor: codelab
foo: bar
rule_files:
- "first.rules"
- "my/*.rules"
remote_write:
- url: http://remote1/push
name: drop_expensive
write_relabel_configs:
- source_labels: [__name__]
regex: expensive.*
action: drop
oauth2:
client_id: "123"
client_secret: "456"
token_url: "http://remote1/auth"
tls_config:
cert_file: valid_cert_file
key_file: valid_key_file
- url: http://remote2/push
name: rw_tls
tls_config:
cert_file: valid_cert_file
key_file: valid_key_file
headers:
name: value
remote_read:
- url: http://remote1/read
read_recent: true
name: default
enable_http2: false
- url: http://remote3/read
read_recent: false
name: read_special
required_matchers:
job: special
tls_config:
cert_file: valid_cert_file
key_file: valid_key_file
scrape_configs:
- job_name: prometheus
honor_labels: true
# scrape_interval is defined by the configured global (15s).
# scrape_timeout is defined by the global default (10s).
# metrics_path defaults to '/metrics'
# scheme defaults to 'http'.
file_sd_configs:
- files:
- foo/*.slow.json
- foo/*.slow.yml
- single/file.yml
refresh_interval: 10m
- files:
- bar/*.yaml
static_configs:
- targets: ["localhost:9090", "localhost:9191"]
labels:
my: label
your: label
relabel_configs:
- source_labels: [job, __meta_dns_name]
regex: (.*)some-[regex]
target_label: job
replacement: foo-${1}
# action defaults to 'replace'
- source_labels: [abc]
target_label: cde
- replacement: static
target_label: abc
- regex:
replacement: static
target_label: abc
- source_labels: [foo]
target_label: abc
action: keepequal
- source_labels: [foo]
target_label: abc
action: dropequal
authorization:
credentials_file: valid_token_file
tls_config:
min_version: TLS10
- job_name: service-x
basic_auth:
username: admin_name
password: "multiline\nmysecret\ntest"
scrape_interval: 50s
scrape_timeout: 5s
scrape_protocols: ["PrometheusText0.0.4"]
body_size_limit: 10MB
sample_limit: 1000
target_limit: 35
label_limit: 35
label_name_length_limit: 210
label_value_length_limit: 210
metrics_path: /my_path
scheme: https
dns_sd_configs:
- refresh_interval: 15s
names:
- first.dns.address.domain.com
- second.dns.address.domain.com
- names:
- first.dns.address.domain.com
relabel_configs:
- source_labels: [job]
regex: (.*)some-[regex]
action: drop
- source_labels: [__address__]
modulus: 8
target_label: __tmp_hash
action: hashmod
- source_labels: [__tmp_hash]
regex: 1
action: keep
- action: labelmap
regex: 1
- action: labeldrop
regex: d
- action: labelkeep
regex: k
metric_relabel_configs:
- source_labels: [__name__]
regex: expensive_metric.*
action: drop
- job_name: service-y
consul_sd_configs:
- server: "localhost:1234"
token: mysecret
path_prefix: /consul
services: ["nginx", "cache", "mysql"]
tags: ["canary", "v1"]
node_meta:
rack: "123"
allow_stale: true
scheme: https
tls_config:
ca_file: valid_ca_file
cert_file: valid_cert_file
key_file: valid_key_file
insecure_skip_verify: false
relabel_configs:
- source_labels: [__meta_sd_consul_tags]
separator: ","
regex: label:([^=]+)=([^,]+)
target_label: ${1}
replacement: ${2}
- job_name: service-z
tls_config:
cert_file: valid_cert_file
key_file: valid_key_file
authorization:
credentials: mysecret
- job_name: service-kubernetes
kubernetes_sd_configs:
- role: endpoints
api_server: "https://localhost:1234"
tls_config:
cert_file: valid_cert_file
key_file: valid_key_file
basic_auth:
username: "myusername"
password: "mysecret"
- job_name: service-kubernetes-namespaces
kubernetes_sd_configs:
- role: endpoints
api_server: "https://localhost:1234"
namespaces:
names:
- default
basic_auth:
username: "myusername"
password_file: valid_password_file
- job_name: service-kuma
kuma_sd_configs:
- server: http://kuma-control-plane.kuma-system.svc:5676
client_id: main-prometheus
- job_name: service-marathon
marathon_sd_configs:
- servers:
- "https://marathon.example.com:443"
auth_token: "mysecret"
tls_config:
cert_file: valid_cert_file
key_file: valid_key_file
- job_name: service-nomad
nomad_sd_configs:
- server: 'http://localhost:4646'
- job_name: service-ec2
ec2_sd_configs:
- region: us-east-1
access_key: access
secret_key: mysecret
profile: profile
filters:
- name: tag:environment
values:
- prod
- name: tag:service
values:
- web
- db
- job_name: service-lightsail
lightsail_sd_configs:
- region: us-east-1
access_key: access
secret_key: mysecret
profile: profile
- job_name: service-azure
azure_sd_configs:
- environment: AzurePublicCloud
authentication_method: OAuth
subscription_id: 11AAAA11-A11A-111A-A111-1111A1111A11
resource_group: my-resource-group
tenant_id: BBBB222B-B2B2-2B22-B222-2BB2222BB2B2
client_id: 333333CC-3C33-3333-CCC3-33C3CCCCC33C
client_secret: mysecret
port: 9100
- job_name: service-nerve
nerve_sd_configs:
- servers:
- localhost
paths:
- /monitoring
- job_name: 0123service-xxx
metrics_path: /metrics
static_configs:
- targets:
- localhost:9090
- job_name: badfederation
honor_timestamps: false
metrics_path: /federate
static_configs:
- targets:
- localhost:9090
- job_name: 測試
metrics_path: /metrics
static_configs:
- targets:
- localhost:9090
- job_name: httpsd
http_sd_configs:
- url: "http://example.com/prometheus"
- job_name: service-triton
triton_sd_configs:
- account: "testAccount"
dns_suffix: "triton.example.com"
endpoint: "triton.example.com"
port: 9163
refresh_interval: 1m
version: 1
tls_config:
cert_file: valid_cert_file
key_file: valid_key_file
- job_name: digitalocean-droplets
digitalocean_sd_configs:
- authorization:
credentials: abcdef
- job_name: docker
docker_sd_configs:
- host: unix:///var/run/docker.sock
- job_name: dockerswarm
dockerswarm_sd_configs:
- host: http://127.0.0.1:2375
role: nodes
- job_name: service-openstack
openstack_sd_configs:
- role: instance
region: RegionOne
port: 80
refresh_interval: 1m
tls_config:
ca_file: valid_ca_file
cert_file: valid_cert_file
key_file: valid_key_file
- job_name: service-puppetdb
puppetdb_sd_configs:
- url: https://puppetserver/
query: 'resources { type = "Package" and title = "httpd" }'
include_parameters: true
port: 80
refresh_interval: 1m
tls_config:
ca_file: valid_ca_file
cert_file: valid_cert_file
key_file: valid_key_file
- job_name: hetzner
relabel_configs:
- action: uppercase
source_labels: [instance]
target_label: instance
hetzner_sd_configs:
- role: hcloud
authorization:
credentials: abcdef
- role: robot
basic_auth:
username: abcdef
password: abcdef
- job_name: service-eureka
eureka_sd_configs:
- server: "http://eureka.example.com:8761/eureka"
- job_name: ovhcloud
ovhcloud_sd_configs:
- service: vps
endpoint: ovh-eu
application_key: testAppKey
application_secret: testAppSecret
consumer_key: testConsumerKey
refresh_interval: 1m
- service: dedicated_server
endpoint: ovh-eu
application_key: testAppKey
application_secret: testAppSecret
consumer_key: testConsumerKey
refresh_interval: 1m
- job_name: scaleway
scaleway_sd_configs:
- role: instance
project_id: 11111111-1111-1111-1111-111111111112
access_key: SCWXXXXXXXXXXXXXXXXX
secret_key: 11111111-1111-1111-1111-111111111111
- role: baremetal
project_id: 11111111-1111-1111-1111-111111111112
access_key: SCWXXXXXXXXXXXXXXXXX
secret_key: 11111111-1111-1111-1111-111111111111
- job_name: linode-instances
linode_sd_configs:
- authorization:
credentials: abcdef
- job_name: uyuni
uyuni_sd_configs:
- server: https://localhost:1234
username: gopher
password: hole
- job_name: ionos
ionos_sd_configs:
- datacenter_id: 8feda53f-15f0-447f-badf-ebe32dad2fc0
authorization:
credentials: abcdef
- job_name: vultr
vultr_sd_configs:
- authorization:
credentials: abcdef
alerting:
alertmanagers:
- scheme: https
static_configs:
- targets:
- "1.2.3.4:9093"
- "1.2.3.5:9093"
- "1.2.3.6:9093"
storage:
tsdb:
out_of_order_time_window: 30m
tracing:
endpoint: "localhost:4317"
client_type: "grpc"
headers:
foo: "bar"
timeout: 5s
compression: "gzip"
tls_config:
cert_file: valid_cert_file
key_file: valid_key_file
insecure_skip_verify: true

30
prometheus/compose.yml Normal file
View File

@@ -0,0 +1,30 @@
services:
prometheus:
container_name: prometheus
image: prom/prometheus
volumes:
- ./appconf:/etc/prometheus
restart: unless-stopped
command:
- --config.file=/etc/prometheus/prometheus.yml
- --storage.tsdb.path=/prometheus
- --web.console.libraries=/usr/share/prometheus/console_libraries
- --web.console.templates=/usr/share/prometheus/consoles
- --web.enable-lifecycle
networks:
- dmz
- monitoring
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@prometheus: host prometheus.{$$INT_DOMAIN}
caddy.1_handle: "@prometheus"
caddy.1_handle.reverse_proxy: "{{upstreams 9090}}"
# Diun
diun.enable: true
networks:
dmz:
external: true
monitoring:
external: true

24
redlib/compose.yml Normal file
View File

@@ -0,0 +1,24 @@
services:
redlib:
container_name: redlib
image: quay.io/redlib/redlib
restart: unless-stopped
networks:
- dmz
environment:
- REDLIB_ROBOTS_DISABLE_INDEXING=on
- REDLIB_DEFAULT_THEME=black
- REDLIB_DEFAULT_LAYOUT=clean
- REDLIB_DEFAULT_WIDE=on
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@redlib: host redlib.{$$INT_DOMAIN}
caddy.1_handle: "@redlib"
caddy.1_handle.reverse_proxy: "{{upstreams 8080}}"
# Diun
diun.enable: true
networks:
dmz:
external: true

26
shiori/compose.yml Normal file
View File

@@ -0,0 +1,26 @@
services:
shiori:
container_name: shiori
image: ghcr.io/go-shiori/shiori
restart: unless-stopped
user: 0:0 # Rootless Podman
environment:
SHIORI_DIR: /data
SHIORI_HTTP_SECRET_KEY: $HTTP_SECRET_KEY
TZ: Europe/Paris
volumes:
- ./appdata:/data
networks:
- dmz
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@shiori: "host shiori.{$$INT_DOMAIN}"
caddy.1_handle: "@shiori"
caddy.1_handle.reverse_proxy: "{{upstreams 8080}}"
# Diun
diun.enable: true
networks:
dmz:
external: true

19
stiling-pdf/compose.yml Normal file
View File

@@ -0,0 +1,19 @@
services:
s-pdf:
container_name: s-pdf
image: frooodle/s-pdf
restart: unless-stopped
networks:
- dmz
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@spdf: "host spdf.{$$INT_DOMAIN}"
caddy.1_handle: "@spdf"
caddy.1_handle.reverse_proxy: "{{upstreams 8080}}"
# Diun
diun.enable: true
networks:
dmz:
external: true

4
umami/.env.example Normal file
View File

@@ -0,0 +1,4 @@
SECRET=replace-me-with-a-random-string
DB=umami
USER=user
PASS=password

42
umami/compose.yml Normal file
View File

@@ -0,0 +1,42 @@
services:
umami:
container_name: umami
image: ghcr.io/umami-software/umami:postgresql-latest
environment:
DATABASE_URL: postgresql://$USER:$PASS@umami-db:5432/$DB
DATABASE_TYPE: postgresql
APP_SECRET: $SECRET
depends_on:
- umami-db
restart: unless-stopped
networks:
- umami
- proxy
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@umami: host umami.{$$INT_DOMAIN}
caddy.1_handle: "@umami"
caddy.1_handle.reverse_proxy: "{{upstreams 3000}}"
# Get real client public IP if behind another proxy
# caddy.1_handle.reverse_proxy.header_up: X-Forwarded-For {header.X-Real-IP}
# Diun
diun.enable: true
umami-db:
container_name: umami-db
image: postgres:15-alpine
environment:
POSTGRES_DB: $DB
POSTGRES_USER: $USER
POSTGRES_PASSWORD: $PASS
volumes:
- ./appdb:/var/lib/postgresql/data
restart: unless-stopped
networks:
- umami
networks:
umami:
external: true
proxy:
external: true

View File

@@ -0,0 +1,19 @@
services:
victoria-metrics-single:
container_name: victoria-metrics-single
image: victoriametrics/victoria-metrics:stable
volumes:
- ./appdata:/victoria-metrics-data
command:
- -retentionPeriod=12
- -maxLabelsPerTimeseries=50
restart: unless-stopped
networks:
- monitoring
labels:
# Diun
diun.enable: true
networks:
monitoring:
external: true

20
whoogle/compose.yml Normal file
View File

@@ -0,0 +1,20 @@
services:
whoogle:
container_name: whoogle
image: benbusby/whoogle-search
env_file: .env
restart: unless-stopped
networks:
- dmz
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@search: host search.{$$INT_DOMAIN}
caddy.1_handle: "@search"
caddy.1_handle.reverse_proxy: "{{upstreams 5000}}"
# Diun
diun.enable: true
networks:
dmz:
external: true

25
yarr/compose.yml Normal file
View File

@@ -0,0 +1,25 @@
services:
yarr:
container_name: yarr
image: ghcr.io/wakeful-cloud/yarr
volumes:
- ./appdata:/data
environment:
# Rootless Podman
- UID=0
- GID=0
networks:
- dmz
restart: unless-stopped
labels:
# Caddy
caddy: "*.{$$INT_DOMAIN}"
caddy.1_@rss: host rss.{$$INT_DOMAIN}
caddy.1_handle: "@rss"
caddy.1_handle.reverse_proxy: "{{upstreams 7070}}"
# Diun
diun.enable: true
networks:
dmz:
external: true