Decommission `decker` server

It's been replaced by prometheus running internally, and uptimerobot checking.
This commit is contained in:
Jake Howard 2022-12-09 19:04:13 +00:00
parent 413400cbc6
commit e86ed81102
Signed by: jake
GPG Key ID: 57AFB45680EDD477
17 changed files with 0 additions and 170 deletions

View File

@ -1,5 +1,4 @@
"hosts":
"casey_ip": "213.219.38.11"
"decker_ip": "192.46.233.9"
"grimes_ip": "104.238.172.209"
"walker_ip": "192.248.168.230"

View File

@ -9,5 +9,3 @@ nebula:
ip: 10.23.2.4
ingress:
ip: 10.23.2.5
decker:
ip: 10.23.2.6

View File

@ -1,3 +0,0 @@
restic_backup_locations:
- /opt
restic_healthchecks_id: "{{ vault_restic_healthchecks_id }}"

View File

@ -1,12 +0,0 @@
$ANSIBLE_VAULT;1.1;AES256
37326662353562626466613939643162346663306230333066323231346233633561363932313364
6636326134326435356161653231643666343432373133380a623161326465613235626236623062
63303436626538646432323337343062376235363734623935663135666531306562616630343835
6537356330336261360a666166366663633937326534616534316531366136613237633035383738
38333832653935623637333437386531353831616130656532356662363765306439633464626661
66386538336266353538356431393162373763383734633638323866396434363465303866303163
31366566316338636239313539343465343336376435633834396239643535663563373832303331
35643966653666653538626236663437616164653764323562346238663538396233636233326165
62373633383539353237376130363334373936623532653538326366366261613833383734376330
34393234393461346137336561363264613139616161333239363334346465323234376661616166
656331326539323739626633376662613564

View File

@ -1,7 +1,6 @@
casey
walker
grimes
decker
pve

View File

@ -19,7 +19,6 @@
- ingress
- walker
- grimes
- decker
roles:
- role: geerlingguy.ntp
become: true
@ -33,7 +32,6 @@
- walker
- pve-gitlab-runner
- grimes
- decker
- renovate
roles:
- role: geerlingguy.docker
@ -50,14 +48,12 @@
- forrest
- walker
- grimes
- decker
roles:
- db_auto_backup
- hosts:
- pve-docker
- walker
- decker
roles:
- traefik
@ -133,12 +129,6 @@
become: true
- restic
- hosts: decker
roles:
- nebula
- restic
- uptime_kuma
- hosts: renovate
roles:
- renovate

View File

@ -22,7 +22,6 @@ scrape_configs:
static_configs:
- targets:
- "{{ nebula.clients.walker.ip }}:8080"
- "{{ nebula.clients.decker.ip }}:8080"
- "{{ pve_hosts.docker.ip }}:8080"
- job_name: homeassistant

View File

@ -1,20 +0,0 @@
$ANSIBLE_VAULT;1.1;AES256
65353231313330393239343839623361663961346636306236363938373037363538373338633731
6430663764633362633565616462373066366234356463370a356462363864396134376338363936
36646437363265306131643131353033613939363235643965333331633231653231366236393961
6433386362653437650a323733353361343130306533623662323536653265306361393265393732
39663236343530643835373132653664663661313731393433306635396639653635356531313365
35656435353032333639366534386530363637643365356332663864323161383531316561376436
62633036636432336434383461396564323536376238646161386562366338383734343462646631
63636330393639303566376131643761613132346462366237623062383737663838393833383964
30396661373738343536363831303939393738363866396364303236616262376337366637303632
65393139623064613166353235343963653364333365323966373837373435303565343335356334
66613963393339363638643931376434623333386133336638363336353334313835313961626235
34306364393233663062636639396164303963303433353538386335383432376535383735646436
65656436373234323936653263396363316432666666343536303537383032656462353761363464
32396464646532356663346234623939656138343233353932333165623237353132633264333035
64373134623863306564633738313233363835623733313766383761386230383033383232616137
31363430303763656662363666646533316262646530306632613733363566366461666133623638
64333330306637613730633733666561616331663463623739336263636637316463323061383735
32323666383633656363643633386139613666366565356431393134356233343038663061353064
303334396630656532363137383034323763

View File

@ -1,11 +0,0 @@
$ANSIBLE_VAULT;1.1;AES256
38646235373534373165393032646530386230363864316439633366663962383432313931613265
3565373033636239306139313166373264363366386539380a323933393234653565623633643065
30643236313165396637326533343864336235393634663765626638623561303062343865323730
3766373635363739620a366531363234393034613761303838373264383138303031313739393962
63316132636264316334366661303830343961313561613038326134386134613565666336383065
32626138356661343366643137363735306466333933306539633063663134616165363062303366
39326133393439633330393762373637396465633337383861376138336362343365303065326431
33613365303464633163646130336139306430346431313465323930653164323931656432386438
33376165656635663335353263376635333262616263376132326362393434383830313434626237
6664653033366130313861326163623532353363633364626433

View File

@ -1,25 +0,0 @@
version: "2.3"
services:
uptime-kuma:
image: louislam/uptime-kuma:1.18.5-alpine
restart: unless-stopped
environment:
- PUID={{ docker_user.id }}
- PGID={{ docker_user.id }}
- TZ={{ timezone }}
networks:
- default
- traefik
volumes:
- ./data:/app/data
dns:
- 9.9.9.9
- 149.112.112.112
labels:
- traefik.enable=true
- traefik.http.routers.uptime-kuma.rule=Host(`status.theorangeone.net`)
networks:
traefik:
external: true

View File

@ -1,4 +0,0 @@
- name: restart uptime-kuma
shell:
chdir: /opt/uptime-kuma
cmd: "{{ docker_update_command }}"

View File

@ -1,17 +0,0 @@
- name: Create install directory
file:
path: /opt/uptime-kuma
state: directory
owner: "{{ docker_user.name }}"
mode: "{{ docker_compose_directory_mask }}"
become: true
- name: Install compose file
template:
src: files/docker-compose.yml
dest: /opt/uptime-kuma/docker-compose.yml
mode: "{{ docker_compose_file_mask }}"
owner: "{{ docker_user.name }}"
validate: docker-compose -f %s config
notify: restart uptime-kuma
become: true

View File

@ -4,7 +4,6 @@ resource "local_file" "hosts" {
casey_ip : linode_instance.casey.ip_address,
walker_ip : vultr_instance.walker.main_ip,
grimes_ip : vultr_instance.grimes.main_ip,
decker_ip : linode_instance.decker.ip_address,
}
})
filename = "${path.module}/../ansible/group_vars/all/hosts.yml"

View File

@ -1,45 +0,0 @@
resource "linode_instance" "decker" {
label = "decker"
image = "linode/arch"
region = "eu-central"
type = "g6-nanode-1"
private_ip = true
}
resource "linode_firewall" "decker" {
label = "decker"
linodes = [linode_instance.decker.id]
outbound_policy = "ACCEPT"
inbound_policy = "DROP"
inbound {
label = "allow-ping"
action = "ACCEPT"
protocol = "ICMP"
ipv4 = ["0.0.0.0/0"]
ipv6 = ["::/0"]
}
inbound {
label = "allow-inbound-https"
action = "ACCEPT"
protocol = "TCP"
ports = "443"
ipv4 = ["0.0.0.0/0"]
ipv6 = ["::/0"]
}
inbound {
label = "allow-inbound-http"
action = "ACCEPT"
protocol = "TCP"
ports = "80"
ipv4 = ["0.0.0.0/0"]
ipv6 = ["::/0"]
}
}
resource "linode_rdns" "decker_reverse_ipv4" {
address = linode_instance.decker.ip_address
rdns = "decker.sys.theorangeone.net"
}

View File

@ -21,11 +21,3 @@ resource "cloudflare_record" "sys_domain_grimes" {
type = "A"
ttl = 1
}
resource "cloudflare_record" "sys_domain_decker" {
zone_id = cloudflare_zone.theorangeonenet.id
name = "decker.sys"
value = linode_instance.decker.ip_address
type = "A"
ttl = 1
}

View File

@ -197,14 +197,6 @@ resource "cloudflare_record" "theorangeonenet_dokku_wildcard" {
ttl = 1
}
resource "cloudflare_record" "theorangeonenet_status" {
zone_id = cloudflare_zone.theorangeonenet.id
name = "status"
value = linode_instance.decker.ip_address
type = "A"
ttl = 1
}
resource "cloudflare_record" "theorangeonenet_google_site_verification" {
zone_id = cloudflare_zone.theorangeonenet.id
name = "@"

View File

@ -2,7 +2,6 @@ resource "uptimerobot_monitor" "vps_ping" {
for_each = {
casey = linode_instance.casey.ip_address
walker = vultr_instance.walker.main_ip
decker = linode_instance.decker.ip_address
grimes = vultr_instance.grimes.main_ip
}