Initial import
This commit is contained in:
commit
09f0b0672d
64 changed files with 3735 additions and 0 deletions
56
app/terraform/__init__.py
Normal file
56
app/terraform/__init__.py
Normal file
|
@ -0,0 +1,56 @@
|
|||
import json
|
||||
import os
|
||||
import subprocess
|
||||
from typing import Dict, Any
|
||||
|
||||
import jinja2
|
||||
|
||||
from app import app
|
||||
|
||||
|
||||
class BaseAutomation:
|
||||
short_name = None
|
||||
|
||||
def working_directory(self, filename=None):
|
||||
return os.path.join(
|
||||
app.config['TERRAFORM_DIRECTORY'],
|
||||
self.short_name or self.__class__.__name__.lower(),
|
||||
filename or ""
|
||||
)
|
||||
|
||||
def write_terraform_config(self, template: str, **kwargs):
|
||||
tmpl = jinja2.Template(template)
|
||||
with open(self.working_directory("main.tf"), 'w') as tf:
|
||||
tf.write(tmpl.render(**kwargs))
|
||||
|
||||
def terraform_init(self):
|
||||
subprocess.run(
|
||||
['terraform', 'init'],
|
||||
cwd=self.working_directory())
|
||||
|
||||
def terraform_plan(self):
|
||||
plan = subprocess.run(
|
||||
['terraform', 'plan'],
|
||||
cwd=self.working_directory())
|
||||
|
||||
def terraform_apply(self, refresh: bool = True, parallelism: int = 10):
|
||||
subprocess.run(
|
||||
['terraform', 'apply', f'-refresh={str(refresh).lower()}', '-auto-approve',
|
||||
f'-parallelism={str(parallelism)}'],
|
||||
cwd=self.working_directory())
|
||||
|
||||
def terraform_show(self) -> Dict[str, Any]:
|
||||
terraform = subprocess.run(
|
||||
['terraform', 'show', '-json'],
|
||||
cwd=os.path.join(
|
||||
self.working_directory()),
|
||||
stdout=subprocess.PIPE)
|
||||
return json.loads(terraform.stdout)
|
||||
|
||||
def terraform_output(self) -> Dict[str, Any]:
|
||||
terraform = subprocess.run(
|
||||
['terraform', 'output', '-json'],
|
||||
cwd=os.path.join(
|
||||
self.working_directory()),
|
||||
stdout=subprocess.PIPE)
|
||||
return json.loads(terraform.stdout)
|
29
app/terraform/block_bridge_github.py
Normal file
29
app/terraform/block_bridge_github.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
import datetime
|
||||
|
||||
from dateutil.parser import isoparse
|
||||
from github import Github
|
||||
|
||||
from app import app
|
||||
from app.models import Bridge
|
||||
|
||||
|
||||
def check_blocks():
|
||||
g = Github(app.config['GITHUB_API_KEY'])
|
||||
repo = g.get_repo(app.config['GITHUB_BRIDGE_REPO'])
|
||||
for vp in app.config['GITHUB_BRIDGE_VANTAGE_POINTS']:
|
||||
results = repo.get_contents(f"recentResult_{vp}").decoded_content.decode('utf-8').splitlines()
|
||||
for result in results:
|
||||
parts = result.split("\t")
|
||||
if isoparse(parts[2]) < (datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=3)):
|
||||
continue
|
||||
if int(parts[1]) < 40:
|
||||
bridge = Bridge.query.filter(
|
||||
Bridge.nickname == parts[0]
|
||||
).first()
|
||||
if bridge is not None:
|
||||
bridge.deprecate()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with app.app_context():
|
||||
check_blocks()
|
67
app/terraform/block_external.py
Normal file
67
app/terraform/block_external.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
|
||||
from app import app
|
||||
from app.models import Proxy
|
||||
|
||||
|
||||
def check_blocks():
|
||||
user_agent = {'User-agent': 'BypassCensorship/1.0 (contact@sr2.uk for info)'}
|
||||
page = requests.get(app.config['EXTERNAL_CHECK_URL'], headers=user_agent)
|
||||
soup = BeautifulSoup(page.content, 'html.parser')
|
||||
h2 = soup.find_all('h2')
|
||||
div = soup.find_all('div', class_="overflow-auto mb-5")
|
||||
|
||||
results = {}
|
||||
|
||||
i = 0
|
||||
while i < len(h2):
|
||||
if not div[i].div:
|
||||
urls = []
|
||||
a = div[i].find_all('a')
|
||||
j = 0
|
||||
while j < len(a):
|
||||
urls.append(a[j].text)
|
||||
j += 1
|
||||
results[h2[i].text] = urls
|
||||
else:
|
||||
results[h2[i].text] = []
|
||||
i += 1
|
||||
|
||||
for vp in results:
|
||||
if vp not in app.config['EXTERNAL_VANTAGE_POINTS']:
|
||||
continue
|
||||
for url in results[vp]:
|
||||
if "cloudfront.net" in url:
|
||||
slug = url[len('https://'):][:-len('.cloudfront.net')]
|
||||
print(f"Found {slug} blocked")
|
||||
proxy = Proxy.query.filter(
|
||||
Proxy.provider == "cloudfront",
|
||||
Proxy.slug == slug
|
||||
).first()
|
||||
if not proxy:
|
||||
print("Proxy not found")
|
||||
continue
|
||||
if proxy.deprecated:
|
||||
print("Proxy already marked blocked")
|
||||
continue
|
||||
proxy.deprecate()
|
||||
if "azureedge.net" in url:
|
||||
slug = url[len('https://'):][:-len('.azureedge.net')]
|
||||
print(f"Found {slug} blocked")
|
||||
proxy = Proxy.query.filter(
|
||||
Proxy.provider == "azure_cdn",
|
||||
Proxy.slug == slug
|
||||
).first()
|
||||
if not proxy:
|
||||
print("Proxy not found")
|
||||
continue
|
||||
if proxy.deprecated:
|
||||
print("Proxy already marked blocked")
|
||||
continue
|
||||
proxy.deprecate()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with app.app_context():
|
||||
check_blocks()
|
77
app/terraform/bridge/__init__.py
Normal file
77
app/terraform/bridge/__init__.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
import datetime
|
||||
|
||||
from app import app
|
||||
from app.extensions import db
|
||||
from app.models import BridgeConf, Bridge, Group
|
||||
from app.terraform import BaseAutomation
|
||||
|
||||
|
||||
class BridgeAutomation(BaseAutomation):
|
||||
def create_missing(self):
|
||||
bridgeconfs = BridgeConf.query.filter(
|
||||
BridgeConf.provider == self.provider
|
||||
).all()
|
||||
for bridgeconf in bridgeconfs:
|
||||
active_bridges = Bridge.query.filter(
|
||||
Bridge.conf_id == bridgeconf.id,
|
||||
Bridge.deprecated == None
|
||||
).all()
|
||||
if len(active_bridges) < bridgeconf.number:
|
||||
for i in range(bridgeconf.number - len(active_bridges)):
|
||||
bridge = Bridge()
|
||||
bridge.conf_id = bridgeconf.id
|
||||
bridge.added = datetime.datetime.utcnow()
|
||||
bridge.updated = datetime.datetime.utcnow()
|
||||
db.session.add(bridge)
|
||||
elif len(active_bridges) > bridgeconf.number:
|
||||
active_bridge_count = len(active_bridges)
|
||||
for bridge in active_bridges:
|
||||
bridge.deprecate()
|
||||
active_bridge_count -= 1
|
||||
if active_bridge_count == bridgeconf.number:
|
||||
break
|
||||
|
||||
def destroy_expired(self):
|
||||
cutoff = datetime.datetime.utcnow() - datetime.timedelta(days=0)
|
||||
bridges = [b for b in Bridge.query.filter(
|
||||
Bridge.destroyed == None,
|
||||
Bridge.deprecated < cutoff
|
||||
).all() if b.conf.provider == self.provider]
|
||||
for bridge in bridges:
|
||||
bridge.destroy()
|
||||
|
||||
def generate_terraform(self):
|
||||
self.write_terraform_config(
|
||||
self.template,
|
||||
groups=Group.query.all(),
|
||||
bridgeconfs=BridgeConf.query.filter(
|
||||
BridgeConf.destroyed == None,
|
||||
BridgeConf.provider == self.provider
|
||||
).all(),
|
||||
global_namespace=app.config['GLOBAL_NAMESPACE'],
|
||||
**{
|
||||
k: app.config[k.upper()]
|
||||
for k in self.template_parameters
|
||||
}
|
||||
)
|
||||
|
||||
def import_terraform(self):
|
||||
outputs = self.terraform_output()
|
||||
for output in outputs:
|
||||
if output.startswith('bridge_hashed_fingerprint_'):
|
||||
parts = outputs[output]['value'].split(" ")
|
||||
if len(parts) < 2:
|
||||
continue
|
||||
bridge = Bridge.query.filter(Bridge.id == output[len('bridge_hashed_fingerprint_'):]).first()
|
||||
bridge.nickname = parts[0]
|
||||
bridge.hashed_fingerprint = parts[1]
|
||||
bridge.terraform_updated = datetime.datetime.utcnow()
|
||||
if output.startswith('bridge_bridgeline_'):
|
||||
parts = outputs[output]['value'].split(" ")
|
||||
if len(parts) < 4:
|
||||
continue
|
||||
bridge = Bridge.query.filter(Bridge.id == output[len('bridge_bridgeline_'):]).first()
|
||||
del(parts[3])
|
||||
bridge.bridgeline = " ".join(parts)
|
||||
bridge.terraform_updated = datetime.datetime.utcnow()
|
||||
db.session.commit()
|
84
app/terraform/bridge/aws.py
Normal file
84
app/terraform/bridge/aws.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
from app import app
|
||||
from app.terraform.bridge import BridgeAutomation
|
||||
|
||||
|
||||
class BridgeAWSAutomation(BridgeAutomation):
|
||||
short_name = "bridge_aws"
|
||||
provider = "aws"
|
||||
|
||||
template_parameters = [
|
||||
"aws_access_key",
|
||||
"aws_secret_key",
|
||||
"ssh_public_key_path"
|
||||
]
|
||||
|
||||
template = """
|
||||
terraform {
|
||||
required_providers {
|
||||
aws = {
|
||||
version = "~> 4.2.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "aws" {
|
||||
access_key = "{{ aws_access_key }}"
|
||||
secret_key = "{{ aws_secret_key }}"
|
||||
region = "us-east-1"
|
||||
}
|
||||
|
||||
locals {
|
||||
ssh_key = file("{{ ssh_public_key_path }}")
|
||||
}
|
||||
|
||||
{% for group in groups %}
|
||||
module "label_{{ group.id }}" {
|
||||
source = "cloudposse/label/null"
|
||||
version = "0.25.0"
|
||||
namespace = "{{ global_namespace }}"
|
||||
tenant = "{{ group.group_name }}"
|
||||
label_order = ["namespace", "tenant", "name", "attributes"]
|
||||
}
|
||||
{% endfor %}
|
||||
|
||||
{% for bridgeconf in bridgeconfs %}
|
||||
{% for bridge in bridgeconf.bridges %}
|
||||
{% if not bridge.destroyed %}
|
||||
module "bridge_{{ bridge.id }}" {
|
||||
source = "sr2c/tor-bridge/aws"
|
||||
version = "0.0.1"
|
||||
ssh_key = local.ssh_key
|
||||
contact_info = "hi"
|
||||
context = module.label_{{ bridgeconf.group.id }}.context
|
||||
name = "br"
|
||||
attributes = ["{{ bridge.id }}"]
|
||||
distribution_method = "{{ bridge.conf.method }}"
|
||||
}
|
||||
|
||||
output "bridge_hashed_fingerprint_{{ bridge.id }}" {
|
||||
value = module.bridge_{{ bridge.id }}.hashed_fingerprint
|
||||
}
|
||||
|
||||
output "bridge_bridgeline_{{ bridge.id }}" {
|
||||
value = module.bridge_{{ bridge.id }}.bridgeline
|
||||
sensitive = true
|
||||
}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
"""
|
||||
|
||||
|
||||
def automate():
|
||||
auto = BridgeAWSAutomation()
|
||||
auto.destroy_expired()
|
||||
auto.create_missing()
|
||||
auto.generate_terraform()
|
||||
auto.terraform_init()
|
||||
auto.terraform_apply()
|
||||
auto.import_terraform()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with app.app_context():
|
||||
automate()
|
95
app/terraform/bridge/gandi.py
Normal file
95
app/terraform/bridge/gandi.py
Normal file
|
@ -0,0 +1,95 @@
|
|||
from app import app
|
||||
from app.terraform.bridge import BridgeAutomation
|
||||
|
||||
|
||||
class BridgeGandiAutomation(BridgeAutomation):
|
||||
short_name = "bridge_gandi"
|
||||
provider = "gandi"
|
||||
|
||||
template_parameters = [
|
||||
"gandi_openstack_user",
|
||||
"gandi_openstack_password",
|
||||
"gandi_openstack_tenant_name",
|
||||
"ssh_public_key_path"
|
||||
]
|
||||
|
||||
template = """
|
||||
terraform {
|
||||
required_providers {
|
||||
openstack = {
|
||||
source = "terraform-provider-openstack/openstack"
|
||||
version = "~> 1.42.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "openstack" {
|
||||
auth_url = "https://keystone.sd6.api.gandi.net:5000/v3"
|
||||
user_domain_name = "public"
|
||||
project_domain_name = "public"
|
||||
user_name = "{{ gandi_openstack_user }}"
|
||||
password = "{{ gandi_openstack_password }}"
|
||||
tenant_name = "{{ gandi_openstack_tenant_name }}"
|
||||
region = "FR-SD6"
|
||||
}
|
||||
|
||||
locals {
|
||||
ssh_key = file("{{ ssh_public_key_path }}")
|
||||
}
|
||||
|
||||
{% for group in groups %}
|
||||
module "label_{{ group.id }}" {
|
||||
source = "cloudposse/label/null"
|
||||
version = "0.25.0"
|
||||
namespace = "{{ global_namespace }}"
|
||||
tenant = "{{ group.group_name }}"
|
||||
label_order = ["namespace", "tenant", "name", "attributes"]
|
||||
}
|
||||
{% endfor %}
|
||||
|
||||
{% for bridgeconf in bridgeconfs %}
|
||||
{% for bridge in bridgeconf.bridges %}
|
||||
{% if not bridge.destroyed %}
|
||||
module "bridge_{{ bridge.id }}" {
|
||||
source = "sr2c/tor-bridge/openstack"
|
||||
version = "0.0.6"
|
||||
context = module.label_{{ bridgeconf.group.id }}.context
|
||||
name = "br"
|
||||
attributes = ["{{ bridge.id }}"]
|
||||
ssh_key = local.ssh_key
|
||||
contact_info = "hi"
|
||||
distribution_method = "{{ bridge.conf.method }}"
|
||||
|
||||
image_name = "Debian 11 Bullseye"
|
||||
flavor_name = "V-R1"
|
||||
external_network_name = "public"
|
||||
require_block_device_creation = true
|
||||
}
|
||||
|
||||
output "bridge_hashed_fingerprint_{{ bridge.id }}" {
|
||||
value = module.bridge_{{ bridge.id }}.hashed_fingerprint
|
||||
}
|
||||
|
||||
output "bridge_bridgeline_{{ bridge.id }}" {
|
||||
value = module.bridge_{{ bridge.id }}.bridgeline
|
||||
sensitive = true
|
||||
}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
"""
|
||||
|
||||
|
||||
def automate():
|
||||
auto = BridgeGandiAutomation()
|
||||
auto.destroy_expired()
|
||||
auto.create_missing()
|
||||
auto.generate_terraform()
|
||||
auto.terraform_init()
|
||||
auto.terraform_apply()
|
||||
auto.import_terraform()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with app.app_context():
|
||||
automate()
|
98
app/terraform/bridge/hcloud.py
Normal file
98
app/terraform/bridge/hcloud.py
Normal file
|
@ -0,0 +1,98 @@
|
|||
from app import app
|
||||
from app.terraform.bridge import BridgeAutomation
|
||||
|
||||
|
||||
class BridgeHcloudAutomation(BridgeAutomation):
|
||||
short_name = "bridge_hcloud"
|
||||
provider = "hcloud"
|
||||
|
||||
template_parameters = [
|
||||
"hcloud_token"
|
||||
]
|
||||
|
||||
template = """
|
||||
terraform {
|
||||
required_providers {
|
||||
random = {
|
||||
source = "hashicorp/random"
|
||||
version = "3.1.0"
|
||||
}
|
||||
hcloud = {
|
||||
source = "hetznercloud/hcloud"
|
||||
version = "1.31.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "hcloud" {
|
||||
token = "{{ hcloud_token }}"
|
||||
}
|
||||
|
||||
data "hcloud_datacenters" "ds" {
|
||||
}
|
||||
|
||||
data "hcloud_server_type" "cx11" {
|
||||
name = "cx11"
|
||||
}
|
||||
|
||||
{% for group in groups %}
|
||||
module "label_{{ group.id }}" {
|
||||
source = "cloudposse/label/null"
|
||||
version = "0.25.0"
|
||||
namespace = "{{ global_namespace }}"
|
||||
tenant = "{{ group.group_name }}"
|
||||
label_order = ["namespace", "tenant", "name", "attributes"]
|
||||
}
|
||||
{% endfor %}
|
||||
|
||||
{% for bridgeconf in bridgeconfs %}
|
||||
{% for bridge in bridgeconf.bridges %}
|
||||
{% if not bridge.destroyed %}
|
||||
resource "random_shuffle" "datacenter_{{ bridge.id }}" {
|
||||
input = [for s in data.hcloud_datacenters.ds.datacenters : s.name if contains(s.available_server_type_ids, data.hcloud_server_type.cx11.id)]
|
||||
result_count = 1
|
||||
|
||||
lifecycle {
|
||||
ignore_changes = [input] # don't replace all the bridges if a new DC appears
|
||||
}
|
||||
}
|
||||
|
||||
module "bridge_{{ bridge.id }}" {
|
||||
source = "sr2c/tor-bridge/hcloud"
|
||||
version = "0.0.2"
|
||||
datacenter = one(random_shuffle.datacenter_{{ bridge.id }}.result)
|
||||
context = module.label_{{ bridgeconf.group.id }}.context
|
||||
name = "br"
|
||||
attributes = ["{{ bridge.id }}"]
|
||||
ssh_key_name = "bc"
|
||||
contact_info = "hi"
|
||||
distribution_method = "{{ bridge.conf.method }}"
|
||||
}
|
||||
|
||||
output "bridge_hashed_fingerprint_{{ bridge.id }}" {
|
||||
value = module.bridge_{{ bridge.id }}.hashed_fingerprint
|
||||
}
|
||||
|
||||
output "bridge_bridgeline_{{ bridge.id }}" {
|
||||
value = module.bridge_{{ bridge.id }}.bridgeline
|
||||
sensitive = true
|
||||
}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
"""
|
||||
|
||||
|
||||
def automate():
|
||||
auto = BridgeHcloudAutomation()
|
||||
auto.destroy_expired()
|
||||
auto.create_missing()
|
||||
auto.generate_terraform()
|
||||
auto.terraform_init()
|
||||
auto.terraform_apply()
|
||||
auto.import_terraform()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with app.app_context():
|
||||
automate()
|
122
app/terraform/bridge/ovh.py
Normal file
122
app/terraform/bridge/ovh.py
Normal file
|
@ -0,0 +1,122 @@
|
|||
from app import app
|
||||
from app.terraform.bridge import BridgeAutomation
|
||||
|
||||
|
||||
class BridgeOvhAutomation(BridgeAutomation):
|
||||
short_name = "bridge_ovh"
|
||||
provider = "ovh"
|
||||
|
||||
template_parameters = [
|
||||
"ovh_cloud_application_key",
|
||||
"ovh_cloud_application_secret",
|
||||
"ovh_cloud_consumer_key",
|
||||
"ovh_cloud_project_service",
|
||||
"ovh_openstack_user",
|
||||
"ovh_openstack_password",
|
||||
"ovh_openstack_tenant_id",
|
||||
"ssh_public_key_path"
|
||||
]
|
||||
|
||||
template = """
|
||||
terraform {
|
||||
required_providers {
|
||||
random = {
|
||||
source = "hashicorp/random"
|
||||
version = "3.1.0"
|
||||
}
|
||||
openstack = {
|
||||
source = "terraform-provider-openstack/openstack"
|
||||
version = "~> 1.42.0"
|
||||
}
|
||||
ovh = {
|
||||
source = "ovh/ovh"
|
||||
version = ">= 0.13.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "openstack" {
|
||||
auth_url = "https://auth.cloud.ovh.net/v3/"
|
||||
domain_name = "Default" # Domain name - Always at 'default' for OVHcloud
|
||||
user_name = "{{ ovh_openstack_user }}"
|
||||
password = "{{ ovh_openstack_password }}"
|
||||
tenant_id = "{{ ovh_openstack_tenant_id }}"
|
||||
}
|
||||
|
||||
provider "ovh" {
|
||||
endpoint = "ovh-eu"
|
||||
application_key = "{{ ovh_cloud_application_key }}"
|
||||
application_secret = "{{ ovh_cloud_application_secret }}"
|
||||
consumer_key = "{{ ovh_cloud_consumer_key }}"
|
||||
}
|
||||
|
||||
locals {
|
||||
ssh_key = file("{{ ssh_public_key_path }}")
|
||||
}
|
||||
|
||||
data "ovh_cloud_project_regions" "regions" {
|
||||
service_name = "{{ ovh_cloud_project_service }}"
|
||||
has_services_up = ["instance"]
|
||||
}
|
||||
|
||||
{% for group in groups %}
|
||||
module "label_{{ group.id }}" {
|
||||
source = "cloudposse/label/null"
|
||||
version = "0.25.0"
|
||||
namespace = "{{ global_namespace }}"
|
||||
tenant = "{{ group.group_name }}"
|
||||
label_order = ["namespace", "tenant", "name", "attributes"]
|
||||
}
|
||||
{% endfor %}
|
||||
|
||||
{% for bridgeconf in bridgeconfs %}
|
||||
{% for bridge in bridgeconf.bridges %}
|
||||
{% if not bridge.destroyed %}
|
||||
resource "random_shuffle" "region_{{ bridge.id }}" {
|
||||
input = data.ovh_cloud_project_regions.regions.names
|
||||
result_count = 1
|
||||
|
||||
lifecycle {
|
||||
ignore_changes = [input] # don't replace all the bridges if a new region appears
|
||||
}
|
||||
}
|
||||
|
||||
module "bridge_{{ bridge.id }}" {
|
||||
source = "sr2c/tor-bridge/openstack"
|
||||
version = "0.0.6"
|
||||
region = one(random_shuffle.region_{{ bridge.id }}.result)
|
||||
context = module.label_{{ bridgeconf.group.id }}.context
|
||||
name = "br"
|
||||
attributes = ["{{ bridge.id }}"]
|
||||
ssh_key = local.ssh_key
|
||||
contact_info = "hi"
|
||||
distribution_method = "{{ bridge.conf.method }}"
|
||||
}
|
||||
|
||||
output "bridge_hashed_fingerprint_{{ bridge.id }}" {
|
||||
value = module.bridge_{{ bridge.id }}.hashed_fingerprint
|
||||
}
|
||||
|
||||
output "bridge_bridgeline_{{ bridge.id }}" {
|
||||
value = module.bridge_{{ bridge.id }}.bridgeline
|
||||
sensitive = true
|
||||
}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
"""
|
||||
|
||||
|
||||
def automate():
|
||||
auto = BridgeOvhAutomation()
|
||||
auto.destroy_expired()
|
||||
auto.create_missing()
|
||||
auto.generate_terraform()
|
||||
auto.terraform_init()
|
||||
auto.terraform_apply()
|
||||
auto.import_terraform()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with app.app_context():
|
||||
automate()
|
79
app/terraform/eotk.py
Normal file
79
app/terraform/eotk.py
Normal file
|
@ -0,0 +1,79 @@
|
|||
from app import app
|
||||
from app.models import Group
|
||||
from app.terraform import BaseAutomation
|
||||
|
||||
|
||||
class EotkAutomation(BaseAutomation):
|
||||
short_name = "eotk"
|
||||
|
||||
template_parameters = [
|
||||
"aws_access_key",
|
||||
"aws_secret_key"
|
||||
]
|
||||
|
||||
template = """
|
||||
terraform {
|
||||
required_providers {
|
||||
aws = {
|
||||
version = "~> 4.4.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "aws" {
|
||||
access_key = "{{ aws_access_key }}"
|
||||
secret_key = "{{ aws_secret_key }}"
|
||||
region = "us-east-1"
|
||||
}
|
||||
|
||||
{% for group in groups %}
|
||||
module "label_{{ group.id }}" {
|
||||
source = "cloudposse/label/null"
|
||||
version = "0.25.0"
|
||||
namespace = "{{ global_namespace }}"
|
||||
tenant = "{{ group.group_name }}"
|
||||
label_order = ["namespace", "tenant", "name", "attributes"]
|
||||
}
|
||||
|
||||
module "bucket_{{ group.id }}" {
|
||||
source = "cloudposse/s3-bucket/aws"
|
||||
version = "0.49.0"
|
||||
acl = "private"
|
||||
enabled = true
|
||||
user_enabled = true
|
||||
versioning_enabled = false
|
||||
allowed_bucket_actions = [
|
||||
"s3:GetObject",
|
||||
"s3:PutObject",
|
||||
"s3:ListBucket",
|
||||
"s3:GetBucketLocation"
|
||||
]
|
||||
context = module.label_{{ group.id }}.context
|
||||
name = "logs"
|
||||
attributes = ["eotk"]
|
||||
}
|
||||
|
||||
resource "aws_sns_topic" "alarms_{{ group.id }}" {
|
||||
name = "${module.label_{{ group.id }}.id}-eotk-alarms"
|
||||
}
|
||||
{% endfor %}
|
||||
"""
|
||||
|
||||
def generate_terraform(self):
|
||||
self.write_terraform_config(
|
||||
self.template,
|
||||
groups=Group.query.filter(Group.eotk == True).all(),
|
||||
global_namespace=app.config['GLOBAL_NAMESPACE'],
|
||||
**{
|
||||
k: app.config[k.upper()]
|
||||
for k in self.template_parameters
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with app.app_context():
|
||||
auto = EotkAutomation()
|
||||
auto.generate_terraform()
|
||||
auto.terraform_init()
|
||||
auto.terraform_apply()
|
28
app/terraform/list/__init__.py
Normal file
28
app/terraform/list/__init__.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
import json
|
||||
|
||||
from app import app
|
||||
from app.mirror_sites import bridgelines, mirror_sites, mirror_mapping
|
||||
from app.models import MirrorList
|
||||
from app.terraform import BaseAutomation
|
||||
|
||||
|
||||
class ListAutomation(BaseAutomation):
|
||||
def generate_terraform(self):
|
||||
self.write_terraform_config(
|
||||
self.template,
|
||||
lists=MirrorList.query.filter(
|
||||
MirrorList.destroyed == None,
|
||||
MirrorList.provider == self.provider,
|
||||
).all(),
|
||||
global_namespace=app.config['GLOBAL_NAMESPACE'],
|
||||
**{
|
||||
k: app.config[k.upper()]
|
||||
for k in self.template_parameters
|
||||
}
|
||||
)
|
||||
with open(self.working_directory('bc2.json'), 'w') as out:
|
||||
json.dump(mirror_sites(), out, indent=2, sort_keys=True)
|
||||
with open(self.working_directory('bca.json'), 'w') as out:
|
||||
json.dump(mirror_mapping(), out, indent=2, sort_keys=True)
|
||||
with open(self.working_directory('bridgelines.json'), 'w') as out:
|
||||
json.dump(bridgelines(), out, indent=2, sort_keys=True)
|
55
app/terraform/list/github.py
Normal file
55
app/terraform/list/github.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
from app import app
|
||||
from app.terraform.list import ListAutomation
|
||||
|
||||
|
||||
class ListGithubAutomation(ListAutomation):
|
||||
short_name = "list_github"
|
||||
provider = "github"
|
||||
|
||||
template_parameters = [
|
||||
"github_api_key"
|
||||
]
|
||||
|
||||
template = """
|
||||
terraform {
|
||||
required_providers {
|
||||
github = {
|
||||
source = "integrations/github"
|
||||
version = "~> 4.20.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
{% for list in lists %}
|
||||
provider "github" {
|
||||
alias = "list_{{ list.id }}"
|
||||
owner = "{{ list.container.split("/")[0] }}"
|
||||
token = "{{ github_api_key }}"
|
||||
}
|
||||
|
||||
data "github_repository" "repository_{{ list.id }}" {
|
||||
provider = github.list_{{ list.id }}
|
||||
name = "{{ list.container.split("/")[1] }}"
|
||||
}
|
||||
|
||||
resource "github_repository_file" "file_{{ list.id }}" {
|
||||
provider = github.list_{{ list.id }}
|
||||
repository = data.github_repository.repository_{{ list.id }}.name
|
||||
branch = "{{ list.branch }}"
|
||||
file = "{{ list.filename }}"
|
||||
content = file("{{ list.format }}.json")
|
||||
commit_message = "Managed by Terraform"
|
||||
commit_author = "Terraform User"
|
||||
commit_email = "terraform@api.otf.is"
|
||||
overwrite_on_create = true
|
||||
}
|
||||
{% endfor %}
|
||||
"""
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with app.app_context():
|
||||
auto = ListGithubAutomation()
|
||||
auto.generate_terraform()
|
||||
auto.terraform_init()
|
||||
auto.terraform_apply()
|
54
app/terraform/list/gitlab.py
Normal file
54
app/terraform/list/gitlab.py
Normal file
|
@ -0,0 +1,54 @@
|
|||
from app import app
|
||||
from app.terraform.list import ListAutomation
|
||||
|
||||
|
||||
class ListGitlabAutomation(ListAutomation):
|
||||
short_name = "list_gitlab"
|
||||
provider = "gitlab"
|
||||
|
||||
template_parameters = [
|
||||
"gitlab_token",
|
||||
"gitlab_author_email",
|
||||
"gitlab_author_name",
|
||||
"gitlab_commit_message"
|
||||
]
|
||||
|
||||
template = """
|
||||
terraform {
|
||||
required_providers {
|
||||
gitlab = {
|
||||
source = "gitlabhq/gitlab"
|
||||
version = "~> 3.12.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "gitlab" {
|
||||
token = "{{ gitlab_token }}"
|
||||
}
|
||||
|
||||
{% for list in lists %}
|
||||
data "gitlab_project" "project_{{ list.id }}" {
|
||||
id = "{{ list.container }}"
|
||||
}
|
||||
|
||||
resource "gitlab_repository_file" "file_{{ list.id }}" {
|
||||
project = data.gitlab_project.project_{{ list.id }}.id
|
||||
file_path = "{{ list.filename }}"
|
||||
branch = "{{ list.branch }}"
|
||||
content = base64encode(file("{{ list.format }}.json"))
|
||||
author_email = "{{ gitlab_author_email }}"
|
||||
author_name = "{{ gitlab_author_name }}"
|
||||
commit_message = "{{ gitlab_commit_message }}"
|
||||
}
|
||||
|
||||
{% endfor %}
|
||||
"""
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with app.app_context():
|
||||
auto = ListGitlabAutomation()
|
||||
auto.generate_terraform()
|
||||
auto.terraform_init()
|
||||
auto.terraform_apply()
|
46
app/terraform/list/s3.py
Normal file
46
app/terraform/list/s3.py
Normal file
|
@ -0,0 +1,46 @@
|
|||
from app import app
|
||||
from app.terraform.list import ListAutomation
|
||||
|
||||
|
||||
class ListGithubAutomation(ListAutomation):
|
||||
short_name = "list_s3"
|
||||
provider = "s3"
|
||||
|
||||
template_parameters = [
|
||||
"aws_access_key",
|
||||
"aws_secret_key"
|
||||
]
|
||||
|
||||
template = """
|
||||
terraform {
|
||||
required_providers {
|
||||
aws = {
|
||||
version = "~> 4.4.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "aws" {
|
||||
access_key = "{{ aws_access_key }}"
|
||||
secret_key = "{{ aws_secret_key }}"
|
||||
region = "us-east-1"
|
||||
}
|
||||
|
||||
{% for list in lists %}
|
||||
resource "aws_s3_object" "object_{{ list.id }}" {
|
||||
bucket = "{{ list.container }}"
|
||||
key = "{{ list.filename }}"
|
||||
source = "{{ list.format }}.json"
|
||||
content_type = "application/json"
|
||||
etag = filemd5("{{ list.format }}.json")
|
||||
}
|
||||
{% endfor %}
|
||||
"""
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with app.app_context():
|
||||
auto = ListGithubAutomation()
|
||||
auto.generate_terraform()
|
||||
auto.terraform_init()
|
||||
auto.terraform_apply()
|
51
app/terraform/proxy/__init__.py
Normal file
51
app/terraform/proxy/__init__.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
import datetime
|
||||
|
||||
from app import app
|
||||
from app.extensions import db
|
||||
from app.models import Group, Origin, Proxy
|
||||
from app.terraform import BaseAutomation
|
||||
|
||||
|
||||
class ProxyAutomation(BaseAutomation):
|
||||
def create_missing_proxies(self):
|
||||
origins = Origin.query.all()
|
||||
for origin in origins:
|
||||
cloudfront_proxies = [
|
||||
x for x in origin.proxies
|
||||
if x.provider == self.provider and x.deprecated is None and x.destroyed is None
|
||||
]
|
||||
if not cloudfront_proxies:
|
||||
proxy = Proxy()
|
||||
proxy.origin_id = origin.id
|
||||
proxy.provider = self.provider
|
||||
proxy.added = datetime.datetime.utcnow()
|
||||
proxy.updated = datetime.datetime.utcnow()
|
||||
db.session.add(proxy)
|
||||
db.session.commit()
|
||||
|
||||
def destroy_expired_proxies(self):
|
||||
cutoff = datetime.datetime.utcnow() - datetime.timedelta(days=3)
|
||||
proxies = Proxy.query.filter(
|
||||
Proxy.destroyed == None,
|
||||
Proxy.provider == self.provider,
|
||||
Proxy.deprecated < cutoff
|
||||
).all()
|
||||
for proxy in proxies:
|
||||
proxy.destroyed = datetime.datetime.utcnow()
|
||||
proxy.updated = datetime.datetime.utcnow()
|
||||
db.session.commit()
|
||||
|
||||
def generate_terraform(self):
|
||||
self.write_terraform_config(
|
||||
self.template,
|
||||
groups=Group.query.all(),
|
||||
proxies=Proxy.query.filter(
|
||||
Proxy.provider == self.provider,
|
||||
Proxy.destroyed == None
|
||||
).all(),
|
||||
global_namespace=app.config['GLOBAL_NAMESPACE'],
|
||||
**{
|
||||
k: app.config[k.upper()]
|
||||
for k in self.template_parameters
|
||||
}
|
||||
)
|
238
app/terraform/proxy/azure_cdn.py
Normal file
238
app/terraform/proxy/azure_cdn.py
Normal file
|
@ -0,0 +1,238 @@
|
|||
import datetime
|
||||
import string
|
||||
import random
|
||||
|
||||
from azure.identity import ClientSecretCredential
|
||||
from azure.mgmt.alertsmanagement import AlertsManagementClient
|
||||
import tldextract
|
||||
|
||||
from app import app
|
||||
from app.alarms import get_proxy_alarm
|
||||
from app.extensions import db
|
||||
from app.models import Group, Proxy, Alarm, AlarmState
|
||||
from app.terraform.proxy import ProxyAutomation
|
||||
|
||||
|
||||
class ProxyAzureCdnAutomation(ProxyAutomation):
|
||||
short_name = "proxy_azure_cdn"
|
||||
provider = "azure_cdn"
|
||||
|
||||
template_parameters = [
|
||||
"azure_resource_group_name",
|
||||
"azure_storage_account_name",
|
||||
"azure_location",
|
||||
"azure_client_id",
|
||||
"azure_client_secret",
|
||||
"azure_subscription_id",
|
||||
"azure_tenant_id"
|
||||
]
|
||||
|
||||
template = """
|
||||
terraform {
|
||||
required_providers {
|
||||
azurerm = {
|
||||
source = "hashicorp/azurerm"
|
||||
version = "=2.99.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "azurerm" {
|
||||
features {}
|
||||
|
||||
client_id = "{{ azure_client_id }}"
|
||||
client_secret = "{{ azure_client_secret }}"
|
||||
subscription_id = "{{ azure_subscription_id }}"
|
||||
tenant_id = "{{ azure_tenant_id }}"
|
||||
skip_provider_registration = true
|
||||
}
|
||||
|
||||
data "azurerm_resource_group" "this" {
|
||||
name = "{{ azure_resource_group_name }}"
|
||||
}
|
||||
|
||||
resource "azurerm_storage_account" "this" {
|
||||
name = "{{ azure_storage_account_name }}"
|
||||
resource_group_name = data.azurerm_resource_group.this.name
|
||||
location = "{{ azure_location }}"
|
||||
account_tier = "Standard"
|
||||
account_replication_type = "RAGRS"
|
||||
}
|
||||
|
||||
{% for group in groups %}
|
||||
module "label_{{ group.id }}" {
|
||||
source = "cloudposse/label/null"
|
||||
version = "0.25.0"
|
||||
namespace = "{{ global_namespace }}"
|
||||
tenant = "{{ group.group_name }}"
|
||||
label_order = ["namespace", "tenant", "name", "attributes"]
|
||||
}
|
||||
|
||||
resource "azurerm_cdn_profile" "profile_{{ group.id }}" {
|
||||
name = module.label_{{ group.id }}.id
|
||||
location = "{{ azure_location }}"
|
||||
resource_group_name = data.azurerm_resource_group.this.name
|
||||
sku = "Standard_Microsoft"
|
||||
|
||||
tags = module.label_{{ group.id }}.tags
|
||||
}
|
||||
|
||||
resource "azurerm_monitor_diagnostic_setting" "profile_diagnostic_{{ group.id }}" {
|
||||
name = "cdn-diagnostics"
|
||||
target_resource_id = azurerm_cdn_profile.profile_{{ group.id }}.id
|
||||
storage_account_id = azurerm_storage_account.this.id
|
||||
|
||||
log {
|
||||
category = "AzureCDNAccessLog"
|
||||
enabled = true
|
||||
|
||||
retention_policy {
|
||||
enabled = true
|
||||
days = 90
|
||||
}
|
||||
}
|
||||
|
||||
metric {
|
||||
category = "AllMetrics"
|
||||
enabled = true
|
||||
|
||||
retention_policy {
|
||||
enabled = true
|
||||
days = 90
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource "azurerm_monitor_metric_alert" "response_alert_{{ group.id }}" {
|
||||
name = "bandwidth-out-high-${module.label_{{ group.id }}.id}"
|
||||
resource_group_name = data.azurerm_resource_group.this.name
|
||||
scopes = [azurerm_cdn_profile.profile_{{ group.id }}.id]
|
||||
description = "Action will be triggered when response size is too high."
|
||||
|
||||
criteria {
|
||||
metric_namespace = "Microsoft.Cdn/profiles"
|
||||
metric_name = "ResponseSize"
|
||||
aggregation = "Total"
|
||||
operator = "GreaterThan"
|
||||
threshold = 21474836481
|
||||
}
|
||||
|
||||
window_size = "PT1H"
|
||||
}
|
||||
{% endfor %}
|
||||
|
||||
{% for proxy in proxies %}
|
||||
resource "azurerm_cdn_endpoint" "endpoint_{{ proxy.id }}" {
|
||||
name = "{{ proxy.slug }}"
|
||||
profile_name = azurerm_cdn_profile.profile_{{ proxy.origin.group.id }}.name
|
||||
location = "{{ azure_location }}"
|
||||
resource_group_name = data.azurerm_resource_group.this.name
|
||||
|
||||
origin {
|
||||
name = "upstream"
|
||||
host_name = "{{ proxy.origin.domain_name }}"
|
||||
}
|
||||
|
||||
global_delivery_rule {
|
||||
modify_request_header_action {
|
||||
action = "Overwrite"
|
||||
name = "User-Agent"
|
||||
value = "Amazon CloudFront"
|
||||
}
|
||||
modify_request_header_action {
|
||||
action = "Append"
|
||||
name = "X-Amz-Cf-Id"
|
||||
value = "dummystring"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource "azurerm_monitor_diagnostic_setting" "diagnostic_{{ proxy.id }}" {
|
||||
name = "cdn-diagnostics"
|
||||
target_resource_id = azurerm_cdn_endpoint.endpoint_{{ proxy.id }}.id
|
||||
storage_account_id = azurerm_storage_account.this.id
|
||||
|
||||
log {
|
||||
category = "CoreAnalytics"
|
||||
enabled = true
|
||||
|
||||
retention_policy {
|
||||
enabled = true
|
||||
days = 90
|
||||
}
|
||||
}
|
||||
}
|
||||
{% endfor %}
|
||||
"""
|
||||
|
||||
def create_missing_proxies(self):
|
||||
groups = Group.query.all()
|
||||
for group in groups:
|
||||
active_proxies = len([p for p in Proxy.query.filter(
|
||||
Proxy.provider == 'azure_cdn',
|
||||
Proxy.destroyed == None
|
||||
).all() if p.origin.group_id == group.id])
|
||||
for origin in group.origins:
|
||||
if active_proxies == 25:
|
||||
break
|
||||
active_proxies += 1
|
||||
azure_cdn_proxies = [
|
||||
x for x in origin.proxies
|
||||
if x.provider == "azure_cdn" and x.deprecated is None and x.destroyed is None
|
||||
]
|
||||
if not azure_cdn_proxies:
|
||||
proxy = Proxy()
|
||||
proxy.origin_id = origin.id
|
||||
proxy.provider = "azure_cdn"
|
||||
proxy.slug = tldextract.extract(origin.domain_name).domain[:5] + ''.join(
|
||||
random.choices(string.ascii_lowercase, k=random.randint(10, 15)))
|
||||
proxy.url = f"https://{proxy.slug}.azureedge.net"
|
||||
proxy.added = datetime.datetime.utcnow()
|
||||
proxy.updated = datetime.datetime.utcnow()
|
||||
db.session.add(proxy)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def set_urls():
|
||||
proxies = Proxy.query.filter(
|
||||
Proxy.provider == 'azure_cdn',
|
||||
Proxy.destroyed == None
|
||||
).all()
|
||||
for proxy in proxies:
|
||||
proxy.url = f"https://{proxy.slug}.azureedge.net"
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def import_monitor_alerts():
|
||||
credential = ClientSecretCredential(
|
||||
tenant_id=app.config['AZURE_TENANT_ID'],
|
||||
client_id=app.config['AZURE_CLIENT_ID'],
|
||||
client_secret=app.config['AZURE_CLIENT_SECRET'])
|
||||
client = AlertsManagementClient(
|
||||
credential,
|
||||
app.config['AZURE_SUBSCRIPTION_ID']
|
||||
)
|
||||
firing = [x.name[len("bandwidth-out-high-bc-"):]
|
||||
for x in client.alerts.get_all()
|
||||
if x.name.startswith("bandwidth-out-high-bc-") and x.properties.essentials.monitor_condition == "Fired"]
|
||||
for proxy in Proxy.query.filter(
|
||||
Proxy.provider == "azure_cdn",
|
||||
Proxy.destroyed == None
|
||||
):
|
||||
alarm = get_proxy_alarm(proxy.id, "bandwidth-out-high")
|
||||
if proxy.origin.group.group_name.lower() not in firing:
|
||||
alarm.update_state(AlarmState.OK, "Azure monitor alert not firing")
|
||||
else:
|
||||
alarm.update_state(AlarmState.CRITICAL, "Azure monitor alert firing")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with app.app_context():
|
||||
auto = ProxyAzureCdnAutomation()
|
||||
auto.create_missing_proxies()
|
||||
auto.destroy_expired_proxies()
|
||||
auto.generate_terraform()
|
||||
auto.terraform_init()
|
||||
auto.terraform_apply(refresh=False, parallelism=1) # Rate limits are problem
|
||||
set_urls()
|
||||
import_monitor_alerts()
|
156
app/terraform/proxy/cloudfront.py
Normal file
156
app/terraform/proxy/cloudfront.py
Normal file
|
@ -0,0 +1,156 @@
|
|||
import datetime
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import boto3
|
||||
|
||||
from app import app
|
||||
from app.alarms import get_proxy_alarm
|
||||
from app.extensions import db
|
||||
from app.models import Proxy, Alarm, AlarmState
|
||||
from app.terraform.proxy import ProxyAutomation
|
||||
|
||||
|
||||
class ProxyCloudfrontAutomation(ProxyAutomation):
|
||||
short_name = "proxy_cloudfront"
|
||||
provider = "cloudfront"
|
||||
|
||||
template_parameters = [
|
||||
"aws_access_key",
|
||||
"aws_secret_key"
|
||||
]
|
||||
|
||||
template = """
|
||||
terraform {
|
||||
required_providers {
|
||||
aws = {
|
||||
version = "~> 4.4.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "aws" {
|
||||
access_key = "{{ aws_access_key }}"
|
||||
secret_key = "{{ aws_secret_key }}"
|
||||
region = "us-east-1"
|
||||
}
|
||||
|
||||
{% for group in groups %}
|
||||
module "label_{{ group.id }}" {
|
||||
source = "cloudposse/label/null"
|
||||
version = "0.25.0"
|
||||
namespace = "{{ global_namespace }}"
|
||||
tenant = "{{ group.group_name }}"
|
||||
label_order = ["namespace", "tenant", "name", "attributes"]
|
||||
}
|
||||
|
||||
module "log_bucket_{{ group.id }}" {
|
||||
source = "cloudposse/s3-log-storage/aws"
|
||||
version = "0.28.0"
|
||||
context = module.label_{{ group.id }}.context
|
||||
name = "logs"
|
||||
attributes = ["cloudfront"]
|
||||
acl = "log-delivery-write"
|
||||
standard_transition_days = 30
|
||||
glacier_transition_days = 60
|
||||
expiration_days = 90
|
||||
}
|
||||
|
||||
resource "aws_sns_topic" "alarms_{{ group.id }}" {
|
||||
name = "${module.label_{{ group.id }}.id}-cloudfront-alarms"
|
||||
}
|
||||
{% endfor %}
|
||||
|
||||
{% for proxy in proxies %}
|
||||
module "cloudfront_{{ proxy.id }}" {
|
||||
source = "sr2c/bc-proxy/aws"
|
||||
version = "0.0.5"
|
||||
origin_domain = "{{ proxy.origin.domain_name }}"
|
||||
logging_bucket = module.log_bucket_{{ proxy.origin.group.id }}.bucket_domain_name
|
||||
sns_topic_arn = aws_sns_topic.alarms_{{ proxy.origin.group.id }}.arn
|
||||
low_bandwidth_alarm = false
|
||||
context = module.label_{{ proxy.origin.group.id }}.context
|
||||
name = "proxy"
|
||||
attributes = ["{{ proxy.origin.domain_name }}"]
|
||||
}
|
||||
{% endfor %}
|
||||
"""
|
||||
|
||||
|
||||
def import_cloudfront_values():
|
||||
terraform = subprocess.run(
|
||||
['terraform', 'show', '-json'],
|
||||
cwd=os.path.join(
|
||||
app.config['TERRAFORM_DIRECTORY'],
|
||||
"proxy_cloudfront"),
|
||||
stdout=subprocess.PIPE)
|
||||
state = json.loads(terraform.stdout)
|
||||
|
||||
for mod in state['values']['root_module']['child_modules']:
|
||||
if mod['address'].startswith('module.cloudfront_'):
|
||||
for res in mod['resources']:
|
||||
if res['address'].endswith('aws_cloudfront_distribution.this'):
|
||||
proxy = Proxy.query.filter(Proxy.id == mod['address'][len('module.cloudfront_'):]).first()
|
||||
proxy.url = "https://" + res['values']['domain_name']
|
||||
proxy.slug = res['values']['id']
|
||||
proxy.terraform_updated = datetime.datetime.utcnow()
|
||||
db.session.commit()
|
||||
break
|
||||
|
||||
|
||||
def import_cloudwatch_alarms():
|
||||
cloudwatch = boto3.client('cloudwatch',
|
||||
aws_access_key_id=app.config['AWS_ACCESS_KEY'],
|
||||
aws_secret_access_key=app.config['AWS_SECRET_KEY'],
|
||||
region_name='us-east-1')
|
||||
dist_paginator = cloudwatch.get_paginator('describe_alarms')
|
||||
page_iterator = dist_paginator.paginate(AlarmNamePrefix="bandwidth-out-high-")
|
||||
for page in page_iterator:
|
||||
for cw_alarm in page['MetricAlarms']:
|
||||
dist_id = cw_alarm["AlarmName"][len("bandwidth-out-high-"):]
|
||||
proxy = Proxy.query.filter(Proxy.slug == dist_id).first()
|
||||
if proxy is None:
|
||||
print("Skipping unknown proxy " + dist_id)
|
||||
continue
|
||||
alarm = get_proxy_alarm(proxy.id, "bandwidth-out-high")
|
||||
if cw_alarm['StateValue'] == "OK":
|
||||
alarm.update_state(AlarmState.OK, "CloudWatch alarm OK")
|
||||
elif cw_alarm['StateValue'] == "ALARM":
|
||||
alarm.update_state(AlarmState.CRITICAL, "CloudWatch alarm ALARM")
|
||||
else:
|
||||
alarm.update_state(AlarmState.UNKNOWN, f"CloudWatch alarm {cw_alarm['StateValue']}")
|
||||
alarm = Alarm.query.filter(
|
||||
Alarm.alarm_type == "cloudfront-quota"
|
||||
).first()
|
||||
if alarm is None:
|
||||
alarm = Alarm()
|
||||
alarm.target = "service/cloudfront"
|
||||
alarm.alarm_type = "cloudfront-quota"
|
||||
alarm.state_changed = datetime.datetime.utcnow()
|
||||
db.session.add(alarm)
|
||||
alarm.last_updated = datetime.datetime.utcnow()
|
||||
deployed_count = len(Proxy.query.filter(
|
||||
Proxy.destroyed == None).all())
|
||||
old_state = alarm.alarm_state
|
||||
if deployed_count > 370:
|
||||
alarm.alarm_state = AlarmState.CRITICAL
|
||||
elif deployed_count > 320:
|
||||
alarm.alarm_state = AlarmState.WARNING
|
||||
else:
|
||||
alarm.alarm_state = AlarmState.OK
|
||||
if alarm.alarm_state != old_state:
|
||||
alarm.state_changed = datetime.datetime.utcnow()
|
||||
db.session.commit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with app.app_context():
|
||||
auto = ProxyCloudfrontAutomation()
|
||||
auto.destroy_expired_proxies()
|
||||
auto.create_missing_proxies()
|
||||
auto.generate_terraform()
|
||||
auto.terraform_init()
|
||||
auto.terraform_apply()
|
||||
import_cloudfront_values()
|
||||
import_cloudwatch_alarms()
|
60
app/terraform/proxy_check.py
Normal file
60
app/terraform/proxy_check.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
import requests
|
||||
|
||||
from app import app
|
||||
from app.extensions import db
|
||||
from app.models import AlarmState, Alarm, Proxy
|
||||
|
||||
|
||||
def set_http_alarm(proxy_id: int, state: AlarmState, text: str):
|
||||
alarm = Alarm.query.filter(
|
||||
Alarm.proxy_id == proxy_id,
|
||||
Alarm.alarm_type == "http-status"
|
||||
).first()
|
||||
if alarm is None:
|
||||
alarm = Alarm()
|
||||
alarm.proxy_id = proxy_id
|
||||
alarm.alarm_type = "http-status"
|
||||
db.session.add(alarm)
|
||||
alarm.update_state(state, text)
|
||||
|
||||
|
||||
def check_http():
|
||||
proxies = Proxy.query.filter(
|
||||
Proxy.destroyed == None
|
||||
)
|
||||
for proxy in proxies:
|
||||
try:
|
||||
if proxy.url is None:
|
||||
continue
|
||||
r = requests.get(proxy.url,
|
||||
allow_redirects=False,
|
||||
timeout=5)
|
||||
r.raise_for_status()
|
||||
if r.is_redirect:
|
||||
set_http_alarm(
|
||||
proxy.id,
|
||||
AlarmState.CRITICAL,
|
||||
f"{r.status_code} {r.reason}"
|
||||
)
|
||||
else:
|
||||
set_http_alarm(
|
||||
proxy.id,
|
||||
AlarmState.OK,
|
||||
f"{r.status_code} {r.reason}"
|
||||
)
|
||||
except (requests.ConnectionError, requests.Timeout):
|
||||
set_http_alarm(
|
||||
proxy.id,
|
||||
AlarmState.CRITICAL,
|
||||
f"Connection failure")
|
||||
except requests.HTTPError:
|
||||
set_http_alarm(
|
||||
proxy.id,
|
||||
AlarmState.CRITICAL,
|
||||
f"{r.status_code} {r.reason}"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with app.app_context():
|
||||
check_http()
|
Loading…
Add table
Add a link
Reference in a new issue