ci: add flake8

This commit is contained in:
Iain Learmonth 2022-05-16 13:29:48 +01:00
parent 014596d271
commit dd501a6e4e
32 changed files with 170 additions and 171 deletions

View file

@ -28,11 +28,11 @@ class AlarmProxyAzureCdnAutomation(BaseAutomation):
if x.name.startswith("bandwidth-out-high-bc-") and x.properties.essentials.monitor_condition == "Fired"]
for proxy in Proxy.query.filter(
Proxy.provider == "azure_cdn",
Proxy.destroyed == None
Proxy.destroyed.is_(None)
):
alarm = get_proxy_alarm(proxy.id, "bandwidth-out-high")
if proxy.origin.group.group_name.lower() not in firing:
alarm.update_state(AlarmState.OK, "Azure monitor alert not firing")
else:
alarm.update_state(AlarmState.CRITICAL, "Azure monitor alert firing")
return True, ""
return True, ""

View file

@ -47,7 +47,7 @@ class AlarmProxyCloudfrontAutomation(BaseAutomation):
db.session.add(alarm)
alarm.last_updated = datetime.datetime.utcnow()
deployed_count = len(Proxy.query.filter(
Proxy.destroyed == None).all())
Proxy.destroyed.is_(None)).all())
old_state = alarm.alarm_state
if deployed_count > 370:
alarm.alarm_state = AlarmState.CRITICAL

View file

@ -29,7 +29,7 @@ class AlarmProxyHTTPStatusAutomation(BaseAutomation):
def automate(self, full: bool = False) -> Tuple[bool, str]:
proxies = Proxy.query.filter(
Proxy.destroyed == None
Proxy.destroyed.is_(None)
)
for proxy in proxies:
try:

View file

@ -1,7 +1,7 @@
from collections import defaultdict
from datetime import datetime
from datetime import timedelta
from typing import Dict, Tuple, Union, Any
from typing import Dict, Tuple, Any
import requests
@ -78,7 +78,7 @@ class BlockOONIAutomation(BaseAutomation):
frequency = 240
def automate(self, full: bool = False) -> Tuple[bool, str]:
origins = Origin.query.filter(Origin.destroyed == None).all()
origins = Origin.query.filter(Origin.destroyed.is_(None)).all()
for origin in origins:
ooni = threshold_origin(origin.domain_name)
for country in ooni:

View file

@ -17,8 +17,8 @@ class BlockRoskomsvobodaAutomation(BaseAutomation):
def automate(self, full: bool = False) -> Tuple[bool, str]:
activities = []
proxies: List[Proxy] = Proxy.query.filter(
Proxy.deprecated == None,
Proxy.destroyed == None
Proxy.deprecated.is_(None),
Proxy.destroyed.is_(None)
).all()
patterns = requests.get("https://reestr.rublacklist.net/api/v2/domains/json").json()
for pattern in patterns:

View file

@ -23,12 +23,12 @@ class BridgeAutomation(TerraformAutomation):
def create_missing(self) -> None:
bridgeconfs: Iterable[BridgeConf] = BridgeConf.query.filter(
BridgeConf.provider == self.provider,
BridgeConf.destroyed == None
BridgeConf.destroyed.is_(None)
).all()
for bridgeconf in bridgeconfs:
active_bridges = Bridge.query.filter(
Bridge.conf_id == bridgeconf.id,
Bridge.deprecated == None
Bridge.deprecated.is_(None)
).all()
if len(active_bridges) < bridgeconf.number:
for i in range(bridgeconf.number - len(active_bridges)):
@ -49,7 +49,7 @@ class BridgeAutomation(TerraformAutomation):
def destroy_expired(self) -> None:
cutoff = datetime.datetime.utcnow() - datetime.timedelta(days=0)
bridges = [b for b in Bridge.query.filter(
Bridge.destroyed == None,
Bridge.destroyed.is_(None),
Bridge.deprecated < cutoff
).all() if b.conf.provider == self.provider]
for bridge in bridges:
@ -66,7 +66,7 @@ class BridgeAutomation(TerraformAutomation):
self.template,
groups=Group.query.all(),
bridgeconfs=BridgeConf.query.filter(
BridgeConf.destroyed == None,
BridgeConf.destroyed.is_(None),
BridgeConf.provider == self.provider
).all(),
global_namespace=app.config['GLOBAL_NAMESPACE'],

View file

@ -20,17 +20,17 @@ class BridgeAWSAutomation(BridgeAutomation):
}
}
}
provider "aws" {
access_key = "{{ aws_access_key }}"
secret_key = "{{ aws_secret_key }}"
region = "us-east-1"
}
locals {
ssh_key = file("{{ ssh_public_key_path }}")
}
{% for group in groups %}
module "label_{{ group.id }}" {
source = "cloudposse/label/null"
@ -40,7 +40,7 @@ class BridgeAWSAutomation(BridgeAutomation):
label_order = ["namespace", "tenant", "name", "attributes"]
}
{% endfor %}
{% for bridgeconf in bridgeconfs %}
{% for bridge in bridgeconf.bridges %}
{% if not bridge.destroyed %}
@ -54,11 +54,11 @@ class BridgeAWSAutomation(BridgeAutomation):
attributes = ["{{ bridge.id }}"]
distribution_method = "{{ bridge.conf.method }}"
}
output "bridge_hashed_fingerprint_{{ bridge.id }}" {
value = module.bridge_{{ bridge.id }}.hashed_fingerprint
}
output "bridge_bridgeline_{{ bridge.id }}" {
value = module.bridge_{{ bridge.id }}.bridgeline
sensitive = true

View file

@ -34,7 +34,7 @@ class BridgeOvhAutomation(BridgeAutomation):
}
}
}
provider "openstack" {
auth_url = "https://auth.cloud.ovh.net/v3/"
domain_name = "Default" # Domain name - Always at 'default' for OVHcloud
@ -42,24 +42,24 @@ class BridgeOvhAutomation(BridgeAutomation):
password = "{{ ovh_openstack_password }}"
tenant_id = "{{ ovh_openstack_tenant_id }}"
}
provider "ovh" {
endpoint = "ovh-eu"
application_key = "{{ ovh_cloud_application_key }}"
application_secret = "{{ ovh_cloud_application_secret }}"
consumer_key = "{{ ovh_cloud_consumer_key }}"
}
locals {
public_ssh_key = file("{{ ssh_public_key_path }}")
private_ssh_key = file("{{ ssh_private_key_path }}")
}
data "ovh_cloud_project_regions" "regions" {
service_name = "{{ ovh_openstack_tenant_id }}"
has_services_up = ["instance"]
}
{% for group in groups %}
module "label_{{ group.id }}" {
source = "cloudposse/label/null"
@ -69,19 +69,19 @@ class BridgeOvhAutomation(BridgeAutomation):
label_order = ["namespace", "tenant", "name", "attributes"]
}
{% endfor %}
{% for bridgeconf in bridgeconfs %}
{% for bridge in bridgeconf.bridges %}
{% if not bridge.destroyed %}
resource "random_shuffle" "region_{{ bridge.id }}" {
input = data.ovh_cloud_project_regions.regions.names
result_count = 1
lifecycle {
ignore_changes = [input] # don't replace all the bridges if a new region appears
}
}
module "bridge_{{ bridge.id }}" {
source = "sr2c/tor-bridge/openstack"
version = "0.0.7"
@ -94,11 +94,11 @@ class BridgeOvhAutomation(BridgeAutomation):
contact_info = "hi"
distribution_method = "{{ bridge.conf.method }}"
}
output "bridge_hashed_fingerprint_{{ bridge.id }}" {
value = module.bridge_{{ bridge.id }}.hashed_fingerprint
}
output "bridge_bridgeline_{{ bridge.id }}" {
value = module.bridge_{{ bridge.id }}.bridgeline
sensitive = true

View file

@ -15,7 +15,7 @@ def update_eotk_instance(group_id: int,
Eotk.group_id == group_id,
Eotk.region == region,
Eotk.provider == "aws",
Eotk.destroyed == None
Eotk.destroyed.is_(None)
).first()
if instance is None:
instance = Eotk()
@ -45,20 +45,20 @@ class EotkAWSAutomation(TerraformAutomation):
}
}
}
provider "aws" {
access_key = "{{ aws_access_key }}"
secret_key = "{{ aws_secret_key }}"
region = "us-east-2"
}
provider "aws" {
access_key = "{{ aws_access_key }}"
secret_key = "{{ aws_secret_key }}"
region = "eu-central-1"
alias = "second_region"
}
{% for group in groups %}
module "eotk_{{ group.id }}" {
providers = {
@ -80,8 +80,8 @@ class EotkAWSAutomation(TerraformAutomation):
self.tf_write(
self.template,
groups=Group.query.filter(
Group.eotk == True,
Group.destroyed == None
Group.eotk.is_(True),
Group.destroyed.is_(None)
).all(),
global_namespace=app.config['GLOBAL_NAMESPACE'],
**{

View file

@ -25,7 +25,7 @@ class ListAutomation(TerraformAutomation):
self.tf_write(
self.template,
lists=MirrorList.query.filter(
MirrorList.destroyed == None,
MirrorList.destroyed.is_(None),
MirrorList.provider == self.provider,
).all(),
global_namespace=app.config['GLOBAL_NAMESPACE'],

View file

@ -22,16 +22,16 @@ class ListGitlabAutomation(ListAutomation):
}
}
}
provider "gitlab" {
token = "{{ gitlab_token }}"
}
{% for list in lists %}
data "gitlab_project" "project_{{ list.id }}" {
id = "{{ list.container }}"
}
resource "gitlab_repository_file" "file_{{ list.id }}" {
project = data.gitlab_project.project_{{ list.id }}.id
file_path = "{{ list.filename }}"
@ -41,6 +41,6 @@ class ListGitlabAutomation(ListAutomation):
author_name = "{{ gitlab_author_name }}"
commit_message = "{{ gitlab_commit_message }}"
}
{% endfor %}
"""

View file

@ -83,8 +83,8 @@ class ProxyAutomation(TerraformAutomation):
def deprecate_orphaned_proxies(self) -> None:
proxies = Proxy.query.filter(
Proxy.deprecated == None,
Proxy.destroyed == None,
Proxy.deprecated.is_(None),
Proxy.destroyed.is_(None),
Proxy.provider == self.provider
).all()
for proxy in proxies:
@ -95,7 +95,7 @@ class ProxyAutomation(TerraformAutomation):
def destroy_expired_proxies(self) -> None:
cutoff = datetime.datetime.utcnow() - datetime.timedelta(days=3)
proxies = Proxy.query.filter(
Proxy.destroyed == None,
Proxy.destroyed.is_(None),
Proxy.provider == self.provider,
Proxy.deprecated < cutoff
).all()
@ -123,7 +123,7 @@ class ProxyAutomation(TerraformAutomation):
groups=Group.query.all(),
proxies=Proxy.query.filter(
Proxy.provider == self.provider,
Proxy.destroyed == None
Proxy.destroyed.is_(None)
).all(),
subgroups=self.get_subgroups(),
global_namespace=app.config['GLOBAL_NAMESPACE'],

View file

@ -31,21 +31,20 @@ class ProxyAzureCdnAutomation(ProxyAutomation):
}
}
}
provider "azurerm" {
features {}
client_id = "{{ azure_client_id }}"
client_secret = "{{ azure_client_secret }}"
subscription_id = "{{ azure_subscription_id }}"
tenant_id = "{{ azure_tenant_id }}"
skip_provider_registration = true
}
data "azurerm_resource_group" "this" {
name = "{{ azure_resource_group_name }}"
}
resource "azurerm_storage_account" "this" {
name = "{{ azure_storage_account_name }}"
resource_group_name = data.azurerm_resource_group.this.name
@ -53,7 +52,7 @@ class ProxyAzureCdnAutomation(ProxyAutomation):
account_tier = "Standard"
account_replication_type = "RAGRS"
}
{% for group in groups %}
module "label_{{ group.id }}" {
source = "cloudposse/label/null"
@ -62,49 +61,48 @@ class ProxyAzureCdnAutomation(ProxyAutomation):
tenant = "{{ group.group_name }}"
label_order = ["namespace", "tenant", "name", "attributes"]
}
{% for subgroup in subgroups[group.id] %}
resource "azurerm_cdn_profile" "profile_{{ group.id }}_{{ subgroup }}" {
name = "${module.label_{{ group.id }}.id}-sub{{ subgroup }}"
location = "{{ azure_location }}"
resource_group_name = data.azurerm_resource_group.this.name
sku = "Standard_Microsoft"
tags = module.label_{{ group.id }}.tags
tags = module.label_{{ group.id }}.tags
}
resource "azurerm_monitor_diagnostic_setting" "profile_diagnostic_{{ group.id }}_{{ subgroup }}" {
name = "cdn-diagnostics"
target_resource_id = azurerm_cdn_profile.profile_{{ group.id }}_{{ subgroup }}.id
storage_account_id = azurerm_storage_account.this.id
log {
category = "AzureCDNAccessLog"
enabled = true
retention_policy {
enabled = true
days = 90
}
}
metric {
category = "AllMetrics"
enabled = true
retention_policy {
enabled = true
days = 90
}
}
}
resource "azurerm_monitor_metric_alert" "response_alert_{{ group.id }}_{{ subgroup }}" {
name = "bandwidth-out-high-${module.label_{{ group.id }}.id}-sub{{ subgroup }}"
resource_group_name = data.azurerm_resource_group.this.name
scopes = [azurerm_cdn_profile.profile_{{ group.id }}_{{ subgroup }}.id]
description = "Action will be triggered when response size is too high."
criteria {
metric_namespace = "Microsoft.Cdn/profiles"
metric_name = "ResponseSize"
@ -112,26 +110,26 @@ class ProxyAzureCdnAutomation(ProxyAutomation):
operator = "GreaterThan"
threshold = 21474836481
}
window_size = "PT1H"
}
{% endfor %}
{% endfor %}
{% for proxy in proxies %}
resource "azurerm_cdn_endpoint" "endpoint_{{ proxy.id }}" {
name = "{{ proxy.slug }}"
profile_name = azurerm_cdn_profile.profile_{{ proxy.origin.group.id }}_{{ proxy.psg }}.name
location = "{{ azure_location }}"
resource_group_name = data.azurerm_resource_group.this.name
origin_host_header = "{{ proxy.origin.domain_name }}"
origin {
name = "upstream"
host_name = "{{ proxy.origin.domain_name }}"
}
global_delivery_rule {
modify_request_header_action {
action = "Append"
@ -140,16 +138,16 @@ class ProxyAzureCdnAutomation(ProxyAutomation):
}
}
}
resource "azurerm_monitor_diagnostic_setting" "diagnostic_{{ proxy.id }}" {
name = "cdn-diagnostics"
target_resource_id = azurerm_cdn_endpoint.endpoint_{{ proxy.id }}.id
storage_account_id = azurerm_storage_account.this.id
log {
category = "CoreAnalytics"
enabled = true
retention_policy {
enabled = true
days = 90
@ -162,7 +160,7 @@ class ProxyAzureCdnAutomation(ProxyAutomation):
def import_state(self, state: Optional[Any]) -> None:
proxies = Proxy.query.filter(
Proxy.provider == self.provider,
Proxy.destroyed == None
Proxy.destroyed.is_(None)
).all()
for proxy in proxies:
proxy.url = f"https://{proxy.slug}.azureedge.net"

View file

@ -24,13 +24,13 @@ class ProxyCloudfrontAutomation(ProxyAutomation):
}
}
}
provider "aws" {
access_key = "{{ aws_access_key }}"
secret_key = "{{ aws_secret_key }}"
region = "us-east-2"
}
{% for group in groups %}
module "label_{{ group.id }}" {
source = "cloudposse/label/null"
@ -39,7 +39,7 @@ class ProxyCloudfrontAutomation(ProxyAutomation):
tenant = "{{ group.group_name }}"
label_order = ["namespace", "tenant", "name", "attributes"]
}
module "log_bucket_{{ group.id }}" {
source = "cloudposse/s3-log-storage/aws"
version = "0.28.0"
@ -51,12 +51,12 @@ class ProxyCloudfrontAutomation(ProxyAutomation):
glacier_transition_days = 60
expiration_days = 90
}
resource "aws_sns_topic" "alarms_{{ group.id }}" {
name = "${module.label_{{ group.id }}.id}-cloudfront-alarms"
}
{% endfor %}
{% for proxy in proxies %}
module "cloudfront_{{ proxy.id }}" {
source = "sr2c/bc-proxy/aws"

View file

@ -61,7 +61,7 @@ module "log_bucket_{{ group.id }}" {
{% if group.id == 3 %}
resource "fastly_service_vcl" "service_{{ group.id }}" {
name = module.label_{{ group.id }}.id
{% for origin in group.origins %}
{% for proxy in origin.proxies %}
{% if proxy.destroyed == None and proxy.provider == "fastly" %}
@ -71,7 +71,7 @@ resource "fastly_service_vcl" "service_{{ group.id }}" {
}
{% endif %}
{% endfor %}
backend {
address = "{{ origin.domain_name }}"
name = "{{ origin.description }}"
@ -110,7 +110,7 @@ def create_missing_proxies():
def destroy_expired_proxies():
cutoff = datetime.datetime.utcnow() - datetime.timedelta(days=3)
proxies = Proxy.query.filter(
Proxy.destroyed == None,
Proxy.destroyed.is_(None),
Proxy.provider == "fastly",
Proxy.deprecated < cutoff
).all()