lint: reformat python code with black
This commit is contained in:
parent
331beb01b4
commit
a406a7974b
88 changed files with 2579 additions and 1608 deletions
|
@ -15,15 +15,14 @@ from app.models.mirrors import Origin, Proxy, SmartProxy
|
|||
from app.terraform.terraform import TerraformAutomation
|
||||
|
||||
|
||||
def update_smart_proxy_instance(group_id: int,
|
||||
provider: str,
|
||||
region: str,
|
||||
instance_id: str) -> None:
|
||||
def update_smart_proxy_instance(
|
||||
group_id: int, provider: str, region: str, instance_id: str
|
||||
) -> None:
|
||||
instance = SmartProxy.query.filter(
|
||||
SmartProxy.group_id == group_id,
|
||||
SmartProxy.region == region,
|
||||
SmartProxy.provider == provider,
|
||||
SmartProxy.destroyed.is_(None)
|
||||
SmartProxy.destroyed.is_(None),
|
||||
).first()
|
||||
if instance is None:
|
||||
instance = SmartProxy()
|
||||
|
@ -93,16 +92,21 @@ class ProxyAutomation(TerraformAutomation):
|
|||
self.template,
|
||||
groups=groups,
|
||||
proxies=Proxy.query.filter(
|
||||
Proxy.provider == self.provider, Proxy.destroyed.is_(None)).all(),
|
||||
Proxy.provider == self.provider, Proxy.destroyed.is_(None)
|
||||
).all(),
|
||||
subgroups=self.get_subgroups(),
|
||||
global_namespace=app.config['GLOBAL_NAMESPACE'], bypass_token=app.config['BYPASS_TOKEN'],
|
||||
terraform_modules_path=os.path.join(*list(os.path.split(app.root_path))[:-1], 'terraform-modules'),
|
||||
global_namespace=app.config["GLOBAL_NAMESPACE"],
|
||||
bypass_token=app.config["BYPASS_TOKEN"],
|
||||
terraform_modules_path=os.path.join(
|
||||
*list(os.path.split(app.root_path))[:-1], "terraform-modules"
|
||||
),
|
||||
backend_config=f"""backend "http" {{
|
||||
lock_address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
|
||||
unlock_address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
|
||||
address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
|
||||
}}""",
|
||||
**{k: app.config[k.upper()] for k in self.template_parameters})
|
||||
**{k: app.config[k.upper()] for k in self.template_parameters},
|
||||
)
|
||||
if self.smart_proxies:
|
||||
for group in groups:
|
||||
self.sp_config(group)
|
||||
|
@ -111,9 +115,11 @@ class ProxyAutomation(TerraformAutomation):
|
|||
group_origins: List[Origin] = Origin.query.filter(
|
||||
Origin.group_id == group.id,
|
||||
Origin.destroyed.is_(None),
|
||||
Origin.smart.is_(True)
|
||||
Origin.smart.is_(True),
|
||||
).all()
|
||||
self.tmpl_write(f"smart_proxy.{group.id}.conf", """
|
||||
self.tmpl_write(
|
||||
f"smart_proxy.{group.id}.conf",
|
||||
"""
|
||||
{% for origin in origins %}
|
||||
server {
|
||||
listen 443 ssl;
|
||||
|
@ -173,23 +179,28 @@ class ProxyAutomation(TerraformAutomation):
|
|||
}
|
||||
{% endfor %}
|
||||
""",
|
||||
provider=self.provider,
|
||||
origins=group_origins,
|
||||
smart_zone=app.config['SMART_ZONE'])
|
||||
provider=self.provider,
|
||||
origins=group_origins,
|
||||
smart_zone=app.config["SMART_ZONE"],
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_subgroups(cls) -> Dict[int, Dict[int, int]]:
|
||||
conn = db.engine.connect()
|
||||
stmt = text("""
|
||||
stmt = text(
|
||||
"""
|
||||
SELECT origin.group_id, proxy.psg, COUNT(proxy.id) FROM proxy, origin
|
||||
WHERE proxy.origin_id = origin.id
|
||||
AND proxy.destroyed IS NULL
|
||||
AND proxy.provider = :provider
|
||||
GROUP BY origin.group_id, proxy.psg;
|
||||
""")
|
||||
"""
|
||||
)
|
||||
stmt = stmt.bindparams(provider=cls.provider)
|
||||
result = conn.execute(stmt).all()
|
||||
subgroups: Dict[int, Dict[int, int]] = defaultdict(lambda: defaultdict(lambda: 0))
|
||||
subgroups: Dict[int, Dict[int, int]] = defaultdict(
|
||||
lambda: defaultdict(lambda: 0)
|
||||
)
|
||||
for row in result:
|
||||
subgroups[row[0]][row[1]] = row[2]
|
||||
return subgroups
|
||||
|
|
|
@ -21,7 +21,7 @@ class ProxyAzureCdnAutomation(ProxyAutomation):
|
|||
"azure_client_secret",
|
||||
"azure_subscription_id",
|
||||
"azure_tenant_id",
|
||||
"smart_zone"
|
||||
"smart_zone",
|
||||
]
|
||||
|
||||
template = """
|
||||
|
@ -162,8 +162,7 @@ class ProxyAzureCdnAutomation(ProxyAutomation):
|
|||
|
||||
def import_state(self, state: Optional[Any]) -> None:
|
||||
proxies = Proxy.query.filter(
|
||||
Proxy.provider == self.provider,
|
||||
Proxy.destroyed.is_(None)
|
||||
Proxy.provider == self.provider, Proxy.destroyed.is_(None)
|
||||
).all()
|
||||
for proxy in proxies:
|
||||
proxy.url = f"https://{proxy.slug}.azureedge.net"
|
||||
|
|
|
@ -17,7 +17,7 @@ class ProxyCloudfrontAutomation(ProxyAutomation):
|
|||
"admin_email",
|
||||
"aws_access_key",
|
||||
"aws_secret_key",
|
||||
"smart_zone"
|
||||
"smart_zone",
|
||||
]
|
||||
|
||||
template = """
|
||||
|
@ -111,26 +111,35 @@ class ProxyCloudfrontAutomation(ProxyAutomation):
|
|||
def import_state(self, state: Any) -> None:
|
||||
if not isinstance(state, dict):
|
||||
raise RuntimeError("The Terraform state object returned was not a dict.")
|
||||
if "child_modules" not in state['values']['root_module']:
|
||||
if "child_modules" not in state["values"]["root_module"]:
|
||||
# There are no CloudFront proxies deployed to import state for
|
||||
return
|
||||
# CloudFront distributions (proxies)
|
||||
for mod in state['values']['root_module']['child_modules']:
|
||||
if mod['address'].startswith('module.cloudfront_'):
|
||||
for res in mod['resources']:
|
||||
if res['address'].endswith('aws_cloudfront_distribution.this'):
|
||||
proxy = Proxy.query.filter(Proxy.id == mod['address'][len('module.cloudfront_'):]).first()
|
||||
proxy.url = "https://" + res['values']['domain_name']
|
||||
proxy.slug = res['values']['id']
|
||||
for mod in state["values"]["root_module"]["child_modules"]:
|
||||
if mod["address"].startswith("module.cloudfront_"):
|
||||
for res in mod["resources"]:
|
||||
if res["address"].endswith("aws_cloudfront_distribution.this"):
|
||||
proxy = Proxy.query.filter(
|
||||
Proxy.id == mod["address"][len("module.cloudfront_") :]
|
||||
).first()
|
||||
proxy.url = "https://" + res["values"]["domain_name"]
|
||||
proxy.slug = res["values"]["id"]
|
||||
proxy.terraform_updated = datetime.now(tz=timezone.utc)
|
||||
break
|
||||
# EC2 instances (smart proxies)
|
||||
for g in state["values"]["root_module"]["child_modules"]:
|
||||
if g["address"].startswith("module.smart_proxy_"):
|
||||
group_id = int(g["address"][len("module.smart_proxy_"):])
|
||||
group_id = int(g["address"][len("module.smart_proxy_") :])
|
||||
for s in g["child_modules"]:
|
||||
if s["address"].endswith(".module.instance"):
|
||||
for x in s["resources"]:
|
||||
if x["address"].endswith(".module.instance.aws_instance.default[0]"):
|
||||
update_smart_proxy_instance(group_id, self.provider, "us-east-2a", x['values']['id'])
|
||||
if x["address"].endswith(
|
||||
".module.instance.aws_instance.default[0]"
|
||||
):
|
||||
update_smart_proxy_instance(
|
||||
group_id,
|
||||
self.provider,
|
||||
"us-east-2a",
|
||||
x["values"]["id"],
|
||||
)
|
||||
db.session.commit()
|
||||
|
|
|
@ -14,11 +14,7 @@ class ProxyFastlyAutomation(ProxyAutomation):
|
|||
subgroup_members_max = 20
|
||||
cloud_name = "fastly"
|
||||
|
||||
template_parameters = [
|
||||
"aws_access_key",
|
||||
"aws_secret_key",
|
||||
"fastly_api_key"
|
||||
]
|
||||
template_parameters = ["aws_access_key", "aws_secret_key", "fastly_api_key"]
|
||||
|
||||
template = """
|
||||
terraform {
|
||||
|
@ -125,13 +121,14 @@ class ProxyFastlyAutomation(ProxyAutomation):
|
|||
Constructor method.
|
||||
"""
|
||||
# Requires Flask application context to read configuration
|
||||
self.subgroup_members_max = min(current_app.config.get("FASTLY_MAX_BACKENDS", 5), 20)
|
||||
self.subgroup_members_max = min(
|
||||
current_app.config.get("FASTLY_MAX_BACKENDS", 5), 20
|
||||
)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def import_state(self, state: Optional[Any]) -> None:
|
||||
proxies = Proxy.query.filter(
|
||||
Proxy.provider == self.provider,
|
||||
Proxy.destroyed.is_(None)
|
||||
Proxy.provider == self.provider, Proxy.destroyed.is_(None)
|
||||
).all()
|
||||
for proxy in proxies:
|
||||
proxy.url = f"https://{proxy.slug}.global.ssl.fastly.net"
|
||||
|
|
|
@ -18,12 +18,16 @@ from app.terraform.proxy.azure_cdn import ProxyAzureCdnAutomation
|
|||
from app.terraform.proxy.cloudfront import ProxyCloudfrontAutomation
|
||||
from app.terraform.proxy.fastly import ProxyFastlyAutomation
|
||||
|
||||
PROXY_PROVIDERS: Dict[str, Type[ProxyAutomation]] = {p.provider: p for p in [ # type: ignore[attr-defined]
|
||||
# In order of preference
|
||||
ProxyCloudfrontAutomation,
|
||||
ProxyFastlyAutomation,
|
||||
ProxyAzureCdnAutomation
|
||||
] if p.enabled} # type: ignore[attr-defined]
|
||||
PROXY_PROVIDERS: Dict[str, Type[ProxyAutomation]] = {
|
||||
p.provider: p # type: ignore[attr-defined]
|
||||
for p in [
|
||||
# In order of preference
|
||||
ProxyCloudfrontAutomation,
|
||||
ProxyFastlyAutomation,
|
||||
ProxyAzureCdnAutomation,
|
||||
]
|
||||
if p.enabled # type: ignore[attr-defined]
|
||||
}
|
||||
|
||||
SubgroupCount = OrderedDictT[str, OrderedDictT[int, OrderedDictT[int, int]]]
|
||||
|
||||
|
@ -61,8 +65,9 @@ def random_slug(origin_domain_name: str) -> str:
|
|||
"exampasdfghjkl"
|
||||
"""
|
||||
# The random slug doesn't need to be cryptographically secure, hence the use of `# nosec`
|
||||
return tldextract.extract(origin_domain_name).domain[:5] + ''.join(
|
||||
random.choices(string.ascii_lowercase, k=12)) # nosec
|
||||
return tldextract.extract(origin_domain_name).domain[:5] + "".join(
|
||||
random.choices(string.ascii_lowercase, k=12) # nosec: B311
|
||||
)
|
||||
|
||||
|
||||
def calculate_subgroup_count(proxies: Optional[List[Proxy]] = None) -> SubgroupCount:
|
||||
|
@ -95,8 +100,13 @@ def calculate_subgroup_count(proxies: Optional[List[Proxy]] = None) -> SubgroupC
|
|||
return subgroup_count
|
||||
|
||||
|
||||
def next_subgroup(subgroup_count: SubgroupCount, provider: str, group_id: int, max_subgroup_count: int,
|
||||
max_subgroup_members: int) -> Optional[int]:
|
||||
def next_subgroup(
|
||||
subgroup_count: SubgroupCount,
|
||||
provider: str,
|
||||
group_id: int,
|
||||
max_subgroup_count: int,
|
||||
max_subgroup_members: int,
|
||||
) -> Optional[int]:
|
||||
"""
|
||||
Find the first available subgroup with less than the specified maximum count in the specified provider and group.
|
||||
If the last subgroup in the group is full, return the next subgroup number as long as it doesn't exceed
|
||||
|
@ -137,27 +147,36 @@ def auto_deprecate_proxies() -> None:
|
|||
- The "max_age_reached" reason means the proxy has been in use for longer than the maximum allowed period.
|
||||
The maximum age cutoff is randomly set to a time between 24 and 48 hours.
|
||||
"""
|
||||
origin_destroyed_proxies = (db.session.query(Proxy)
|
||||
.join(Origin, Proxy.origin_id == Origin.id)
|
||||
.filter(Proxy.destroyed.is_(None),
|
||||
Proxy.deprecated.is_(None),
|
||||
Origin.destroyed.is_not(None))
|
||||
.all())
|
||||
origin_destroyed_proxies = (
|
||||
db.session.query(Proxy)
|
||||
.join(Origin, Proxy.origin_id == Origin.id)
|
||||
.filter(
|
||||
Proxy.destroyed.is_(None),
|
||||
Proxy.deprecated.is_(None),
|
||||
Origin.destroyed.is_not(None),
|
||||
)
|
||||
.all()
|
||||
)
|
||||
logging.debug("Origin destroyed: %s", origin_destroyed_proxies)
|
||||
for proxy in origin_destroyed_proxies:
|
||||
proxy.deprecate(reason="origin_destroyed")
|
||||
max_age_proxies = (db.session.query(Proxy)
|
||||
.join(Origin, Proxy.origin_id == Origin.id)
|
||||
.filter(Proxy.destroyed.is_(None),
|
||||
Proxy.deprecated.is_(None),
|
||||
Proxy.pool_id != -1, # do not rotate hotspare proxies
|
||||
Origin.assets,
|
||||
Origin.auto_rotation)
|
||||
.all())
|
||||
max_age_proxies = (
|
||||
db.session.query(Proxy)
|
||||
.join(Origin, Proxy.origin_id == Origin.id)
|
||||
.filter(
|
||||
Proxy.destroyed.is_(None),
|
||||
Proxy.deprecated.is_(None),
|
||||
Proxy.pool_id != -1, # do not rotate hotspare proxies
|
||||
Origin.assets,
|
||||
Origin.auto_rotation,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
logging.debug("Max age: %s", max_age_proxies)
|
||||
for proxy in max_age_proxies:
|
||||
max_age_cutoff = datetime.now(timezone.utc) - timedelta(
|
||||
days=1, seconds=86400 * random.random()) # nosec: B311
|
||||
days=1, seconds=86400 * random.random() # nosec: B311
|
||||
)
|
||||
if proxy.added < max_age_cutoff:
|
||||
proxy.deprecate(reason="max_age_reached")
|
||||
|
||||
|
@ -171,8 +190,7 @@ def destroy_expired_proxies() -> None:
|
|||
"""
|
||||
expiry_cutoff = datetime.now(timezone.utc) - timedelta(days=4)
|
||||
proxies = Proxy.query.filter(
|
||||
Proxy.destroyed.is_(None),
|
||||
Proxy.deprecated < expiry_cutoff
|
||||
Proxy.destroyed.is_(None), Proxy.deprecated < expiry_cutoff
|
||||
).all()
|
||||
for proxy in proxies:
|
||||
logging.debug("Destroying expired proxy")
|
||||
|
@ -244,12 +262,17 @@ class ProxyMetaAutomation(BaseAutomation):
|
|||
if origin.destroyed is not None:
|
||||
continue
|
||||
proxies = [
|
||||
x for x in origin.proxies
|
||||
if x.pool_id == pool.id and x.deprecated is None and x.destroyed is None
|
||||
x
|
||||
for x in origin.proxies
|
||||
if x.pool_id == pool.id
|
||||
and x.deprecated is None
|
||||
and x.destroyed is None
|
||||
]
|
||||
logging.debug("Proxies for group %s: %s", group.group_name, proxies)
|
||||
if not proxies:
|
||||
logging.debug("Creating new proxy for %s in pool %s", origin, pool)
|
||||
logging.debug(
|
||||
"Creating new proxy for %s in pool %s", origin, pool
|
||||
)
|
||||
if not promote_hot_spare_proxy(pool.id, origin):
|
||||
# No "hot spare" available
|
||||
self.create_proxy(pool.id, origin)
|
||||
|
@ -270,8 +293,13 @@ class ProxyMetaAutomation(BaseAutomation):
|
|||
"""
|
||||
for provider in PROXY_PROVIDERS.values():
|
||||
logging.debug("Looking at provider %s", provider.provider)
|
||||
subgroup = next_subgroup(self.subgroup_count, provider.provider, origin.group_id,
|
||||
provider.subgroup_members_max, provider.subgroup_count_max)
|
||||
subgroup = next_subgroup(
|
||||
self.subgroup_count,
|
||||
provider.provider,
|
||||
origin.group_id,
|
||||
provider.subgroup_members_max,
|
||||
provider.subgroup_count_max,
|
||||
)
|
||||
if subgroup is None:
|
||||
continue # Exceeded maximum number of subgroups and last subgroup is full
|
||||
self.increment_subgroup(provider.provider, origin.group_id, subgroup)
|
||||
|
@ -317,9 +345,7 @@ class ProxyMetaAutomation(BaseAutomation):
|
|||
If an origin is not destroyed and lacks active proxies (not deprecated and not destroyed),
|
||||
a new 'hot spare' proxy for this origin is created in the reserve pool (with pool_id = -1).
|
||||
"""
|
||||
origins = Origin.query.filter(
|
||||
Origin.destroyed.is_(None)
|
||||
).all()
|
||||
origins = Origin.query.filter(Origin.destroyed.is_(None)).all()
|
||||
for origin in origins:
|
||||
if origin.countries:
|
||||
risk_levels = origin.risk_level.items()
|
||||
|
@ -328,7 +354,10 @@ class ProxyMetaAutomation(BaseAutomation):
|
|||
if highest_risk_level < 4:
|
||||
for proxy in origin.proxies:
|
||||
if proxy.destroyed is None and proxy.pool_id == -1:
|
||||
logging.debug("Destroying hot spare proxy for origin %s (low risk)", origin)
|
||||
logging.debug(
|
||||
"Destroying hot spare proxy for origin %s (low risk)",
|
||||
origin,
|
||||
)
|
||||
proxy.destroy()
|
||||
continue
|
||||
if origin.destroyed is not None:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue