automate: move working_dir to be set in constructor
This commit is contained in:
parent
efdaad977a
commit
109851745b
7 changed files with 59 additions and 43 deletions
|
@ -38,7 +38,7 @@ from app.terraform.proxy.cloudfront import ProxyCloudfrontAutomation
|
|||
from app.terraform.proxy.fastly import ProxyFastlyAutomation
|
||||
|
||||
jobs = {
|
||||
x.short_name: x
|
||||
x.short_name: x # type: ignore[attr-defined]
|
||||
for x in [
|
||||
AlarmEotkAwsAutomation,
|
||||
AlarmProxyAzureCdnAutomation,
|
||||
|
@ -78,7 +78,7 @@ def run_all(**kwargs: bool) -> None:
|
|||
:return: None
|
||||
"""
|
||||
for job in jobs.values():
|
||||
run_job(job, **kwargs) # type: ignore
|
||||
run_job(job, **kwargs)
|
||||
|
||||
|
||||
def run_job(job_cls: Type[BaseAutomation], *,
|
||||
|
@ -107,10 +107,10 @@ def run_job(job_cls: Type[BaseAutomation], *,
|
|||
return
|
||||
automation.state = AutomationState.RUNNING
|
||||
db.session.commit()
|
||||
job: BaseAutomation = job_cls()
|
||||
try:
|
||||
job: BaseAutomation = job_cls()
|
||||
tempdir_path = tempfile.mkdtemp()
|
||||
success, logs = job.automate(tempdir_path)
|
||||
success, logs = job.automate()
|
||||
# We want to catch any and all exceptions that would cause problems here, because
|
||||
# the error handling process isn't really handling the error, but rather causing it
|
||||
# to be logged for investigation. Catching more specific exceptions would just mean that
|
||||
|
@ -162,7 +162,7 @@ class AutomateCliHandler(BaseCliHandler):
|
|||
def run(self) -> None:
|
||||
with app.app_context():
|
||||
if self.args.job:
|
||||
run_job(jobs[self.args.job], # type: ignore
|
||||
run_job(jobs[self.args.job],
|
||||
force=self.args.force,
|
||||
ignore_schedule=self.args.ignore_schedule)
|
||||
elif self.args.all:
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
# pylint: disable=too-few-public-methods
|
||||
|
||||
import builtins
|
||||
import datetime
|
||||
from typing import Dict, List, Union
|
||||
|
||||
from flask import current_app
|
||||
|
|
|
@ -1,27 +1,28 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
import os
|
||||
from typing import Tuple, Optional, Any
|
||||
from typing import Tuple, Any, Optional
|
||||
|
||||
import jinja2
|
||||
|
||||
from app import app
|
||||
|
||||
|
||||
class BaseAutomation(metaclass=ABCMeta):
|
||||
class BaseAutomation():
|
||||
short_name: str = "base"
|
||||
description: str = "Abstract base automation."
|
||||
frequency: int
|
||||
working_dir: Optional[str]
|
||||
|
||||
"""
|
||||
The short name of the automation provider. This is used as an opaque token throughout
|
||||
the portal system.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def automate(self, working_dir: str, full: bool = False) -> Tuple[bool, str]:
|
||||
def __init__(self, working_dir: Optional[str] = None):
|
||||
super().__init__()
|
||||
self.working_dir = working_dir
|
||||
|
||||
def automate(self, full: bool = False) -> Tuple[bool, str]:
|
||||
raise NotImplementedError()
|
||||
|
||||
def tmpl_write(self, filename: str, template: str, working_dir: str, **kwargs: Any) -> None:
|
||||
def tmpl_write(self, filename: str, template: str, **kwargs: Any) -> None:
|
||||
"""
|
||||
Write a Jinja2 template to the working directory for use by an automation module.
|
||||
|
||||
|
@ -31,6 +32,8 @@ class BaseAutomation(metaclass=ABCMeta):
|
|||
:param kwargs: variables for use with the template
|
||||
:return: None
|
||||
"""
|
||||
if not self.working_dir:
|
||||
raise RuntimeError("No working directory specified.")
|
||||
tmpl = jinja2.Template(template)
|
||||
with open(os.path.join(working_dir, filename), 'w', encoding="utf-8") as tfconf:
|
||||
with open(os.path.join(self.working_dir, filename), 'w', encoding="utf-8") as tfconf:
|
||||
tfconf.write(tmpl.render(**kwargs))
|
||||
|
|
|
@ -43,10 +43,11 @@ class ListAutomation(TerraformAutomation):
|
|||
in the templating of the Terraform configuration.
|
||||
"""
|
||||
|
||||
def tf_generate(self, working_dir) -> None:
|
||||
def tf_generate(self) -> None:
|
||||
if not self.working_dir:
|
||||
raise RuntimeError("No working directory specified.")
|
||||
self.tf_write(
|
||||
self.template,
|
||||
working_dir,
|
||||
lists=MirrorList.query.filter(
|
||||
MirrorList.destroyed.is_(None),
|
||||
MirrorList.provider == self.provider,
|
||||
|
@ -66,9 +67,9 @@ class ListAutomation(TerraformAutomation):
|
|||
for pool in Pool.query.filter(Pool.destroyed.is_(None)).all():
|
||||
for key, formatter in lists.items():
|
||||
for obfuscate in [True, False]:
|
||||
with open(os.path.join(working_dir, f"{key}.{pool.pool_name}{'.jsno' if obfuscate else '.json'}"),
|
||||
with open(os.path.join(self.working_dir, f"{key}.{pool.pool_name}{'.jsno' if obfuscate else '.json'}"),
|
||||
'w', encoding="utf-8") as out:
|
||||
out.write(json_encode(formatter(pool), obfuscate))
|
||||
with open(os.path.join(working_dir, f"{key}.{pool.pool_name}{'.jso' if obfuscate else '.js'}"),
|
||||
with open(os.path.join(self.working_dir, f"{key}.{pool.pool_name}{'.jso' if obfuscate else '.js'}"),
|
||||
'w', encoding="utf-8") as out:
|
||||
out.write(javascript_encode(formatter(pool), obfuscate))
|
||||
|
|
|
@ -14,28 +14,29 @@ from app.terraform.proxy.azure_cdn import ProxyAzureCdnAutomation
|
|||
from app.terraform.proxy.cloudfront import ProxyCloudfrontAutomation
|
||||
from app.terraform.proxy.fastly import ProxyFastlyAutomation
|
||||
|
||||
PROXY_PROVIDERS = {p.provider: p for p in [ # In order of preference
|
||||
PROXY_PROVIDERS = {p.provider: p for p in [ # type: ignore[attr-defined]
|
||||
# In order of preference
|
||||
ProxyCloudfrontAutomation,
|
||||
ProxyFastlyAutomation,
|
||||
ProxyAzureCdnAutomation
|
||||
] if p.enabled} # type: ignore[truthy-function]
|
||||
] if p.enabled} # type: ignore[attr-defined]
|
||||
|
||||
|
||||
def create_proxy(pool: Pool, origin: Origin) -> bool:
|
||||
for desperate in [False, True]:
|
||||
for provider in PROXY_PROVIDERS.values():
|
||||
if origin.smart and not provider.smart_proxies:
|
||||
if origin.smart and not provider.smart_proxies: # type: ignore[attr-defined]
|
||||
continue # This origin cannot be supported on this provider
|
||||
if provider.smart_proxies and not (desperate or origin.smart):
|
||||
if provider.smart_proxies and not (desperate or origin.smart): # type: ignore[attr-defined]
|
||||
continue
|
||||
next_subgroup = provider.next_subgroup(origin.group_id)
|
||||
next_subgroup = provider.next_subgroup(origin.group_id) # type: ignore[attr-defined]
|
||||
if next_subgroup is None:
|
||||
continue
|
||||
proxy = Proxy()
|
||||
proxy.pool_id = pool.id
|
||||
proxy.origin_id = origin.id
|
||||
proxy.provider = provider.provider
|
||||
proxy.psg = provider.next_subgroup(origin.group_id)
|
||||
proxy.provider = provider.provider # type: ignore[attr-defined]
|
||||
proxy.psg = provider.next_subgroup(origin.group_id) # type: ignore[attr-defined]
|
||||
# The random usage below is good enough for its purpose: to create a slug that
|
||||
# hasn't been used recently.
|
||||
proxy.slug = tldextract.extract(origin.domain_name).domain[:5] + ''.join(
|
||||
|
@ -71,7 +72,7 @@ class ProxyMetaAutomation(BaseAutomation):
|
|||
for proxy in proxies:
|
||||
if proxy.origin.destroyed is not None:
|
||||
proxy.deprecate(reason="origin_destroyed")
|
||||
if proxy.origin.smart and not PROXY_PROVIDERS[proxy.provider].smart_proxies:
|
||||
if proxy.origin.smart and not PROXY_PROVIDERS[proxy.provider].smart_proxies: # type: ignore[attr-defined]
|
||||
proxy.deprecate(reason="not_smart_enough")
|
||||
# Create new proxies
|
||||
pools = Pool.query.all()
|
||||
|
|
|
@ -27,7 +27,7 @@ class TerraformAutomation(BaseAutomation):
|
|||
Short name for the provider used by this module.
|
||||
"""
|
||||
|
||||
def automate(self, working_dir: str, full: bool = False) -> Tuple[bool, str]:
|
||||
def automate(self, full: bool = False) -> Tuple[bool, str]:
|
||||
"""
|
||||
Runs the Terraform automation module. The run will follow these steps:
|
||||
|
||||
|
@ -46,10 +46,12 @@ class TerraformAutomation(BaseAutomation):
|
|||
:return: success status and Terraform apply logs
|
||||
"""
|
||||
|
||||
if not self.working_dir:
|
||||
raise RuntimeError("No working directory specified.")
|
||||
prehook_result = self.tf_prehook() # pylint: disable=assignment-from-no-return
|
||||
self.tf_generate(working_dir)
|
||||
self.tf_init(working_dir)
|
||||
returncode, logs = self.tf_apply(working_dir, refresh=self.always_refresh or full)
|
||||
self.tf_generate()
|
||||
self.tf_init()
|
||||
returncode, logs = self.tf_apply(self.working_dir, refresh=self.always_refresh or full)
|
||||
self.tf_posthook(prehook_result=prehook_result)
|
||||
return returncode == 0, logs
|
||||
|
||||
|
@ -59,6 +61,8 @@ class TerraformAutomation(BaseAutomation):
|
|||
lock_timeout: int = 15) -> Tuple[int, str]:
|
||||
if not parallelism:
|
||||
parallelism = self.parallelism
|
||||
if not self.working_dir:
|
||||
raise RuntimeError("No working directory specified.")
|
||||
# The following subprocess call takes external input, but is providing
|
||||
# the argument list as an array such that argument injection would be
|
||||
# ineffective.
|
||||
|
@ -76,11 +80,12 @@ class TerraformAutomation(BaseAutomation):
|
|||
return tfcmd.returncode, tfcmd.stdout.decode('utf-8')
|
||||
|
||||
@abstractmethod
|
||||
def tf_generate(self, working_dir) -> None:
|
||||
def tf_generate(self) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def tf_init(self, working_dir: str, *,
|
||||
lock_timeout: int = 15) -> None:
|
||||
def tf_init(self, *, lock_timeout: int = 15) -> None:
|
||||
if not self.working_dir:
|
||||
raise RuntimeError("No working directory specified.")
|
||||
# The init command does not support JSON output.
|
||||
# The following subprocess call takes external input, but is providing
|
||||
# the argument list as an array such that argument injection would be
|
||||
|
@ -90,20 +95,24 @@ class TerraformAutomation(BaseAutomation):
|
|||
'init',
|
||||
f'-lock-timeout={str(lock_timeout)}m',
|
||||
],
|
||||
cwd=working_dir)
|
||||
cwd=self.working_dir)
|
||||
|
||||
def tf_output(self, working_dir) -> Any:
|
||||
def tf_output(self) -> Any:
|
||||
if not self.working_dir:
|
||||
raise RuntimeError("No working directory specified.")
|
||||
# The following subprocess call does not take any user input.
|
||||
tfcmd = subprocess.run( # nosec
|
||||
['terraform', 'output', '-json'],
|
||||
cwd=working_dir,
|
||||
cwd=self.working_dir,
|
||||
stdout=subprocess.PIPE)
|
||||
return json.loads(tfcmd.stdout)
|
||||
|
||||
def tf_plan(self, working_dir: str, *,
|
||||
def tf_plan(self, *,
|
||||
refresh: bool = True,
|
||||
parallelism: Optional[int] = None,
|
||||
lock_timeout: int = 15) -> Tuple[int, str]:
|
||||
if not self.working_dir:
|
||||
raise RuntimeError("No working directory specified.")
|
||||
# The following subprocess call takes external input, but is providing
|
||||
# the argument list as an array such that argument injection would be
|
||||
# ineffective.
|
||||
|
@ -115,7 +124,7 @@ class TerraformAutomation(BaseAutomation):
|
|||
f'-parallelism={str(parallelism)}',
|
||||
f'-lock-timeout={str(lock_timeout)}m',
|
||||
],
|
||||
cwd=working_dir)
|
||||
cwd=self.working_dir)
|
||||
return tfcmd.returncode, tfcmd.stdout.decode('utf-8')
|
||||
|
||||
def tf_posthook(self, *, prehook_result: Any = None) -> None:
|
||||
|
@ -140,13 +149,15 @@ class TerraformAutomation(BaseAutomation):
|
|||
:return: state that is useful to :func:`tf_posthook`, if required
|
||||
"""
|
||||
|
||||
def tf_show(self, working_dir) -> Any:
|
||||
def tf_show(self) -> Any:
|
||||
# This subprocess call doesn't take any user input.
|
||||
if not self.working_dir:
|
||||
raise RuntimeError("No working directory specified.")
|
||||
terraform = subprocess.run( # nosec
|
||||
['terraform', 'show', '-json'],
|
||||
cwd=working_dir,
|
||||
cwd=self.working_dir,
|
||||
stdout=subprocess.PIPE)
|
||||
return json.loads(terraform.stdout)
|
||||
|
||||
def tf_write(self, template: str, working_dir: str, **kwargs: Any) -> None:
|
||||
self.tmpl_write("main.tf", template, working_dir, **kwargs)
|
||||
def tf_write(self, template: str, **kwargs: Any) -> None:
|
||||
self.tmpl_write("main.tf", template, **kwargs)
|
||||
|
|
|
@ -3,3 +3,4 @@ types-flask-sqlalchemy
|
|||
types-requests
|
||||
types-PyYAML
|
||||
types-python-dateutil
|
||||
types-sqlalchemy
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue