automate: move working_dir to be set in constructor
This commit is contained in:
parent
efdaad977a
commit
109851745b
7 changed files with 59 additions and 43 deletions
|
@ -38,7 +38,7 @@ from app.terraform.proxy.cloudfront import ProxyCloudfrontAutomation
|
||||||
from app.terraform.proxy.fastly import ProxyFastlyAutomation
|
from app.terraform.proxy.fastly import ProxyFastlyAutomation
|
||||||
|
|
||||||
jobs = {
|
jobs = {
|
||||||
x.short_name: x
|
x.short_name: x # type: ignore[attr-defined]
|
||||||
for x in [
|
for x in [
|
||||||
AlarmEotkAwsAutomation,
|
AlarmEotkAwsAutomation,
|
||||||
AlarmProxyAzureCdnAutomation,
|
AlarmProxyAzureCdnAutomation,
|
||||||
|
@ -78,7 +78,7 @@ def run_all(**kwargs: bool) -> None:
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
for job in jobs.values():
|
for job in jobs.values():
|
||||||
run_job(job, **kwargs) # type: ignore
|
run_job(job, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def run_job(job_cls: Type[BaseAutomation], *,
|
def run_job(job_cls: Type[BaseAutomation], *,
|
||||||
|
@ -107,10 +107,10 @@ def run_job(job_cls: Type[BaseAutomation], *,
|
||||||
return
|
return
|
||||||
automation.state = AutomationState.RUNNING
|
automation.state = AutomationState.RUNNING
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
job: BaseAutomation = job_cls()
|
|
||||||
try:
|
try:
|
||||||
|
job: BaseAutomation = job_cls()
|
||||||
tempdir_path = tempfile.mkdtemp()
|
tempdir_path = tempfile.mkdtemp()
|
||||||
success, logs = job.automate(tempdir_path)
|
success, logs = job.automate()
|
||||||
# We want to catch any and all exceptions that would cause problems here, because
|
# We want to catch any and all exceptions that would cause problems here, because
|
||||||
# the error handling process isn't really handling the error, but rather causing it
|
# the error handling process isn't really handling the error, but rather causing it
|
||||||
# to be logged for investigation. Catching more specific exceptions would just mean that
|
# to be logged for investigation. Catching more specific exceptions would just mean that
|
||||||
|
@ -162,7 +162,7 @@ class AutomateCliHandler(BaseCliHandler):
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
with app.app_context():
|
with app.app_context():
|
||||||
if self.args.job:
|
if self.args.job:
|
||||||
run_job(jobs[self.args.job], # type: ignore
|
run_job(jobs[self.args.job],
|
||||||
force=self.args.force,
|
force=self.args.force,
|
||||||
ignore_schedule=self.args.ignore_schedule)
|
ignore_schedule=self.args.ignore_schedule)
|
||||||
elif self.args.all:
|
elif self.args.all:
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# pylint: disable=too-few-public-methods
|
# pylint: disable=too-few-public-methods
|
||||||
|
|
||||||
import builtins
|
import builtins
|
||||||
import datetime
|
|
||||||
from typing import Dict, List, Union
|
from typing import Dict, List, Union
|
||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
|
@ -1,27 +1,28 @@
|
||||||
from abc import ABCMeta, abstractmethod
|
|
||||||
import os
|
import os
|
||||||
from typing import Tuple, Optional, Any
|
from typing import Tuple, Any, Optional
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
|
|
||||||
from app import app
|
|
||||||
|
|
||||||
|
class BaseAutomation():
|
||||||
class BaseAutomation(metaclass=ABCMeta):
|
|
||||||
short_name: str = "base"
|
short_name: str = "base"
|
||||||
description: str = "Abstract base automation."
|
description: str = "Abstract base automation."
|
||||||
frequency: int
|
frequency: int
|
||||||
|
working_dir: Optional[str]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
The short name of the automation provider. This is used as an opaque token throughout
|
The short name of the automation provider. This is used as an opaque token throughout
|
||||||
the portal system.
|
the portal system.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
def __init__(self, working_dir: Optional[str] = None):
|
||||||
def automate(self, working_dir: str, full: bool = False) -> Tuple[bool, str]:
|
super().__init__()
|
||||||
|
self.working_dir = working_dir
|
||||||
|
|
||||||
|
def automate(self, full: bool = False) -> Tuple[bool, str]:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def tmpl_write(self, filename: str, template: str, working_dir: str, **kwargs: Any) -> None:
|
def tmpl_write(self, filename: str, template: str, **kwargs: Any) -> None:
|
||||||
"""
|
"""
|
||||||
Write a Jinja2 template to the working directory for use by an automation module.
|
Write a Jinja2 template to the working directory for use by an automation module.
|
||||||
|
|
||||||
|
@ -31,6 +32,8 @@ class BaseAutomation(metaclass=ABCMeta):
|
||||||
:param kwargs: variables for use with the template
|
:param kwargs: variables for use with the template
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
|
if not self.working_dir:
|
||||||
|
raise RuntimeError("No working directory specified.")
|
||||||
tmpl = jinja2.Template(template)
|
tmpl = jinja2.Template(template)
|
||||||
with open(os.path.join(working_dir, filename), 'w', encoding="utf-8") as tfconf:
|
with open(os.path.join(self.working_dir, filename), 'w', encoding="utf-8") as tfconf:
|
||||||
tfconf.write(tmpl.render(**kwargs))
|
tfconf.write(tmpl.render(**kwargs))
|
||||||
|
|
|
@ -43,10 +43,11 @@ class ListAutomation(TerraformAutomation):
|
||||||
in the templating of the Terraform configuration.
|
in the templating of the Terraform configuration.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def tf_generate(self, working_dir) -> None:
|
def tf_generate(self) -> None:
|
||||||
|
if not self.working_dir:
|
||||||
|
raise RuntimeError("No working directory specified.")
|
||||||
self.tf_write(
|
self.tf_write(
|
||||||
self.template,
|
self.template,
|
||||||
working_dir,
|
|
||||||
lists=MirrorList.query.filter(
|
lists=MirrorList.query.filter(
|
||||||
MirrorList.destroyed.is_(None),
|
MirrorList.destroyed.is_(None),
|
||||||
MirrorList.provider == self.provider,
|
MirrorList.provider == self.provider,
|
||||||
|
@ -66,9 +67,9 @@ class ListAutomation(TerraformAutomation):
|
||||||
for pool in Pool.query.filter(Pool.destroyed.is_(None)).all():
|
for pool in Pool.query.filter(Pool.destroyed.is_(None)).all():
|
||||||
for key, formatter in lists.items():
|
for key, formatter in lists.items():
|
||||||
for obfuscate in [True, False]:
|
for obfuscate in [True, False]:
|
||||||
with open(os.path.join(working_dir, f"{key}.{pool.pool_name}{'.jsno' if obfuscate else '.json'}"),
|
with open(os.path.join(self.working_dir, f"{key}.{pool.pool_name}{'.jsno' if obfuscate else '.json'}"),
|
||||||
'w', encoding="utf-8") as out:
|
'w', encoding="utf-8") as out:
|
||||||
out.write(json_encode(formatter(pool), obfuscate))
|
out.write(json_encode(formatter(pool), obfuscate))
|
||||||
with open(os.path.join(working_dir, f"{key}.{pool.pool_name}{'.jso' if obfuscate else '.js'}"),
|
with open(os.path.join(self.working_dir, f"{key}.{pool.pool_name}{'.jso' if obfuscate else '.js'}"),
|
||||||
'w', encoding="utf-8") as out:
|
'w', encoding="utf-8") as out:
|
||||||
out.write(javascript_encode(formatter(pool), obfuscate))
|
out.write(javascript_encode(formatter(pool), obfuscate))
|
||||||
|
|
|
@ -14,28 +14,29 @@ from app.terraform.proxy.azure_cdn import ProxyAzureCdnAutomation
|
||||||
from app.terraform.proxy.cloudfront import ProxyCloudfrontAutomation
|
from app.terraform.proxy.cloudfront import ProxyCloudfrontAutomation
|
||||||
from app.terraform.proxy.fastly import ProxyFastlyAutomation
|
from app.terraform.proxy.fastly import ProxyFastlyAutomation
|
||||||
|
|
||||||
PROXY_PROVIDERS = {p.provider: p for p in [ # In order of preference
|
PROXY_PROVIDERS = {p.provider: p for p in [ # type: ignore[attr-defined]
|
||||||
|
# In order of preference
|
||||||
ProxyCloudfrontAutomation,
|
ProxyCloudfrontAutomation,
|
||||||
ProxyFastlyAutomation,
|
ProxyFastlyAutomation,
|
||||||
ProxyAzureCdnAutomation
|
ProxyAzureCdnAutomation
|
||||||
] if p.enabled} # type: ignore[truthy-function]
|
] if p.enabled} # type: ignore[attr-defined]
|
||||||
|
|
||||||
|
|
||||||
def create_proxy(pool: Pool, origin: Origin) -> bool:
|
def create_proxy(pool: Pool, origin: Origin) -> bool:
|
||||||
for desperate in [False, True]:
|
for desperate in [False, True]:
|
||||||
for provider in PROXY_PROVIDERS.values():
|
for provider in PROXY_PROVIDERS.values():
|
||||||
if origin.smart and not provider.smart_proxies:
|
if origin.smart and not provider.smart_proxies: # type: ignore[attr-defined]
|
||||||
continue # This origin cannot be supported on this provider
|
continue # This origin cannot be supported on this provider
|
||||||
if provider.smart_proxies and not (desperate or origin.smart):
|
if provider.smart_proxies and not (desperate or origin.smart): # type: ignore[attr-defined]
|
||||||
continue
|
continue
|
||||||
next_subgroup = provider.next_subgroup(origin.group_id)
|
next_subgroup = provider.next_subgroup(origin.group_id) # type: ignore[attr-defined]
|
||||||
if next_subgroup is None:
|
if next_subgroup is None:
|
||||||
continue
|
continue
|
||||||
proxy = Proxy()
|
proxy = Proxy()
|
||||||
proxy.pool_id = pool.id
|
proxy.pool_id = pool.id
|
||||||
proxy.origin_id = origin.id
|
proxy.origin_id = origin.id
|
||||||
proxy.provider = provider.provider
|
proxy.provider = provider.provider # type: ignore[attr-defined]
|
||||||
proxy.psg = provider.next_subgroup(origin.group_id)
|
proxy.psg = provider.next_subgroup(origin.group_id) # type: ignore[attr-defined]
|
||||||
# The random usage below is good enough for its purpose: to create a slug that
|
# The random usage below is good enough for its purpose: to create a slug that
|
||||||
# hasn't been used recently.
|
# hasn't been used recently.
|
||||||
proxy.slug = tldextract.extract(origin.domain_name).domain[:5] + ''.join(
|
proxy.slug = tldextract.extract(origin.domain_name).domain[:5] + ''.join(
|
||||||
|
@ -71,7 +72,7 @@ class ProxyMetaAutomation(BaseAutomation):
|
||||||
for proxy in proxies:
|
for proxy in proxies:
|
||||||
if proxy.origin.destroyed is not None:
|
if proxy.origin.destroyed is not None:
|
||||||
proxy.deprecate(reason="origin_destroyed")
|
proxy.deprecate(reason="origin_destroyed")
|
||||||
if proxy.origin.smart and not PROXY_PROVIDERS[proxy.provider].smart_proxies:
|
if proxy.origin.smart and not PROXY_PROVIDERS[proxy.provider].smart_proxies: # type: ignore[attr-defined]
|
||||||
proxy.deprecate(reason="not_smart_enough")
|
proxy.deprecate(reason="not_smart_enough")
|
||||||
# Create new proxies
|
# Create new proxies
|
||||||
pools = Pool.query.all()
|
pools = Pool.query.all()
|
||||||
|
|
|
@ -27,7 +27,7 @@ class TerraformAutomation(BaseAutomation):
|
||||||
Short name for the provider used by this module.
|
Short name for the provider used by this module.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def automate(self, working_dir: str, full: bool = False) -> Tuple[bool, str]:
|
def automate(self, full: bool = False) -> Tuple[bool, str]:
|
||||||
"""
|
"""
|
||||||
Runs the Terraform automation module. The run will follow these steps:
|
Runs the Terraform automation module. The run will follow these steps:
|
||||||
|
|
||||||
|
@ -46,10 +46,12 @@ class TerraformAutomation(BaseAutomation):
|
||||||
:return: success status and Terraform apply logs
|
:return: success status and Terraform apply logs
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if not self.working_dir:
|
||||||
|
raise RuntimeError("No working directory specified.")
|
||||||
prehook_result = self.tf_prehook() # pylint: disable=assignment-from-no-return
|
prehook_result = self.tf_prehook() # pylint: disable=assignment-from-no-return
|
||||||
self.tf_generate(working_dir)
|
self.tf_generate()
|
||||||
self.tf_init(working_dir)
|
self.tf_init()
|
||||||
returncode, logs = self.tf_apply(working_dir, refresh=self.always_refresh or full)
|
returncode, logs = self.tf_apply(self.working_dir, refresh=self.always_refresh or full)
|
||||||
self.tf_posthook(prehook_result=prehook_result)
|
self.tf_posthook(prehook_result=prehook_result)
|
||||||
return returncode == 0, logs
|
return returncode == 0, logs
|
||||||
|
|
||||||
|
@ -59,6 +61,8 @@ class TerraformAutomation(BaseAutomation):
|
||||||
lock_timeout: int = 15) -> Tuple[int, str]:
|
lock_timeout: int = 15) -> Tuple[int, str]:
|
||||||
if not parallelism:
|
if not parallelism:
|
||||||
parallelism = self.parallelism
|
parallelism = self.parallelism
|
||||||
|
if not self.working_dir:
|
||||||
|
raise RuntimeError("No working directory specified.")
|
||||||
# The following subprocess call takes external input, but is providing
|
# The following subprocess call takes external input, but is providing
|
||||||
# the argument list as an array such that argument injection would be
|
# the argument list as an array such that argument injection would be
|
||||||
# ineffective.
|
# ineffective.
|
||||||
|
@ -76,11 +80,12 @@ class TerraformAutomation(BaseAutomation):
|
||||||
return tfcmd.returncode, tfcmd.stdout.decode('utf-8')
|
return tfcmd.returncode, tfcmd.stdout.decode('utf-8')
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def tf_generate(self, working_dir) -> None:
|
def tf_generate(self) -> None:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def tf_init(self, working_dir: str, *,
|
def tf_init(self, *, lock_timeout: int = 15) -> None:
|
||||||
lock_timeout: int = 15) -> None:
|
if not self.working_dir:
|
||||||
|
raise RuntimeError("No working directory specified.")
|
||||||
# The init command does not support JSON output.
|
# The init command does not support JSON output.
|
||||||
# The following subprocess call takes external input, but is providing
|
# The following subprocess call takes external input, but is providing
|
||||||
# the argument list as an array such that argument injection would be
|
# the argument list as an array such that argument injection would be
|
||||||
|
@ -90,20 +95,24 @@ class TerraformAutomation(BaseAutomation):
|
||||||
'init',
|
'init',
|
||||||
f'-lock-timeout={str(lock_timeout)}m',
|
f'-lock-timeout={str(lock_timeout)}m',
|
||||||
],
|
],
|
||||||
cwd=working_dir)
|
cwd=self.working_dir)
|
||||||
|
|
||||||
def tf_output(self, working_dir) -> Any:
|
def tf_output(self) -> Any:
|
||||||
|
if not self.working_dir:
|
||||||
|
raise RuntimeError("No working directory specified.")
|
||||||
# The following subprocess call does not take any user input.
|
# The following subprocess call does not take any user input.
|
||||||
tfcmd = subprocess.run( # nosec
|
tfcmd = subprocess.run( # nosec
|
||||||
['terraform', 'output', '-json'],
|
['terraform', 'output', '-json'],
|
||||||
cwd=working_dir,
|
cwd=self.working_dir,
|
||||||
stdout=subprocess.PIPE)
|
stdout=subprocess.PIPE)
|
||||||
return json.loads(tfcmd.stdout)
|
return json.loads(tfcmd.stdout)
|
||||||
|
|
||||||
def tf_plan(self, working_dir: str, *,
|
def tf_plan(self, *,
|
||||||
refresh: bool = True,
|
refresh: bool = True,
|
||||||
parallelism: Optional[int] = None,
|
parallelism: Optional[int] = None,
|
||||||
lock_timeout: int = 15) -> Tuple[int, str]:
|
lock_timeout: int = 15) -> Tuple[int, str]:
|
||||||
|
if not self.working_dir:
|
||||||
|
raise RuntimeError("No working directory specified.")
|
||||||
# The following subprocess call takes external input, but is providing
|
# The following subprocess call takes external input, but is providing
|
||||||
# the argument list as an array such that argument injection would be
|
# the argument list as an array such that argument injection would be
|
||||||
# ineffective.
|
# ineffective.
|
||||||
|
@ -115,7 +124,7 @@ class TerraformAutomation(BaseAutomation):
|
||||||
f'-parallelism={str(parallelism)}',
|
f'-parallelism={str(parallelism)}',
|
||||||
f'-lock-timeout={str(lock_timeout)}m',
|
f'-lock-timeout={str(lock_timeout)}m',
|
||||||
],
|
],
|
||||||
cwd=working_dir)
|
cwd=self.working_dir)
|
||||||
return tfcmd.returncode, tfcmd.stdout.decode('utf-8')
|
return tfcmd.returncode, tfcmd.stdout.decode('utf-8')
|
||||||
|
|
||||||
def tf_posthook(self, *, prehook_result: Any = None) -> None:
|
def tf_posthook(self, *, prehook_result: Any = None) -> None:
|
||||||
|
@ -140,13 +149,15 @@ class TerraformAutomation(BaseAutomation):
|
||||||
:return: state that is useful to :func:`tf_posthook`, if required
|
:return: state that is useful to :func:`tf_posthook`, if required
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def tf_show(self, working_dir) -> Any:
|
def tf_show(self) -> Any:
|
||||||
# This subprocess call doesn't take any user input.
|
# This subprocess call doesn't take any user input.
|
||||||
|
if not self.working_dir:
|
||||||
|
raise RuntimeError("No working directory specified.")
|
||||||
terraform = subprocess.run( # nosec
|
terraform = subprocess.run( # nosec
|
||||||
['terraform', 'show', '-json'],
|
['terraform', 'show', '-json'],
|
||||||
cwd=working_dir,
|
cwd=self.working_dir,
|
||||||
stdout=subprocess.PIPE)
|
stdout=subprocess.PIPE)
|
||||||
return json.loads(terraform.stdout)
|
return json.loads(terraform.stdout)
|
||||||
|
|
||||||
def tf_write(self, template: str, working_dir: str, **kwargs: Any) -> None:
|
def tf_write(self, template: str, **kwargs: Any) -> None:
|
||||||
self.tmpl_write("main.tf", template, working_dir, **kwargs)
|
self.tmpl_write("main.tf", template, **kwargs)
|
||||||
|
|
|
@ -3,3 +3,4 @@ types-flask-sqlalchemy
|
||||||
types-requests
|
types-requests
|
||||||
types-PyYAML
|
types-PyYAML
|
||||||
types-python-dateutil
|
types-python-dateutil
|
||||||
|
types-sqlalchemy
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue