Use disposable temporary directories to run automation jobs
This commit is contained in:
parent
9e5280280f
commit
0ebfe28b89
5 changed files with 36 additions and 42 deletions
|
@ -1,5 +1,7 @@
|
|||
import datetime
|
||||
import logging
|
||||
import shutil
|
||||
import tempfile
|
||||
from traceback import TracebackException
|
||||
from typing import Type
|
||||
|
||||
|
@ -107,7 +109,8 @@ def run_job(job_cls: Type[BaseAutomation], *,
|
|||
db.session.commit()
|
||||
job: BaseAutomation = job_cls()
|
||||
try:
|
||||
success, logs = job.automate()
|
||||
tempdir_path = tempfile.mkdtemp()
|
||||
success, logs = job.automate(tempdir_path)
|
||||
# We want to catch any and all exceptions that would cause problems here, because
|
||||
# the error handling process isn't really handling the error, but rather causing it
|
||||
# to be logged for investigation. Catching more specific exceptions would just mean that
|
||||
|
@ -120,6 +123,7 @@ def run_job(job_cls: Type[BaseAutomation], *,
|
|||
automation.state = AutomationState.IDLE
|
||||
automation.next_run = datetime.datetime.utcnow() + datetime.timedelta(
|
||||
minutes=getattr(job, "frequency", 7))
|
||||
shutil.rmtree(tempdir_path)
|
||||
else:
|
||||
automation.state = AutomationState.ERROR
|
||||
automation.enabled = False
|
||||
|
|
|
@ -18,34 +18,19 @@ class BaseAutomation(metaclass=ABCMeta):
|
|||
"""
|
||||
|
||||
@abstractmethod
|
||||
def automate(self, full: bool = False) -> Tuple[bool, str]:
|
||||
def automate(self, working_dir: str, full: bool = False) -> Tuple[bool, str]:
|
||||
raise NotImplementedError()
|
||||
|
||||
def working_directory(self, filename: Optional[str] = None) -> str:
|
||||
"""
|
||||
Provides a filesystem path that can be used during the automation run.
|
||||
This is currently a persistent path, but this should not be relied upon
|
||||
as future versions may use disposable temporary paths instead. State that
|
||||
is needed in subsequent runs should be stored elsewhere.
|
||||
|
||||
:param filename: the filename inside the working directory to create a path for
|
||||
:return: filesystem path for that filename
|
||||
"""
|
||||
return os.path.join(
|
||||
app.config['TERRAFORM_DIRECTORY'],
|
||||
self.short_name or self.__class__.__name__.lower(),
|
||||
filename or ""
|
||||
)
|
||||
|
||||
def tmpl_write(self, filename: str, template: str, **kwargs: Any) -> None:
|
||||
def tmpl_write(self, filename: str, template: str, working_dir: str, **kwargs: Any) -> None:
|
||||
"""
|
||||
Write a Jinja2 template to the working directory for use by an automation module.
|
||||
|
||||
:param filename: filename to write to
|
||||
:param template: Jinja2 template
|
||||
:param working_dir: temporary directory for running the Terraform automation
|
||||
:param kwargs: variables for use with the template
|
||||
:return: None
|
||||
"""
|
||||
tmpl = jinja2.Template(template)
|
||||
with open(self.working_directory(filename), 'w', encoding="utf-8") as tfconf:
|
||||
with open(os.path.join(working_dir, filename), 'w', encoding="utf-8") as tfconf:
|
||||
tfconf.write(tmpl.render(**kwargs))
|
||||
|
|
|
@ -43,9 +43,10 @@ class ListAutomation(TerraformAutomation):
|
|||
in the templating of the Terraform configuration.
|
||||
"""
|
||||
|
||||
def tf_generate(self) -> None:
|
||||
def tf_generate(self, working_dir) -> None:
|
||||
self.tf_write(
|
||||
self.template,
|
||||
working_dir,
|
||||
lists=MirrorList.query.filter(
|
||||
MirrorList.destroyed.is_(None),
|
||||
MirrorList.provider == self.provider,
|
||||
|
@ -65,9 +66,9 @@ class ListAutomation(TerraformAutomation):
|
|||
for pool in Pool.query.filter(Pool.destroyed.is_(None)).all():
|
||||
for key, formatter in lists.items():
|
||||
for obfuscate in [True, False]:
|
||||
with open(self.working_directory(f"{key}.{pool.pool_name}{'.jsno' if obfuscate else '.json'}"),
|
||||
with open(os.path.join(working_dir, f"{key}.{pool.pool_name}{'.jsno' if obfuscate else '.json'}"),
|
||||
'w', encoding="utf-8") as out:
|
||||
out.write(json_encode(formatter(pool), obfuscate))
|
||||
with open(self.working_directory(f"{key}.{pool.pool_name}{'.jso' if obfuscate else '.js'}"),
|
||||
with open(os.path.join(working_dir, f"{key}.{pool.pool_name}{'.jso' if obfuscate else '.js'}"),
|
||||
'w', encoding="utf-8") as out:
|
||||
out.write(javascript_encode(formatter(pool), obfuscate))
|
||||
|
|
|
@ -27,7 +27,7 @@ class TerraformAutomation(BaseAutomation):
|
|||
Short name for the provider used by this module.
|
||||
"""
|
||||
|
||||
def automate(self, full: bool = False) -> Tuple[bool, str]:
|
||||
def automate(self, working_dir: str, full: bool = False) -> Tuple[bool, str]:
|
||||
"""
|
||||
Runs the Terraform automation module. The run will follow these steps:
|
||||
|
||||
|
@ -41,17 +41,19 @@ class TerraformAutomation(BaseAutomation):
|
|||
5. The :func:`tf_posthook` hook is run.
|
||||
6. The logs from the apply step are returned as a string.
|
||||
|
||||
:param working_dir: temporary directory used to run the automation
|
||||
:param full: include a Terraform refresh in the automation module run
|
||||
:return: success status and Terraform apply logs
|
||||
"""
|
||||
|
||||
prehook_result = self.tf_prehook() # pylint: disable=assignment-from-no-return
|
||||
self.tf_generate()
|
||||
self.tf_init()
|
||||
returncode, logs = self.tf_apply(refresh=self.always_refresh or full)
|
||||
self.tf_generate(working_dir)
|
||||
self.tf_init(working_dir)
|
||||
returncode, logs = self.tf_apply(working_dir, refresh=self.always_refresh or full)
|
||||
self.tf_posthook(prehook_result=prehook_result)
|
||||
return returncode == 0, logs
|
||||
|
||||
def tf_apply(self, *,
|
||||
def tf_apply(self, working_dir: str, *,
|
||||
refresh: bool = True,
|
||||
parallelism: Optional[int] = None,
|
||||
lock_timeout: int = 15) -> Tuple[int, str]:
|
||||
|
@ -69,15 +71,15 @@ class TerraformAutomation(BaseAutomation):
|
|||
f'-parallelism={str(parallelism)}',
|
||||
f'-lock-timeout={str(lock_timeout)}m',
|
||||
],
|
||||
cwd=self.working_directory(),
|
||||
cwd=working_dir,
|
||||
stdout=subprocess.PIPE)
|
||||
return tfcmd.returncode, tfcmd.stdout.decode('utf-8')
|
||||
|
||||
@abstractmethod
|
||||
def tf_generate(self) -> None:
|
||||
def tf_generate(self, working_dir) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def tf_init(self, *,
|
||||
def tf_init(self, working_dir: str, *,
|
||||
lock_timeout: int = 15) -> None:
|
||||
# The init command does not support JSON output.
|
||||
# The following subprocess call takes external input, but is providing
|
||||
|
@ -88,17 +90,17 @@ class TerraformAutomation(BaseAutomation):
|
|||
'init',
|
||||
f'-lock-timeout={str(lock_timeout)}m',
|
||||
],
|
||||
cwd=self.working_directory())
|
||||
cwd=working_dir)
|
||||
|
||||
def tf_output(self) -> Any:
|
||||
def tf_output(self, working_dir) -> Any:
|
||||
# The following subprocess call does not take any user input.
|
||||
tfcmd = subprocess.run( # nosec
|
||||
['terraform', 'output', '-json'],
|
||||
cwd=self.working_directory(),
|
||||
cwd=working_dir,
|
||||
stdout=subprocess.PIPE)
|
||||
return json.loads(tfcmd.stdout)
|
||||
|
||||
def tf_plan(self, *,
|
||||
def tf_plan(self, working_dir: str, *,
|
||||
refresh: bool = True,
|
||||
parallelism: Optional[int] = None,
|
||||
lock_timeout: int = 15) -> Tuple[int, str]:
|
||||
|
@ -113,7 +115,7 @@ class TerraformAutomation(BaseAutomation):
|
|||
f'-parallelism={str(parallelism)}',
|
||||
f'-lock-timeout={str(lock_timeout)}m',
|
||||
],
|
||||
cwd=self.working_directory())
|
||||
cwd=working_dir)
|
||||
return tfcmd.returncode, tfcmd.stdout.decode('utf-8')
|
||||
|
||||
def tf_posthook(self, *, prehook_result: Any = None) -> None:
|
||||
|
@ -138,13 +140,13 @@ class TerraformAutomation(BaseAutomation):
|
|||
:return: state that is useful to :func:`tf_posthook`, if required
|
||||
"""
|
||||
|
||||
def tf_show(self) -> Any:
|
||||
def tf_show(self, working_dir) -> Any:
|
||||
# This subprocess call doesn't take any user input.
|
||||
terraform = subprocess.run( # nosec
|
||||
['terraform', 'show', '-json'],
|
||||
cwd=self.working_directory(),
|
||||
cwd=working_dir,
|
||||
stdout=subprocess.PIPE)
|
||||
return json.loads(terraform.stdout)
|
||||
|
||||
def tf_write(self, template: str, **kwargs: Any) -> None:
|
||||
self.tmpl_write("main.tf", template, **kwargs)
|
||||
def tf_write(self, template: str, working_dir: str, **kwargs: Any) -> None:
|
||||
self.tmpl_write("main.tf", template, working_dir, **kwargs)
|
||||
|
|
|
@ -15,8 +15,7 @@ SECRET_KEY: iechaj0mun6beih3rooga0mei7eo0iwoal1eeweN
|
|||
SSH_PRIVATE_KEY_PATH: /home/bc/.ssh/id_rsa
|
||||
SSH_PUBLIC_KEY_PATH: /home/bc/.ssh/id_rsa.pub
|
||||
|
||||
# This directory must exist and be writable by the user running the portal.
|
||||
TERRAFORM_DIRECTORY: /home/bc/terraform
|
||||
TFSTATE_BACKEND: http://127.0.0.1:5000/tfstate
|
||||
|
||||
############################################################################
|
||||
# Provider configuration follows. You must activate at least one provider. #
|
||||
|
@ -47,6 +46,9 @@ TERRAFORM_DIRECTORY: /home/bc/terraform
|
|||
|
||||
## GitLab
|
||||
#GITLAB_ACTIVATED: true
|
||||
#GITLAB_AUTHOR_EMAIL:
|
||||
#GITLAB_AUTHOR_NAME:
|
||||
#GITLAB_COMMIT_MESSAGE:
|
||||
#GITLAB_TOKEN:
|
||||
|
||||
## Hetzner Cloud
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue