lint
This commit is contained in:
parent
6df0083842
commit
10ffdff2c3
14 changed files with 46 additions and 16 deletions
|
@ -124,11 +124,11 @@ def run_job(job_cls: Type[BaseAutomation], *,
|
||||||
trace = TracebackException.from_exception(exc)
|
trace = TracebackException.from_exception(exc)
|
||||||
success = False
|
success = False
|
||||||
logs = "\n".join(trace.format())
|
logs = "\n".join(trace.format())
|
||||||
if success:
|
if job is not None and success:
|
||||||
automation.state = AutomationState.IDLE
|
automation.state = AutomationState.IDLE
|
||||||
automation.next_run = datetime.datetime.utcnow() + datetime.timedelta(
|
automation.next_run = datetime.datetime.utcnow() + datetime.timedelta(
|
||||||
minutes=getattr(job, "frequency", 7))
|
minutes=getattr(job, "frequency", 7))
|
||||||
if 'TERRAFORM_DIRECTORY' not in app.config:
|
if 'TERRAFORM_DIRECTORY' not in app.config and working_dir is not None:
|
||||||
# We used a temporary working directory
|
# We used a temporary working directory
|
||||||
shutil.rmtree(working_dir)
|
shutil.rmtree(working_dir)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -32,7 +32,7 @@ class Bridgelines(BaseModel):
|
||||||
title = "Bridgelines Version 1"
|
title = "Bridgelines Version 1"
|
||||||
|
|
||||||
|
|
||||||
def bridgelines(pool: Pool, *, distribution_method: Optional[str] = None) -> Dict[str, Any]:
|
def bridgelines(_, *, distribution_method: Optional[str] = None) -> Dict[str, Any]:
|
||||||
bridges: Iterable[Bridge] = Bridge.query.filter(
|
bridges: Iterable[Bridge] = Bridge.query.filter(
|
||||||
Bridge.destroyed.is_(None),
|
Bridge.destroyed.is_(None),
|
||||||
Bridge.deprecated.is_(None),
|
Bridge.deprecated.is_(None),
|
||||||
|
|
|
@ -35,7 +35,7 @@ class MirrorMapping(BaseModel):
|
||||||
title = "Mirror Mapping Version 1.1"
|
title = "Mirror Mapping Version 1.1"
|
||||||
|
|
||||||
|
|
||||||
def mirror_mapping(ignored_pool: Pool) -> Dict[str, Union[str, Dict[str, str]]]:
|
def mirror_mapping(_) -> Dict[str, Union[str, Dict[str, str]]]:
|
||||||
return MirrorMapping(
|
return MirrorMapping(
|
||||||
version="1.1",
|
version="1.1",
|
||||||
mappings={
|
mappings={
|
||||||
|
|
|
@ -42,7 +42,7 @@ def redirector_pool(pool: Pool) -> RedirectorPool:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def redirector_data(ignored_pool: Optional[Pool]) -> Dict[str, Union[str, Dict[str, Union[Dict[str, str]]]]]:
|
def redirector_data(_) -> Dict[str, Union[str, Dict[str, Union[Dict[str, str]]]]]:
|
||||||
active_pools = Pool.query.filter(
|
active_pools = Pool.query.filter(
|
||||||
Pool.destroyed.is_(None)
|
Pool.destroyed.is_(None)
|
||||||
).all()
|
).all()
|
||||||
|
|
|
@ -57,7 +57,7 @@ class Webhook(AbstractConfiguration):
|
||||||
product="notify",
|
product="notify",
|
||||||
provider=self.format,
|
provider=self.format,
|
||||||
resource_type="conf",
|
resource_type="conf",
|
||||||
resource_id=self.id
|
resource_id=str(self.id)
|
||||||
)
|
)
|
||||||
|
|
||||||
def send(self, text: str) -> None:
|
def send(self, text: str) -> None:
|
||||||
|
|
|
@ -51,5 +51,5 @@ class AutomationLogs(AbstractResource):
|
||||||
product="core",
|
product="core",
|
||||||
provider="",
|
provider="",
|
||||||
resource_type="automationlog",
|
resource_type="automationlog",
|
||||||
resource_id=self.id
|
resource_id=str(self.id)
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
|
from app.brm.brn import BRN
|
||||||
from app.extensions import db
|
from app.extensions import db
|
||||||
from app.models import AbstractConfiguration
|
from app.models import AbstractConfiguration
|
||||||
|
|
||||||
|
@ -22,6 +23,16 @@ class Group(AbstractConfiguration):
|
||||||
"group_name", "eotk"
|
"group_name", "eotk"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def brn(self) -> BRN:
|
||||||
|
return BRN(
|
||||||
|
group_id=self.id,
|
||||||
|
product="group",
|
||||||
|
provider="",
|
||||||
|
resource_type="group",
|
||||||
|
resource_id=str(self.id)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Pool(AbstractConfiguration):
|
class Pool(AbstractConfiguration):
|
||||||
pool_name = db.Column(db.String(80), unique=True, nullable=False)
|
pool_name = db.Column(db.String(80), unique=True, nullable=False)
|
||||||
|
@ -38,6 +49,16 @@ class Pool(AbstractConfiguration):
|
||||||
lists = db.relationship("MirrorList", back_populates="pool")
|
lists = db.relationship("MirrorList", back_populates="pool")
|
||||||
groups = db.relationship("Group", secondary="pool_group", back_populates="pools")
|
groups = db.relationship("Group", secondary="pool_group", back_populates="pools")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def brn(self) -> BRN:
|
||||||
|
return BRN(
|
||||||
|
group_id=0,
|
||||||
|
product="pool",
|
||||||
|
provider="",
|
||||||
|
resource_type="pool",
|
||||||
|
resource_id=str(self.pool_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class PoolGroup(db.Model): # type: ignore[name-defined,misc]
|
class PoolGroup(db.Model): # type: ignore[name-defined,misc]
|
||||||
pool_id = db.Column(db.Integer, db.ForeignKey("pool.id"), primary_key=True)
|
pool_id = db.Column(db.Integer, db.ForeignKey("pool.id"), primary_key=True)
|
||||||
|
@ -98,3 +119,13 @@ class MirrorList(AbstractConfiguration):
|
||||||
return super().csv_header() + [
|
return super().csv_header() + [
|
||||||
"provider", "format", "container", "branch", "filename"
|
"provider", "format", "container", "branch", "filename"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def brn(self) -> BRN:
|
||||||
|
return BRN(
|
||||||
|
group_id=0,
|
||||||
|
product="list",
|
||||||
|
provider=self.provider,
|
||||||
|
resource_type="list",
|
||||||
|
resource_id=str(self.id)
|
||||||
|
)
|
||||||
|
|
|
@ -67,7 +67,7 @@ class Proxy(AbstractResource):
|
||||||
product="mirror",
|
product="mirror",
|
||||||
provider=self.provider,
|
provider=self.provider,
|
||||||
resource_type="proxy",
|
resource_type="proxy",
|
||||||
resource_id=self.id
|
resource_id=str(self.id)
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
@ -95,10 +95,10 @@ def describe_brn(s: str) -> ResponseReturnValue:
|
||||||
|
|
||||||
|
|
||||||
@portal.app_template_filter("pretty_json")
|
@portal.app_template_filter("pretty_json")
|
||||||
def pretty_json(input: Optional[str]) -> str:
|
def pretty_json(json_str: Optional[str]) -> str:
|
||||||
if not input:
|
if not json_str:
|
||||||
return "None"
|
return "None"
|
||||||
return json.dumps(json.loads(input), indent=2)
|
return json.dumps(json.loads(json_str), indent=2)
|
||||||
|
|
||||||
|
|
||||||
def total_origins_blocked() -> int:
|
def total_origins_blocked() -> int:
|
||||||
|
|
|
@ -61,7 +61,6 @@ class BaseAutomation:
|
||||||
|
|
||||||
:param filename: filename to write to
|
:param filename: filename to write to
|
||||||
:param template: Jinja2 template
|
:param template: Jinja2 template
|
||||||
:param working_dir: temporary directory for running the Terraform automation
|
|
||||||
:param kwargs: variables for use with the template
|
:param kwargs: variables for use with the template
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import datetime
|
import datetime
|
||||||
|
from abc import ABC
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from dateutil.parser import isoparse
|
from dateutil.parser import isoparse
|
||||||
|
@ -6,7 +7,7 @@ from dateutil.parser import isoparse
|
||||||
from app.terraform.block.bridge import BlockBridgeAutomation
|
from app.terraform.block.bridge import BlockBridgeAutomation
|
||||||
|
|
||||||
|
|
||||||
class BlockBridgeReachabilityAutomation(BlockBridgeAutomation):
|
class BlockBridgeReachabilityAutomation(BlockBridgeAutomation, ABC):
|
||||||
|
|
||||||
_lines: List[str]
|
_lines: List[str]
|
||||||
|
|
||||||
|
|
|
@ -67,7 +67,8 @@ class ListAutomation(TerraformAutomation):
|
||||||
for pool in Pool.query.filter(Pool.destroyed.is_(None)).all():
|
for pool in Pool.query.filter(Pool.destroyed.is_(None)).all():
|
||||||
for key, formatter in lists.items():
|
for key, formatter in lists.items():
|
||||||
for obfuscate in [True, False]:
|
for obfuscate in [True, False]:
|
||||||
with open(os.path.join(self.working_dir, f"{key}.{pool.pool_name}{'.jsno' if obfuscate else '.json'}"),
|
with open(os.path.join(
|
||||||
|
self.working_dir, f"{key}.{pool.pool_name}{'.jsno' if obfuscate else '.json'}"),
|
||||||
'w', encoding="utf-8") as out:
|
'w', encoding="utf-8") as out:
|
||||||
out.write(json_encode(formatter(pool), obfuscate))
|
out.write(json_encode(formatter(pool), obfuscate))
|
||||||
with open(os.path.join(self.working_dir, f"{key}.{pool.pool_name}{'.jso' if obfuscate else '.js'}"),
|
with open(os.path.join(self.working_dir, f"{key}.{pool.pool_name}{'.jso' if obfuscate else '.js'}"),
|
||||||
|
|
|
@ -41,7 +41,6 @@ class TerraformAutomation(BaseAutomation):
|
||||||
5. The :func:`tf_posthook` hook is run.
|
5. The :func:`tf_posthook` hook is run.
|
||||||
6. The logs from the apply step are returned as a string.
|
6. The logs from the apply step are returned as a string.
|
||||||
|
|
||||||
:param working_dir: temporary directory used to run the automation
|
|
||||||
:param full: include a Terraform refresh in the automation module run
|
:param full: include a Terraform refresh in the automation module run
|
||||||
:return: success status and Terraform apply logs
|
:return: success status and Terraform apply logs
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -6,7 +6,6 @@ Create Date: 2022-12-20 18:10:19.540534
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import secrets
|
import secrets
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue