lint: reformat python code with black

This commit is contained in:
Iain Learmonth 2024-12-06 18:15:47 +00:00
parent 331beb01b4
commit a406a7974b
88 changed files with 2579 additions and 1608 deletions

View file

@ -12,6 +12,7 @@ class DeterministicZip:
Heavily inspired by https://github.com/bboe/deterministic_zip.
"""
zipfile: ZipFile
def __init__(self, filename: str):
@ -67,15 +68,22 @@ class BaseAutomation:
if not self.working_dir:
raise RuntimeError("No working directory specified.")
tmpl = jinja2.Template(template)
with open(os.path.join(self.working_dir, filename), 'w', encoding="utf-8") as tfconf:
with open(
os.path.join(self.working_dir, filename), "w", encoding="utf-8"
) as tfconf:
tfconf.write(tmpl.render(**kwargs))
def bin_write(self, filename: str, data: bytes, group_id: Optional[int] = None) -> None:
def bin_write(
self, filename: str, data: bytes, group_id: Optional[int] = None
) -> None:
if not self.working_dir:
raise RuntimeError("No working directory specified.")
try:
os.mkdir(os.path.join(self.working_dir, str(group_id)))
except FileExistsError:
pass
with open(os.path.join(self.working_dir, str(group_id) if group_id else "", filename), 'wb') as binfile:
with open(
os.path.join(self.working_dir, str(group_id) if group_id else "", filename),
"wb",
) as binfile:
binfile.write(data)

View file

@ -13,33 +13,38 @@ from app.terraform import BaseAutomation
def alarms_in_region(region: str, prefix: str, aspect: str) -> None:
cloudwatch = boto3.client('cloudwatch',
aws_access_key_id=app.config['AWS_ACCESS_KEY'],
aws_secret_access_key=app.config['AWS_SECRET_KEY'],
region_name=region)
dist_paginator = cloudwatch.get_paginator('describe_alarms')
cloudwatch = boto3.client(
"cloudwatch",
aws_access_key_id=app.config["AWS_ACCESS_KEY"],
aws_secret_access_key=app.config["AWS_SECRET_KEY"],
region_name=region,
)
dist_paginator = cloudwatch.get_paginator("describe_alarms")
page_iterator = dist_paginator.paginate(AlarmNamePrefix=prefix)
for page in page_iterator:
for cw_alarm in page['MetricAlarms']:
eotk_id = cw_alarm["AlarmName"][len(prefix):].split("-")
group: Optional[Group] = Group.query.filter(func.lower(Group.group_name) == eotk_id[1]).first()
for cw_alarm in page["MetricAlarms"]:
eotk_id = cw_alarm["AlarmName"][len(prefix) :].split("-")
group: Optional[Group] = Group.query.filter(
func.lower(Group.group_name) == eotk_id[1]
).first()
if group is None:
print("Unable to find group for " + cw_alarm['AlarmName'])
print("Unable to find group for " + cw_alarm["AlarmName"])
continue
eotk = Eotk.query.filter(
Eotk.group_id == group.id,
Eotk.region == region
Eotk.group_id == group.id, Eotk.region == region
).first()
if eotk is None:
print("Skipping unknown instance " + cw_alarm['AlarmName'])
print("Skipping unknown instance " + cw_alarm["AlarmName"])
continue
alarm = get_or_create_alarm(eotk.brn, aspect)
if cw_alarm['StateValue'] == "OK":
if cw_alarm["StateValue"] == "OK":
alarm.update_state(AlarmState.OK, "CloudWatch alarm OK")
elif cw_alarm['StateValue'] == "ALARM":
elif cw_alarm["StateValue"] == "ALARM":
alarm.update_state(AlarmState.CRITICAL, "CloudWatch alarm ALARM")
else:
alarm.update_state(AlarmState.UNKNOWN, f"CloudWatch alarm {cw_alarm['StateValue']}")
alarm.update_state(
AlarmState.UNKNOWN, f"CloudWatch alarm {cw_alarm['StateValue']}"
)
class AlarmEotkAwsAutomation(BaseAutomation):

View file

@ -16,20 +16,19 @@ class AlarmProxyAzureCdnAutomation(BaseAutomation):
def automate(self, full: bool = False) -> Tuple[bool, str]:
credential = ClientSecretCredential(
tenant_id=app.config['AZURE_TENANT_ID'],
client_id=app.config['AZURE_CLIENT_ID'],
client_secret=app.config['AZURE_CLIENT_SECRET'])
client = AlertsManagementClient(
credential,
app.config['AZURE_SUBSCRIPTION_ID']
tenant_id=app.config["AZURE_TENANT_ID"],
client_id=app.config["AZURE_CLIENT_ID"],
client_secret=app.config["AZURE_CLIENT_SECRET"],
)
firing = [x.name[len("bandwidth-out-high-bc-"):]
for x in client.alerts.get_all()
if x.name.startswith("bandwidth-out-high-bc-")
and x.properties.essentials.monitor_condition == "Fired"]
client = AlertsManagementClient(credential, app.config["AZURE_SUBSCRIPTION_ID"])
firing = [
x.name[len("bandwidth-out-high-bc-") :]
for x in client.alerts.get_all()
if x.name.startswith("bandwidth-out-high-bc-")
and x.properties.essentials.monitor_condition == "Fired"
]
for proxy in Proxy.query.filter(
Proxy.provider == "azure_cdn",
Proxy.destroyed.is_(None)
Proxy.provider == "azure_cdn", Proxy.destroyed.is_(None)
):
alarm = get_or_create_alarm(proxy.brn, "bandwidth-out-high")
if proxy.origin.group.group_name.lower() not in firing:

View file

@ -16,9 +16,8 @@ def _cloudfront_quota() -> None:
# It would be nice to learn this from the Service Quotas API, however
# at the time of writing this comment, the current value for this quota
# is not available from the API. It just doesn't return anything.
max_count = int(current_app.config.get('AWS_CLOUDFRONT_MAX_DISTRIBUTIONS', 200))
deployed_count = len(Proxy.query.filter(
Proxy.destroyed.is_(None)).all())
max_count = int(current_app.config.get("AWS_CLOUDFRONT_MAX_DISTRIBUTIONS", 200))
deployed_count = len(Proxy.query.filter(Proxy.destroyed.is_(None)).all())
message = f"{deployed_count} distributions deployed of {max_count} quota"
alarm = get_or_create_alarm(
BRN(
@ -26,9 +25,9 @@ def _cloudfront_quota() -> None:
product="mirror",
provider="cloudfront",
resource_type="quota",
resource_id="distributions"
resource_id="distributions",
),
"quota-usage"
"quota-usage",
)
if deployed_count > max_count * 0.9:
alarm.update_state(AlarmState.CRITICAL, message)
@ -39,26 +38,30 @@ def _cloudfront_quota() -> None:
def _proxy_alarms() -> None:
cloudwatch = boto3.client('cloudwatch',
aws_access_key_id=app.config['AWS_ACCESS_KEY'],
aws_secret_access_key=app.config['AWS_SECRET_KEY'],
region_name='us-east-2')
dist_paginator = cloudwatch.get_paginator('describe_alarms')
cloudwatch = boto3.client(
"cloudwatch",
aws_access_key_id=app.config["AWS_ACCESS_KEY"],
aws_secret_access_key=app.config["AWS_SECRET_KEY"],
region_name="us-east-2",
)
dist_paginator = cloudwatch.get_paginator("describe_alarms")
page_iterator = dist_paginator.paginate(AlarmNamePrefix="bandwidth-out-high-")
for page in page_iterator:
for cw_alarm in page['MetricAlarms']:
dist_id = cw_alarm["AlarmName"][len("bandwidth-out-high-"):]
for cw_alarm in page["MetricAlarms"]:
dist_id = cw_alarm["AlarmName"][len("bandwidth-out-high-") :]
proxy = Proxy.query.filter(Proxy.slug == dist_id).first()
if proxy is None:
print("Skipping unknown proxy " + dist_id)
continue
alarm = get_or_create_alarm(proxy.brn, "bandwidth-out-high")
if cw_alarm['StateValue'] == "OK":
if cw_alarm["StateValue"] == "OK":
alarm.update_state(AlarmState.OK, "CloudWatch alarm OK")
elif cw_alarm['StateValue'] == "ALARM":
elif cw_alarm["StateValue"] == "ALARM":
alarm.update_state(AlarmState.CRITICAL, "CloudWatch alarm ALARM")
else:
alarm.update_state(AlarmState.UNKNOWN, f"CloudWatch alarm {cw_alarm['StateValue']}")
alarm.update_state(
AlarmState.UNKNOWN, f"CloudWatch alarm {cw_alarm['StateValue']}"
)
class AlarmProxyCloudfrontAutomation(BaseAutomation):

View file

@ -16,39 +16,25 @@ class AlarmProxyHTTPStatusAutomation(BaseAutomation):
frequency = 45
def automate(self, full: bool = False) -> Tuple[bool, str]:
proxies = Proxy.query.filter(
Proxy.destroyed.is_(None)
)
proxies = Proxy.query.filter(Proxy.destroyed.is_(None))
for proxy in proxies:
try:
if proxy.url is None:
continue
r = requests.get(proxy.url,
allow_redirects=False,
timeout=5)
r = requests.get(proxy.url, allow_redirects=False, timeout=5)
r.raise_for_status()
alarm = get_or_create_alarm(proxy.brn, "http-status")
if r.is_redirect:
alarm.update_state(
AlarmState.CRITICAL,
f"{r.status_code} {r.reason}"
AlarmState.CRITICAL, f"{r.status_code} {r.reason}"
)
else:
alarm.update_state(
AlarmState.OK,
f"{r.status_code} {r.reason}"
)
alarm.update_state(AlarmState.OK, f"{r.status_code} {r.reason}")
except requests.HTTPError:
alarm = get_or_create_alarm(proxy.brn, "http-status")
alarm.update_state(
AlarmState.CRITICAL,
f"{r.status_code} {r.reason}"
)
alarm.update_state(AlarmState.CRITICAL, f"{r.status_code} {r.reason}")
except RequestException as e:
alarm = get_or_create_alarm(proxy.brn, "http-status")
alarm.update_state(
AlarmState.CRITICAL,
repr(e)
)
alarm.update_state(AlarmState.CRITICAL, repr(e))
db.session.commit()
return True, ""

View file

@ -13,33 +13,38 @@ from app.terraform import BaseAutomation
def alarms_in_region(region: str, prefix: str, aspect: str) -> None:
cloudwatch = boto3.client('cloudwatch',
aws_access_key_id=app.config['AWS_ACCESS_KEY'],
aws_secret_access_key=app.config['AWS_SECRET_KEY'],
region_name=region)
dist_paginator = cloudwatch.get_paginator('describe_alarms')
cloudwatch = boto3.client(
"cloudwatch",
aws_access_key_id=app.config["AWS_ACCESS_KEY"],
aws_secret_access_key=app.config["AWS_SECRET_KEY"],
region_name=region,
)
dist_paginator = cloudwatch.get_paginator("describe_alarms")
page_iterator = dist_paginator.paginate(AlarmNamePrefix=prefix)
for page in page_iterator:
for cw_alarm in page['MetricAlarms']:
smart_id = cw_alarm["AlarmName"][len(prefix):].split("-")
group: Optional[Group] = Group.query.filter(func.lower(Group.group_name) == smart_id[1]).first()
for cw_alarm in page["MetricAlarms"]:
smart_id = cw_alarm["AlarmName"][len(prefix) :].split("-")
group: Optional[Group] = Group.query.filter(
func.lower(Group.group_name) == smart_id[1]
).first()
if group is None:
print("Unable to find group for " + cw_alarm['AlarmName'])
print("Unable to find group for " + cw_alarm["AlarmName"])
continue
smart_proxy = SmartProxy.query.filter(
SmartProxy.group_id == group.id,
SmartProxy.region == region
SmartProxy.group_id == group.id, SmartProxy.region == region
).first()
if smart_proxy is None:
print("Skipping unknown instance " + cw_alarm['AlarmName'])
print("Skipping unknown instance " + cw_alarm["AlarmName"])
continue
alarm = get_or_create_alarm(smart_proxy.brn, aspect)
if cw_alarm['StateValue'] == "OK":
if cw_alarm["StateValue"] == "OK":
alarm.update_state(AlarmState.OK, "CloudWatch alarm OK")
elif cw_alarm['StateValue'] == "ALARM":
elif cw_alarm["StateValue"] == "ALARM":
alarm.update_state(AlarmState.CRITICAL, "CloudWatch alarm ALARM")
else:
alarm.update_state(AlarmState.UNKNOWN, f"CloudWatch alarm {cw_alarm['StateValue']}")
alarm.update_state(
AlarmState.UNKNOWN, f"CloudWatch alarm {cw_alarm['StateValue']}"
)
class AlarmSmartAwsAutomation(BaseAutomation):

View file

@ -16,7 +16,7 @@ def clean_json_response(raw_response: str) -> Dict[str, Any]:
"""
end_index = raw_response.rfind("}")
if end_index != -1:
raw_response = raw_response[:end_index + 1]
raw_response = raw_response[: end_index + 1]
response: Dict[str, Any] = json.loads(raw_response)
return response
@ -27,20 +27,21 @@ def request_test_now(test_url: str) -> str:
"User-Agent": "bypasscensorship.org",
"Content-Type": "application/json;charset=utf-8",
"Pragma": "no-cache",
"Cache-Control": "no-cache"
"Cache-Control": "no-cache",
}
request_count = 0
while request_count < 180:
params = {
"url": test_url,
"timestamp": str(int(time.time())) # unix timestamp
}
response = requests.post(api_url, params=params, headers=headers, json={}, timeout=30)
params = {"url": test_url, "timestamp": str(int(time.time()))} # unix timestamp
response = requests.post(
api_url, params=params, headers=headers, json={}, timeout=30
)
response_data = clean_json_response(response.text)
print(f"Response: {response_data}")
if "url_test_id" in response_data.get("d", {}):
url_test_id: str = response_data["d"]["url_test_id"]
logging.debug("Test result for %s has test result ID %s", test_url, url_test_id)
logging.debug(
"Test result for %s has test result ID %s", test_url, url_test_id
)
return url_test_id
request_count += 1
time.sleep(2)
@ -52,13 +53,19 @@ def request_test_result(url_test_id: str) -> int:
headers = {
"User-Agent": "bypasscensorship.org",
"Pragma": "no-cache",
"Cache-Control": "no-cache"
"Cache-Control": "no-cache",
}
response = requests.get(url, headers=headers, timeout=30)
response_data = response.json()
tests = response_data.get("d", [])
non_zero_curl_exit_count: int = sum(1 for test in tests if test.get("curl_exit_value") != "0")
logging.debug("Test result for %s has %s non-zero exit values", url_test_id, non_zero_curl_exit_count)
non_zero_curl_exit_count: int = sum(
1 for test in tests if test.get("curl_exit_value") != "0"
)
logging.debug(
"Test result for %s has %s non-zero exit values",
url_test_id,
non_zero_curl_exit_count,
)
return non_zero_curl_exit_count
@ -81,7 +88,7 @@ class BlockBlockyAutomation(BlockMirrorAutomation):
Proxy.url.is_not(None),
Proxy.deprecated.is_(None),
Proxy.destroyed.is_(None),
Proxy.pool_id != -1
Proxy.pool_id != -1,
)
.all()
)

View file

@ -15,7 +15,8 @@ class BlockBridgeScriptzteamAutomation(BlockBridgelinesAutomation):
def fetch(self) -> None:
r = requests.get(
"https://raw.githubusercontent.com/scriptzteam/Tor-Bridges-Collector/main/bridges-obfs4",
timeout=60)
timeout=60,
)
r.encoding = "utf-8"
contents = r.text
self._lines = contents.splitlines()

View file

@ -24,8 +24,9 @@ class BlockBridgeAutomation(BaseAutomation):
self.hashed_fingerprints = []
super().__init__(*args, **kwargs)
def perform_deprecations(self, ids: List[str], bridge_select_func: Callable[[str], Optional[Bridge]]
) -> List[Tuple[Optional[str], Any, Any]]:
def perform_deprecations(
self, ids: List[str], bridge_select_func: Callable[[str], Optional[Bridge]]
) -> List[Tuple[Optional[str], Any, Any]]:
rotated = []
for id_ in ids:
bridge = bridge_select_func(id_)
@ -37,7 +38,13 @@ class BlockBridgeAutomation(BaseAutomation):
continue
if bridge.deprecate(reason=self.short_name):
logging.info("Rotated %s", bridge.hashed_fingerprint)
rotated.append((bridge.fingerprint, bridge.cloud_account.provider, bridge.cloud_account.description))
rotated.append(
(
bridge.fingerprint,
bridge.cloud_account.provider,
bridge.cloud_account.description,
)
)
else:
logging.debug("Not rotating a bridge that is already deprecated")
return rotated
@ -50,15 +57,28 @@ class BlockBridgeAutomation(BaseAutomation):
rotated = []
rotated.extend(self.perform_deprecations(self.ips, get_bridge_by_ip))
logging.debug("Blocked by IP")
rotated.extend(self.perform_deprecations(self.fingerprints, get_bridge_by_fingerprint))
rotated.extend(
self.perform_deprecations(self.fingerprints, get_bridge_by_fingerprint)
)
logging.debug("Blocked by fingerprint")
rotated.extend(self.perform_deprecations(self.hashed_fingerprints, get_bridge_by_hashed_fingerprint))
rotated.extend(
self.perform_deprecations(
self.hashed_fingerprints, get_bridge_by_hashed_fingerprint
)
)
logging.debug("Blocked by hashed fingerprint")
if rotated:
activity = Activity(
activity_type="block",
text=(f"[{self.short_name}] ♻ Rotated {len(rotated)} bridges: \n"
+ "\n".join([f"* {fingerprint} ({provider}: {provider_description})" for fingerprint, provider, provider_description in rotated]))
text=(
f"[{self.short_name}] ♻ Rotated {len(rotated)} bridges: \n"
+ "\n".join(
[
f"* {fingerprint} ({provider}: {provider_description})"
for fingerprint, provider, provider_description in rotated
]
)
),
)
db.session.add(activity)
activity.notify()
@ -87,7 +107,7 @@ def get_bridge_by_ip(ip: str) -> Optional[Bridge]:
return Bridge.query.filter( # type: ignore[no-any-return]
Bridge.deprecated.is_(None),
Bridge.destroyed.is_(None),
Bridge.bridgeline.contains(f" {ip} ")
Bridge.bridgeline.contains(f" {ip} "),
).first()
@ -95,7 +115,7 @@ def get_bridge_by_fingerprint(fingerprint: str) -> Optional[Bridge]:
return Bridge.query.filter( # type: ignore[no-any-return]
Bridge.deprecated.is_(None),
Bridge.destroyed.is_(None),
Bridge.fingerprint == fingerprint
Bridge.fingerprint == fingerprint,
).first()
@ -103,5 +123,5 @@ def get_bridge_by_hashed_fingerprint(hashed_fingerprint: str) -> Optional[Bridge
return Bridge.query.filter( # type: ignore[no-any-return]
Bridge.deprecated.is_(None),
Bridge.destroyed.is_(None),
Bridge.hashed_fingerprint == hashed_fingerprint
Bridge.hashed_fingerprint == hashed_fingerprint,
).first()

View file

@ -17,6 +17,8 @@ class BlockBridgelinesAutomation(BlockBridgeAutomation, ABC):
fingerprint = parts[2]
self.ips.append(ip_address)
self.fingerprints.append(fingerprint)
logging.debug(f"Added blocked bridge with IP {ip_address} and fingerprint {fingerprint}")
logging.debug(
f"Added blocked bridge with IP {ip_address} and fingerprint {fingerprint}"
)
except IndexError:
logging.warning("A parsing error occured.")

View file

@ -1,8 +1,7 @@
from flask import current_app
from github import Github
from app.terraform.block.bridge_reachability import \
BlockBridgeReachabilityAutomation
from app.terraform.block.bridge_reachability import BlockBridgeReachabilityAutomation
class BlockBridgeGitHubAutomation(BlockBridgeReachabilityAutomation):
@ -15,12 +14,13 @@ class BlockBridgeGitHubAutomation(BlockBridgeReachabilityAutomation):
frequency = 30
def fetch(self) -> None:
github = Github(current_app.config['GITHUB_API_KEY'])
repo = github.get_repo(current_app.config['GITHUB_BRIDGE_REPO'])
for vantage_point in current_app.config['GITHUB_BRIDGE_VANTAGE_POINTS']:
github = Github(current_app.config["GITHUB_API_KEY"])
repo = github.get_repo(current_app.config["GITHUB_BRIDGE_REPO"])
for vantage_point in current_app.config["GITHUB_BRIDGE_VANTAGE_POINTS"]:
contents = repo.get_contents(f"recentResult_{vantage_point}")
if isinstance(contents, list):
raise RuntimeError(
f"Expected a file at recentResult_{vantage_point}"
" but got a directory.")
self._lines = contents.decoded_content.decode('utf-8').splitlines()
" but got a directory."
)
self._lines = contents.decoded_content.decode("utf-8").splitlines()

View file

@ -1,8 +1,7 @@
from flask import current_app
from gitlab import Gitlab
from app.terraform.block.bridge_reachability import \
BlockBridgeReachabilityAutomation
from app.terraform.block.bridge_reachability import BlockBridgeReachabilityAutomation
class BlockBridgeGitlabAutomation(BlockBridgeReachabilityAutomation):
@ -16,15 +15,15 @@ class BlockBridgeGitlabAutomation(BlockBridgeReachabilityAutomation):
def fetch(self) -> None:
self._lines = list()
credentials = {"private_token": current_app.config['GITLAB_TOKEN']}
credentials = {"private_token": current_app.config["GITLAB_TOKEN"]}
if "GITLAB_URL" in current_app.config:
credentials['url'] = current_app.config['GITLAB_URL']
credentials["url"] = current_app.config["GITLAB_URL"]
gitlab = Gitlab(**credentials)
project = gitlab.projects.get(current_app.config['GITLAB_BRIDGE_PROJECT'])
for vantage_point in current_app.config['GITHUB_BRIDGE_VANTAGE_POINTS']:
project = gitlab.projects.get(current_app.config["GITLAB_BRIDGE_PROJECT"])
for vantage_point in current_app.config["GITHUB_BRIDGE_VANTAGE_POINTS"]:
contents = project.files.get(
file_path=f"recentResult_{vantage_point}",
ref=current_app.config["GITLAB_BRIDGE_BRANCH"]
ref=current_app.config["GITLAB_BRIDGE_BRANCH"],
)
# Decode the base64 first, then decode the UTF-8 string
self._lines.extend(contents.decode().decode('utf-8').splitlines())
self._lines.extend(contents.decode().decode("utf-8").splitlines())

View file

@ -14,8 +14,10 @@ class BlockBridgeReachabilityAutomation(BlockBridgeAutomation, ABC):
def parse(self) -> None:
for line in self._lines:
parts = line.split("\t")
if isoparse(parts[2]) < (datetime.datetime.now(datetime.timezone.utc)
- datetime.timedelta(days=3)):
if isoparse(parts[2]) < (
datetime.datetime.now(datetime.timezone.utc)
- datetime.timedelta(days=3)
):
# Skip results older than 3 days
continue
if int(parts[1]) < 40:

View file

@ -13,7 +13,9 @@ class BlockBridgeRoskomsvobodaAutomation(BlockBridgeAutomation):
_data: Any
def fetch(self) -> None:
self._data = requests.get("https://reestr.rublacklist.net/api/v3/ips/", timeout=180).json()
self._data = requests.get(
"https://reestr.rublacklist.net/api/v3/ips/", timeout=180
).json()
def parse(self) -> None:
self.ips.extend(self._data)

View file

@ -9,7 +9,7 @@ from app.terraform.block_mirror import BlockMirrorAutomation
def _trim_prefix(s: str, prefix: str) -> str:
if s.startswith(prefix):
return s[len(prefix):]
return s[len(prefix) :]
return s
@ -20,30 +20,31 @@ def trim_http_https(s: str) -> str:
:param s: String to modify.
:return: Modified string.
"""
return _trim_prefix(
_trim_prefix(s, "https://"),
"http://")
return _trim_prefix(_trim_prefix(s, "https://"), "http://")
class BlockExternalAutomation(BlockMirrorAutomation):
"""
Automation task to import proxy reachability results from external source.
"""
short_name = "block_external"
description = "Import proxy reachability results from external source"
_content: bytes
def fetch(self) -> None:
user_agent = {'User-agent': 'BypassCensorship/1.0'}
check_urls_config = app.config.get('EXTERNAL_CHECK_URL', [])
user_agent = {"User-agent": "BypassCensorship/1.0"}
check_urls_config = app.config.get("EXTERNAL_CHECK_URL", [])
if isinstance(check_urls_config, dict):
# Config is already a dictionary, use as is.
check_urls = check_urls_config
elif isinstance(check_urls_config, list):
# Convert list of strings to a dictionary with "external_N" keys.
check_urls = {f"external_{i}": url for i, url in enumerate(check_urls_config)}
check_urls = {
f"external_{i}": url for i, url in enumerate(check_urls_config)
}
elif isinstance(check_urls_config, str):
# Single string, convert to a dictionary with key "external".
check_urls = {"external": check_urls_config}
@ -53,9 +54,13 @@ class BlockExternalAutomation(BlockMirrorAutomation):
for source, check_url in check_urls.items():
if self._data is None:
self._data = defaultdict(list)
self._data[source].extend(requests.get(check_url, headers=user_agent, timeout=30).json())
self._data[source].extend(
requests.get(check_url, headers=user_agent, timeout=30).json()
)
def parse(self) -> None:
for source, patterns in self._data.items():
self.patterns[source].extend(["https://" + trim_http_https(pattern) for pattern in patterns])
self.patterns[source].extend(
["https://" + trim_http_https(pattern) for pattern in patterns]
)
logging.debug("Found URLs: %s", self.patterns)

View file

@ -52,8 +52,15 @@ class BlockMirrorAutomation(BaseAutomation):
if rotated:
activity = Activity(
activity_type="block",
text=(f"[{self.short_name}] ♻ Rotated {len(rotated)} proxies: \n"
+ "\n".join([f"* {proxy_domain} ({origin_domain})" for proxy_domain, origin_domain in rotated]))
text=(
f"[{self.short_name}] ♻ Rotated {len(rotated)} proxies: \n"
+ "\n".join(
[
f"* {proxy_domain} ({origin_domain})"
for proxy_domain, origin_domain in rotated
]
)
),
)
db.session.add(activity)
activity.notify()
@ -79,15 +86,15 @@ class BlockMirrorAutomation(BaseAutomation):
def active_proxy_urls() -> List[str]:
return [proxy.url for proxy in Proxy.query.filter(
Proxy.deprecated.is_(None),
Proxy.destroyed.is_(None)
).all()]
return [
proxy.url
for proxy in Proxy.query.filter(
Proxy.deprecated.is_(None), Proxy.destroyed.is_(None)
).all()
]
def proxy_by_url(url: str) -> Optional[Proxy]:
return Proxy.query.filter( # type: ignore[no-any-return]
Proxy.deprecated.is_(None),
Proxy.destroyed.is_(None),
Proxy.url == url
Proxy.deprecated.is_(None), Proxy.destroyed.is_(None), Proxy.url == url
).first()

View file

@ -12,19 +12,23 @@ from app.terraform import BaseAutomation
def check_origin(domain_name: str) -> Dict[str, Any]:
start_date = (datetime.now(tz=timezone.utc) - timedelta(days=1)).strftime("%Y-%m-%dT%H%%3A%M")
start_date = (datetime.now(tz=timezone.utc) - timedelta(days=1)).strftime(
"%Y-%m-%dT%H%%3A%M"
)
end_date = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H%%3A%M")
api_url = f"https://api.ooni.io/api/v1/measurements?domain={domain_name}&since={start_date}&until={end_date}"
result: Dict[str, Dict[str, int]] = defaultdict(lambda: {"anomaly": 0, "confirmed": 0, "failure": 0, "ok": 0})
result: Dict[str, Dict[str, int]] = defaultdict(
lambda: {"anomaly": 0, "confirmed": 0, "failure": 0, "ok": 0}
)
return _check_origin(api_url, result)
def _check_origin(api_url: str, result: Dict[str, Any]) -> Dict[str, Any]:
print(f"Processing {api_url}")
req = requests.get(api_url, timeout=30).json()
if 'results' not in req or not req['results']:
if "results" not in req or not req["results"]:
return result
for r in req['results']:
for r in req["results"]:
not_ok = False
for status in ["anomaly", "confirmed", "failure"]:
if status in r and r[status]:
@ -33,27 +37,28 @@ def _check_origin(api_url: str, result: Dict[str, Any]) -> Dict[str, Any]:
break
if not not_ok:
result[r["probe_cc"]]["ok"] += 1
if req['metadata']['next_url']:
return _check_origin(req['metadata']['next_url'], result)
if req["metadata"]["next_url"]:
return _check_origin(req["metadata"]["next_url"], result)
return result
def threshold_origin(domain_name: str) -> Dict[str, Any]:
ooni = check_origin(domain_name)
for country in ooni:
total = sum([
ooni[country]["anomaly"],
ooni[country]["confirmed"],
ooni[country]["failure"],
ooni[country]["ok"]
])
total_blocks = sum([
ooni[country]["anomaly"],
ooni[country]["confirmed"]
])
total = sum(
[
ooni[country]["anomaly"],
ooni[country]["confirmed"],
ooni[country]["failure"],
ooni[country]["ok"],
]
)
total_blocks = sum([ooni[country]["anomaly"], ooni[country]["confirmed"]])
block_perc = round((total_blocks / total * 100), 1)
ooni[country]["block_perc"] = block_perc
ooni[country]["state"] = AlarmState.WARNING if block_perc > 20 else AlarmState.OK
ooni[country]["state"] = (
AlarmState.WARNING if block_perc > 20 else AlarmState.OK
)
ooni[country]["message"] = f"Blocked in {block_perc}% of measurements"
return ooni
@ -72,8 +77,9 @@ class BlockOONIAutomation(BaseAutomation):
for origin in origins:
ooni = threshold_origin(origin.domain_name)
for country in ooni:
alarm = get_or_create_alarm(origin.brn,
f"origin-block-ooni-{country.lower()}")
alarm = get_or_create_alarm(
origin.brn, f"origin-block-ooni-{country.lower()}"
)
alarm.update_state(ooni[country]["state"], ooni[country]["message"])
db.session.commit()
return True, ""

View file

@ -32,6 +32,7 @@ class BlockRoskomsvobodaAutomation(BlockMirrorAutomation):
Where proxies are found to be blocked they will be rotated.
"""
short_name = "block_roskomsvoboda"
description = "Import Russian blocklist from RosKomSvoboda"
frequency = 300
@ -43,7 +44,11 @@ class BlockRoskomsvobodaAutomation(BlockMirrorAutomation):
try:
# This endpoint routinely has an expired certificate, and it's more useful that we are consuming the
# data than that we are verifying the certificate.
r = requests.get(f"https://dumps.rublacklist.net/fetch/{latest_rev}", timeout=180, verify=False) # nosec: B501
r = requests.get(
f"https://dumps.rublacklist.net/fetch/{latest_rev}",
timeout=180,
verify=False,
) # nosec: B501
r.raise_for_status()
zip_file = ZipFile(BytesIO(r.content))
self._data = zip_file.read("dump.xml")
@ -51,26 +56,33 @@ class BlockRoskomsvobodaAutomation(BlockMirrorAutomation):
except requests.HTTPError:
activity = Activity(
activity_type="automation",
text=(f"[{self.short_name}] 🚨 Unable to download dump {latest_rev} due to HTTP error {r.status_code}. "
"The automation task has not been disabled and will attempt to download the next dump when the "
"latest dump revision is incremented at the server."))
text=(
f"[{self.short_name}] 🚨 Unable to download dump {latest_rev} due to HTTP error {r.status_code}. "
"The automation task has not been disabled and will attempt to download the next dump when the "
"latest dump revision is incremented at the server."
),
)
activity.notify()
db.session.add(activity)
db.session.commit()
except BadZipFile:
activity = Activity(
activity_type="automation",
text=(f"[{self.short_name}] 🚨 Unable to extract zip file from dump {latest_rev}. There was an error "
"related to the format of the zip file. "
"The automation task has not been disabled and will attempt to download the next dump when the "
"latest dump revision is incremented at the server."))
text=(
f"[{self.short_name}] 🚨 Unable to extract zip file from dump {latest_rev}. There was an error "
"related to the format of the zip file. "
"The automation task has not been disabled and will attempt to download the next dump when the "
"latest dump revision is incremented at the server."
),
)
activity.notify()
db.session.add(activity)
db.session.commit()
def fetch(self) -> None:
state: Optional[TerraformState] = TerraformState.query.filter(
TerraformState.key == "block_roskomsvoboda").first()
TerraformState.key == "block_roskomsvoboda"
).first()
if state is None:
state = TerraformState()
state.key = "block_roskomsvoboda"
@ -80,8 +92,14 @@ class BlockRoskomsvobodaAutomation(BlockMirrorAutomation):
latest_metadata = json.loads(state.state)
# This endpoint routinely has an expired certificate, and it's more useful that we are consuming the
# data than that we are verifying the certificate.
latest_rev = requests.get("https://dumps.rublacklist.net/fetch/latest", timeout=30, verify=False).text.strip() # nosec: B501
logging.debug("Latest revision is %s, already got %s", latest_rev, latest_metadata["dump_rev"])
latest_rev = requests.get(
"https://dumps.rublacklist.net/fetch/latest", timeout=30, verify=False
).text.strip() # nosec: B501
logging.debug(
"Latest revision is %s, already got %s",
latest_rev,
latest_metadata["dump_rev"],
)
if latest_rev != latest_metadata["dump_rev"]:
state.state = json.dumps({"dump_rev": latest_rev})
db.session.commit()
@ -94,18 +112,24 @@ class BlockRoskomsvobodaAutomation(BlockMirrorAutomation):
logging.debug("No new data to parse")
return
try:
for _event, element in lxml.etree.iterparse(BytesIO(self._data),
resolve_entities=False):
for _event, element in lxml.etree.iterparse(
BytesIO(self._data), resolve_entities=False
):
if element.tag == "domain":
self.patterns["roskomsvoboda"].append("https://" + element.text.strip())
self.patterns["roskomsvoboda"].append(
"https://" + element.text.strip()
)
except XMLSyntaxError:
activity = Activity(
activity_type="automation",
text=(f"[{self.short_name}] 🚨 Unable to parse XML file from dump. There was an error "
"related to the format of the XML file within the zip file. Interestingly we were able to "
"extract the file from the zip file fine. "
"The automation task has not been disabled and will attempt to download the next dump when the "
"latest dump revision is incremented at the server."))
text=(
f"[{self.short_name}] 🚨 Unable to parse XML file from dump. There was an error "
"related to the format of the XML file within the zip file. Interestingly we were able to "
"extract the file from the zip file fine. "
"The automation task has not been disabled and will attempt to download the next dump when the "
"latest dump revision is incremented at the server."
),
)
activity.notify()
db.session.add(activity)
db.session.commit()

View file

@ -16,20 +16,32 @@ BridgeResourceRow = Row[Tuple[AbstractResource, BridgeConf, CloudAccount]]
def active_bridges_by_provider(provider: CloudProvider) -> Sequence[BridgeResourceRow]:
stmt = select(Bridge, BridgeConf, CloudAccount).join_from(Bridge, BridgeConf).join_from(Bridge, CloudAccount).where(
CloudAccount.provider == provider,
Bridge.destroyed.is_(None),
stmt = (
select(Bridge, BridgeConf, CloudAccount)
.join_from(Bridge, BridgeConf)
.join_from(Bridge, CloudAccount)
.where(
CloudAccount.provider == provider,
Bridge.destroyed.is_(None),
)
)
bridges: Sequence[BridgeResourceRow] = db.session.execute(stmt).all()
return bridges
def recently_destroyed_bridges_by_provider(provider: CloudProvider) -> Sequence[BridgeResourceRow]:
def recently_destroyed_bridges_by_provider(
provider: CloudProvider,
) -> Sequence[BridgeResourceRow]:
cutoff = datetime.now(tz=timezone.utc) - timedelta(hours=72)
stmt = select(Bridge, BridgeConf, CloudAccount).join_from(Bridge, BridgeConf).join_from(Bridge, CloudAccount).where(
CloudAccount.provider == provider,
Bridge.destroyed.is_not(None),
Bridge.destroyed >= cutoff,
stmt = (
select(Bridge, BridgeConf, CloudAccount)
.join_from(Bridge, BridgeConf)
.join_from(Bridge, CloudAccount)
.where(
CloudAccount.provider == provider,
Bridge.destroyed.is_not(None),
Bridge.destroyed >= cutoff,
)
)
bridges: Sequence[BridgeResourceRow] = db.session.execute(stmt).all()
return bridges
@ -60,35 +72,38 @@ class BridgeAutomation(TerraformAutomation):
self.template,
active_resources=active_bridges_by_provider(self.provider),
destroyed_resources=recently_destroyed_bridges_by_provider(self.provider),
global_namespace=app.config['GLOBAL_NAMESPACE'],
terraform_modules_path=os.path.join(*list(os.path.split(app.root_path))[:-1], 'terraform-modules'),
global_namespace=app.config["GLOBAL_NAMESPACE"],
terraform_modules_path=os.path.join(
*list(os.path.split(app.root_path))[:-1], "terraform-modules"
),
backend_config=f"""backend "http" {{
lock_address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
unlock_address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
}}""",
**{
k: app.config[k.upper()]
for k in self.template_parameters
}
**{k: app.config[k.upper()] for k in self.template_parameters},
)
def tf_posthook(self, *, prehook_result: Any = None) -> None:
outputs = self.tf_output()
for output in outputs:
if output.startswith('bridge_hashed_fingerprint_'):
parts = outputs[output]['value'].split(" ")
if output.startswith("bridge_hashed_fingerprint_"):
parts = outputs[output]["value"].split(" ")
if len(parts) < 2:
continue
bridge = Bridge.query.filter(Bridge.id == output[len('bridge_hashed_fingerprint_'):]).first()
bridge = Bridge.query.filter(
Bridge.id == output[len("bridge_hashed_fingerprint_") :]
).first()
bridge.nickname = parts[0]
bridge.hashed_fingerprint = parts[1]
bridge.terraform_updated = datetime.now(tz=timezone.utc)
if output.startswith('bridge_bridgeline_'):
parts = outputs[output]['value'].split(" ")
if output.startswith("bridge_bridgeline_"):
parts = outputs[output]["value"].split(" ")
if len(parts) < 4:
continue
bridge = Bridge.query.filter(Bridge.id == output[len('bridge_bridgeline_'):]).first()
bridge = Bridge.query.filter(
Bridge.id == output[len("bridge_bridgeline_") :]
).first()
del parts[3]
bridge.bridgeline = " ".join(parts)
bridge.terraform_updated = datetime.now(tz=timezone.utc)

View file

@ -7,10 +7,7 @@ class BridgeGandiAutomation(BridgeAutomation):
description = "Deploy Tor bridges on GandiCloud VPS"
provider = CloudProvider.GANDI
template_parameters = [
"ssh_public_key_path",
"ssh_private_key_path"
]
template_parameters = ["ssh_public_key_path", "ssh_private_key_path"]
template = """
terraform {

View file

@ -7,10 +7,7 @@ class BridgeHcloudAutomation(BridgeAutomation):
description = "Deploy Tor bridges on Hetzner Cloud"
provider = CloudProvider.HCLOUD
template_parameters = [
"ssh_private_key_path",
"ssh_public_key_path"
]
template_parameters = ["ssh_private_key_path", "ssh_public_key_path"]
template = """
terraform {

View file

@ -25,10 +25,17 @@ def active_bridges_in_account(account: CloudAccount) -> List[Bridge]:
return bridges
def create_bridges_in_account(bridgeconf: BridgeConf, account: CloudAccount, count: int) -> int:
def create_bridges_in_account(
bridgeconf: BridgeConf, account: CloudAccount, count: int
) -> int:
created = 0
while created < count and len(active_bridges_in_account(account)) < account.max_instances:
logging.debug("Creating bridge for configuration %s in account %s", bridgeconf.id, account)
while (
created < count
and len(active_bridges_in_account(account)) < account.max_instances
):
logging.debug(
"Creating bridge for configuration %s in account %s", bridgeconf.id, account
)
bridge = Bridge()
bridge.pool_id = bridgeconf.pool.id
bridge.conf_id = bridgeconf.id
@ -45,16 +52,18 @@ def create_bridges_by_cost(bridgeconf: BridgeConf, count: int) -> int:
"""
Creates bridge resources for the given bridge configuration using the cheapest available provider.
"""
logging.debug("Creating %s bridges by cost for configuration %s", count, bridgeconf.id)
logging.debug(
"Creating %s bridges by cost for configuration %s", count, bridgeconf.id
)
created = 0
for provider in BRIDGE_PROVIDERS:
if created >= count:
break
logging.info("Creating bridges in %s accounts", provider.description)
for account in CloudAccount.query.filter(
CloudAccount.destroyed.is_(None),
CloudAccount.enabled.is_(True),
CloudAccount.provider == provider,
CloudAccount.destroyed.is_(None),
CloudAccount.enabled.is_(True),
CloudAccount.provider == provider,
).all():
logging.info("Creating bridges in %s", account)
created += create_bridges_in_account(bridgeconf, account, count - created)
@ -78,7 +87,9 @@ def create_bridges_by_random(bridgeconf: BridgeConf, count: int) -> int:
"""
Creates bridge resources for the given bridge configuration using random providers.
"""
logging.debug("Creating %s bridges by random for configuration %s", count, bridgeconf.id)
logging.debug(
"Creating %s bridges by random for configuration %s", count, bridgeconf.id
)
created = 0
while candidate_accounts := _accounts_with_room():
# Not security-critical random number generation
@ -97,16 +108,24 @@ def create_bridges(bridgeconf: BridgeConf, count: int) -> int:
return create_bridges_by_random(bridgeconf, count)
def deprecate_bridges(bridgeconf: BridgeConf, count: int, reason: str = "redundant") -> int:
logging.debug("Deprecating %s bridges (%s) for configuration %s", count, reason, bridgeconf.id)
def deprecate_bridges(
bridgeconf: BridgeConf, count: int, reason: str = "redundant"
) -> int:
logging.debug(
"Deprecating %s bridges (%s) for configuration %s", count, reason, bridgeconf.id
)
deprecated = 0
active_conf_bridges = iter(Bridge.query.filter(
Bridge.conf_id == bridgeconf.id,
Bridge.deprecated.is_(None),
Bridge.destroyed.is_(None),
).all())
active_conf_bridges = iter(
Bridge.query.filter(
Bridge.conf_id == bridgeconf.id,
Bridge.deprecated.is_(None),
Bridge.destroyed.is_(None),
).all()
)
while deprecated < count:
logging.debug("Deprecating bridge %s for configuration %s", deprecated + 1, bridgeconf.id)
logging.debug(
"Deprecating bridge %s for configuration %s", deprecated + 1, bridgeconf.id
)
bridge = next(active_conf_bridges)
logging.debug("Bridge %r", bridge)
bridge.deprecate(reason=reason)
@ -129,7 +148,9 @@ class BridgeMetaAutomation(BaseAutomation):
for bridge in deprecated_bridges:
if bridge.deprecated is None:
continue # Possible due to SQLAlchemy lazy loading
cutoff = datetime.now(tz=timezone.utc) - timedelta(hours=bridge.conf.expiry_hours)
cutoff = datetime.now(tz=timezone.utc) - timedelta(
hours=bridge.conf.expiry_hours
)
if bridge.deprecated < cutoff:
logging.debug("Destroying expired bridge")
bridge.destroy()
@ -146,7 +167,9 @@ class BridgeMetaAutomation(BaseAutomation):
activate_bridgeconfs = BridgeConf.query.filter(
BridgeConf.destroyed.is_(None),
).all()
logging.debug("Found %s active bridge configurations", len(activate_bridgeconfs))
logging.debug(
"Found %s active bridge configurations", len(activate_bridgeconfs)
)
for bridgeconf in activate_bridgeconfs:
active_conf_bridges = Bridge.query.filter(
Bridge.conf_id == bridgeconf.id,
@ -157,16 +180,18 @@ class BridgeMetaAutomation(BaseAutomation):
Bridge.conf_id == bridgeconf.id,
Bridge.destroyed.is_(None),
).all()
logging.debug("Generating new bridges for %s (active: %s, total: %s, target: %s, max: %s)",
bridgeconf.id,
len(active_conf_bridges),
len(total_conf_bridges),
bridgeconf.target_number,
bridgeconf.max_number
)
logging.debug(
"Generating new bridges for %s (active: %s, total: %s, target: %s, max: %s)",
bridgeconf.id,
len(active_conf_bridges),
len(total_conf_bridges),
bridgeconf.target_number,
bridgeconf.max_number,
)
missing = min(
bridgeconf.target_number - len(active_conf_bridges),
bridgeconf.max_number - len(total_conf_bridges))
bridgeconf.max_number - len(total_conf_bridges),
)
if missing > 0:
create_bridges(bridgeconf, missing)
elif missing < 0:

View file

@ -7,10 +7,7 @@ class BridgeOvhAutomation(BridgeAutomation):
description = "Deploy Tor bridges on OVH Public Cloud"
provider = CloudProvider.OVH
template_parameters = [
"ssh_public_key_path",
"ssh_private_key_path"
]
template_parameters = ["ssh_public_key_path", "ssh_private_key_path"]
template = """
terraform {

View file

@ -11,14 +11,12 @@ from app.terraform.eotk import eotk_configuration
from app.terraform.terraform import TerraformAutomation
def update_eotk_instance(group_id: int,
region: str,
instance_id: str) -> None:
def update_eotk_instance(group_id: int, region: str, instance_id: str) -> None:
instance = Eotk.query.filter(
Eotk.group_id == group_id,
Eotk.region == region,
Eotk.provider == "aws",
Eotk.destroyed.is_(None)
Eotk.destroyed.is_(None),
).first()
if instance is None:
instance = Eotk()
@ -35,10 +33,7 @@ class EotkAWSAutomation(TerraformAutomation):
short_name = "eotk_aws"
description = "Deploy EOTK instances to AWS"
template_parameters = [
"aws_access_key",
"aws_secret_key"
]
template_parameters = ["aws_access_key", "aws_secret_key"]
template = """
terraform {
@ -81,32 +76,41 @@ class EotkAWSAutomation(TerraformAutomation):
self.tf_write(
self.template,
groups=Group.query.filter(
Group.eotk.is_(True),
Group.destroyed.is_(None)
Group.eotk.is_(True), Group.destroyed.is_(None)
).all(),
global_namespace=app.config['GLOBAL_NAMESPACE'],
terraform_modules_path=os.path.join(*list(os.path.split(app.root_path))[:-1], 'terraform-modules'),
global_namespace=app.config["GLOBAL_NAMESPACE"],
terraform_modules_path=os.path.join(
*list(os.path.split(app.root_path))[:-1], "terraform-modules"
),
backend_config=f"""backend "http" {{
lock_address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
unlock_address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
}}""",
**{
k: app.config[k.upper()]
for k in self.template_parameters
}
**{k: app.config[k.upper()] for k in self.template_parameters},
)
for group in Group.query.filter(
Group.eotk.is_(True),
Group.destroyed.is_(None)
).order_by(Group.id).all():
with DeterministicZip(os.path.join(self.working_dir, f"{group.id}.zip")) as dzip:
dzip.add_file("sites.conf", eotk_configuration(group).encode('utf-8'))
for group in (
Group.query.filter(Group.eotk.is_(True), Group.destroyed.is_(None))
.order_by(Group.id)
.all()
):
with DeterministicZip(
os.path.join(self.working_dir, f"{group.id}.zip")
) as dzip:
dzip.add_file("sites.conf", eotk_configuration(group).encode("utf-8"))
for onion in sorted(group.onions, key=lambda o: o.onion_name):
dzip.add_file(f"{onion.onion_name}.v3pub.key", onion.onion_public_key)
dzip.add_file(f"{onion.onion_name}.v3sec.key", onion.onion_private_key)
dzip.add_file(f"{onion.onion_name[:20]}-v3.cert", onion.tls_public_key)
dzip.add_file(f"{onion.onion_name[:20]}-v3.pem", onion.tls_private_key)
dzip.add_file(
f"{onion.onion_name}.v3pub.key", onion.onion_public_key
)
dzip.add_file(
f"{onion.onion_name}.v3sec.key", onion.onion_private_key
)
dzip.add_file(
f"{onion.onion_name[:20]}-v3.cert", onion.tls_public_key
)
dzip.add_file(
f"{onion.onion_name[:20]}-v3.pem", onion.tls_private_key
)
def tf_posthook(self, *, prehook_result: Any = None) -> None:
for e in Eotk.query.all():
@ -115,9 +119,9 @@ class EotkAWSAutomation(TerraformAutomation):
for output in outputs:
if output.startswith("eotk_instances_"):
try:
group_id = int(output[len("eotk_instance_") + 1:])
for az in outputs[output]['value']:
update_eotk_instance(group_id, az, outputs[output]['value'][az])
group_id = int(output[len("eotk_instance_") + 1 :])
for az in outputs[output]["value"]:
update_eotk_instance(group_id, az, outputs[output]["value"][az])
except ValueError:
pass
db.session.commit()

View file

@ -55,26 +55,36 @@ class ListAutomation(TerraformAutomation):
MirrorList.destroyed.is_(None),
MirrorList.provider == self.provider,
).all(),
global_namespace=app.config['GLOBAL_NAMESPACE'],
terraform_modules_path=os.path.join(*list(os.path.split(app.root_path))[:-1], 'terraform-modules'),
global_namespace=app.config["GLOBAL_NAMESPACE"],
terraform_modules_path=os.path.join(
*list(os.path.split(app.root_path))[:-1], "terraform-modules"
),
backend_config=f"""backend "http" {{
lock_address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
unlock_address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
}}""",
**{
k: app.config[k.upper()]
for k in self.template_parameters
}
**{k: app.config[k.upper()] for k in self.template_parameters},
)
for pool in Pool.query.filter(Pool.destroyed.is_(None)).all():
for key, formatter in lists.items():
formatted_pool = formatter(pool)
for obfuscate in [True, False]:
with open(os.path.join(
self.working_dir, f"{key}.{pool.pool_name}{'.jsno' if obfuscate else '.json'}"),
'w', encoding="utf-8") as out:
with open(
os.path.join(
self.working_dir,
f"{key}.{pool.pool_name}{'.jsno' if obfuscate else '.json'}",
),
"w",
encoding="utf-8",
) as out:
out.write(json_encode(formatted_pool, obfuscate))
with open(os.path.join(self.working_dir, f"{key}.{pool.pool_name}{'.jso' if obfuscate else '.js'}"),
'w', encoding="utf-8") as out:
with open(
os.path.join(
self.working_dir,
f"{key}.{pool.pool_name}{'.jso' if obfuscate else '.js'}",
),
"w",
encoding="utf-8",
) as out:
out.write(javascript_encode(formatted_pool, obfuscate))

View file

@ -11,9 +11,7 @@ class ListGithubAutomation(ListAutomation):
# TODO: file an issue in the github about this, GitLab had a similar issue but fixed it
parallelism = 1
template_parameters = [
"github_api_key"
]
template_parameters = ["github_api_key"]
template = """
terraform {

View file

@ -15,7 +15,7 @@ class ListGitlabAutomation(ListAutomation):
"gitlab_token",
"gitlab_author_email",
"gitlab_author_name",
"gitlab_commit_message"
"gitlab_commit_message",
]
template = """
@ -56,5 +56,5 @@ class ListGitlabAutomation(ListAutomation):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
if 'GITLAB_URL' in current_app.config:
if "GITLAB_URL" in current_app.config:
self.template_parameters.append("gitlab_url")

View file

@ -6,10 +6,7 @@ class ListS3Automation(ListAutomation):
description = "Update mirror lists in AWS S3 buckets"
provider = "s3"
template_parameters = [
"aws_access_key",
"aws_secret_key"
]
template_parameters = ["aws_access_key", "aws_secret_key"]
template = """
terraform {

View file

@ -15,15 +15,14 @@ from app.models.mirrors import Origin, Proxy, SmartProxy
from app.terraform.terraform import TerraformAutomation
def update_smart_proxy_instance(group_id: int,
provider: str,
region: str,
instance_id: str) -> None:
def update_smart_proxy_instance(
group_id: int, provider: str, region: str, instance_id: str
) -> None:
instance = SmartProxy.query.filter(
SmartProxy.group_id == group_id,
SmartProxy.region == region,
SmartProxy.provider == provider,
SmartProxy.destroyed.is_(None)
SmartProxy.destroyed.is_(None),
).first()
if instance is None:
instance = SmartProxy()
@ -93,16 +92,21 @@ class ProxyAutomation(TerraformAutomation):
self.template,
groups=groups,
proxies=Proxy.query.filter(
Proxy.provider == self.provider, Proxy.destroyed.is_(None)).all(),
Proxy.provider == self.provider, Proxy.destroyed.is_(None)
).all(),
subgroups=self.get_subgroups(),
global_namespace=app.config['GLOBAL_NAMESPACE'], bypass_token=app.config['BYPASS_TOKEN'],
terraform_modules_path=os.path.join(*list(os.path.split(app.root_path))[:-1], 'terraform-modules'),
global_namespace=app.config["GLOBAL_NAMESPACE"],
bypass_token=app.config["BYPASS_TOKEN"],
terraform_modules_path=os.path.join(
*list(os.path.split(app.root_path))[:-1], "terraform-modules"
),
backend_config=f"""backend "http" {{
lock_address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
unlock_address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
address = "{app.config['TFSTATE_BACKEND']}/{self.short_name}"
}}""",
**{k: app.config[k.upper()] for k in self.template_parameters})
**{k: app.config[k.upper()] for k in self.template_parameters},
)
if self.smart_proxies:
for group in groups:
self.sp_config(group)
@ -111,9 +115,11 @@ class ProxyAutomation(TerraformAutomation):
group_origins: List[Origin] = Origin.query.filter(
Origin.group_id == group.id,
Origin.destroyed.is_(None),
Origin.smart.is_(True)
Origin.smart.is_(True),
).all()
self.tmpl_write(f"smart_proxy.{group.id}.conf", """
self.tmpl_write(
f"smart_proxy.{group.id}.conf",
"""
{% for origin in origins %}
server {
listen 443 ssl;
@ -173,23 +179,28 @@ class ProxyAutomation(TerraformAutomation):
}
{% endfor %}
""",
provider=self.provider,
origins=group_origins,
smart_zone=app.config['SMART_ZONE'])
provider=self.provider,
origins=group_origins,
smart_zone=app.config["SMART_ZONE"],
)
@classmethod
def get_subgroups(cls) -> Dict[int, Dict[int, int]]:
conn = db.engine.connect()
stmt = text("""
stmt = text(
"""
SELECT origin.group_id, proxy.psg, COUNT(proxy.id) FROM proxy, origin
WHERE proxy.origin_id = origin.id
AND proxy.destroyed IS NULL
AND proxy.provider = :provider
GROUP BY origin.group_id, proxy.psg;
""")
"""
)
stmt = stmt.bindparams(provider=cls.provider)
result = conn.execute(stmt).all()
subgroups: Dict[int, Dict[int, int]] = defaultdict(lambda: defaultdict(lambda: 0))
subgroups: Dict[int, Dict[int, int]] = defaultdict(
lambda: defaultdict(lambda: 0)
)
for row in result:
subgroups[row[0]][row[1]] = row[2]
return subgroups

View file

@ -21,7 +21,7 @@ class ProxyAzureCdnAutomation(ProxyAutomation):
"azure_client_secret",
"azure_subscription_id",
"azure_tenant_id",
"smart_zone"
"smart_zone",
]
template = """
@ -162,8 +162,7 @@ class ProxyAzureCdnAutomation(ProxyAutomation):
def import_state(self, state: Optional[Any]) -> None:
proxies = Proxy.query.filter(
Proxy.provider == self.provider,
Proxy.destroyed.is_(None)
Proxy.provider == self.provider, Proxy.destroyed.is_(None)
).all()
for proxy in proxies:
proxy.url = f"https://{proxy.slug}.azureedge.net"

View file

@ -17,7 +17,7 @@ class ProxyCloudfrontAutomation(ProxyAutomation):
"admin_email",
"aws_access_key",
"aws_secret_key",
"smart_zone"
"smart_zone",
]
template = """
@ -111,26 +111,35 @@ class ProxyCloudfrontAutomation(ProxyAutomation):
def import_state(self, state: Any) -> None:
if not isinstance(state, dict):
raise RuntimeError("The Terraform state object returned was not a dict.")
if "child_modules" not in state['values']['root_module']:
if "child_modules" not in state["values"]["root_module"]:
# There are no CloudFront proxies deployed to import state for
return
# CloudFront distributions (proxies)
for mod in state['values']['root_module']['child_modules']:
if mod['address'].startswith('module.cloudfront_'):
for res in mod['resources']:
if res['address'].endswith('aws_cloudfront_distribution.this'):
proxy = Proxy.query.filter(Proxy.id == mod['address'][len('module.cloudfront_'):]).first()
proxy.url = "https://" + res['values']['domain_name']
proxy.slug = res['values']['id']
for mod in state["values"]["root_module"]["child_modules"]:
if mod["address"].startswith("module.cloudfront_"):
for res in mod["resources"]:
if res["address"].endswith("aws_cloudfront_distribution.this"):
proxy = Proxy.query.filter(
Proxy.id == mod["address"][len("module.cloudfront_") :]
).first()
proxy.url = "https://" + res["values"]["domain_name"]
proxy.slug = res["values"]["id"]
proxy.terraform_updated = datetime.now(tz=timezone.utc)
break
# EC2 instances (smart proxies)
for g in state["values"]["root_module"]["child_modules"]:
if g["address"].startswith("module.smart_proxy_"):
group_id = int(g["address"][len("module.smart_proxy_"):])
group_id = int(g["address"][len("module.smart_proxy_") :])
for s in g["child_modules"]:
if s["address"].endswith(".module.instance"):
for x in s["resources"]:
if x["address"].endswith(".module.instance.aws_instance.default[0]"):
update_smart_proxy_instance(group_id, self.provider, "us-east-2a", x['values']['id'])
if x["address"].endswith(
".module.instance.aws_instance.default[0]"
):
update_smart_proxy_instance(
group_id,
self.provider,
"us-east-2a",
x["values"]["id"],
)
db.session.commit()

View file

@ -14,11 +14,7 @@ class ProxyFastlyAutomation(ProxyAutomation):
subgroup_members_max = 20
cloud_name = "fastly"
template_parameters = [
"aws_access_key",
"aws_secret_key",
"fastly_api_key"
]
template_parameters = ["aws_access_key", "aws_secret_key", "fastly_api_key"]
template = """
terraform {
@ -125,13 +121,14 @@ class ProxyFastlyAutomation(ProxyAutomation):
Constructor method.
"""
# Requires Flask application context to read configuration
self.subgroup_members_max = min(current_app.config.get("FASTLY_MAX_BACKENDS", 5), 20)
self.subgroup_members_max = min(
current_app.config.get("FASTLY_MAX_BACKENDS", 5), 20
)
super().__init__(*args, **kwargs)
def import_state(self, state: Optional[Any]) -> None:
proxies = Proxy.query.filter(
Proxy.provider == self.provider,
Proxy.destroyed.is_(None)
Proxy.provider == self.provider, Proxy.destroyed.is_(None)
).all()
for proxy in proxies:
proxy.url = f"https://{proxy.slug}.global.ssl.fastly.net"

View file

@ -18,12 +18,16 @@ from app.terraform.proxy.azure_cdn import ProxyAzureCdnAutomation
from app.terraform.proxy.cloudfront import ProxyCloudfrontAutomation
from app.terraform.proxy.fastly import ProxyFastlyAutomation
PROXY_PROVIDERS: Dict[str, Type[ProxyAutomation]] = {p.provider: p for p in [ # type: ignore[attr-defined]
# In order of preference
ProxyCloudfrontAutomation,
ProxyFastlyAutomation,
ProxyAzureCdnAutomation
] if p.enabled} # type: ignore[attr-defined]
PROXY_PROVIDERS: Dict[str, Type[ProxyAutomation]] = {
p.provider: p # type: ignore[attr-defined]
for p in [
# In order of preference
ProxyCloudfrontAutomation,
ProxyFastlyAutomation,
ProxyAzureCdnAutomation,
]
if p.enabled # type: ignore[attr-defined]
}
SubgroupCount = OrderedDictT[str, OrderedDictT[int, OrderedDictT[int, int]]]
@ -61,8 +65,9 @@ def random_slug(origin_domain_name: str) -> str:
"exampasdfghjkl"
"""
# The random slug doesn't need to be cryptographically secure, hence the use of `# nosec`
return tldextract.extract(origin_domain_name).domain[:5] + ''.join(
random.choices(string.ascii_lowercase, k=12)) # nosec
return tldextract.extract(origin_domain_name).domain[:5] + "".join(
random.choices(string.ascii_lowercase, k=12) # nosec: B311
)
def calculate_subgroup_count(proxies: Optional[List[Proxy]] = None) -> SubgroupCount:
@ -95,8 +100,13 @@ def calculate_subgroup_count(proxies: Optional[List[Proxy]] = None) -> SubgroupC
return subgroup_count
def next_subgroup(subgroup_count: SubgroupCount, provider: str, group_id: int, max_subgroup_count: int,
max_subgroup_members: int) -> Optional[int]:
def next_subgroup(
subgroup_count: SubgroupCount,
provider: str,
group_id: int,
max_subgroup_count: int,
max_subgroup_members: int,
) -> Optional[int]:
"""
Find the first available subgroup with less than the specified maximum count in the specified provider and group.
If the last subgroup in the group is full, return the next subgroup number as long as it doesn't exceed
@ -137,27 +147,36 @@ def auto_deprecate_proxies() -> None:
- The "max_age_reached" reason means the proxy has been in use for longer than the maximum allowed period.
The maximum age cutoff is randomly set to a time between 24 and 48 hours.
"""
origin_destroyed_proxies = (db.session.query(Proxy)
.join(Origin, Proxy.origin_id == Origin.id)
.filter(Proxy.destroyed.is_(None),
Proxy.deprecated.is_(None),
Origin.destroyed.is_not(None))
.all())
origin_destroyed_proxies = (
db.session.query(Proxy)
.join(Origin, Proxy.origin_id == Origin.id)
.filter(
Proxy.destroyed.is_(None),
Proxy.deprecated.is_(None),
Origin.destroyed.is_not(None),
)
.all()
)
logging.debug("Origin destroyed: %s", origin_destroyed_proxies)
for proxy in origin_destroyed_proxies:
proxy.deprecate(reason="origin_destroyed")
max_age_proxies = (db.session.query(Proxy)
.join(Origin, Proxy.origin_id == Origin.id)
.filter(Proxy.destroyed.is_(None),
Proxy.deprecated.is_(None),
Proxy.pool_id != -1, # do not rotate hotspare proxies
Origin.assets,
Origin.auto_rotation)
.all())
max_age_proxies = (
db.session.query(Proxy)
.join(Origin, Proxy.origin_id == Origin.id)
.filter(
Proxy.destroyed.is_(None),
Proxy.deprecated.is_(None),
Proxy.pool_id != -1, # do not rotate hotspare proxies
Origin.assets,
Origin.auto_rotation,
)
.all()
)
logging.debug("Max age: %s", max_age_proxies)
for proxy in max_age_proxies:
max_age_cutoff = datetime.now(timezone.utc) - timedelta(
days=1, seconds=86400 * random.random()) # nosec: B311
days=1, seconds=86400 * random.random() # nosec: B311
)
if proxy.added < max_age_cutoff:
proxy.deprecate(reason="max_age_reached")
@ -171,8 +190,7 @@ def destroy_expired_proxies() -> None:
"""
expiry_cutoff = datetime.now(timezone.utc) - timedelta(days=4)
proxies = Proxy.query.filter(
Proxy.destroyed.is_(None),
Proxy.deprecated < expiry_cutoff
Proxy.destroyed.is_(None), Proxy.deprecated < expiry_cutoff
).all()
for proxy in proxies:
logging.debug("Destroying expired proxy")
@ -244,12 +262,17 @@ class ProxyMetaAutomation(BaseAutomation):
if origin.destroyed is not None:
continue
proxies = [
x for x in origin.proxies
if x.pool_id == pool.id and x.deprecated is None and x.destroyed is None
x
for x in origin.proxies
if x.pool_id == pool.id
and x.deprecated is None
and x.destroyed is None
]
logging.debug("Proxies for group %s: %s", group.group_name, proxies)
if not proxies:
logging.debug("Creating new proxy for %s in pool %s", origin, pool)
logging.debug(
"Creating new proxy for %s in pool %s", origin, pool
)
if not promote_hot_spare_proxy(pool.id, origin):
# No "hot spare" available
self.create_proxy(pool.id, origin)
@ -270,8 +293,13 @@ class ProxyMetaAutomation(BaseAutomation):
"""
for provider in PROXY_PROVIDERS.values():
logging.debug("Looking at provider %s", provider.provider)
subgroup = next_subgroup(self.subgroup_count, provider.provider, origin.group_id,
provider.subgroup_members_max, provider.subgroup_count_max)
subgroup = next_subgroup(
self.subgroup_count,
provider.provider,
origin.group_id,
provider.subgroup_members_max,
provider.subgroup_count_max,
)
if subgroup is None:
continue # Exceeded maximum number of subgroups and last subgroup is full
self.increment_subgroup(provider.provider, origin.group_id, subgroup)
@ -317,9 +345,7 @@ class ProxyMetaAutomation(BaseAutomation):
If an origin is not destroyed and lacks active proxies (not deprecated and not destroyed),
a new 'hot spare' proxy for this origin is created in the reserve pool (with pool_id = -1).
"""
origins = Origin.query.filter(
Origin.destroyed.is_(None)
).all()
origins = Origin.query.filter(Origin.destroyed.is_(None)).all()
for origin in origins:
if origin.countries:
risk_levels = origin.risk_level.items()
@ -328,7 +354,10 @@ class ProxyMetaAutomation(BaseAutomation):
if highest_risk_level < 4:
for proxy in origin.proxies:
if proxy.destroyed is None and proxy.pool_id == -1:
logging.debug("Destroying hot spare proxy for origin %s (low risk)", origin)
logging.debug(
"Destroying hot spare proxy for origin %s (low risk)",
origin,
)
proxy.destroy()
continue
if origin.destroyed is not None:

View file

@ -15,21 +15,26 @@ from app.terraform.terraform import TerraformAutomation
def import_state(state: Any) -> None:
if not isinstance(state, dict):
raise RuntimeError("The Terraform state object returned was not a dict.")
if "values" not in state or "child_modules" not in state['values']['root_module']:
if "values" not in state or "child_modules" not in state["values"]["root_module"]:
# There are no CloudFront origins deployed to import state for
return
# CloudFront distributions (origins)
for mod in state['values']['root_module']['child_modules']:
if mod['address'].startswith('module.static_'):
static_id = mod['address'][len('module.static_'):]
for mod in state["values"]["root_module"]["child_modules"]:
if mod["address"].startswith("module.static_"):
static_id = mod["address"][len("module.static_") :]
logging.debug("Found static module in state: %s", static_id)
for res in mod['resources']:
if res['address'].endswith('aws_cloudfront_distribution.this'):
for res in mod["resources"]:
if res["address"].endswith("aws_cloudfront_distribution.this"):
logging.debug("and found related cloudfront distribution")
static = StaticOrigin.query.filter(StaticOrigin.id == static_id).first()
static.origin_domain_name = res['values']['domain_name']
logging.debug("and found static origin: %s to update with domain name: %s", static.id,
static.origin_domain_name)
static = StaticOrigin.query.filter(
StaticOrigin.id == static_id
).first()
static.origin_domain_name = res["values"]["domain_name"]
logging.debug(
"and found static origin: %s to update with domain name: %s",
static.id,
static.origin_domain_name,
)
static.terraform_updated = datetime.now(tz=timezone.utc)
break
db.session.commit()
@ -128,14 +133,18 @@ class StaticAWSAutomation(TerraformAutomation):
groups=groups,
storage_cloud_accounts=storage_cloud_accounts,
source_cloud_accounts=source_cloud_accounts,
global_namespace=current_app.config['GLOBAL_NAMESPACE'], bypass_token=current_app.config['BYPASS_TOKEN'],
terraform_modules_path=os.path.join(*list(os.path.split(current_app.root_path))[:-1], 'terraform-modules'),
global_namespace=current_app.config["GLOBAL_NAMESPACE"],
bypass_token=current_app.config["BYPASS_TOKEN"],
terraform_modules_path=os.path.join(
*list(os.path.split(current_app.root_path))[:-1], "terraform-modules"
),
backend_config=f"""backend "http" {{
lock_address = "{current_app.config['TFSTATE_BACKEND']}/{self.short_name}"
unlock_address = "{current_app.config['TFSTATE_BACKEND']}/{self.short_name}"
address = "{current_app.config['TFSTATE_BACKEND']}/{self.short_name}"
}}""",
**{k: current_app.config[k.upper()] for k in self.template_parameters})
**{k: current_app.config[k.upper()] for k in self.template_parameters},
)
def tf_posthook(self, *, prehook_result: Any = None) -> None:
import_state(self.tf_show())

View file

@ -27,7 +27,9 @@ class StaticMetaAutomation(BaseAutomation):
if static_origin.origin_domain_name is not None:
try:
# Check if an Origin with the same domain name already exists
origin = Origin.query.filter_by(domain_name=static_origin.origin_domain_name).one()
origin = Origin.query.filter_by(
domain_name=static_origin.origin_domain_name
).one()
# Keep auto rotation value in sync
origin.auto_rotation = static_origin.auto_rotate
except NoResultFound:
@ -35,17 +37,21 @@ class StaticMetaAutomation(BaseAutomation):
origin = Origin(
group_id=static_origin.group_id,
description=f"PORTAL !! DO NOT DELETE !! Automatically created web origin for static origin "
f"#{static_origin.id}",
f"#{static_origin.id}",
domain_name=static_origin.origin_domain_name,
auto_rotation=static_origin.auto_rotate,
smart=False,
assets=False,
)
db.session.add(origin)
logging.debug(f"Created Origin with domain name {origin.domain_name}")
logging.debug(
f"Created Origin with domain name {origin.domain_name}"
)
# Step 2: Remove Origins for StaticOrigins with non-null destroy value
static_origins_with_destroyed = StaticOrigin.query.filter(StaticOrigin.destroyed.isnot(None)).all()
static_origins_with_destroyed = StaticOrigin.query.filter(
StaticOrigin.destroyed.isnot(None)
).all()
for static_origin in static_origins_with_destroyed:
try:
origin = Origin.query.filter_by(

View file

@ -51,14 +51,20 @@ class TerraformAutomation(BaseAutomation):
prehook_result = self.tf_prehook() # pylint: disable=assignment-from-no-return
self.tf_generate()
self.tf_init()
returncode, logs = self.tf_apply(self.working_dir, refresh=self.always_refresh or full)
returncode, logs = self.tf_apply(
self.working_dir, refresh=self.always_refresh or full
)
self.tf_posthook(prehook_result=prehook_result)
return returncode == 0, logs
def tf_apply(self, working_dir: str, *,
refresh: bool = True,
parallelism: Optional[int] = None,
lock_timeout: int = 15) -> Tuple[int, str]:
def tf_apply(
self,
working_dir: str,
*,
refresh: bool = True,
parallelism: Optional[int] = None,
lock_timeout: int = 15,
) -> Tuple[int, str]:
if not parallelism:
parallelism = self.parallelism
if not self.working_dir:
@ -67,17 +73,19 @@ class TerraformAutomation(BaseAutomation):
# the argument list as an array such that argument injection would be
# ineffective.
tfcmd = subprocess.run( # nosec
['terraform',
'apply',
'-auto-approve',
'-json',
f'-refresh={str(refresh).lower()}',
f'-parallelism={str(parallelism)}',
f'-lock-timeout={str(lock_timeout)}m',
],
[
"terraform",
"apply",
"-auto-approve",
"-json",
f"-refresh={str(refresh).lower()}",
f"-parallelism={str(parallelism)}",
f"-lock-timeout={str(lock_timeout)}m",
],
cwd=working_dir,
stdout=subprocess.PIPE)
return tfcmd.returncode, tfcmd.stdout.decode('utf-8')
stdout=subprocess.PIPE,
)
return tfcmd.returncode, tfcmd.stdout.decode("utf-8")
@abstractmethod
def tf_generate(self) -> None:
@ -91,41 +99,49 @@ class TerraformAutomation(BaseAutomation):
# the argument list as an array such that argument injection would be
# ineffective.
subprocess.run( # nosec
['terraform',
'init',
f'-lock-timeout={str(lock_timeout)}m',
],
cwd=self.working_dir)
[
"terraform",
"init",
f"-lock-timeout={str(lock_timeout)}m",
],
cwd=self.working_dir,
)
def tf_output(self) -> Any:
if not self.working_dir:
raise RuntimeError("No working directory specified.")
# The following subprocess call does not take any user input.
tfcmd = subprocess.run( # nosec
['terraform', 'output', '-json'],
["terraform", "output", "-json"],
cwd=self.working_dir,
stdout=subprocess.PIPE)
stdout=subprocess.PIPE,
)
return json.loads(tfcmd.stdout)
def tf_plan(self, *,
refresh: bool = True,
parallelism: Optional[int] = None,
lock_timeout: int = 15) -> Tuple[int, str]:
def tf_plan(
self,
*,
refresh: bool = True,
parallelism: Optional[int] = None,
lock_timeout: int = 15,
) -> Tuple[int, str]:
if not self.working_dir:
raise RuntimeError("No working directory specified.")
# The following subprocess call takes external input, but is providing
# the argument list as an array such that argument injection would be
# ineffective.
tfcmd = subprocess.run( # nosec
['terraform',
'plan',
'-json',
f'-refresh={str(refresh).lower()}',
f'-parallelism={str(parallelism)}',
f'-lock-timeout={str(lock_timeout)}m',
],
cwd=self.working_dir)
return tfcmd.returncode, tfcmd.stdout.decode('utf-8')
[
"terraform",
"plan",
"-json",
f"-refresh={str(refresh).lower()}",
f"-parallelism={str(parallelism)}",
f"-lock-timeout={str(lock_timeout)}m",
],
cwd=self.working_dir,
)
return tfcmd.returncode, tfcmd.stdout.decode("utf-8")
def tf_posthook(self, *, prehook_result: Any = None) -> None:
"""
@ -154,9 +170,8 @@ class TerraformAutomation(BaseAutomation):
raise RuntimeError("No working directory specified.")
# This subprocess call doesn't take any user input.
terraform = subprocess.run( # nosec
['terraform', 'show', '-json'],
cwd=self.working_dir,
stdout=subprocess.PIPE)
["terraform", "show", "-json"], cwd=self.working_dir, stdout=subprocess.PIPE
)
return json.loads(terraform.stdout)
def tf_write(self, template: str, **kwargs: Any) -> None: