resource pool system
This commit is contained in:
parent
dc989dd7cb
commit
16f7e2199d
19 changed files with 382 additions and 105 deletions
|
@ -1,14 +1,11 @@
|
|||
import os.path
|
||||
import sys
|
||||
from abc import abstractmethod
|
||||
from collections import defaultdict
|
||||
import datetime
|
||||
import math
|
||||
import string
|
||||
import random
|
||||
from typing import Dict, Optional, Any, List
|
||||
from collections import defaultdict
|
||||
from typing import Optional, Any, List, Dict
|
||||
|
||||
from sqlalchemy import text
|
||||
from tldextract import tldextract
|
||||
|
||||
from app import app
|
||||
from app.extensions import db
|
||||
|
@ -44,13 +41,19 @@ def sp_trusted_prefixes() -> str:
|
|||
|
||||
|
||||
class ProxyAutomation(TerraformAutomation):
|
||||
subgroup_max = math.inf
|
||||
subgroup_members_max = sys.maxsize
|
||||
"""
|
||||
Maximum number of proxies to deploy per sub-group. This is required for some providers
|
||||
where the number origins per group may exceed the number of proxies that can be configured
|
||||
in a single "configuration block", e.g. Azure CDN's profiles.
|
||||
"""
|
||||
|
||||
subgroup_count_max = sys.maxsize
|
||||
"""
|
||||
Maximum number of subgroups that can be deployed. This is required for some providers where
|
||||
the total number of subgroups is limited by a quota, e.g. Azure CDN's profiles.
|
||||
"""
|
||||
|
||||
template: str
|
||||
"""
|
||||
Terraform configuration template using Jinja 2.
|
||||
|
@ -67,83 +70,11 @@ class ProxyAutomation(TerraformAutomation):
|
|||
Whether this provider supports "smart" proxies.
|
||||
"""
|
||||
|
||||
def get_subgroups(self) -> Dict[int, Dict[int, int]]:
|
||||
conn = db.engine.connect()
|
||||
result = conn.execute(text("""
|
||||
SELECT origin.group_id, proxy.psg, COUNT(proxy.id) FROM proxy, origin
|
||||
WHERE proxy.origin_id = origin.id
|
||||
AND proxy.destroyed IS NULL
|
||||
AND proxy.provider = :provider
|
||||
GROUP BY origin.group_id, proxy.psg;
|
||||
"""), provider=self.provider)
|
||||
subgroups: Dict[int, Dict[int, int]] = defaultdict(lambda: defaultdict(lambda: 0))
|
||||
for row in result:
|
||||
subgroups[row[0]][row[1]] = row[2]
|
||||
return subgroups
|
||||
|
||||
def create_missing_proxies(self) -> None:
|
||||
groups = Group.query.all()
|
||||
subgroups = self.get_subgroups()
|
||||
for group in groups:
|
||||
subgroup = 0
|
||||
for origin in group.origins:
|
||||
if origin.destroyed is not None:
|
||||
continue
|
||||
while True:
|
||||
if subgroups[group.id][subgroup] >= self.subgroup_max:
|
||||
subgroup += 1
|
||||
else:
|
||||
break
|
||||
proxies = [
|
||||
x for x in origin.proxies
|
||||
if x.provider == self.provider and x.deprecated is None and x.destroyed is None
|
||||
]
|
||||
if not proxies:
|
||||
subgroups[group.id][subgroup] += 1
|
||||
proxy = Proxy()
|
||||
proxy.origin_id = origin.id
|
||||
proxy.provider = self.provider
|
||||
proxy.psg = subgroup
|
||||
# The random usage below is good enough for its purpose: to create a slug that
|
||||
# hasn't been used before.
|
||||
proxy.slug = tldextract.extract(origin.domain_name).domain[:5] + ''.join(
|
||||
random.choices(string.ascii_lowercase, k=12)) # nosec
|
||||
proxy.added = datetime.datetime.utcnow()
|
||||
proxy.updated = datetime.datetime.utcnow()
|
||||
db.session.add(proxy)
|
||||
db.session.commit()
|
||||
|
||||
def deprecate_orphaned_proxies(self) -> None:
|
||||
proxies = Proxy.query.filter(
|
||||
Proxy.deprecated.is_(None),
|
||||
Proxy.destroyed.is_(None),
|
||||
Proxy.provider == self.provider
|
||||
).all()
|
||||
for proxy in proxies:
|
||||
if proxy.origin.destroyed is not None:
|
||||
proxy.deprecate(reason="origin_destroyed")
|
||||
db.session.commit()
|
||||
|
||||
def destroy_expired_proxies(self) -> None:
|
||||
cutoff = datetime.datetime.utcnow() - datetime.timedelta(days=3)
|
||||
proxies = Proxy.query.filter(
|
||||
Proxy.destroyed.is_(None),
|
||||
Proxy.provider == self.provider,
|
||||
Proxy.deprecated < cutoff
|
||||
).all()
|
||||
for proxy in proxies:
|
||||
proxy.destroyed = datetime.datetime.utcnow()
|
||||
proxy.updated = datetime.datetime.utcnow()
|
||||
db.session.commit()
|
||||
|
||||
@abstractmethod
|
||||
def import_state(self, state: Any) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def tf_prehook(self) -> Optional[Any]: # pylint: disable=useless-return
|
||||
self.create_missing_proxies()
|
||||
self.deprecate_orphaned_proxies()
|
||||
self.destroy_expired_proxies()
|
||||
return None
|
||||
|
||||
def tf_posthook(self, *, prehook_result: Any = None) -> None:
|
||||
|
@ -223,3 +154,37 @@ class ProxyAutomation(TerraformAutomation):
|
|||
provider=self.provider,
|
||||
origins=group_origins,
|
||||
smart_zone=app.config['SMART_ZONE'])
|
||||
|
||||
@classmethod
|
||||
def get_subgroups(cls) -> Dict[int, Dict[int, int]]:
|
||||
conn = db.engine.connect()
|
||||
result = conn.execute(text("""
|
||||
SELECT origin.group_id, proxy.psg, COUNT(proxy.id) FROM proxy, origin
|
||||
WHERE proxy.origin_id = origin.id
|
||||
AND proxy.destroyed IS NULL
|
||||
AND proxy.provider = :provider
|
||||
GROUP BY origin.group_id, proxy.psg;
|
||||
"""), provider=cls.provider)
|
||||
subgroups: Dict[int, Dict[int, int]] = defaultdict(lambda: defaultdict(lambda: 0))
|
||||
for row in result:
|
||||
subgroups[row[0]][row[1]] = row[2]
|
||||
return subgroups
|
||||
|
||||
@classmethod
|
||||
def next_subgroup(cls, group_id: int) -> Optional[int]:
|
||||
conn = db.engine.connect()
|
||||
result = conn.execute(text("""
|
||||
SELECT proxy.psg, COUNT(proxy.id) FROM proxy, origin
|
||||
WHERE proxy.origin_id = origin.id
|
||||
AND proxy.destroyed IS NULL
|
||||
AND origin.group_id = :group_id
|
||||
AND proxy.provider = :provider
|
||||
GROUP BY proxy.psg ORDER BY proxy.psg;
|
||||
"""), provider=cls.short_name, group_id=group_id)
|
||||
subgroups = {
|
||||
row[0]: row[1] for row in result
|
||||
}
|
||||
for subgroup in range(0, cls.subgroup_count_max):
|
||||
if subgroups.get(subgroup, 0) < cls.subgroup_members_max:
|
||||
return subgroup
|
||||
return None
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue