resource pool system
This commit is contained in:
parent
dc989dd7cb
commit
16f7e2199d
19 changed files with 382 additions and 105 deletions
|
@ -28,6 +28,7 @@ from app.terraform.bridge.ovh import BridgeOvhAutomation
|
|||
from app.terraform.list.github import ListGithubAutomation
|
||||
from app.terraform.list.gitlab import ListGitlabAutomation
|
||||
from app.terraform.list.s3 import ListS3Automation
|
||||
from app.terraform.proxy.meta import ProxyMetaAutomation
|
||||
from app.terraform.proxy.azure_cdn import ProxyAzureCdnAutomation
|
||||
from app.terraform.proxy.cloudfront import ProxyCloudfrontAutomation
|
||||
from app.terraform.proxy.fastly import ProxyFastlyAutomation
|
||||
|
@ -56,7 +57,8 @@ jobs = {
|
|||
ListS3Automation,
|
||||
ProxyAzureCdnAutomation,
|
||||
ProxyCloudfrontAutomation,
|
||||
ProxyFastlyAutomation
|
||||
ProxyFastlyAutomation,
|
||||
ProxyMetaAutomation
|
||||
]
|
||||
}
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ class Group(AbstractConfiguration):
|
|||
eotks = db.relationship("Eotk", back_populates="group")
|
||||
onions = db.relationship("Onion", back_populates="group")
|
||||
smart_proxies = db.relationship("SmartProxy", back_populates="group")
|
||||
pools = db.relationship("Pool", secondary="pool_group", back_populates="groups")
|
||||
|
||||
@classmethod
|
||||
def csv_header(cls) -> List[str]:
|
||||
|
@ -22,7 +23,27 @@ class Group(AbstractConfiguration):
|
|||
]
|
||||
|
||||
|
||||
class Pool(AbstractConfiguration):
|
||||
pool_name = db.Column(db.String(80), unique=True, nullable=False)
|
||||
|
||||
@classmethod
|
||||
def csv_header(cls) -> List[str]:
|
||||
return super().csv_header() + [
|
||||
"pool_name"
|
||||
]
|
||||
|
||||
proxies = db.relationship("Proxy", back_populates="pool")
|
||||
lists = db.relationship("MirrorList", back_populates="pool")
|
||||
groups = db.relationship("Group", secondary="pool_group", back_populates="pools")
|
||||
|
||||
|
||||
class PoolGroup(db.Model): # type: ignore[misc]
|
||||
pool_id = db.Column(db.Integer, db.ForeignKey("pool.id"), primary_key=True)
|
||||
group_id = db.Column(db.Integer, db.ForeignKey("group.id"), primary_key=True)
|
||||
|
||||
|
||||
class MirrorList(AbstractConfiguration):
|
||||
pool_id = db.Column(db.Integer, db.ForeignKey("pool.id"))
|
||||
provider = db.Column(db.String(255), nullable=False)
|
||||
format = db.Column(db.String(20), nullable=False)
|
||||
encoding = db.Column(db.String(20), nullable=False)
|
||||
|
@ -31,6 +52,8 @@ class MirrorList(AbstractConfiguration):
|
|||
role = db.Column(db.String(255), nullable=True)
|
||||
filename = db.Column(db.String(255), nullable=False)
|
||||
|
||||
pool = db.relationship("Pool", back_populates="lists")
|
||||
|
||||
providers_supported = {
|
||||
"github": "GitHub",
|
||||
"gitlab": "GitLab",
|
||||
|
|
|
@ -50,6 +50,7 @@ class Origin(AbstractConfiguration):
|
|||
|
||||
class Proxy(AbstractResource):
|
||||
origin_id = db.Column(db.Integer, db.ForeignKey("origin.id"), nullable=False)
|
||||
pool_id = db.Column(db.Integer, db.ForeignKey("pool.id"))
|
||||
provider = db.Column(db.String(20), nullable=False)
|
||||
psg = db.Column(db.Integer, nullable=True)
|
||||
slug = db.Column(db.String(20), nullable=True)
|
||||
|
@ -57,6 +58,7 @@ class Proxy(AbstractResource):
|
|||
url = db.Column(db.String(255), nullable=True)
|
||||
|
||||
origin = db.relationship("Origin", back_populates="proxies")
|
||||
pool = db.relationship("Pool", back_populates="proxies")
|
||||
|
||||
@property
|
||||
def brn(self) -> BRN:
|
||||
|
|
|
@ -21,6 +21,7 @@ from app.portal.group import bp as group
|
|||
from app.portal.list import bp as list_
|
||||
from app.portal.origin import bp as origin
|
||||
from app.portal.onion import bp as onion
|
||||
from app.portal.pool import bp as pool
|
||||
from app.portal.proxy import bp as proxy
|
||||
from app.portal.smart_proxy import bp as smart_proxy
|
||||
from app.portal.webhook import bp as webhook
|
||||
|
@ -34,6 +35,7 @@ portal.register_blueprint(group, url_prefix="/group")
|
|||
portal.register_blueprint(list_, url_prefix="/list")
|
||||
portal.register_blueprint(origin, url_prefix="/origin")
|
||||
portal.register_blueprint(onion, url_prefix="/onion")
|
||||
portal.register_blueprint(pool, url_prefix="/pool")
|
||||
portal.register_blueprint(proxy, url_prefix="/proxy")
|
||||
portal.register_blueprint(smart_proxy, url_prefix="/smart")
|
||||
portal.register_blueprint(webhook, url_prefix="/webhook")
|
||||
|
|
|
@ -74,6 +74,7 @@ def automation_kick(automation_id: int) -> ResponseReturnValue:
|
|||
return view_lifecycle(
|
||||
header="Kick automation timer?",
|
||||
message=automation.description,
|
||||
section="automation",
|
||||
success_view="portal.automation.automation_list",
|
||||
success_message="This automation job will next run within 1 minute.",
|
||||
resource=automation,
|
||||
|
|
|
@ -6,8 +6,3 @@ class EditMirrorForm(FlaskForm): # type: ignore
|
|||
origin = SelectField('Origin')
|
||||
url = StringField('URL')
|
||||
submit = SubmitField('Save Changes')
|
||||
|
||||
|
||||
class EditProxyForm(FlaskForm): # type: ignore
|
||||
origin = SelectField('Origin')
|
||||
submit = SubmitField('Save Changes')
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from typing import Optional, Any
|
||||
|
||||
from flask import render_template, url_for, flash, redirect, Blueprint, Response
|
||||
from flask.typing import ResponseReturnValue
|
||||
|
@ -13,7 +13,7 @@ from app.extensions import db
|
|||
from app.lists.bc2 import mirror_sites
|
||||
from app.lists.bridgelines import bridgelines
|
||||
from app.lists.mirror_mapping import mirror_mapping
|
||||
from app.models.base import MirrorList
|
||||
from app.models.base import MirrorList, Pool
|
||||
from app.portal.util import response_404, view_lifecycle
|
||||
|
||||
bp = Blueprint("list", __name__)
|
||||
|
@ -96,6 +96,7 @@ def list_new(group_id: Optional[int] = None) -> ResponseReturnValue:
|
|||
form.encoding.choices = list(MirrorList.encodings_supported.items())
|
||||
if form.validate_on_submit():
|
||||
list_ = MirrorList()
|
||||
list_.pool_id = form.pool.data
|
||||
list_.provider = form.provider.data
|
||||
list_.format = form.format.data
|
||||
list_.encoding = form.encoding.data
|
||||
|
@ -122,6 +123,7 @@ def list_new(group_id: Optional[int] = None) -> ResponseReturnValue:
|
|||
|
||||
|
||||
class NewMirrorListForm(FlaskForm): # type: ignore
|
||||
pool = SelectField('Resource Pool', validators=[DataRequired()])
|
||||
provider = SelectField('Provider', validators=[DataRequired()])
|
||||
format = SelectField('Distribution Method', validators=[DataRequired()])
|
||||
encoding = SelectField('Encoding', validators=[DataRequired()])
|
||||
|
@ -136,6 +138,12 @@ class NewMirrorListForm(FlaskForm): # type: ignore
|
|||
filename = StringField('Filename', validators=[DataRequired()])
|
||||
submit = SubmitField('Save Changes')
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.pool.choices = [
|
||||
(pool.id, pool.pool_name) for pool in Pool.query.all()
|
||||
]
|
||||
|
||||
|
||||
@bp.route('/edit/<list_id>', methods=['GET', 'POST'])
|
||||
def list_edit(list_id: int) -> ResponseReturnValue:
|
||||
|
@ -160,6 +168,7 @@ def list_edit(list_id: int) -> ResponseReturnValue:
|
|||
form.format.choices = list(MirrorList.formats_supported.items())
|
||||
form.encoding.choices = list(MirrorList.encodings_supported.items())
|
||||
if form.validate_on_submit():
|
||||
list_.pool_id = form.pool.data
|
||||
list_.provider = form.provider.data
|
||||
list_.format = form.format.data
|
||||
list_.encoding = form.encoding.data
|
||||
|
|
78
app/portal/pool.py
Normal file
78
app/portal/pool.py
Normal file
|
@ -0,0 +1,78 @@
|
|||
from datetime import datetime
|
||||
|
||||
from flask import render_template, url_for, flash, redirect, Response, Blueprint
|
||||
from flask.typing import ResponseReturnValue
|
||||
from flask_wtf import FlaskForm
|
||||
import sqlalchemy
|
||||
from wtforms import StringField, SubmitField
|
||||
from wtforms.validators import DataRequired
|
||||
|
||||
from app.extensions import db
|
||||
from app.models.base import Pool
|
||||
|
||||
bp = Blueprint("pool", __name__)
|
||||
|
||||
|
||||
class NewPoolForm(FlaskForm): # type: ignore
|
||||
group_name = StringField("Short Name", validators=[DataRequired()])
|
||||
description = StringField("Description", validators=[DataRequired()])
|
||||
submit = SubmitField('Save Changes', render_kw={"class": "btn btn-success"})
|
||||
|
||||
|
||||
class EditPoolForm(FlaskForm): # type: ignore
|
||||
description = StringField("Description", validators=[DataRequired()])
|
||||
submit = SubmitField('Save Changes', render_kw={"class": "btn btn-success"})
|
||||
|
||||
|
||||
@bp.route("/list")
|
||||
def pool_list() -> ResponseReturnValue:
|
||||
pools = Pool.query.order_by(Pool.pool_name).all()
|
||||
return render_template("list.html.j2",
|
||||
section="pool",
|
||||
title="Resource Pools",
|
||||
item="pool",
|
||||
items=pools,
|
||||
new_link=url_for("portal.pool.pool_new"))
|
||||
|
||||
|
||||
@bp.route("/new", methods=['GET', 'POST'])
|
||||
def pool_new() -> ResponseReturnValue:
|
||||
form = NewPoolForm()
|
||||
if form.validate_on_submit():
|
||||
pool = Pool()
|
||||
pool.pool_name = form.group_name.data
|
||||
pool.description = form.description.data
|
||||
pool.created = datetime.utcnow()
|
||||
pool.updated = datetime.utcnow()
|
||||
try:
|
||||
db.session.add(pool)
|
||||
db.session.commit()
|
||||
flash(f"Created new pool {pool.pool_name}.", "success")
|
||||
return redirect(url_for("portal.pool.pool_edit", pool_id=pool.id))
|
||||
except sqlalchemy.exc.SQLAlchemyError:
|
||||
flash("Failed to create new pool.", "danger")
|
||||
return redirect(url_for("portal.pool.pool_list"))
|
||||
return render_template("new.html.j2", section="pool", form=form)
|
||||
|
||||
|
||||
@bp.route('/edit/<pool_id>', methods=['GET', 'POST'])
|
||||
def pool_edit(pool_id: int) -> ResponseReturnValue:
|
||||
pool = Pool.query.filter(Pool.id == pool_id).first()
|
||||
if pool is None:
|
||||
return Response(render_template("error.html.j2",
|
||||
section="pool",
|
||||
header="404 Pool Not Found",
|
||||
message="The requested pool could not be found."),
|
||||
status=404)
|
||||
form = EditPoolForm(description=pool.description)
|
||||
if form.validate_on_submit():
|
||||
pool.description = form.description.data
|
||||
pool.updated = datetime.utcnow()
|
||||
try:
|
||||
db.session.commit()
|
||||
flash("Saved changes to pool.", "success")
|
||||
except sqlalchemy.exc.SQLAlchemyError:
|
||||
flash("An error occurred saving the changes to the pool.", "danger")
|
||||
return render_template("pool.html.j2",
|
||||
section="pool",
|
||||
pool=pool, form=form)
|
|
@ -87,6 +87,12 @@
|
|||
{{ icon("collection") }} Groups
|
||||
</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link{% if section == "pool" %} active{% endif %}"
|
||||
href="{{ url_for("portal.pool.pool_list") }}">
|
||||
{{ icon("stack") }} Resource Pools
|
||||
</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link{% if section == "origin" %} active{% endif %}"
|
||||
href="{{ url_for("portal.origin.origin_list") }}">
|
||||
|
|
|
@ -98,5 +98,10 @@
|
|||
<path d="M1.333 6.334v3C1.333 10.805 4.318 12 8 12s6.667-1.194 6.667-2.667V6.334a6.51 6.51 0 0 1-1.458.79C11.81 7.684 9.967 8 8 8c-1.966 0-3.809-.317-5.208-.876a6.508 6.508 0 0 1-1.458-.79z"/>
|
||||
<path d="M14.667 11.668a6.51 6.51 0 0 1-1.458.789c-1.4.56-3.242.876-5.21.876-1.966 0-3.809-.316-5.208-.876a6.51 6.51 0 0 1-1.458-.79v1.666C1.333 14.806 4.318 16 8 16s6.667-1.194 6.667-2.667v-1.665z"/>
|
||||
</svg>
|
||||
{% elif i == "stack" %}
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-stack" viewBox="0 0 16 16">
|
||||
<path d="m14.12 10.163 1.715.858c.22.11.22.424 0 .534L8.267 15.34a.598.598 0 0 1-.534 0L.165 11.555a.299.299 0 0 1 0-.534l1.716-.858 5.317 2.659c.505.252 1.1.252 1.604 0l5.317-2.66zM7.733.063a.598.598 0 0 1 .534 0l7.568 3.784a.3.3 0 0 1 0 .535L8.267 8.165a.598.598 0 0 1-.534 0L.165 4.382a.299.299 0 0 1 0-.535L7.733.063z"/>
|
||||
<path d="m14.12 6.576 1.715.858c.22.11.22.424 0 .534l-7.568 3.784a.598.598 0 0 1-.534 0L.165 7.968a.299.299 0 0 1 0-.534l1.716-.858 5.317 2.659c.505.252 1.1.252 1.604 0l5.317-2.659z"/>
|
||||
</svg>
|
||||
{% endif %}
|
||||
{% endmacro %}
|
|
@ -1,7 +1,7 @@
|
|||
{% extends "base.html.j2" %}
|
||||
{% from "tables.html.j2" import alarms_table, automations_table, bridgeconfs_table, bridges_table, eotk_table,
|
||||
groups_table, instances_table, mirrorlists_table, origins_table, origin_onion_table, onions_table, proxies_table,
|
||||
webhook_table %}
|
||||
groups_table, instances_table, mirrorlists_table, origins_table, origin_onion_table, onions_table, pools_table,
|
||||
proxies_table, webhook_table %}
|
||||
|
||||
{% block content %}
|
||||
<h1 class="h2 mt-3">{{ title }}</h1>
|
||||
|
@ -33,6 +33,8 @@
|
|||
{% endif %}
|
||||
{% elif item == "origin" %}
|
||||
{{ origins_table(items) }}
|
||||
{% elif item == "pool" %}
|
||||
{{ pools_table(items) }}
|
||||
{% elif item == "proxy" %}
|
||||
{{ proxies_table(items) }}
|
||||
{% elif item == "smart proxy" %}
|
||||
|
|
21
app/portal/templates/pool.html.j2
Normal file
21
app/portal/templates/pool.html.j2
Normal file
|
@ -0,0 +1,21 @@
|
|||
{% extends "base.html.j2" %}
|
||||
{% from 'bootstrap5/form.html' import render_form %}
|
||||
{% from "tables.html.j2" import groups_table, mirrorlists_table, proxies_table %}
|
||||
|
||||
{% block content %}
|
||||
<h1 class="h2 mt-3">Resource Pool</h1>
|
||||
<h2 class="h3">{{ pool.pool_name }}</h2>
|
||||
|
||||
<div style="border: 1px solid #666;" class="p-3">
|
||||
{{ render_form(form) }}
|
||||
</div>
|
||||
|
||||
<h3>Groups</h3>
|
||||
{{ groups_table(pool.groups) }}
|
||||
|
||||
<h3>Distribution Lists</h3>
|
||||
{{ mirrorlists_table(pool.lists) }}
|
||||
|
||||
<h3>Simple Proxies</h3>
|
||||
{{ proxies_table(pool.proxies) }}
|
||||
{% endblock %}
|
|
@ -334,6 +334,29 @@
|
|||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro pools_table(pools) %}
|
||||
<div class="table-responsive">
|
||||
<table class="table table-striped table-sm">
|
||||
<thead>
|
||||
<tr>
|
||||
<th scope="col">Name</th>
|
||||
<th scope="col">Description</th>
|
||||
<th scope="col">Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for pool in pools %}
|
||||
<tr>
|
||||
<td>{{ pool.pool_name }}</td>
|
||||
<td>{{ pool.description }}</td>
|
||||
<td><a href="{{ url_for("portal.pool.pool_edit", pool_id=pool.id) }}" class="btn btn-primary btn-sm">View/Edit</a></td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro proxies_table(proxies) %}
|
||||
<div class="table-responsive">
|
||||
<table class="table table-striped table-sm">
|
||||
|
|
|
@ -1,14 +1,11 @@
|
|||
import os.path
|
||||
import sys
|
||||
from abc import abstractmethod
|
||||
from collections import defaultdict
|
||||
import datetime
|
||||
import math
|
||||
import string
|
||||
import random
|
||||
from typing import Dict, Optional, Any, List
|
||||
from collections import defaultdict
|
||||
from typing import Optional, Any, List, Dict
|
||||
|
||||
from sqlalchemy import text
|
||||
from tldextract import tldextract
|
||||
|
||||
from app import app
|
||||
from app.extensions import db
|
||||
|
@ -44,13 +41,19 @@ def sp_trusted_prefixes() -> str:
|
|||
|
||||
|
||||
class ProxyAutomation(TerraformAutomation):
|
||||
subgroup_max = math.inf
|
||||
subgroup_members_max = sys.maxsize
|
||||
"""
|
||||
Maximum number of proxies to deploy per sub-group. This is required for some providers
|
||||
where the number origins per group may exceed the number of proxies that can be configured
|
||||
in a single "configuration block", e.g. Azure CDN's profiles.
|
||||
"""
|
||||
|
||||
subgroup_count_max = sys.maxsize
|
||||
"""
|
||||
Maximum number of subgroups that can be deployed. This is required for some providers where
|
||||
the total number of subgroups is limited by a quota, e.g. Azure CDN's profiles.
|
||||
"""
|
||||
|
||||
template: str
|
||||
"""
|
||||
Terraform configuration template using Jinja 2.
|
||||
|
@ -67,83 +70,11 @@ class ProxyAutomation(TerraformAutomation):
|
|||
Whether this provider supports "smart" proxies.
|
||||
"""
|
||||
|
||||
def get_subgroups(self) -> Dict[int, Dict[int, int]]:
|
||||
conn = db.engine.connect()
|
||||
result = conn.execute(text("""
|
||||
SELECT origin.group_id, proxy.psg, COUNT(proxy.id) FROM proxy, origin
|
||||
WHERE proxy.origin_id = origin.id
|
||||
AND proxy.destroyed IS NULL
|
||||
AND proxy.provider = :provider
|
||||
GROUP BY origin.group_id, proxy.psg;
|
||||
"""), provider=self.provider)
|
||||
subgroups: Dict[int, Dict[int, int]] = defaultdict(lambda: defaultdict(lambda: 0))
|
||||
for row in result:
|
||||
subgroups[row[0]][row[1]] = row[2]
|
||||
return subgroups
|
||||
|
||||
def create_missing_proxies(self) -> None:
|
||||
groups = Group.query.all()
|
||||
subgroups = self.get_subgroups()
|
||||
for group in groups:
|
||||
subgroup = 0
|
||||
for origin in group.origins:
|
||||
if origin.destroyed is not None:
|
||||
continue
|
||||
while True:
|
||||
if subgroups[group.id][subgroup] >= self.subgroup_max:
|
||||
subgroup += 1
|
||||
else:
|
||||
break
|
||||
proxies = [
|
||||
x for x in origin.proxies
|
||||
if x.provider == self.provider and x.deprecated is None and x.destroyed is None
|
||||
]
|
||||
if not proxies:
|
||||
subgroups[group.id][subgroup] += 1
|
||||
proxy = Proxy()
|
||||
proxy.origin_id = origin.id
|
||||
proxy.provider = self.provider
|
||||
proxy.psg = subgroup
|
||||
# The random usage below is good enough for its purpose: to create a slug that
|
||||
# hasn't been used before.
|
||||
proxy.slug = tldextract.extract(origin.domain_name).domain[:5] + ''.join(
|
||||
random.choices(string.ascii_lowercase, k=12)) # nosec
|
||||
proxy.added = datetime.datetime.utcnow()
|
||||
proxy.updated = datetime.datetime.utcnow()
|
||||
db.session.add(proxy)
|
||||
db.session.commit()
|
||||
|
||||
def deprecate_orphaned_proxies(self) -> None:
|
||||
proxies = Proxy.query.filter(
|
||||
Proxy.deprecated.is_(None),
|
||||
Proxy.destroyed.is_(None),
|
||||
Proxy.provider == self.provider
|
||||
).all()
|
||||
for proxy in proxies:
|
||||
if proxy.origin.destroyed is not None:
|
||||
proxy.deprecate(reason="origin_destroyed")
|
||||
db.session.commit()
|
||||
|
||||
def destroy_expired_proxies(self) -> None:
|
||||
cutoff = datetime.datetime.utcnow() - datetime.timedelta(days=3)
|
||||
proxies = Proxy.query.filter(
|
||||
Proxy.destroyed.is_(None),
|
||||
Proxy.provider == self.provider,
|
||||
Proxy.deprecated < cutoff
|
||||
).all()
|
||||
for proxy in proxies:
|
||||
proxy.destroyed = datetime.datetime.utcnow()
|
||||
proxy.updated = datetime.datetime.utcnow()
|
||||
db.session.commit()
|
||||
|
||||
@abstractmethod
|
||||
def import_state(self, state: Any) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def tf_prehook(self) -> Optional[Any]: # pylint: disable=useless-return
|
||||
self.create_missing_proxies()
|
||||
self.deprecate_orphaned_proxies()
|
||||
self.destroy_expired_proxies()
|
||||
return None
|
||||
|
||||
def tf_posthook(self, *, prehook_result: Any = None) -> None:
|
||||
|
@ -223,3 +154,37 @@ class ProxyAutomation(TerraformAutomation):
|
|||
provider=self.provider,
|
||||
origins=group_origins,
|
||||
smart_zone=app.config['SMART_ZONE'])
|
||||
|
||||
@classmethod
|
||||
def get_subgroups(cls) -> Dict[int, Dict[int, int]]:
|
||||
conn = db.engine.connect()
|
||||
result = conn.execute(text("""
|
||||
SELECT origin.group_id, proxy.psg, COUNT(proxy.id) FROM proxy, origin
|
||||
WHERE proxy.origin_id = origin.id
|
||||
AND proxy.destroyed IS NULL
|
||||
AND proxy.provider = :provider
|
||||
GROUP BY origin.group_id, proxy.psg;
|
||||
"""), provider=cls.provider)
|
||||
subgroups: Dict[int, Dict[int, int]] = defaultdict(lambda: defaultdict(lambda: 0))
|
||||
for row in result:
|
||||
subgroups[row[0]][row[1]] = row[2]
|
||||
return subgroups
|
||||
|
||||
@classmethod
|
||||
def next_subgroup(cls, group_id: int) -> Optional[int]:
|
||||
conn = db.engine.connect()
|
||||
result = conn.execute(text("""
|
||||
SELECT proxy.psg, COUNT(proxy.id) FROM proxy, origin
|
||||
WHERE proxy.origin_id = origin.id
|
||||
AND proxy.destroyed IS NULL
|
||||
AND origin.group_id = :group_id
|
||||
AND proxy.provider = :provider
|
||||
GROUP BY proxy.psg ORDER BY proxy.psg;
|
||||
"""), provider=cls.short_name, group_id=group_id)
|
||||
subgroups = {
|
||||
row[0]: row[1] for row in result
|
||||
}
|
||||
for subgroup in range(0, cls.subgroup_count_max):
|
||||
if subgroups.get(subgroup, 0) < cls.subgroup_members_max:
|
||||
return subgroup
|
||||
return None
|
||||
|
|
|
@ -9,7 +9,7 @@ class ProxyAzureCdnAutomation(ProxyAutomation):
|
|||
short_name = "proxy_azure_cdn"
|
||||
description = "Deploy proxies to Azure CDN"
|
||||
provider = "azure_cdn"
|
||||
subgroup_max = 25
|
||||
subgroup_members_max = 25
|
||||
parallelism = 1
|
||||
|
||||
template_parameters = [
|
||||
|
@ -125,21 +125,12 @@ class ProxyAzureCdnAutomation(ProxyAutomation):
|
|||
location = "{{ azure_location }}"
|
||||
resource_group_name = data.azurerm_resource_group.this.name
|
||||
|
||||
{% if proxy.origin.smart %}
|
||||
origin_host_header = "origin-{{ proxy.origin.id }}.cloudfront.smart.{{ smart_zone[:-1] }}"
|
||||
|
||||
origin {
|
||||
name = "upstream"
|
||||
host_name = "origin-{{ proxy.origin.id }}.cloudfront.smart.{{ smart_zone[:-1] }}"
|
||||
}
|
||||
{% else %}
|
||||
origin_host_header = "{{ proxy.origin.domain_name }}"
|
||||
|
||||
origin {
|
||||
name = "upstream"
|
||||
host_name = "{{ proxy.origin.domain_name }}"
|
||||
}
|
||||
{% endif %}
|
||||
|
||||
global_delivery_rule {
|
||||
modify_request_header_action {
|
||||
|
|
|
@ -11,6 +11,7 @@ class ProxyFastlyAutomation(ProxyAutomation):
|
|||
short_name = "proxy_fastly"
|
||||
description = "Deploy proxies to Fastly"
|
||||
provider = "fastly"
|
||||
subgroup_members_max = 20
|
||||
|
||||
template_parameters = [
|
||||
"aws_access_key",
|
||||
|
|
|
@ -64,10 +64,10 @@ def all_cdn_prefixes() -> Iterable[str]:
|
|||
aws = AWS()
|
||||
prefixes.update(aws.ipv4_ranges)
|
||||
prefixes.update(aws.ipv6_ranges)
|
||||
azure = AzureFrontDoorBackend()
|
||||
prefixes.update(azure.ipv4_ranges)
|
||||
prefixes.update(azure.ipv6_ranges)
|
||||
fastly = Fastly()
|
||||
prefixes.update(fastly.ipv4_ranges)
|
||||
prefixes.update(fastly.ipv6_ranges)
|
||||
# azure = AzureFrontDoorBackend()
|
||||
# prefixes.update(azure.ipv4_ranges)
|
||||
# prefixes.update(azure.ipv6_ranges)
|
||||
# fastly = Fastly()
|
||||
# prefixes.update(fastly.ipv4_ranges)
|
||||
# prefixes.update(fastly.ipv6_ranges)
|
||||
return [str(p) for p in prefixes]
|
||||
|
|
90
app/terraform/proxy/meta.py
Normal file
90
app/terraform/proxy/meta.py
Normal file
|
@ -0,0 +1,90 @@
|
|||
import datetime
|
||||
import logging
|
||||
import random
|
||||
import string
|
||||
from typing import Tuple, List
|
||||
|
||||
from tldextract import tldextract
|
||||
|
||||
from app import db
|
||||
from app.models.base import Pool
|
||||
from app.models.mirrors import Proxy, Origin
|
||||
from app.terraform import BaseAutomation
|
||||
from app.terraform.proxy.azure_cdn import ProxyAzureCdnAutomation
|
||||
from app.terraform.proxy.cloudfront import ProxyCloudfrontAutomation
|
||||
from app.terraform.proxy.fastly import ProxyFastlyAutomation
|
||||
|
||||
PROXY_PROVIDERS = {p.short_name: p for p in [ # In order of preference
|
||||
ProxyCloudfrontAutomation,
|
||||
ProxyFastlyAutomation,
|
||||
ProxyAzureCdnAutomation
|
||||
]}
|
||||
|
||||
|
||||
def create_proxy(pool: Pool, origin: Origin) -> bool:
|
||||
for desperate in [False, True]:
|
||||
for provider in PROXY_PROVIDERS.values():
|
||||
if origin.smart and not provider.smart_proxies:
|
||||
continue # This origin cannot be supported on this provider
|
||||
if provider.smart_proxies and not (desperate or origin.smart):
|
||||
continue
|
||||
next_subgroup = provider.next_subgroup(origin.group_id)
|
||||
if next_subgroup is None:
|
||||
continue
|
||||
proxy = Proxy()
|
||||
proxy.pool_id = pool.id
|
||||
proxy.origin_id = origin.id
|
||||
proxy.provider = provider.provider
|
||||
proxy.psg = provider.next_subgroup(origin.group_id)
|
||||
# The random usage below is good enough for its purpose: to create a slug that
|
||||
# hasn't been used recently.
|
||||
proxy.slug = tldextract.extract(origin.domain_name).domain[:5] + ''.join(
|
||||
random.choices(string.ascii_lowercase, k=12)) # nosec
|
||||
proxy.added = datetime.datetime.utcnow()
|
||||
proxy.updated = datetime.datetime.utcnow()
|
||||
logging.debug("Creating proxy %s", proxy)
|
||||
db.session.add(proxy)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class ProxyMetaAutomation(BaseAutomation):
|
||||
short_name = "proxy_meta"
|
||||
description = "Housekeeping for proxies"
|
||||
|
||||
def automate(self, full: bool = False) -> Tuple[bool, str]:
|
||||
# Destroy expired proxies
|
||||
cutoff = datetime.datetime.utcnow() - datetime.timedelta(days=3)
|
||||
proxies: List[Proxy] = Proxy.query.filter(
|
||||
Proxy.destroyed.is_(None),
|
||||
Proxy.deprecated < cutoff
|
||||
).all()
|
||||
for proxy in proxies:
|
||||
logging.debug("Destroying expired proxy")
|
||||
proxy.destroy()
|
||||
# Deprecate orphaned proxies and mismatched proxies
|
||||
proxies = Proxy.query.filter(
|
||||
Proxy.deprecated.is_(None),
|
||||
Proxy.destroyed.is_(None),
|
||||
).all()
|
||||
for proxy in proxies:
|
||||
if proxy.origin.destroyed is not None:
|
||||
proxy.deprecate(reason="origin_destroyed")
|
||||
if proxy.origin.smart and not PROXY_PROVIDERS[proxy.provider].smart_proxies:
|
||||
proxy.deprecate(reason="not_smart_enough")
|
||||
# Create new proxies
|
||||
pools = Pool.query.all()
|
||||
for pool in pools:
|
||||
for group in pool.groups:
|
||||
for origin in group.origins:
|
||||
if origin.destroyed is not None:
|
||||
continue
|
||||
proxies = [
|
||||
x for x in origin.proxies
|
||||
if x.pool_id == pool.id and x.deprecated is None and x.destroyed is None
|
||||
]
|
||||
if not proxies:
|
||||
logging.debug("Creating new proxy for %s in pool %s", origin, pool)
|
||||
create_proxy(pool, origin)
|
||||
db.session.commit()
|
||||
return True, ""
|
61
migrations/versions/45fedef32318_.py
Normal file
61
migrations/versions/45fedef32318_.py
Normal file
|
@ -0,0 +1,61 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 45fedef32318
|
||||
Revises: 665e340dbe09
|
||||
Create Date: 2022-09-07 16:20:04.603554
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '45fedef32318'
|
||||
down_revision = '665e340dbe09'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('pool',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=False),
|
||||
sa.Column('added', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated', sa.DateTime(), nullable=False),
|
||||
sa.Column('destroyed', sa.DateTime(), nullable=True),
|
||||
sa.Column('pool_name', sa.String(length=80), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_pool')),
|
||||
sa.UniqueConstraint('pool_name', name=op.f('uq_pool_pool_name'))
|
||||
)
|
||||
op.create_table('pool_group',
|
||||
sa.Column('pool_id', sa.Integer(), nullable=False),
|
||||
sa.Column('group_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['group_id'], ['group.id'], name=op.f('fk_pool_group_group_id_group')),
|
||||
sa.ForeignKeyConstraint(['pool_id'], ['pool.id'], name=op.f('fk_pool_group_pool_id_pool')),
|
||||
sa.PrimaryKeyConstraint('pool_id', 'group_id', name=op.f('pk_pool_group'))
|
||||
)
|
||||
with op.batch_alter_table('mirror_list', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('pool_id', sa.Integer(), nullable=True))
|
||||
batch_op.create_foreign_key(batch_op.f('fk_mirror_list_pool_id_pool'), 'pool', ['pool_id'], ['id'])
|
||||
|
||||
with op.batch_alter_table('proxy', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('pool_id', sa.Integer(), nullable=True))
|
||||
batch_op.create_foreign_key(batch_op.f('fk_proxy_pool_id_pool'), 'pool', ['pool_id'], ['id'])
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('proxy', schema=None) as batch_op:
|
||||
batch_op.drop_constraint(batch_op.f('fk_proxy_pool_id_pool'), type_='foreignkey')
|
||||
batch_op.drop_column('pool_id')
|
||||
|
||||
with op.batch_alter_table('mirror_list', schema=None) as batch_op:
|
||||
batch_op.drop_constraint(batch_op.f('fk_mirror_list_pool_id_pool'), type_='foreignkey')
|
||||
batch_op.drop_column('pool_id')
|
||||
|
||||
op.drop_table('pool_group')
|
||||
op.drop_table('pool')
|
||||
# ### end Alembic commands ###
|
Loading…
Add table
Add a link
Reference in a new issue