From 09f0b0672de19d2759e2eebbd78bcf23fb1741f2 Mon Sep 17 00:00:00 2001 From: Iain Learmonth Date: Thu, 10 Mar 2022 14:26:22 +0000 Subject: [PATCH] Initial import --- .gitignore | 156 +++++++ .gitlab-ci.yml | 21 + app/__init__.py | 55 +++ app/alarms.py | 27 ++ app/extensions.py | 17 + app/mirror_sites.py | 52 +++ app/models.py | 238 +++++++++++ app/portal/__init__.py | 384 ++++++++++++++++++ app/portal/forms.py | 69 ++++ app/portal/static/portal.css | 100 +++++ app/portal/templates/alarms.html.j2 | 31 ++ app/portal/templates/base.html.j2 | 162 ++++++++ app/portal/templates/bridgeconf.html.j2 | 16 + app/portal/templates/bridgeconfs.html.j2 | 8 + app/portal/templates/bridges.html.j2 | 7 + app/portal/templates/error.html.j2 | 6 + app/portal/templates/group.html.j2 | 18 + app/portal/templates/groups.html.j2 | 30 ++ app/portal/templates/home.html.j2 | 6 + app/portal/templates/lifecycle.html.j2 | 8 + app/portal/templates/mirrorlists.html.j2 | 8 + app/portal/templates/new.html.j2 | 12 + app/portal/templates/origin.html.j2 | 20 + app/portal/templates/origins.html.j2 | 8 + app/portal/templates/proxies.html.j2 | 7 + app/portal/templates/search.html.j2 | 14 + app/portal/templates/tables.html.j2 | 253 ++++++++++++ app/static/.gitkeep | 0 app/terraform/__init__.py | 56 +++ app/terraform/block_bridge_github.py | 29 ++ app/terraform/block_external.py | 67 +++ app/terraform/bridge/__init__.py | 77 ++++ app/terraform/bridge/aws.py | 84 ++++ app/terraform/bridge/gandi.py | 95 +++++ app/terraform/bridge/hcloud.py | 98 +++++ app/terraform/bridge/ovh.py | 122 ++++++ app/terraform/eotk.py | 79 ++++ app/terraform/list/__init__.py | 28 ++ app/terraform/list/github.py | 55 +++ app/terraform/list/gitlab.py | 54 +++ app/terraform/list/s3.py | 46 +++ app/terraform/proxy/__init__.py | 51 +++ app/terraform/proxy/azure_cdn.py | 238 +++++++++++ app/terraform/proxy/cloudfront.py | 156 +++++++ app/terraform/proxy_check.py | 60 +++ config.yaml.example | 33 ++ docs/Makefile | 20 + docs/admin/index.rst | 3 + docs/conf.py | 41 ++ docs/index.rst | 35 ++ docs/make.bat | 35 ++ docs/tech/conf.rst | 8 + docs/tech/index.rst | 4 + docs/tech/resource.rst | 8 + docs/user/index.rst | 3 + migrations/README | 1 + migrations/alembic.ini | 50 +++ migrations/env.py | 91 +++++ migrations/script.py.mako | 24 ++ .../versions/07c4fb2af22c_initial_schema.py | 124 ++++++ ...5185e88_alarms_text_and_destroy_origins.py | 38 ++ .../5c69fe874e4d_add_bridge_nicknames.py | 32 ++ .../versions/e1332e4cb910_add_mirror_lists.py | 41 ++ requirements.txt | 16 + 64 files changed, 3735 insertions(+) create mode 100644 .gitignore create mode 100644 .gitlab-ci.yml create mode 100644 app/__init__.py create mode 100644 app/alarms.py create mode 100644 app/extensions.py create mode 100644 app/mirror_sites.py create mode 100644 app/models.py create mode 100644 app/portal/__init__.py create mode 100644 app/portal/forms.py create mode 100644 app/portal/static/portal.css create mode 100644 app/portal/templates/alarms.html.j2 create mode 100644 app/portal/templates/base.html.j2 create mode 100644 app/portal/templates/bridgeconf.html.j2 create mode 100644 app/portal/templates/bridgeconfs.html.j2 create mode 100644 app/portal/templates/bridges.html.j2 create mode 100644 app/portal/templates/error.html.j2 create mode 100644 app/portal/templates/group.html.j2 create mode 100644 app/portal/templates/groups.html.j2 create mode 100644 app/portal/templates/home.html.j2 create mode 100644 app/portal/templates/lifecycle.html.j2 create mode 100644 app/portal/templates/mirrorlists.html.j2 create mode 100644 app/portal/templates/new.html.j2 create mode 100644 app/portal/templates/origin.html.j2 create mode 100644 app/portal/templates/origins.html.j2 create mode 100644 app/portal/templates/proxies.html.j2 create mode 100644 app/portal/templates/search.html.j2 create mode 100644 app/portal/templates/tables.html.j2 create mode 100644 app/static/.gitkeep create mode 100644 app/terraform/__init__.py create mode 100644 app/terraform/block_bridge_github.py create mode 100644 app/terraform/block_external.py create mode 100644 app/terraform/bridge/__init__.py create mode 100644 app/terraform/bridge/aws.py create mode 100644 app/terraform/bridge/gandi.py create mode 100644 app/terraform/bridge/hcloud.py create mode 100644 app/terraform/bridge/ovh.py create mode 100644 app/terraform/eotk.py create mode 100644 app/terraform/list/__init__.py create mode 100644 app/terraform/list/github.py create mode 100644 app/terraform/list/gitlab.py create mode 100644 app/terraform/list/s3.py create mode 100644 app/terraform/proxy/__init__.py create mode 100644 app/terraform/proxy/azure_cdn.py create mode 100644 app/terraform/proxy/cloudfront.py create mode 100644 app/terraform/proxy_check.py create mode 100644 config.yaml.example create mode 100644 docs/Makefile create mode 100644 docs/admin/index.rst create mode 100644 docs/conf.py create mode 100644 docs/index.rst create mode 100644 docs/make.bat create mode 100644 docs/tech/conf.rst create mode 100644 docs/tech/index.rst create mode 100644 docs/tech/resource.rst create mode 100644 docs/user/index.rst create mode 100644 migrations/README create mode 100644 migrations/alembic.ini create mode 100644 migrations/env.py create mode 100644 migrations/script.py.mako create mode 100644 migrations/versions/07c4fb2af22c_initial_schema.py create mode 100644 migrations/versions/59c9a5185e88_alarms_text_and_destroy_origins.py create mode 100644 migrations/versions/5c69fe874e4d_add_bridge_nicknames.py create mode 100644 migrations/versions/e1332e4cb910_add_mirror_lists.py create mode 100644 requirements.txt diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3843588 --- /dev/null +++ b/.gitignore @@ -0,0 +1,156 @@ +# Secrets +config.yaml +app/example.db* + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +.idea/ diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000..519f1cc --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,21 @@ +image: python:3.8-alpine + +test: + stage: test + script: + - pip install -U sphinx sphinx-press-theme + - sphinx-build -b html docs public + rules: + - if: $CI_COMMIT_REF_NAME != $CI_DEFAULT_BRANCH + +pages: + stage: deploy + script: + - pip install -U sphinx sphinx-press-theme + - sphinx-build -b html docs public + artifacts: + paths: + - public + rules: + - if: $CI_COMMIT_REF_NAME == "docs" + diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..2983958 --- /dev/null +++ b/app/__init__.py @@ -0,0 +1,55 @@ +import boto3 as boto3 +from flask import Flask, jsonify, Response, redirect, url_for +import yaml + +from app.extensions import db +from app.extensions import migrate +from app.extensions import bootstrap +from app.mirror_sites import mirror_sites +from app.models import Group, Origin, Proxy, Mirror +from app.portal import portal + +app = Flask(__name__) +app.config.from_file("../config.yaml", load=yaml.safe_load) +db.init_app(app) +migrate.init_app(app, db, render_as_batch=True) +bootstrap.init_app(app) + +app.register_blueprint(portal, url_prefix="/portal") + + +@app.route('/') +def index(): + return redirect(url_for("portal.portal_home")) + + +@app.route('/import/cloudfront') +def import_cloudfront(): + a = "" + not_found = [] + cloudfront = boto3.client('cloudfront', + aws_access_key_id=app.config['AWS_ACCESS_KEY'], + aws_secret_access_key=app.config['AWS_SECRET_KEY']) + dist_paginator = cloudfront.get_paginator('list_distributions') + page_iterator = dist_paginator.paginate() + for page in page_iterator: + for dist in page['DistributionList']['Items']: + res = Proxy.query.all() + matches = [r for r in res if r.origin.domain_name == dist['Comment'][8:]] + if not matches: + not_found.append(dist['Comment'][8:]) + continue + a += f"# {dist['Comment'][8:]}\n" + a += f"terraform import module.cloudfront_{matches[0].id}.aws_cloudfront_distribution.this {dist['Id']}\n" + for n in not_found: + a += f"# Not found: {n}\n" + return Response(a, content_type="text/plain") + + +@app.route('/mirrorSites.json') +def json_mirror_sites(): + return jsonify(mirror_sites) + + +if __name__ == '__main__': + app.run() diff --git a/app/alarms.py b/app/alarms.py new file mode 100644 index 0000000..079f735 --- /dev/null +++ b/app/alarms.py @@ -0,0 +1,27 @@ +from app.extensions import db +from app.models import Alarm + + +def _get_alarm(target: str, + alarm_type: str, + proxy_id=None, + create_if_missing=True): + if target == "proxy": + alarm = Alarm.query.filter( + Alarm.target == "proxy", + Alarm.alarm_type == alarm_type, + Alarm.proxy_id == proxy_id + ).first() + if create_if_missing and alarm is None: + alarm = Alarm() + alarm.target = target + alarm.alarm_type = alarm_type + if target == "proxy": + alarm.proxy_id = proxy_id + db.session.add(alarm) + db.session.commit() + return alarm + + +def get_proxy_alarm(proxy_id: int, alarm_type: str): + return _get_alarm("proxy", "alarm_type", proxy_id=proxy_id) diff --git a/app/extensions.py b/app/extensions.py new file mode 100644 index 0000000..1653fdd --- /dev/null +++ b/app/extensions.py @@ -0,0 +1,17 @@ +from flask_migrate import Migrate +from flask_sqlalchemy import SQLAlchemy +from flask_bootstrap import Bootstrap5 +from sqlalchemy import MetaData + +convention = { + "ix": 'ix_%(column_0_label)s', + "uq": "uq_%(table_name)s_%(column_0_name)s", + "ck": "ck_%(table_name)s_%(constraint_name)s", + "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", + "pk": "pk_%(table_name)s" +} + +metadata = MetaData(naming_convention=convention) +db = SQLAlchemy(metadata=metadata) +migrate = Migrate() +bootstrap = Bootstrap5() diff --git a/app/mirror_sites.py b/app/mirror_sites.py new file mode 100644 index 0000000..e1c4d16 --- /dev/null +++ b/app/mirror_sites.py @@ -0,0 +1,52 @@ +from tldextract import extract + +from app.models import Origin, Bridge, Proxy + + +def mirror_sites(): + return { + "version": "2.0", + "sites": [{ + "main_domain": x.domain_name.replace("www.", ""), + "available_alternatives": [ + { + "proto": "tor" if ".onion" in a.url else "https", + "type": "eotk" if ".onion" in a.url else "mirror", + "created_at": str(a.added), + "updated_at": str(a.updated), + "url": a.url + } for a in x.mirrors if not a.deprecated and not a.destroyed + ] + [ + { + "proto": "https", + "type": "mirror", + "created_at": str(a.added), + "updated_at": str(a.updated), + "url": a.url + } for a in x.proxies if + a.url is not None and not a.deprecated and not a.destroyed and a.provider == "cloudfront" + ]} for x in Origin.query.order_by(Origin.domain_name).all() + ] + } + + +def bridgelines(): + return { + "version": "1.0", + "bridgelines": [ + b.bridgeline for b in Bridge.query.filter( + Bridge.destroyed == None, + Bridge.bridgeline != None + ) + ] + } + + +def mirror_mapping(): + return { + d.url.lstrip("https://"): { + "origin_domain": d.origin.domain_name, + "origin_domain_normalized": d.origin.domain_name.lstrip("www."), + "origin_domain_root": extract(d.origin.domain_name).registered_domain + } for d in Proxy.query.all() if d.url is not None + } diff --git a/app/models.py b/app/models.py new file mode 100644 index 0000000..a32dcc5 --- /dev/null +++ b/app/models.py @@ -0,0 +1,238 @@ +import enum +from datetime import datetime + +from app.extensions import db + + +class AbstractConfiguration(db.Model): + __abstract__ = True + + id = db.Column(db.Integer, primary_key=True) + description = db.Column(db.String(255), nullable=False) + added = db.Column(db.DateTime(), default=datetime.utcnow, nullable=False) + updated = db.Column(db.DateTime(), default=datetime.utcnow, nullable=False) + destroyed = db.Column(db.DateTime(), nullable=True) + + def destroy(self): + self.destroyed = datetime.utcnow() + self.updated = datetime.utcnow() + db.session.commit() + + +class AbstractResource(db.Model): + __abstract__ = True + + id = db.Column(db.Integer, primary_key=True) + added = db.Column(db.DateTime(), default=datetime.utcnow, nullable=False) + updated = db.Column(db.DateTime(), default=datetime.utcnow, nullable=False) + deprecated = db.Column(db.DateTime(), nullable=True) + destroyed = db.Column(db.DateTime(), nullable=True) + + def deprecate(self): + self.deprecated = datetime.utcnow() + self.updated = datetime.utcnow() + db.session.commit() + + def destroy(self): + if self.deprecated is None: + self.deprecated = datetime.utcnow() + self.destroyed = datetime.utcnow() + self.updated = datetime.utcnow() + db.session.commit() + + def __repr__(self): + return f"<{self.__class__.__name__} #{self.id}>" + + +class Group(db.Model): + id = db.Column(db.Integer, primary_key=True) + group_name = db.Column(db.String(80), unique=True, nullable=False) + description = db.Column(db.String(255), nullable=False) + eotk = db.Column(db.Boolean()) + added = db.Column(db.DateTime(), default=datetime.utcnow, nullable=False) + updated = db.Column(db.DateTime(), default=datetime.utcnow, nullable=False) + + origins = db.relationship("Origin", back_populates="group") + bridgeconfs = db.relationship("BridgeConf", back_populates="group") + alarms = db.relationship("Alarm", back_populates="group") + + def as_dict(self): + return { + "id": self.id, + "name": self.group_name, + "description": self.description, + "added": self.added, + "updated": self.updated + } + + def __repr__(self): + return '' % self.group_name + + +class Origin(AbstractConfiguration): + group_id = db.Column(db.Integer, db.ForeignKey("group.id"), nullable=False) + domain_name = db.Column(db.String(255), unique=True, nullable=False) + + group = db.relationship("Group", back_populates="origins") + mirrors = db.relationship("Mirror", back_populates="origin") + proxies = db.relationship("Proxy", back_populates="origin") + alarms = db.relationship("Alarm", back_populates="origin") + + def as_dict(self): + return { + "id": self.id, + "group_id": self.group.id, + "group_name": self.group.group_name, + "domain_name": self.domain_name, + "description": self.description, + "added": self.added, + "updated": self.updated + } + + def __repr__(self): + return '' % self.domain_name + + +class Proxy(AbstractResource): + id = db.Column(db.Integer, primary_key=True) + origin_id = db.Column(db.Integer, db.ForeignKey("origin.id"), nullable=False) + provider = db.Column(db.String(20), nullable=False) + slug = db.Column(db.String(20), nullable=True) + terraform_updated = db.Column(db.DateTime(), nullable=True) + url = db.Column(db.String(255), nullable=True) + + origin = db.relationship("Origin", back_populates="proxies") + alarms = db.relationship("Alarm", back_populates="proxy") + + def as_dict(self): + return { + "id": self.id, + "origin_id": self.origin.id, + "origin_domain_name": self.origin.domain_name, + "provider": self.provider, + "slug": self.slug, + "added": self.added, + "updated": self.updated + } + + +class Mirror(db.Model): + id = db.Column(db.Integer, primary_key=True) + origin_id = db.Column(db.Integer, db.ForeignKey("origin.id"), nullable=False) + url = db.Column(db.String(255), unique=True, nullable=False) + added = db.Column(db.DateTime(), default=datetime.utcnow, nullable=False) + updated = db.Column(db.DateTime(), default=datetime.utcnow, nullable=False) + deprecated = db.Column(db.DateTime(), nullable=True) + destroyed = db.Column(db.DateTime(), nullable=True) + + origin = db.relationship("Origin", back_populates="mirrors") + + def as_dict(self): + return { + "id": self.id, + "origin_id": self.origin_id, + "origin_domain_name": self.origin.domain_name, + "url": self.url, + "added": self.added, + "updated": self.updated + } + + def __repr__(self): + return '' % (self.origin.domain_name, self.id) + + +class AlarmState(enum.Enum): + UNKNOWN = 0 + OK = 1 + WARNING = 2 + CRITICAL = 3 + + +class Alarm(db.Model): + id = db.Column(db.Integer, primary_key=True) + target = db.Column(db.String(60), nullable=False) + group_id = db.Column(db.Integer, db.ForeignKey("group.id")) + origin_id = db.Column(db.Integer, db.ForeignKey("origin.id")) + proxy_id = db.Column(db.Integer, db.ForeignKey("proxy.id")) + bridge_id = db.Column(db.Integer, db.ForeignKey("bridge.id")) + alarm_type = db.Column(db.String(255), nullable=False) + alarm_state = db.Column(db.Enum(AlarmState), default=AlarmState.UNKNOWN, nullable=False) + state_changed = db.Column(db.DateTime(), nullable=False) + last_updated = db.Column(db.DateTime()) + text = db.Column(db.String(255)) + + group = db.relationship("Group", back_populates="alarms") + origin = db.relationship("Origin", back_populates="alarms") + proxy = db.relationship("Proxy", back_populates="alarms") + bridge = db.relationship("Bridge", back_populates="alarms") + + def update_state(self, state: AlarmState, text: str): + if self.state != state: + self.state_changed = datetime.utcnow() + self.alarm_state = state + self.text = text + self.last_updated = datetime.utcnow() + db.session.commit() + + +class BridgeConf(db.Model): + id = db.Column(db.Integer, primary_key=True) + group_id = db.Column(db.Integer, db.ForeignKey("group.id"), nullable=False) + provider = db.Column(db.String(20), nullable=False) + method = db.Column(db.String(20), nullable=False) + description = db.Column(db.String(255)) + number = db.Column(db.Integer()) + added = db.Column(db.DateTime(), default=datetime.utcnow, nullable=False) + updated = db.Column(db.DateTime(), default=datetime.utcnow, nullable=False) + destroyed = db.Column(db.DateTime(), nullable=True) + + group = db.relationship("Group", back_populates="bridgeconfs") + bridges = db.relationship("Bridge", back_populates="conf") + + def destroy(self): + self.destroyed = datetime.utcnow() + self.updated = datetime.utcnow() + for bridge in self.bridges: + if bridge.destroyed is None: + bridge.destroyed = datetime.utcnow() + bridge.updated = datetime.utcnow() + db.session.commit() + + +class Bridge(AbstractResource): + conf_id = db.Column(db.Integer, db.ForeignKey("bridge_conf.id"), nullable=False) + terraform_updated = db.Column(db.DateTime(), nullable=True) + nickname = db.Column(db.String(255), nullable=True) + fingerprint = db.Column(db.String(255), nullable=True) + hashed_fingerprint = db.Column(db.String(255), nullable=True) + bridgeline = db.Column(db.String(255), nullable=True) + + conf = db.relationship("BridgeConf", back_populates="bridges") + alarms = db.relationship("Alarm", back_populates="bridge") + + +class MirrorList(db.Model): + id = db.Column(db.Integer, primary_key=True) + provider = db.Column(db.String(255), nullable=False) + description = db.Column(db.String(255), nullable=False) + format = db.Column(db.String(20), nullable=False) + container = db.Column(db.String(255), nullable=False) + branch = db.Column(db.String(255), nullable=False) + filename = db.Column(db.String(255), nullable=False) + added = db.Column(db.DateTime(), default=datetime.utcnow, nullable=False) + updated = db.Column(db.DateTime(), default=datetime.utcnow, nullable=False) + deprecated = db.Column(db.DateTime(), nullable=True) + destroyed = db.Column(db.DateTime(), nullable=True) + + def destroy(self): + self.destroyed = datetime.utcnow() + self.updated = datetime.utcnow() + db.session.commit() + + def url(self): + if self.provider == "gitlab": + return f"https://gitlab.com/{self.container}/-/raw/{self.branch}/{self.filename}" + if self.provider == "github": + return f"https://raw.githubusercontent.com/{self.container}/{self.branch}/{self.filename}" + if self.provider == "s3": + return f"s3://{self.container}/{self.filename}" diff --git a/app/portal/__init__.py b/app/portal/__init__.py new file mode 100644 index 0000000..81227ef --- /dev/null +++ b/app/portal/__init__.py @@ -0,0 +1,384 @@ +from datetime import datetime, timedelta + +from flask import Blueprint, render_template, Response, flash, redirect, url_for, request +from sqlalchemy import exc, desc, or_ + +from app.extensions import db +from app.models import Group, Origin, Proxy, Alarm, BridgeConf, Bridge, MirrorList, AbstractResource +from app.portal.forms import EditGroupForm, NewGroupForm, NewOriginForm, EditOriginForm, LifecycleForm, \ + NewBridgeConfForm, EditBridgeConfForm, NewMirrorListForm + +portal = Blueprint("portal", __name__, template_folder="templates", static_folder="static") + + +@portal.app_template_filter("mirror_expiry") +def calculate_mirror_expiry(s): + expiry = s + timedelta(days=3) + countdown = expiry - datetime.utcnow() + if countdown.days == 0: + return f"{countdown.seconds // 3600} hours" + return f"{countdown.days} days" + + +@portal.route("/") +def portal_home(): + return render_template("home.html.j2", section="home") + + +@portal.route("/groups") +def view_groups(): + groups = Group.query.order_by(Group.group_name).all() + return render_template("groups.html.j2", section="group", groups=groups) + + +@portal.route("/group/new", methods=['GET', 'POST']) +def new_group(): + form = NewGroupForm() + if form.validate_on_submit(): + group = Group() + group.group_name = form.group_name.data + group.description = form.description.data + group.eotk = form.eotk.data + group.created = datetime.utcnow() + group.updated = datetime.utcnow() + try: + db.session.add(group) + db.session.commit() + flash(f"Created new group {group.group_name}.", "success") + return redirect(url_for("portal.edit_group", group_id=group.id)) + except exc.SQLAlchemyError as e: + print(e) + flash("Failed to create new group.", "danger") + return redirect(url_for("portal.view_groups")) + return render_template("new.html.j2", section="group", form=form) + + +@portal.route('/group/edit/', methods=['GET', 'POST']) +def edit_group(group_id): + group = Group.query.filter(Group.id == group_id).first() + if group is None: + return Response(render_template("error.html.j2", + section="group", + header="404 Group Not Found", + message="The requested group could not be found."), + status=404) + form = EditGroupForm(description=group.description, + eotk=group.eotk) + if form.validate_on_submit(): + group.description = form.description.data + group.eotk = form.eotk.data + group.updated = datetime.utcnow() + try: + db.session.commit() + flash("Saved changes to group.", "success") + except exc.SQLAlchemyError: + flash("An error occurred saving the changes to the group.", "danger") + return render_template("group.html.j2", + section="group", + group=group, form=form) + + +@portal.route("/origin/new", methods=['GET', 'POST']) +@portal.route("/origin/new/", methods=['GET', 'POST']) +def new_origin(group_id=None): + form = NewOriginForm() + form.group.choices = [(x.id, x.group_name) for x in Group.query.all()] + if form.validate_on_submit(): + origin = Origin() + origin.group_id = form.group.data + origin.domain_name = form.domain_name.data + origin.description = form.description.data + origin.created = datetime.utcnow() + origin.updated = datetime.utcnow() + try: + db.session.add(origin) + db.session.commit() + flash(f"Created new origin {origin.domain_name}.", "success") + return redirect(url_for("portal.edit_origin", origin_id=origin.id)) + except exc.SQLAlchemyError as e: + print(e) + flash("Failed to create new origin.", "danger") + return redirect(url_for("portal.view_origins")) + if group_id: + form.group.data = group_id + return render_template("new.html.j2", section="origin", form=form) + + +@portal.route('/origin/edit/', methods=['GET', 'POST']) +def edit_origin(origin_id): + origin = Origin.query.filter(Origin.id == origin_id).first() + if origin is None: + return Response(render_template("error.html.j2", + section="origin", + header="404 Origin Not Found", + message="The requested origin could not be found."), + status=404) + form = EditOriginForm(group=origin.group_id, + description=origin.description) + form.group.choices = [(x.id, x.group_name) for x in Group.query.all()] + if form.validate_on_submit(): + origin.group_id = form.group.data + origin.description = form.description.data + origin.updated = datetime.utcnow() + try: + db.session.commit() + flash("Saved changes to group.", "success") + except exc.SQLAlchemyError: + flash("An error occurred saving the changes to the group.", "danger") + return render_template("origin.html.j2", + section="origin", + origin=origin, form=form) + + +@portal.route("/origins") +def view_origins(): + origins = Origin.query.order_by(Origin.domain_name).all() + return render_template("origins.html.j2", section="origin", origins=origins) + + +@portal.route("/proxies") +def view_proxies(): + proxies = Proxy.query.filter(Proxy.destroyed == None).order_by(desc(Proxy.updated)).all() + return render_template("proxies.html.j2", section="proxy", proxies=proxies) + + +@portal.route("/proxy/block/", methods=['GET', 'POST']) +def blocked_proxy(proxy_id): + proxy = Proxy.query.filter(Proxy.id == proxy_id, Proxy.destroyed == None).first() + if proxy is None: + return Response(render_template("error.html.j2", + header="404 Proxy Not Found", + message="The requested proxy could not be found.")) + form = LifecycleForm() + if form.validate_on_submit(): + proxy.deprecate() + flash("Proxy will be shortly replaced.", "success") + return redirect(url_for("portal.edit_origin", origin_id=proxy.origin.id)) + return render_template("lifecycle.html.j2", + header=f"Mark proxy for {proxy.origin.domain_name} as blocked?", + message=proxy.url, + section="proxy", + form=form) + + +@portal.route("/search") +def search(): + query = request.args.get("query") + proxies = Proxy.query.filter(or_(Proxy.url.contains(query)), Proxy.destroyed == None).all() + origins = Origin.query.filter(or_(Origin.description.contains(query), Origin.domain_name.contains(query))).all() + return render_template("search.html.j2", section="home", proxies=proxies, origins=origins) + + +@portal.route('/alarms') +def view_alarms(): + alarms = Alarm.query.order_by(Alarm.alarm_state, desc(Alarm.state_changed)).all() + return render_template("alarms.html.j2", section="alarm", alarms=alarms) + + +@portal.route('/lists') +def view_mirror_lists(): + mirrorlists = MirrorList.query.filter(MirrorList.destroyed == None).all() + return render_template("mirrorlists.html.j2", section="list", mirrorlists=mirrorlists) + + +@portal.route("/list/destroy/") +def destroy_mirror_list(list_id): + return "not implemented" + +@portal.route("/list/new", methods=['GET', 'POST']) +@portal.route("/list/new/", methods=['GET', 'POST']) +def new_mirror_list(group_id=None): + form = NewMirrorListForm() + form.provider.choices = [ + ("github", "GitHub"), + ("gitlab", "GitLab"), + ("s3", "AWS S3"), + ] + form.format.choices = [ + ("bc2", "Bypass Censorship v2"), + ("bc3", "Bypass Censorship v3"), + ("bca", "Bypass Censorship Analytics"), + ("bridgelines", "Tor Bridge Lines") + ] + form.container.description = "GitHub Project, GitLab Project or AWS S3 bucket name." + form.branch.description = "Ignored for AWS S3." + if form.validate_on_submit(): + mirror_list = MirrorList() + mirror_list.provider = form.provider.data + mirror_list.format = form.format.data + mirror_list.description = form.description.data + mirror_list.container = form.container.data + mirror_list.branch = form.branch.data + mirror_list.filename = form.filename.data + mirror_list.created = datetime.utcnow() + mirror_list.updated = datetime.utcnow() + try: + db.session.add(mirror_list) + db.session.commit() + flash(f"Created new mirror list.", "success") + return redirect(url_for("portal.view_mirror_lists")) + except exc.SQLAlchemyError as e: + print(e) + flash("Failed to create new mirror list.", "danger") + return redirect(url_for("portal.view_mirror_lists")) + if group_id: + form.group.data = group_id + return render_template("new.html.j2", section="list", form=form) + + +@portal.route("/bridgeconfs") +def view_bridgeconfs(): + bridgeconfs = BridgeConf.query.filter(BridgeConf.destroyed == None).all() + return render_template("bridgeconfs.html.j2", section="bridgeconf", bridgeconfs=bridgeconfs) + + +@portal.route("/bridgeconf/new", methods=['GET', 'POST']) +@portal.route("/bridgeconf/new/", methods=['GET', 'POST']) +def new_bridgeconf(group_id=None): + form = NewBridgeConfForm() + form.group.choices = [(x.id, x.group_name) for x in Group.query.all()] + form.provider.choices = [ + ("aws", "AWS Lightsail"), + ("hcloud", "Hetzner Cloud"), + ("ovh", "OVH Public Cloud"), + ("gandi", "GandiCloud VPS") + ] + form.method.choices = [ + ("any", "Any (BridgeDB)"), + ("email", "E-Mail (BridgeDB)"), + ("moat", "Moat (BridgeDB)"), + ("https", "HTTPS (BridgeDB)"), + ("none", "None (Private)") + ] + if form.validate_on_submit(): + bridge_conf = BridgeConf() + bridge_conf.group_id = form.group.data + bridge_conf.provider = form.provider.data + bridge_conf.method = form.method.data + bridge_conf.description = form.description.data + bridge_conf.number = form.number.data + bridge_conf.created = datetime.utcnow() + bridge_conf.updated = datetime.utcnow() + try: + db.session.add(bridge_conf) + db.session.commit() + flash(f"Created new bridge configuration {bridge_conf.id}.", "success") + return redirect(url_for("portal.view_bridgeconfs")) + except exc.SQLAlchemyError as e: + print(e) + flash("Failed to create new bridge configuration.", "danger") + return redirect(url_for("portal.view_bridgeconfs")) + if group_id: + form.group.data = group_id + return render_template("new.html.j2", section="bridgeconf", form=form) + + +@portal.route("/bridges") +def view_bridges(): + bridges = Bridge.query.filter(Bridge.destroyed == None).all() + return render_template("bridges.html.j2", section="bridge", bridges=bridges) + + +@portal.route('/bridgeconf/edit/', methods=['GET', 'POST']) +def edit_bridgeconf(bridgeconf_id): + bridgeconf = BridgeConf.query.filter(BridgeConf.id == bridgeconf_id).first() + if bridgeconf is None: + return Response(render_template("error.html.j2", + section="origin", + header="404 Origin Not Found", + message="The requested origin could not be found."), + status=404) + form = EditBridgeConfForm(description=bridgeconf.description, + number=bridgeconf.number) + if form.validate_on_submit(): + bridgeconf.description = form.description.data + bridgeconf.number = form.number.data + bridgeconf.updated = datetime.utcnow() + try: + db.session.commit() + flash("Saved changes to bridge configuration.", "success") + except exc.SQLAlchemyError: + flash("An error occurred saving the changes to the bridge configuration.", "danger") + return render_template("bridgeconf.html.j2", + section="bridgeconf", + bridgeconf=bridgeconf, form=form) + + +@portal.route("/bridge/block/", methods=['GET', 'POST']) +def blocked_bridge(bridge_id): + bridge = Bridge.query.filter(Bridge.id == bridge_id, Bridge.destroyed == None).first() + if bridge is None: + return Response(render_template("error.html.j2", + header="404 Proxy Not Found", + message="The requested bridge could not be found.")) + form = LifecycleForm() + if form.validate_on_submit(): + bridge.deprecate() + flash("Bridge will be shortly replaced.", "success") + return redirect(url_for("portal.edit_bridgeconf", bridgeconf_id=bridge.conf_id)) + return render_template("lifecycle.html.j2", + header=f"Mark bridge {bridge.hashed_fingerprint} as blocked?", + message=bridge.hashed_fingerprint, + section="bridge", + form=form) + + +def response_404(message: str): + return Response(render_template("error.html.j2", + header="404 Not Found", + message=message)) + + +def view_lifecycle(*, + header: str, + message: str, + success_message: str, + success_view: str, + section: str, + resource: AbstractResource, + action: str): + form = LifecycleForm() + if form.validate_on_submit(): + if action == "destroy": + resource.destroy() + elif action == "deprecate": + resource.deprecate() + flash(success_message, "success") + return redirect(url_for(success_view)) + return render_template("lifecycle.html.j2", + header=header, + message=message, + section=section, + form=form) + + +@portal.route("/bridgeconf/destroy/", methods=['GET', 'POST']) +def destroy_bridgeconf(bridgeconf_id: int): + bridgeconf = BridgeConf.query.filter(BridgeConf.id == bridgeconf_id, BridgeConf.destroyed == None).first() + if bridgeconf is None: + return response_404("The requested bridge configuration could not be found.") + return view_lifecycle( + header=f"Destroy bridge configuration?", + message=bridgeconf.description, + success_view="portal.view_bridgeconfs", + success_message="All bridges from the destroyed configuration will shortly be destroyed at their providers.", + section="bridgeconf", + resource=bridgeconf, + action="destroy" + ) + + +@portal.route("/origin/destroy/", methods=['GET', 'POST']) +def destroy_origin(origin_id: int): + origin = Origin.query.filter(Origin.id == origin_id, Origin.destroyed == None).first() + if origin is None: + return response_404("The requested origin could not be found.") + return view_lifecycle( + header=f"Destroy origin {origin.domain_name}", + message=origin.description, + success_message="All proxies from the destroyed origin will shortly be destroyed at their providers.", + success_view="portal.view_origins", + section="origin", + resource=origin, + action="destroy" + ) diff --git a/app/portal/forms.py b/app/portal/forms.py new file mode 100644 index 0000000..d92151d --- /dev/null +++ b/app/portal/forms.py @@ -0,0 +1,69 @@ +from flask_wtf import FlaskForm +from wtforms import StringField, SubmitField, SelectField, BooleanField, IntegerField +from wtforms.validators import DataRequired, NumberRange + + +class NewGroupForm(FlaskForm): + group_name = StringField("Short Name", validators=[DataRequired()]) + description = StringField("Description", validators=[DataRequired()]) + eotk = BooleanField("Deploy EOTK instances?") + submit = SubmitField('Save Changes', render_kw={"class": "btn btn-success"}) + + +class EditGroupForm(FlaskForm): + description = StringField('Description', validators=[DataRequired()]) + eotk = BooleanField("Deploy EOTK instances?") + submit = SubmitField('Save Changes', render_kw={"class": "btn btn-success"}) + + +class NewOriginForm(FlaskForm): + domain_name = StringField('Domain Name', validators=[DataRequired()]) + description = StringField('Description', validators=[DataRequired()]) + group = SelectField('Group', validators=[DataRequired()]) + submit = SubmitField('Save Changes') + + +class EditOriginForm(FlaskForm): + description = StringField('Description', validators=[DataRequired()]) + group = SelectField('Group', validators=[DataRequired()]) + submit = SubmitField('Save Changes') + + +class EditMirrorForm(FlaskForm): + origin = SelectField('Origin') + url = StringField('URL') + submit = SubmitField('Save Changes') + + +class EditProxyForm(FlaskForm): + origin = SelectField('Origin') + submit = SubmitField('Save Changes') + + +class LifecycleForm(FlaskForm): + submit = SubmitField('Confirm') + + +class NewBridgeConfForm(FlaskForm): + provider = SelectField('Provider', validators=[DataRequired()]) + method = SelectField('Distribution Method', validators=[DataRequired()]) + description = StringField('Description') + group = SelectField('Group', validators=[DataRequired()]) + number = IntegerField('Number', validators=[NumberRange(1, message="One or more bridges must be created")]) + submit = SubmitField('Save Changes') + + +class EditBridgeConfForm(FlaskForm): + description = StringField('Description') + number = IntegerField('Number', validators=[NumberRange(1, message="One or more bridges must be created")]) + submit = SubmitField('Save Changes') + + +class NewMirrorListForm(FlaskForm): + provider = SelectField('Provider', validators=[DataRequired()]) + format = SelectField('Distribution Method', validators=[DataRequired()]) + description = StringField('Description', validators=[DataRequired()]) + container = StringField('Container', validators=[DataRequired()]) + branch = StringField('Branch') + filename = StringField('Filename', validators=[DataRequired()]) + submit = SubmitField('Save Changes') diff --git a/app/portal/static/portal.css b/app/portal/static/portal.css new file mode 100644 index 0000000..e1099fb --- /dev/null +++ b/app/portal/static/portal.css @@ -0,0 +1,100 @@ +body { + font-size: .875rem; +} + +.feather { + width: 16px; + height: 16px; + vertical-align: text-bottom; +} + +/* + * Sidebar + */ + +.sidebar { + position: fixed; + top: 0; + /* rtl:raw: + right: 0; + */ + bottom: 0; + /* rtl:remove */ + left: 0; + z-index: 100; /* Behind the navbar */ + padding: 48px 0 0; /* Height of navbar */ + box-shadow: inset -1px 0 0 rgba(0, 0, 0, .1); +} + +@media (max-width: 767.98px) { + .sidebar { + top: 5rem; + } +} + +.sidebar-sticky { + position: relative; + top: 0; + height: calc(100vh - 48px); + padding-top: .5rem; + overflow-x: hidden; + overflow-y: auto; /* Scrollable contents if viewport is shorter than content. */ +} + +.sidebar .nav-link { + font-weight: 500; + color: #333; +} + +.sidebar .nav-link .feather { + margin-right: 4px; + color: #727272; +} + +.sidebar .nav-link.active { + color: #2470dc; +} + +.sidebar .nav-link:hover .feather, +.sidebar .nav-link.active .feather { + color: inherit; +} + +.sidebar-heading { + font-size: .75rem; + text-transform: uppercase; +} + +/* + * Navbar + */ + +.navbar-brand { + padding-top: .75rem; + padding-bottom: .75rem; + font-size: 1rem; + background-color: rgba(0, 0, 0, .25); + box-shadow: inset -1px 0 0 rgba(0, 0, 0, .25); +} + +.navbar .navbar-toggler { + top: .25rem; + right: 1rem; +} + +.navbar .form-control { + padding: .75rem 1rem; + border-width: 0; + border-radius: 0; +} + +.form-control-dark { + color: #fff; + background-color: rgba(255, 255, 255, .1); + border-color: rgba(255, 255, 255, .1); +} + +.form-control-dark:focus { + border-color: transparent; + box-shadow: 0 0 0 3px rgba(255, 255, 255, .25); +} diff --git a/app/portal/templates/alarms.html.j2 b/app/portal/templates/alarms.html.j2 new file mode 100644 index 0000000..b039b70 --- /dev/null +++ b/app/portal/templates/alarms.html.j2 @@ -0,0 +1,31 @@ +{% extends "base.html.j2" %} +{% from 'bootstrap5/form.html' import render_form %} + +{% block content %} +

Alarms

+

Proxies

+
+ + + + + + + + + + {% for alarm in alarms %} + + {% if alarm.target == "proxy" %} + + {% elif alarm.target == "service/cloudfront" %} + + {% endif %} + + + + {% endfor %} + +
ResourceTypeState
Proxy: {{ alarm.proxy.url }} ({{ alarm.proxy.origin.domain_name }})AWS CloudFront{{ alarm.alarm_type }}{{ alarm.alarm_state.name }}
+
+{% endblock %} diff --git a/app/portal/templates/base.html.j2 b/app/portal/templates/base.html.j2 new file mode 100644 index 0000000..4129be9 --- /dev/null +++ b/app/portal/templates/base.html.j2 @@ -0,0 +1,162 @@ + + + + {% block head %} + + + + + + {% block styles %} + + {{ bootstrap.load_css() }} + {% endblock %} + + Bypass Censorship Portal + + + + + + {% endblock %} + + + + + +
+
+ + +
+ + {% with messages = get_flashed_messages(with_categories=true) %} + {% for category, message in messages %} +
+ {{ message }} +
+ {% endfor %} + {% endwith %} + + {% block content %} + + {% endblock %} +
+
+
+ + +{% block scripts %} + + {{ bootstrap.load_js() }} +{% endblock %} + + + + diff --git a/app/portal/templates/bridgeconf.html.j2 b/app/portal/templates/bridgeconf.html.j2 new file mode 100644 index 0000000..2de1e81 --- /dev/null +++ b/app/portal/templates/bridgeconf.html.j2 @@ -0,0 +1,16 @@ +{% extends "base.html.j2" %} +{% from 'bootstrap5/form.html' import render_form %} +{% from "tables.html.j2" import bridges_table %} + +{% block content %} +

Tor Bridge Configuration

+

{{ bridgeconf.group.group_name }}: {{ bridgeconf.provider }}/{{ bridgeconf.method }}

+ +
+ {{ render_form(form) }} +
+ +

Bridges

+ {{ bridges_table(bridgeconf.bridges) }} + +{% endblock %} diff --git a/app/portal/templates/bridgeconfs.html.j2 b/app/portal/templates/bridgeconfs.html.j2 new file mode 100644 index 0000000..868d9f7 --- /dev/null +++ b/app/portal/templates/bridgeconfs.html.j2 @@ -0,0 +1,8 @@ +{% extends "base.html.j2" %} +{% from "tables.html.j2" import bridgeconfs_table %} + +{% block content %} +

Tor Bridge Configurations

+ Create new configuration + {{ bridgeconfs_table(bridgeconfs) }} +{% endblock %} diff --git a/app/portal/templates/bridges.html.j2 b/app/portal/templates/bridges.html.j2 new file mode 100644 index 0000000..3d38537 --- /dev/null +++ b/app/portal/templates/bridges.html.j2 @@ -0,0 +1,7 @@ +{% extends "base.html.j2" %} +{% from "tables.html.j2" import bridges_table %} + +{% block content %} +

Tor Bridges

+ {{ bridges_table(bridges) }} +{% endblock %} diff --git a/app/portal/templates/error.html.j2 b/app/portal/templates/error.html.j2 new file mode 100644 index 0000000..658d312 --- /dev/null +++ b/app/portal/templates/error.html.j2 @@ -0,0 +1,6 @@ +{% extends "base.html.j2" %} + +{% block content %} +

{{ header }}

+

{{ message }}

+{% endblock %} \ No newline at end of file diff --git a/app/portal/templates/group.html.j2 b/app/portal/templates/group.html.j2 new file mode 100644 index 0000000..0579ff9 --- /dev/null +++ b/app/portal/templates/group.html.j2 @@ -0,0 +1,18 @@ +{% extends "base.html.j2" %} +{% from 'bootstrap5/form.html' import render_form %} +{% from "tables.html.j2" import origins_table %} + +{% block content %} +

Groups

+

{{ group.group_name }}

+ +
+ {{ render_form(form) }} +
+ +

Origins

+ Create new origin + {% if group.origins %} + {{ origins_table(group.origins) }} + {% endif %} +{% endblock %} diff --git a/app/portal/templates/groups.html.j2 b/app/portal/templates/groups.html.j2 new file mode 100644 index 0000000..1c6881c --- /dev/null +++ b/app/portal/templates/groups.html.j2 @@ -0,0 +1,30 @@ +{% extends "base.html.j2" %} + +{% block content %} +

Groups

+ Create new group +
+ + + + + + + + + + + + {% for group in groups %} + + + + + + + + {% endfor %} + +
NameDescriptionEOTKSitesActions
{{ group.group_name }}{{ group.description }}{% if group.eotk %}√{% else %}x{% endif %}{{ group.origins | length }}View/Edit
+
+{% endblock %} \ No newline at end of file diff --git a/app/portal/templates/home.html.j2 b/app/portal/templates/home.html.j2 new file mode 100644 index 0000000..b392e13 --- /dev/null +++ b/app/portal/templates/home.html.j2 @@ -0,0 +1,6 @@ +{% extends "base.html.j2" %} + +{% block content %} +

Welcome

+

Welcome to the Bypass Censorship portal.

+{% endblock %} \ No newline at end of file diff --git a/app/portal/templates/lifecycle.html.j2 b/app/portal/templates/lifecycle.html.j2 new file mode 100644 index 0000000..baf874c --- /dev/null +++ b/app/portal/templates/lifecycle.html.j2 @@ -0,0 +1,8 @@ +{% extends "base.html.j2" %} +{% from 'bootstrap5/form.html' import render_form %} + +{% block content %} +

{{ header }}

+

{{ message }}

+ {{ render_form(form) }} +{% endblock %} diff --git a/app/portal/templates/mirrorlists.html.j2 b/app/portal/templates/mirrorlists.html.j2 new file mode 100644 index 0000000..a584091 --- /dev/null +++ b/app/portal/templates/mirrorlists.html.j2 @@ -0,0 +1,8 @@ +{% extends "base.html.j2" %} +{% from "tables.html.j2" import mirrorlists_table %} + +{% block content %} +

Mirror Lists

+ Create new mirror list + {{ mirrorlists_table(mirrorlists) }} +{% endblock %} diff --git a/app/portal/templates/new.html.j2 b/app/portal/templates/new.html.j2 new file mode 100644 index 0000000..bb8af2f --- /dev/null +++ b/app/portal/templates/new.html.j2 @@ -0,0 +1,12 @@ +{% extends "base.html.j2" %} +{% from 'bootstrap5/form.html' import render_form %} + +{% block content %} +

{{ (resource_p or (section + "s")).title() }}

+

New {{ section.lower() }}

+ +
+ {{ render_form(form) }} +
+ +{% endblock %} diff --git a/app/portal/templates/origin.html.j2 b/app/portal/templates/origin.html.j2 new file mode 100644 index 0000000..b5688d3 --- /dev/null +++ b/app/portal/templates/origin.html.j2 @@ -0,0 +1,20 @@ +{% extends "base.html.j2" %} +{% from 'bootstrap5/form.html' import render_form %} +{% from "tables.html.j2" import proxies_table %} + +{% block content %} +

Origins

+

+ {{ origin.group.group_name }}: {{ origin.domain_name }} + +

+ +
+ {{ render_form(form) }} +
+ +

Proxies

+ {{ proxies_table(origin.proxies) }} + +{% endblock %} diff --git a/app/portal/templates/origins.html.j2 b/app/portal/templates/origins.html.j2 new file mode 100644 index 0000000..ba0f1b6 --- /dev/null +++ b/app/portal/templates/origins.html.j2 @@ -0,0 +1,8 @@ +{% extends "base.html.j2" %} +{% from "tables.html.j2" import origins_table %} + +{% block content %} +

Origins

+ Create new origin + {{ origins_table(origins) }} +{% endblock %} diff --git a/app/portal/templates/proxies.html.j2 b/app/portal/templates/proxies.html.j2 new file mode 100644 index 0000000..c3441af --- /dev/null +++ b/app/portal/templates/proxies.html.j2 @@ -0,0 +1,7 @@ +{% extends "base.html.j2" %} +{% from "tables.html.j2" import proxies_table %} + +{% block content %} +

Proxies

+ {{ proxies_table(proxies) }} +{% endblock %} diff --git a/app/portal/templates/search.html.j2 b/app/portal/templates/search.html.j2 new file mode 100644 index 0000000..88bec71 --- /dev/null +++ b/app/portal/templates/search.html.j2 @@ -0,0 +1,14 @@ +{% extends "base.html.j2" %} +{% from "tables.html.j2" import origins_table %} +{% from "tables.html.j2" import proxies_table %} + +{% block content %} +

Search Results

+ {% if origins %} +

Origins

+ {{ origins_table(origins) }} + {% endif %}{% if proxies %} +

Proxies

+ {{ proxies_table(proxies) }} + {% endif %} +{% endblock %} diff --git a/app/portal/templates/tables.html.j2 b/app/portal/templates/tables.html.j2 new file mode 100644 index 0000000..0aad4f9 --- /dev/null +++ b/app/portal/templates/tables.html.j2 @@ -0,0 +1,253 @@ +{% macro origins_table(origins) %} +
+ + + + + + + + + + + + + {% for origin in origins %} + {% if not origin.destroyed %} + + + + + + + + + {% endif %} + {% endfor %} + +
NameDescriptionMirrorsProxiesGroupActions
+ + {{ origin.domain_name }} + {{ origin.description }}{{ origin.mirrors | length }}{{ origin.proxies | length }} + {{ origin.group.group_name }} + + View/Edit +
+
+{% endmacro %} + +{% macro proxies_table(proxies) %} +
+ + + + + + + + + + + + + {% for proxy in proxies %} + {% if not proxy.destroyed %} + + + + + + + + + {% endif %} + {% endfor %} + +
Origin Domain NameGroupProviderURLAlarmsActions
+ + {{ proxy.origin.domain_name }} + + {{ proxy.origin.group.group_name }} + {{ proxy.provider }} + + {{ proxy.url }} + + {% for alarm in proxy.alarms %} + + {% if alarm.alarm_state.name == "OK" %} + + + + + {% elif alarm.alarm_state.name == "UNKNOWN" %} + + + + + {% else %} + + + + + {% endif %} + + {% endfor %} + + {% if proxy.deprecated %} + Expiring + in {{ proxy.deprecated | mirror_expiry }} + {% else %} + Mark blocked + {% endif %} +
+
+{% endmacro %} + +{% macro bridgeconfs_table(bridgeconfs) %} +
+ + + + + + + + + + + + {% for bridgeconf in bridgeconfs %} + {% if not bridgeconf.destroyed %} + + + + + + + + {% endif %} + {% endfor %} + +
GroupProviderDistribution MethodNumberActions
+ {{ bridgeconf.group.group_name }} + {{ bridgeconf.provider }}{{ bridgeconf.method }}{{ bridgeconf.number }} + View/Edit + Destroy +
+
+{% endmacro %} + +{% macro bridges_table(bridges) %} +
+ + + + + + + + + + + + + {% for bridge in bridges %} + {% if not bridge.destroyed %} + + + + + + + + + {% endif %} + {% endfor %} + +
GroupConfigurationNicknameHashed FingerprintAlarmsActions
+ {{ bridge.conf.group.group_name }} + {{ bridge.conf.description }} ({{ bridge.conf.provider }}/{{ bridge.conf.method }}) + + {{ bridge.nickname }} + + + {{ bridge.hashed_fingerprint }} + + {% for alarm in bridge.alarms %} + + {% if alarm.alarm_state.name == "OK" %} + + + + + {% elif alarm.alarm_state.name == "UNKNOWN" %} + + + + + {% else %} + + + + + {% endif %} + + {% endfor %} + + {% if bridge.deprecated %} + Expiring + in {{ bridge.deprecated | mirror_expiry }} + {% else %} + Mark blocked + {% endif %} +
+
+{% endmacro %} + +{% macro mirrorlists_table(mirrorlists) %} +
+ + + + + + + + + + + + {% for list in mirrorlists %} + {% if not list.destroyed %} + + + + + + + + {% endif %} + {% endfor %} + +
ProviderFormatURIDescriptionActions
{{ list.provider }}{{ list.format }}{{ list.url() }}{{ list.description }} + Destroy +
+
+{% endmacro %} \ No newline at end of file diff --git a/app/static/.gitkeep b/app/static/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/app/terraform/__init__.py b/app/terraform/__init__.py new file mode 100644 index 0000000..76e4771 --- /dev/null +++ b/app/terraform/__init__.py @@ -0,0 +1,56 @@ +import json +import os +import subprocess +from typing import Dict, Any + +import jinja2 + +from app import app + + +class BaseAutomation: + short_name = None + + def working_directory(self, filename=None): + return os.path.join( + app.config['TERRAFORM_DIRECTORY'], + self.short_name or self.__class__.__name__.lower(), + filename or "" + ) + + def write_terraform_config(self, template: str, **kwargs): + tmpl = jinja2.Template(template) + with open(self.working_directory("main.tf"), 'w') as tf: + tf.write(tmpl.render(**kwargs)) + + def terraform_init(self): + subprocess.run( + ['terraform', 'init'], + cwd=self.working_directory()) + + def terraform_plan(self): + plan = subprocess.run( + ['terraform', 'plan'], + cwd=self.working_directory()) + + def terraform_apply(self, refresh: bool = True, parallelism: int = 10): + subprocess.run( + ['terraform', 'apply', f'-refresh={str(refresh).lower()}', '-auto-approve', + f'-parallelism={str(parallelism)}'], + cwd=self.working_directory()) + + def terraform_show(self) -> Dict[str, Any]: + terraform = subprocess.run( + ['terraform', 'show', '-json'], + cwd=os.path.join( + self.working_directory()), + stdout=subprocess.PIPE) + return json.loads(terraform.stdout) + + def terraform_output(self) -> Dict[str, Any]: + terraform = subprocess.run( + ['terraform', 'output', '-json'], + cwd=os.path.join( + self.working_directory()), + stdout=subprocess.PIPE) + return json.loads(terraform.stdout) diff --git a/app/terraform/block_bridge_github.py b/app/terraform/block_bridge_github.py new file mode 100644 index 0000000..7d11beb --- /dev/null +++ b/app/terraform/block_bridge_github.py @@ -0,0 +1,29 @@ +import datetime + +from dateutil.parser import isoparse +from github import Github + +from app import app +from app.models import Bridge + + +def check_blocks(): + g = Github(app.config['GITHUB_API_KEY']) + repo = g.get_repo(app.config['GITHUB_BRIDGE_REPO']) + for vp in app.config['GITHUB_BRIDGE_VANTAGE_POINTS']: + results = repo.get_contents(f"recentResult_{vp}").decoded_content.decode('utf-8').splitlines() + for result in results: + parts = result.split("\t") + if isoparse(parts[2]) < (datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=3)): + continue + if int(parts[1]) < 40: + bridge = Bridge.query.filter( + Bridge.nickname == parts[0] + ).first() + if bridge is not None: + bridge.deprecate() + + +if __name__ == "__main__": + with app.app_context(): + check_blocks() diff --git a/app/terraform/block_external.py b/app/terraform/block_external.py new file mode 100644 index 0000000..de79702 --- /dev/null +++ b/app/terraform/block_external.py @@ -0,0 +1,67 @@ +from bs4 import BeautifulSoup +import requests + +from app import app +from app.models import Proxy + + +def check_blocks(): + user_agent = {'User-agent': 'BypassCensorship/1.0 (contact@sr2.uk for info)'} + page = requests.get(app.config['EXTERNAL_CHECK_URL'], headers=user_agent) + soup = BeautifulSoup(page.content, 'html.parser') + h2 = soup.find_all('h2') + div = soup.find_all('div', class_="overflow-auto mb-5") + + results = {} + + i = 0 + while i < len(h2): + if not div[i].div: + urls = [] + a = div[i].find_all('a') + j = 0 + while j < len(a): + urls.append(a[j].text) + j += 1 + results[h2[i].text] = urls + else: + results[h2[i].text] = [] + i += 1 + + for vp in results: + if vp not in app.config['EXTERNAL_VANTAGE_POINTS']: + continue + for url in results[vp]: + if "cloudfront.net" in url: + slug = url[len('https://'):][:-len('.cloudfront.net')] + print(f"Found {slug} blocked") + proxy = Proxy.query.filter( + Proxy.provider == "cloudfront", + Proxy.slug == slug + ).first() + if not proxy: + print("Proxy not found") + continue + if proxy.deprecated: + print("Proxy already marked blocked") + continue + proxy.deprecate() + if "azureedge.net" in url: + slug = url[len('https://'):][:-len('.azureedge.net')] + print(f"Found {slug} blocked") + proxy = Proxy.query.filter( + Proxy.provider == "azure_cdn", + Proxy.slug == slug + ).first() + if not proxy: + print("Proxy not found") + continue + if proxy.deprecated: + print("Proxy already marked blocked") + continue + proxy.deprecate() + + +if __name__ == "__main__": + with app.app_context(): + check_blocks() diff --git a/app/terraform/bridge/__init__.py b/app/terraform/bridge/__init__.py new file mode 100644 index 0000000..5c15de5 --- /dev/null +++ b/app/terraform/bridge/__init__.py @@ -0,0 +1,77 @@ +import datetime + +from app import app +from app.extensions import db +from app.models import BridgeConf, Bridge, Group +from app.terraform import BaseAutomation + + +class BridgeAutomation(BaseAutomation): + def create_missing(self): + bridgeconfs = BridgeConf.query.filter( + BridgeConf.provider == self.provider + ).all() + for bridgeconf in bridgeconfs: + active_bridges = Bridge.query.filter( + Bridge.conf_id == bridgeconf.id, + Bridge.deprecated == None + ).all() + if len(active_bridges) < bridgeconf.number: + for i in range(bridgeconf.number - len(active_bridges)): + bridge = Bridge() + bridge.conf_id = bridgeconf.id + bridge.added = datetime.datetime.utcnow() + bridge.updated = datetime.datetime.utcnow() + db.session.add(bridge) + elif len(active_bridges) > bridgeconf.number: + active_bridge_count = len(active_bridges) + for bridge in active_bridges: + bridge.deprecate() + active_bridge_count -= 1 + if active_bridge_count == bridgeconf.number: + break + + def destroy_expired(self): + cutoff = datetime.datetime.utcnow() - datetime.timedelta(days=0) + bridges = [b for b in Bridge.query.filter( + Bridge.destroyed == None, + Bridge.deprecated < cutoff + ).all() if b.conf.provider == self.provider] + for bridge in bridges: + bridge.destroy() + + def generate_terraform(self): + self.write_terraform_config( + self.template, + groups=Group.query.all(), + bridgeconfs=BridgeConf.query.filter( + BridgeConf.destroyed == None, + BridgeConf.provider == self.provider + ).all(), + global_namespace=app.config['GLOBAL_NAMESPACE'], + **{ + k: app.config[k.upper()] + for k in self.template_parameters + } + ) + + def import_terraform(self): + outputs = self.terraform_output() + for output in outputs: + if output.startswith('bridge_hashed_fingerprint_'): + parts = outputs[output]['value'].split(" ") + if len(parts) < 2: + continue + bridge = Bridge.query.filter(Bridge.id == output[len('bridge_hashed_fingerprint_'):]).first() + bridge.nickname = parts[0] + bridge.hashed_fingerprint = parts[1] + bridge.terraform_updated = datetime.datetime.utcnow() + if output.startswith('bridge_bridgeline_'): + parts = outputs[output]['value'].split(" ") + if len(parts) < 4: + continue + bridge = Bridge.query.filter(Bridge.id == output[len('bridge_bridgeline_'):]).first() + del(parts[3]) + bridge.bridgeline = " ".join(parts) + bridge.terraform_updated = datetime.datetime.utcnow() + db.session.commit() diff --git a/app/terraform/bridge/aws.py b/app/terraform/bridge/aws.py new file mode 100644 index 0000000..66f7834 --- /dev/null +++ b/app/terraform/bridge/aws.py @@ -0,0 +1,84 @@ +from app import app +from app.terraform.bridge import BridgeAutomation + + +class BridgeAWSAutomation(BridgeAutomation): + short_name = "bridge_aws" + provider = "aws" + + template_parameters = [ + "aws_access_key", + "aws_secret_key", + "ssh_public_key_path" + ] + + template = """ + terraform { + required_providers { + aws = { + version = "~> 4.2.0" + } + } + } + + provider "aws" { + access_key = "{{ aws_access_key }}" + secret_key = "{{ aws_secret_key }}" + region = "us-east-1" + } + + locals { + ssh_key = file("{{ ssh_public_key_path }}") + } + + {% for group in groups %} + module "label_{{ group.id }}" { + source = "cloudposse/label/null" + version = "0.25.0" + namespace = "{{ global_namespace }}" + tenant = "{{ group.group_name }}" + label_order = ["namespace", "tenant", "name", "attributes"] + } + {% endfor %} + + {% for bridgeconf in bridgeconfs %} + {% for bridge in bridgeconf.bridges %} + {% if not bridge.destroyed %} + module "bridge_{{ bridge.id }}" { + source = "sr2c/tor-bridge/aws" + version = "0.0.1" + ssh_key = local.ssh_key + contact_info = "hi" + context = module.label_{{ bridgeconf.group.id }}.context + name = "br" + attributes = ["{{ bridge.id }}"] + distribution_method = "{{ bridge.conf.method }}" + } + + output "bridge_hashed_fingerprint_{{ bridge.id }}" { + value = module.bridge_{{ bridge.id }}.hashed_fingerprint + } + + output "bridge_bridgeline_{{ bridge.id }}" { + value = module.bridge_{{ bridge.id }}.bridgeline + sensitive = true + } + {% endif %} + {% endfor %} + {% endfor %} + """ + + +def automate(): + auto = BridgeAWSAutomation() + auto.destroy_expired() + auto.create_missing() + auto.generate_terraform() + auto.terraform_init() + auto.terraform_apply() + auto.import_terraform() + + +if __name__ == "__main__": + with app.app_context(): + automate() diff --git a/app/terraform/bridge/gandi.py b/app/terraform/bridge/gandi.py new file mode 100644 index 0000000..11a28cb --- /dev/null +++ b/app/terraform/bridge/gandi.py @@ -0,0 +1,95 @@ +from app import app +from app.terraform.bridge import BridgeAutomation + + +class BridgeGandiAutomation(BridgeAutomation): + short_name = "bridge_gandi" + provider = "gandi" + + template_parameters = [ + "gandi_openstack_user", + "gandi_openstack_password", + "gandi_openstack_tenant_name", + "ssh_public_key_path" + ] + + template = """ + terraform { + required_providers { + openstack = { + source = "terraform-provider-openstack/openstack" + version = "~> 1.42.0" + } + } + } + + provider "openstack" { + auth_url = "https://keystone.sd6.api.gandi.net:5000/v3" + user_domain_name = "public" + project_domain_name = "public" + user_name = "{{ gandi_openstack_user }}" + password = "{{ gandi_openstack_password }}" + tenant_name = "{{ gandi_openstack_tenant_name }}" + region = "FR-SD6" + } + + locals { + ssh_key = file("{{ ssh_public_key_path }}") + } + + {% for group in groups %} + module "label_{{ group.id }}" { + source = "cloudposse/label/null" + version = "0.25.0" + namespace = "{{ global_namespace }}" + tenant = "{{ group.group_name }}" + label_order = ["namespace", "tenant", "name", "attributes"] + } + {% endfor %} + + {% for bridgeconf in bridgeconfs %} + {% for bridge in bridgeconf.bridges %} + {% if not bridge.destroyed %} + module "bridge_{{ bridge.id }}" { + source = "sr2c/tor-bridge/openstack" + version = "0.0.6" + context = module.label_{{ bridgeconf.group.id }}.context + name = "br" + attributes = ["{{ bridge.id }}"] + ssh_key = local.ssh_key + contact_info = "hi" + distribution_method = "{{ bridge.conf.method }}" + + image_name = "Debian 11 Bullseye" + flavor_name = "V-R1" + external_network_name = "public" + require_block_device_creation = true + } + + output "bridge_hashed_fingerprint_{{ bridge.id }}" { + value = module.bridge_{{ bridge.id }}.hashed_fingerprint + } + + output "bridge_bridgeline_{{ bridge.id }}" { + value = module.bridge_{{ bridge.id }}.bridgeline + sensitive = true + } + {% endif %} + {% endfor %} + {% endfor %} + """ + + +def automate(): + auto = BridgeGandiAutomation() + auto.destroy_expired() + auto.create_missing() + auto.generate_terraform() + auto.terraform_init() + auto.terraform_apply() + auto.import_terraform() + + +if __name__ == "__main__": + with app.app_context(): + automate() diff --git a/app/terraform/bridge/hcloud.py b/app/terraform/bridge/hcloud.py new file mode 100644 index 0000000..e3c06e4 --- /dev/null +++ b/app/terraform/bridge/hcloud.py @@ -0,0 +1,98 @@ +from app import app +from app.terraform.bridge import BridgeAutomation + + +class BridgeHcloudAutomation(BridgeAutomation): + short_name = "bridge_hcloud" + provider = "hcloud" + + template_parameters = [ + "hcloud_token" + ] + + template = """ + terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.1.0" + } + hcloud = { + source = "hetznercloud/hcloud" + version = "1.31.1" + } + } + } + + provider "hcloud" { + token = "{{ hcloud_token }}" + } + + data "hcloud_datacenters" "ds" { + } + + data "hcloud_server_type" "cx11" { + name = "cx11" + } + + {% for group in groups %} + module "label_{{ group.id }}" { + source = "cloudposse/label/null" + version = "0.25.0" + namespace = "{{ global_namespace }}" + tenant = "{{ group.group_name }}" + label_order = ["namespace", "tenant", "name", "attributes"] + } + {% endfor %} + + {% for bridgeconf in bridgeconfs %} + {% for bridge in bridgeconf.bridges %} + {% if not bridge.destroyed %} + resource "random_shuffle" "datacenter_{{ bridge.id }}" { + input = [for s in data.hcloud_datacenters.ds.datacenters : s.name if contains(s.available_server_type_ids, data.hcloud_server_type.cx11.id)] + result_count = 1 + + lifecycle { + ignore_changes = [input] # don't replace all the bridges if a new DC appears + } + } + + module "bridge_{{ bridge.id }}" { + source = "sr2c/tor-bridge/hcloud" + version = "0.0.2" + datacenter = one(random_shuffle.datacenter_{{ bridge.id }}.result) + context = module.label_{{ bridgeconf.group.id }}.context + name = "br" + attributes = ["{{ bridge.id }}"] + ssh_key_name = "bc" + contact_info = "hi" + distribution_method = "{{ bridge.conf.method }}" + } + + output "bridge_hashed_fingerprint_{{ bridge.id }}" { + value = module.bridge_{{ bridge.id }}.hashed_fingerprint + } + + output "bridge_bridgeline_{{ bridge.id }}" { + value = module.bridge_{{ bridge.id }}.bridgeline + sensitive = true + } + {% endif %} + {% endfor %} + {% endfor %} + """ + + +def automate(): + auto = BridgeHcloudAutomation() + auto.destroy_expired() + auto.create_missing() + auto.generate_terraform() + auto.terraform_init() + auto.terraform_apply() + auto.import_terraform() + + +if __name__ == "__main__": + with app.app_context(): + automate() diff --git a/app/terraform/bridge/ovh.py b/app/terraform/bridge/ovh.py new file mode 100644 index 0000000..df3f784 --- /dev/null +++ b/app/terraform/bridge/ovh.py @@ -0,0 +1,122 @@ +from app import app +from app.terraform.bridge import BridgeAutomation + + +class BridgeOvhAutomation(BridgeAutomation): + short_name = "bridge_ovh" + provider = "ovh" + + template_parameters = [ + "ovh_cloud_application_key", + "ovh_cloud_application_secret", + "ovh_cloud_consumer_key", + "ovh_cloud_project_service", + "ovh_openstack_user", + "ovh_openstack_password", + "ovh_openstack_tenant_id", + "ssh_public_key_path" + ] + + template = """ + terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.1.0" + } + openstack = { + source = "terraform-provider-openstack/openstack" + version = "~> 1.42.0" + } + ovh = { + source = "ovh/ovh" + version = ">= 0.13.0" + } + } + } + + provider "openstack" { + auth_url = "https://auth.cloud.ovh.net/v3/" + domain_name = "Default" # Domain name - Always at 'default' for OVHcloud + user_name = "{{ ovh_openstack_user }}" + password = "{{ ovh_openstack_password }}" + tenant_id = "{{ ovh_openstack_tenant_id }}" + } + + provider "ovh" { + endpoint = "ovh-eu" + application_key = "{{ ovh_cloud_application_key }}" + application_secret = "{{ ovh_cloud_application_secret }}" + consumer_key = "{{ ovh_cloud_consumer_key }}" + } + + locals { + ssh_key = file("{{ ssh_public_key_path }}") + } + + data "ovh_cloud_project_regions" "regions" { + service_name = "{{ ovh_cloud_project_service }}" + has_services_up = ["instance"] + } + + {% for group in groups %} + module "label_{{ group.id }}" { + source = "cloudposse/label/null" + version = "0.25.0" + namespace = "{{ global_namespace }}" + tenant = "{{ group.group_name }}" + label_order = ["namespace", "tenant", "name", "attributes"] + } + {% endfor %} + + {% for bridgeconf in bridgeconfs %} + {% for bridge in bridgeconf.bridges %} + {% if not bridge.destroyed %} + resource "random_shuffle" "region_{{ bridge.id }}" { + input = data.ovh_cloud_project_regions.regions.names + result_count = 1 + + lifecycle { + ignore_changes = [input] # don't replace all the bridges if a new region appears + } + } + + module "bridge_{{ bridge.id }}" { + source = "sr2c/tor-bridge/openstack" + version = "0.0.6" + region = one(random_shuffle.region_{{ bridge.id }}.result) + context = module.label_{{ bridgeconf.group.id }}.context + name = "br" + attributes = ["{{ bridge.id }}"] + ssh_key = local.ssh_key + contact_info = "hi" + distribution_method = "{{ bridge.conf.method }}" + } + + output "bridge_hashed_fingerprint_{{ bridge.id }}" { + value = module.bridge_{{ bridge.id }}.hashed_fingerprint + } + + output "bridge_bridgeline_{{ bridge.id }}" { + value = module.bridge_{{ bridge.id }}.bridgeline + sensitive = true + } + {% endif %} + {% endfor %} + {% endfor %} + """ + + +def automate(): + auto = BridgeOvhAutomation() + auto.destroy_expired() + auto.create_missing() + auto.generate_terraform() + auto.terraform_init() + auto.terraform_apply() + auto.import_terraform() + + +if __name__ == "__main__": + with app.app_context(): + automate() diff --git a/app/terraform/eotk.py b/app/terraform/eotk.py new file mode 100644 index 0000000..afd676c --- /dev/null +++ b/app/terraform/eotk.py @@ -0,0 +1,79 @@ +from app import app +from app.models import Group +from app.terraform import BaseAutomation + + +class EotkAutomation(BaseAutomation): + short_name = "eotk" + + template_parameters = [ + "aws_access_key", + "aws_secret_key" + ] + + template = """ + terraform { + required_providers { + aws = { + version = "~> 4.4.0" + } + } + } + + provider "aws" { + access_key = "{{ aws_access_key }}" + secret_key = "{{ aws_secret_key }}" + region = "us-east-1" + } + + {% for group in groups %} + module "label_{{ group.id }}" { + source = "cloudposse/label/null" + version = "0.25.0" + namespace = "{{ global_namespace }}" + tenant = "{{ group.group_name }}" + label_order = ["namespace", "tenant", "name", "attributes"] + } + + module "bucket_{{ group.id }}" { + source = "cloudposse/s3-bucket/aws" + version = "0.49.0" + acl = "private" + enabled = true + user_enabled = true + versioning_enabled = false + allowed_bucket_actions = [ + "s3:GetObject", + "s3:PutObject", + "s3:ListBucket", + "s3:GetBucketLocation" + ] + context = module.label_{{ group.id }}.context + name = "logs" + attributes = ["eotk"] + } + + resource "aws_sns_topic" "alarms_{{ group.id }}" { + name = "${module.label_{{ group.id }}.id}-eotk-alarms" + } + {% endfor %} + """ + + def generate_terraform(self): + self.write_terraform_config( + self.template, + groups=Group.query.filter(Group.eotk == True).all(), + global_namespace=app.config['GLOBAL_NAMESPACE'], + **{ + k: app.config[k.upper()] + for k in self.template_parameters + } + ) + + +if __name__ == "__main__": + with app.app_context(): + auto = EotkAutomation() + auto.generate_terraform() + auto.terraform_init() + auto.terraform_apply() diff --git a/app/terraform/list/__init__.py b/app/terraform/list/__init__.py new file mode 100644 index 0000000..d465cee --- /dev/null +++ b/app/terraform/list/__init__.py @@ -0,0 +1,28 @@ +import json + +from app import app +from app.mirror_sites import bridgelines, mirror_sites, mirror_mapping +from app.models import MirrorList +from app.terraform import BaseAutomation + + +class ListAutomation(BaseAutomation): + def generate_terraform(self): + self.write_terraform_config( + self.template, + lists=MirrorList.query.filter( + MirrorList.destroyed == None, + MirrorList.provider == self.provider, + ).all(), + global_namespace=app.config['GLOBAL_NAMESPACE'], + **{ + k: app.config[k.upper()] + for k in self.template_parameters + } + ) + with open(self.working_directory('bc2.json'), 'w') as out: + json.dump(mirror_sites(), out, indent=2, sort_keys=True) + with open(self.working_directory('bca.json'), 'w') as out: + json.dump(mirror_mapping(), out, indent=2, sort_keys=True) + with open(self.working_directory('bridgelines.json'), 'w') as out: + json.dump(bridgelines(), out, indent=2, sort_keys=True) diff --git a/app/terraform/list/github.py b/app/terraform/list/github.py new file mode 100644 index 0000000..852c70c --- /dev/null +++ b/app/terraform/list/github.py @@ -0,0 +1,55 @@ +from app import app +from app.terraform.list import ListAutomation + + +class ListGithubAutomation(ListAutomation): + short_name = "list_github" + provider = "github" + + template_parameters = [ + "github_api_key" + ] + + template = """ + terraform { + required_providers { + github = { + source = "integrations/github" + version = "~> 4.20.1" + } + } + } + + {% for list in lists %} + provider "github" { + alias = "list_{{ list.id }}" + owner = "{{ list.container.split("/")[0] }}" + token = "{{ github_api_key }}" + } + + data "github_repository" "repository_{{ list.id }}" { + provider = github.list_{{ list.id }} + name = "{{ list.container.split("/")[1] }}" + } + + resource "github_repository_file" "file_{{ list.id }}" { + provider = github.list_{{ list.id }} + repository = data.github_repository.repository_{{ list.id }}.name + branch = "{{ list.branch }}" + file = "{{ list.filename }}" + content = file("{{ list.format }}.json") + commit_message = "Managed by Terraform" + commit_author = "Terraform User" + commit_email = "terraform@api.otf.is" + overwrite_on_create = true + } + {% endfor %} + """ + + +if __name__ == "__main__": + with app.app_context(): + auto = ListGithubAutomation() + auto.generate_terraform() + auto.terraform_init() + auto.terraform_apply() diff --git a/app/terraform/list/gitlab.py b/app/terraform/list/gitlab.py new file mode 100644 index 0000000..db19ebc --- /dev/null +++ b/app/terraform/list/gitlab.py @@ -0,0 +1,54 @@ +from app import app +from app.terraform.list import ListAutomation + + +class ListGitlabAutomation(ListAutomation): + short_name = "list_gitlab" + provider = "gitlab" + + template_parameters = [ + "gitlab_token", + "gitlab_author_email", + "gitlab_author_name", + "gitlab_commit_message" + ] + + template = """ + terraform { + required_providers { + gitlab = { + source = "gitlabhq/gitlab" + version = "~> 3.12.0" + } + } + } + + provider "gitlab" { + token = "{{ gitlab_token }}" + } + + {% for list in lists %} + data "gitlab_project" "project_{{ list.id }}" { + id = "{{ list.container }}" + } + + resource "gitlab_repository_file" "file_{{ list.id }}" { + project = data.gitlab_project.project_{{ list.id }}.id + file_path = "{{ list.filename }}" + branch = "{{ list.branch }}" + content = base64encode(file("{{ list.format }}.json")) + author_email = "{{ gitlab_author_email }}" + author_name = "{{ gitlab_author_name }}" + commit_message = "{{ gitlab_commit_message }}" + } + + {% endfor %} + """ + + +if __name__ == "__main__": + with app.app_context(): + auto = ListGitlabAutomation() + auto.generate_terraform() + auto.terraform_init() + auto.terraform_apply() diff --git a/app/terraform/list/s3.py b/app/terraform/list/s3.py new file mode 100644 index 0000000..c3131e2 --- /dev/null +++ b/app/terraform/list/s3.py @@ -0,0 +1,46 @@ +from app import app +from app.terraform.list import ListAutomation + + +class ListGithubAutomation(ListAutomation): + short_name = "list_s3" + provider = "s3" + + template_parameters = [ + "aws_access_key", + "aws_secret_key" + ] + + template = """ + terraform { + required_providers { + aws = { + version = "~> 4.4.0" + } + } + } + + provider "aws" { + access_key = "{{ aws_access_key }}" + secret_key = "{{ aws_secret_key }}" + region = "us-east-1" + } + + {% for list in lists %} + resource "aws_s3_object" "object_{{ list.id }}" { + bucket = "{{ list.container }}" + key = "{{ list.filename }}" + source = "{{ list.format }}.json" + content_type = "application/json" + etag = filemd5("{{ list.format }}.json") + } + {% endfor %} + """ + + +if __name__ == "__main__": + with app.app_context(): + auto = ListGithubAutomation() + auto.generate_terraform() + auto.terraform_init() + auto.terraform_apply() diff --git a/app/terraform/proxy/__init__.py b/app/terraform/proxy/__init__.py new file mode 100644 index 0000000..d93d9b4 --- /dev/null +++ b/app/terraform/proxy/__init__.py @@ -0,0 +1,51 @@ +import datetime + +from app import app +from app.extensions import db +from app.models import Group, Origin, Proxy +from app.terraform import BaseAutomation + + +class ProxyAutomation(BaseAutomation): + def create_missing_proxies(self): + origins = Origin.query.all() + for origin in origins: + cloudfront_proxies = [ + x for x in origin.proxies + if x.provider == self.provider and x.deprecated is None and x.destroyed is None + ] + if not cloudfront_proxies: + proxy = Proxy() + proxy.origin_id = origin.id + proxy.provider = self.provider + proxy.added = datetime.datetime.utcnow() + proxy.updated = datetime.datetime.utcnow() + db.session.add(proxy) + db.session.commit() + + def destroy_expired_proxies(self): + cutoff = datetime.datetime.utcnow() - datetime.timedelta(days=3) + proxies = Proxy.query.filter( + Proxy.destroyed == None, + Proxy.provider == self.provider, + Proxy.deprecated < cutoff + ).all() + for proxy in proxies: + proxy.destroyed = datetime.datetime.utcnow() + proxy.updated = datetime.datetime.utcnow() + db.session.commit() + + def generate_terraform(self): + self.write_terraform_config( + self.template, + groups=Group.query.all(), + proxies=Proxy.query.filter( + Proxy.provider == self.provider, + Proxy.destroyed == None + ).all(), + global_namespace=app.config['GLOBAL_NAMESPACE'], + **{ + k: app.config[k.upper()] + for k in self.template_parameters + } + ) diff --git a/app/terraform/proxy/azure_cdn.py b/app/terraform/proxy/azure_cdn.py new file mode 100644 index 0000000..369f5ce --- /dev/null +++ b/app/terraform/proxy/azure_cdn.py @@ -0,0 +1,238 @@ +import datetime +import string +import random + +from azure.identity import ClientSecretCredential +from azure.mgmt.alertsmanagement import AlertsManagementClient +import tldextract + +from app import app +from app.alarms import get_proxy_alarm +from app.extensions import db +from app.models import Group, Proxy, Alarm, AlarmState +from app.terraform.proxy import ProxyAutomation + + +class ProxyAzureCdnAutomation(ProxyAutomation): + short_name = "proxy_azure_cdn" + provider = "azure_cdn" + + template_parameters = [ + "azure_resource_group_name", + "azure_storage_account_name", + "azure_location", + "azure_client_id", + "azure_client_secret", + "azure_subscription_id", + "azure_tenant_id" + ] + + template = """ + terraform { + required_providers { + azurerm = { + source = "hashicorp/azurerm" + version = "=2.99.0" + } + } + } + + provider "azurerm" { + features {} + + client_id = "{{ azure_client_id }}" + client_secret = "{{ azure_client_secret }}" + subscription_id = "{{ azure_subscription_id }}" + tenant_id = "{{ azure_tenant_id }}" + skip_provider_registration = true + } + + data "azurerm_resource_group" "this" { + name = "{{ azure_resource_group_name }}" + } + + resource "azurerm_storage_account" "this" { + name = "{{ azure_storage_account_name }}" + resource_group_name = data.azurerm_resource_group.this.name + location = "{{ azure_location }}" + account_tier = "Standard" + account_replication_type = "RAGRS" + } + + {% for group in groups %} + module "label_{{ group.id }}" { + source = "cloudposse/label/null" + version = "0.25.0" + namespace = "{{ global_namespace }}" + tenant = "{{ group.group_name }}" + label_order = ["namespace", "tenant", "name", "attributes"] + } + + resource "azurerm_cdn_profile" "profile_{{ group.id }}" { + name = module.label_{{ group.id }}.id + location = "{{ azure_location }}" + resource_group_name = data.azurerm_resource_group.this.name + sku = "Standard_Microsoft" + + tags = module.label_{{ group.id }}.tags + } + + resource "azurerm_monitor_diagnostic_setting" "profile_diagnostic_{{ group.id }}" { + name = "cdn-diagnostics" + target_resource_id = azurerm_cdn_profile.profile_{{ group.id }}.id + storage_account_id = azurerm_storage_account.this.id + + log { + category = "AzureCDNAccessLog" + enabled = true + + retention_policy { + enabled = true + days = 90 + } + } + + metric { + category = "AllMetrics" + enabled = true + + retention_policy { + enabled = true + days = 90 + } + } + } + + resource "azurerm_monitor_metric_alert" "response_alert_{{ group.id }}" { + name = "bandwidth-out-high-${module.label_{{ group.id }}.id}" + resource_group_name = data.azurerm_resource_group.this.name + scopes = [azurerm_cdn_profile.profile_{{ group.id }}.id] + description = "Action will be triggered when response size is too high." + + criteria { + metric_namespace = "Microsoft.Cdn/profiles" + metric_name = "ResponseSize" + aggregation = "Total" + operator = "GreaterThan" + threshold = 21474836481 + } + + window_size = "PT1H" + } + {% endfor %} + + {% for proxy in proxies %} + resource "azurerm_cdn_endpoint" "endpoint_{{ proxy.id }}" { + name = "{{ proxy.slug }}" + profile_name = azurerm_cdn_profile.profile_{{ proxy.origin.group.id }}.name + location = "{{ azure_location }}" + resource_group_name = data.azurerm_resource_group.this.name + + origin { + name = "upstream" + host_name = "{{ proxy.origin.domain_name }}" + } + + global_delivery_rule { + modify_request_header_action { + action = "Overwrite" + name = "User-Agent" + value = "Amazon CloudFront" + } + modify_request_header_action { + action = "Append" + name = "X-Amz-Cf-Id" + value = "dummystring" + } + } + } + + resource "azurerm_monitor_diagnostic_setting" "diagnostic_{{ proxy.id }}" { + name = "cdn-diagnostics" + target_resource_id = azurerm_cdn_endpoint.endpoint_{{ proxy.id }}.id + storage_account_id = azurerm_storage_account.this.id + + log { + category = "CoreAnalytics" + enabled = true + + retention_policy { + enabled = true + days = 90 + } + } + } + {% endfor %} + """ + + def create_missing_proxies(self): + groups = Group.query.all() + for group in groups: + active_proxies = len([p for p in Proxy.query.filter( + Proxy.provider == 'azure_cdn', + Proxy.destroyed == None + ).all() if p.origin.group_id == group.id]) + for origin in group.origins: + if active_proxies == 25: + break + active_proxies += 1 + azure_cdn_proxies = [ + x for x in origin.proxies + if x.provider == "azure_cdn" and x.deprecated is None and x.destroyed is None + ] + if not azure_cdn_proxies: + proxy = Proxy() + proxy.origin_id = origin.id + proxy.provider = "azure_cdn" + proxy.slug = tldextract.extract(origin.domain_name).domain[:5] + ''.join( + random.choices(string.ascii_lowercase, k=random.randint(10, 15))) + proxy.url = f"https://{proxy.slug}.azureedge.net" + proxy.added = datetime.datetime.utcnow() + proxy.updated = datetime.datetime.utcnow() + db.session.add(proxy) + db.session.commit() + + +def set_urls(): + proxies = Proxy.query.filter( + Proxy.provider == 'azure_cdn', + Proxy.destroyed == None + ).all() + for proxy in proxies: + proxy.url = f"https://{proxy.slug}.azureedge.net" + db.session.commit() + + +def import_monitor_alerts(): + credential = ClientSecretCredential( + tenant_id=app.config['AZURE_TENANT_ID'], + client_id=app.config['AZURE_CLIENT_ID'], + client_secret=app.config['AZURE_CLIENT_SECRET']) + client = AlertsManagementClient( + credential, + app.config['AZURE_SUBSCRIPTION_ID'] + ) + firing = [x.name[len("bandwidth-out-high-bc-"):] + for x in client.alerts.get_all() + if x.name.startswith("bandwidth-out-high-bc-") and x.properties.essentials.monitor_condition == "Fired"] + for proxy in Proxy.query.filter( + Proxy.provider == "azure_cdn", + Proxy.destroyed == None + ): + alarm = get_proxy_alarm(proxy.id, "bandwidth-out-high") + if proxy.origin.group.group_name.lower() not in firing: + alarm.update_state(AlarmState.OK, "Azure monitor alert not firing") + else: + alarm.update_state(AlarmState.CRITICAL, "Azure monitor alert firing") + + +if __name__ == "__main__": + with app.app_context(): + auto = ProxyAzureCdnAutomation() + auto.create_missing_proxies() + auto.destroy_expired_proxies() + auto.generate_terraform() + auto.terraform_init() + auto.terraform_apply(refresh=False, parallelism=1) # Rate limits are problem + set_urls() + import_monitor_alerts() diff --git a/app/terraform/proxy/cloudfront.py b/app/terraform/proxy/cloudfront.py new file mode 100644 index 0000000..af19d64 --- /dev/null +++ b/app/terraform/proxy/cloudfront.py @@ -0,0 +1,156 @@ +import datetime +import json +import os +import subprocess + +import boto3 + +from app import app +from app.alarms import get_proxy_alarm +from app.extensions import db +from app.models import Proxy, Alarm, AlarmState +from app.terraform.proxy import ProxyAutomation + + +class ProxyCloudfrontAutomation(ProxyAutomation): + short_name = "proxy_cloudfront" + provider = "cloudfront" + + template_parameters = [ + "aws_access_key", + "aws_secret_key" + ] + + template = """ + terraform { + required_providers { + aws = { + version = "~> 4.4.0" + } + } + } + + provider "aws" { + access_key = "{{ aws_access_key }}" + secret_key = "{{ aws_secret_key }}" + region = "us-east-1" + } + + {% for group in groups %} + module "label_{{ group.id }}" { + source = "cloudposse/label/null" + version = "0.25.0" + namespace = "{{ global_namespace }}" + tenant = "{{ group.group_name }}" + label_order = ["namespace", "tenant", "name", "attributes"] + } + + module "log_bucket_{{ group.id }}" { + source = "cloudposse/s3-log-storage/aws" + version = "0.28.0" + context = module.label_{{ group.id }}.context + name = "logs" + attributes = ["cloudfront"] + acl = "log-delivery-write" + standard_transition_days = 30 + glacier_transition_days = 60 + expiration_days = 90 + } + + resource "aws_sns_topic" "alarms_{{ group.id }}" { + name = "${module.label_{{ group.id }}.id}-cloudfront-alarms" + } + {% endfor %} + + {% for proxy in proxies %} + module "cloudfront_{{ proxy.id }}" { + source = "sr2c/bc-proxy/aws" + version = "0.0.5" + origin_domain = "{{ proxy.origin.domain_name }}" + logging_bucket = module.log_bucket_{{ proxy.origin.group.id }}.bucket_domain_name + sns_topic_arn = aws_sns_topic.alarms_{{ proxy.origin.group.id }}.arn + low_bandwidth_alarm = false + context = module.label_{{ proxy.origin.group.id }}.context + name = "proxy" + attributes = ["{{ proxy.origin.domain_name }}"] + } + {% endfor %} + """ + + +def import_cloudfront_values(): + terraform = subprocess.run( + ['terraform', 'show', '-json'], + cwd=os.path.join( + app.config['TERRAFORM_DIRECTORY'], + "proxy_cloudfront"), + stdout=subprocess.PIPE) + state = json.loads(terraform.stdout) + + for mod in state['values']['root_module']['child_modules']: + if mod['address'].startswith('module.cloudfront_'): + for res in mod['resources']: + if res['address'].endswith('aws_cloudfront_distribution.this'): + proxy = Proxy.query.filter(Proxy.id == mod['address'][len('module.cloudfront_'):]).first() + proxy.url = "https://" + res['values']['domain_name'] + proxy.slug = res['values']['id'] + proxy.terraform_updated = datetime.datetime.utcnow() + db.session.commit() + break + + +def import_cloudwatch_alarms(): + cloudwatch = boto3.client('cloudwatch', + aws_access_key_id=app.config['AWS_ACCESS_KEY'], + aws_secret_access_key=app.config['AWS_SECRET_KEY'], + region_name='us-east-1') + dist_paginator = cloudwatch.get_paginator('describe_alarms') + page_iterator = dist_paginator.paginate(AlarmNamePrefix="bandwidth-out-high-") + for page in page_iterator: + for cw_alarm in page['MetricAlarms']: + dist_id = cw_alarm["AlarmName"][len("bandwidth-out-high-"):] + proxy = Proxy.query.filter(Proxy.slug == dist_id).first() + if proxy is None: + print("Skipping unknown proxy " + dist_id) + continue + alarm = get_proxy_alarm(proxy.id, "bandwidth-out-high") + if cw_alarm['StateValue'] == "OK": + alarm.update_state(AlarmState.OK, "CloudWatch alarm OK") + elif cw_alarm['StateValue'] == "ALARM": + alarm.update_state(AlarmState.CRITICAL, "CloudWatch alarm ALARM") + else: + alarm.update_state(AlarmState.UNKNOWN, f"CloudWatch alarm {cw_alarm['StateValue']}") + alarm = Alarm.query.filter( + Alarm.alarm_type == "cloudfront-quota" + ).first() + if alarm is None: + alarm = Alarm() + alarm.target = "service/cloudfront" + alarm.alarm_type = "cloudfront-quota" + alarm.state_changed = datetime.datetime.utcnow() + db.session.add(alarm) + alarm.last_updated = datetime.datetime.utcnow() + deployed_count = len(Proxy.query.filter( + Proxy.destroyed == None).all()) + old_state = alarm.alarm_state + if deployed_count > 370: + alarm.alarm_state = AlarmState.CRITICAL + elif deployed_count > 320: + alarm.alarm_state = AlarmState.WARNING + else: + alarm.alarm_state = AlarmState.OK + if alarm.alarm_state != old_state: + alarm.state_changed = datetime.datetime.utcnow() + db.session.commit() + + +if __name__ == "__main__": + with app.app_context(): + auto = ProxyCloudfrontAutomation() + auto.destroy_expired_proxies() + auto.create_missing_proxies() + auto.generate_terraform() + auto.terraform_init() + auto.terraform_apply() + import_cloudfront_values() + import_cloudwatch_alarms() diff --git a/app/terraform/proxy_check.py b/app/terraform/proxy_check.py new file mode 100644 index 0000000..06c0cf1 --- /dev/null +++ b/app/terraform/proxy_check.py @@ -0,0 +1,60 @@ +import requests + +from app import app +from app.extensions import db +from app.models import AlarmState, Alarm, Proxy + + +def set_http_alarm(proxy_id: int, state: AlarmState, text: str): + alarm = Alarm.query.filter( + Alarm.proxy_id == proxy_id, + Alarm.alarm_type == "http-status" + ).first() + if alarm is None: + alarm = Alarm() + alarm.proxy_id = proxy_id + alarm.alarm_type = "http-status" + db.session.add(alarm) + alarm.update_state(state, text) + + +def check_http(): + proxies = Proxy.query.filter( + Proxy.destroyed == None + ) + for proxy in proxies: + try: + if proxy.url is None: + continue + r = requests.get(proxy.url, + allow_redirects=False, + timeout=5) + r.raise_for_status() + if r.is_redirect: + set_http_alarm( + proxy.id, + AlarmState.CRITICAL, + f"{r.status_code} {r.reason}" + ) + else: + set_http_alarm( + proxy.id, + AlarmState.OK, + f"{r.status_code} {r.reason}" + ) + except (requests.ConnectionError, requests.Timeout): + set_http_alarm( + proxy.id, + AlarmState.CRITICAL, + f"Connection failure") + except requests.HTTPError: + set_http_alarm( + proxy.id, + AlarmState.CRITICAL, + f"{r.status_code} {r.reason}" + ) + + +if __name__ == "__main__": + with app.app_context(): + check_http() diff --git a/config.yaml.example b/config.yaml.example new file mode 100644 index 0000000..4e49bfa --- /dev/null +++ b/config.yaml.example @@ -0,0 +1,33 @@ +--- +# Supports any backend supported by SQLAlchemy, but you may need additional +# packages installed if you're not using SQLite. +SQLALCHEMY_DATABASE_URI: sqlite:///example.db +SQLALCHEMY_TRACK_MODIFICATIONS: true + +# You can just put whatever here, but you should change it! +SECRET_KEY: iechaj0mun6beih3rooga0mei7eo0iwoal1eeweN + +# This directory must exist and be writable by the user running the portal. +TERRAFORM_DIRECTORY: /home/bc/terraform + +# AWS (CloudFront) +AWS_ACCESS_KEY: accesskeygoeshere +AWS_SECRET_KEY: accesssecretgoeshere + +# Azure +AZURE_RESOURCE_GROUP_NAME: namegoeshere +AZURE_STORAGE_ACCOUNT_NAME: namegoeshere +AZURE_LOCATION: westcentralus +AZURE_SUBSCRIPTION_ID: subscriptionuuid +AZURE_TENANT_ID: tenantuuid +AZURE_CLIENT_ID: clientuuid +AZURE_CLIENT_SECRET: clientsecretgoeshere + +# GitHub +GITHUB_ORGANIZATION: exampleorg +GITHUB_REPOSITORY: example-repo +GITHUB_API_KEY: keygoeshere +GITHUB_FILE_V2: mirrorSites.json + +# Hetzner Cloud +HCLOUD_TOKEN: tokengoeshere \ No newline at end of file diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d4bb2cb --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/admin/index.rst b/docs/admin/index.rst new file mode 100644 index 0000000..156f801 --- /dev/null +++ b/docs/admin/index.rst @@ -0,0 +1,3 @@ +Application Overview +==================== + diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..9989199 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,41 @@ +import os +import sys +sys.path.insert(0, os.path.abspath('..')) + + +# -- Project information ----------------------------------------------------- + +project = 'Bypass Censorship' +copyright = '2022, Bypass Censorship' +author = 'Bypass Censorship' + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'press' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..8b66bc6 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,35 @@ +.. Bypass Censorship documentation master file, created by + sphinx-quickstart on Fri Apr 8 12:02:43 2022. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Documentation Home +================== + +.. toctree:: + :maxdepth: 2 + :caption: User Guide: + + user/index.rst + +.. toctree:: + :maxdepth: 2 + :caption: Admin Guide: + + admin/index.rst + +.. toctree:: + :maxdepth: 2 + :caption: Technical Documentation: + + tech/index.rst + tech/conf.rst + tech/resource.rst + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..922152e --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/tech/conf.rst b/docs/tech/conf.rst new file mode 100644 index 0000000..6de800c --- /dev/null +++ b/docs/tech/conf.rst @@ -0,0 +1,8 @@ +Configuration Objects +===================== + +.. autoclass:: app.models.AbstractConfiguration + :members: + :undoc-members: + + diff --git a/docs/tech/index.rst b/docs/tech/index.rst new file mode 100644 index 0000000..5b83b69 --- /dev/null +++ b/docs/tech/index.rst @@ -0,0 +1,4 @@ +Technical Overview +================== + + diff --git a/docs/tech/resource.rst b/docs/tech/resource.rst new file mode 100644 index 0000000..52f8f24 --- /dev/null +++ b/docs/tech/resource.rst @@ -0,0 +1,8 @@ +Resource Objects +================ + +.. autoclass:: app.models.AbstractResource + :members: + :undoc-members: + + diff --git a/docs/user/index.rst b/docs/user/index.rst new file mode 100644 index 0000000..e8b5eb2 --- /dev/null +++ b/docs/user/index.rst @@ -0,0 +1,3 @@ +Introduction +============ + diff --git a/migrations/README b/migrations/README new file mode 100644 index 0000000..0e04844 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Single-database configuration for Flask. diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 0000000..ec9d45c --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,50 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..68feded --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,91 @@ +from __future__ import with_statement + +import logging +from logging.config import fileConfig + +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option( + 'sqlalchemy.url', + str(current_app.extensions['migrate'].db.get_engine().url).replace( + '%', '%%')) +target_metadata = current_app.extensions['migrate'].db.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + connectable = current_app.extensions['migrate'].db.get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + process_revision_directives=process_revision_directives, + **current_app.extensions['migrate'].configure_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/07c4fb2af22c_initial_schema.py b/migrations/versions/07c4fb2af22c_initial_schema.py new file mode 100644 index 0000000..92e6e0a --- /dev/null +++ b/migrations/versions/07c4fb2af22c_initial_schema.py @@ -0,0 +1,124 @@ +"""initial schema + +Revision ID: 07c4fb2af22c +Revises: +Create Date: 2022-03-31 12:36:02.922753 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '07c4fb2af22c' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('group', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('group_name', sa.String(length=80), nullable=False), + sa.Column('description', sa.String(length=255), nullable=False), + sa.Column('eotk', sa.Boolean(), nullable=True), + sa.Column('added', sa.DateTime(), nullable=False), + sa.Column('updated', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_group')), + sa.UniqueConstraint('group_name', name=op.f('uq_group_group_name')) + ) + op.create_table('bridge_conf', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('group_id', sa.Integer(), nullable=False), + sa.Column('provider', sa.String(length=20), nullable=False), + sa.Column('method', sa.String(length=20), nullable=False), + sa.Column('description', sa.String(length=255), nullable=True), + sa.Column('number', sa.Integer(), nullable=True), + sa.Column('added', sa.DateTime(), nullable=False), + sa.Column('updated', sa.DateTime(), nullable=False), + sa.Column('destroyed', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['group_id'], ['group.id'], name=op.f('fk_bridge_conf_group_id_group')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_bridge_conf')) + ) + op.create_table('origin', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('group_id', sa.Integer(), nullable=False), + sa.Column('domain_name', sa.String(length=255), nullable=False), + sa.Column('description', sa.String(length=255), nullable=False), + sa.Column('added', sa.DateTime(), nullable=False), + sa.Column('updated', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['group_id'], ['group.id'], name=op.f('fk_origin_group_id_group')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_origin')), + sa.UniqueConstraint('domain_name', name=op.f('uq_origin_domain_name')) + ) + op.create_table('bridge', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('conf_id', sa.Integer(), nullable=False), + sa.Column('added', sa.DateTime(), nullable=False), + sa.Column('updated', sa.DateTime(), nullable=False), + sa.Column('deprecated', sa.DateTime(), nullable=True), + sa.Column('destroyed', sa.DateTime(), nullable=True), + sa.Column('terraform_updated', sa.DateTime(), nullable=True), + sa.Column('fingerprint', sa.String(length=255), nullable=True), + sa.Column('hashed_fingerprint', sa.String(length=255), nullable=True), + sa.Column('bridgeline', sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(['conf_id'], ['bridge_conf.id'], name=op.f('fk_bridge_conf_id_bridge_conf')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_bridge')) + ) + op.create_table('mirror', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('origin_id', sa.Integer(), nullable=False), + sa.Column('url', sa.String(length=255), nullable=False), + sa.Column('added', sa.DateTime(), nullable=False), + sa.Column('updated', sa.DateTime(), nullable=False), + sa.Column('deprecated', sa.DateTime(), nullable=True), + sa.Column('destroyed', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['origin_id'], ['origin.id'], name=op.f('fk_mirror_origin_id_origin')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_mirror')), + sa.UniqueConstraint('url', name=op.f('uq_mirror_url')) + ) + op.create_table('proxy', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('origin_id', sa.Integer(), nullable=False), + sa.Column('provider', sa.String(length=20), nullable=False), + sa.Column('slug', sa.String(length=20), nullable=True), + sa.Column('added', sa.DateTime(), nullable=False), + sa.Column('updated', sa.DateTime(), nullable=False), + sa.Column('deprecated', sa.DateTime(), nullable=True), + sa.Column('destroyed', sa.DateTime(), nullable=True), + sa.Column('terraform_updated', sa.DateTime(), nullable=True), + sa.Column('url', sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(['origin_id'], ['origin.id'], name=op.f('fk_proxy_origin_id_origin')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_proxy')) + ) + op.create_table('alarm', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('target', sa.String(length=60), nullable=False), + sa.Column('group_id', sa.Integer(), nullable=True), + sa.Column('origin_id', sa.Integer(), nullable=True), + sa.Column('proxy_id', sa.Integer(), nullable=True), + sa.Column('bridge_id', sa.Integer(), nullable=True), + sa.Column('alarm_type', sa.String(length=255), nullable=False), + sa.Column('alarm_state', sa.Enum('UNKNOWN', 'OK', 'WARNING', 'CRITICAL', name='alarmstate'), nullable=False), + sa.Column('state_changed', sa.DateTime(), nullable=False), + sa.Column('last_updated', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint(['bridge_id'], ['bridge.id'], name=op.f('fk_alarm_bridge_id_bridge')), + sa.ForeignKeyConstraint(['group_id'], ['group.id'], name=op.f('fk_alarm_group_id_group')), + sa.ForeignKeyConstraint(['origin_id'], ['origin.id'], name=op.f('fk_alarm_origin_id_origin')), + sa.ForeignKeyConstraint(['proxy_id'], ['proxy.id'], name=op.f('fk_alarm_proxy_id_proxy')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_alarm')) + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('alarm') + op.drop_table('proxy') + op.drop_table('mirror') + op.drop_table('bridge') + op.drop_table('origin') + op.drop_table('bridge_conf') + op.drop_table('group') + # ### end Alembic commands ### diff --git a/migrations/versions/59c9a5185e88_alarms_text_and_destroy_origins.py b/migrations/versions/59c9a5185e88_alarms_text_and_destroy_origins.py new file mode 100644 index 0000000..743e287 --- /dev/null +++ b/migrations/versions/59c9a5185e88_alarms_text_and_destroy_origins.py @@ -0,0 +1,38 @@ +"""alarms text and destroy origins + +Revision ID: 59c9a5185e88 +Revises: 5c69fe874e4d +Create Date: 2022-04-07 16:30:27.888327 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '59c9a5185e88' +down_revision = '5c69fe874e4d' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('alarm', schema=None) as batch_op: + batch_op.add_column(sa.Column('text', sa.String(length=255), nullable=True)) + + with op.batch_alter_table('origin', schema=None) as batch_op: + batch_op.add_column(sa.Column('destroyed', sa.DateTime(), nullable=True)) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('origin', schema=None) as batch_op: + batch_op.drop_column('destroyed') + + with op.batch_alter_table('alarm', schema=None) as batch_op: + batch_op.drop_column('text') + + # ### end Alembic commands ### diff --git a/migrations/versions/5c69fe874e4d_add_bridge_nicknames.py b/migrations/versions/5c69fe874e4d_add_bridge_nicknames.py new file mode 100644 index 0000000..7ca8082 --- /dev/null +++ b/migrations/versions/5c69fe874e4d_add_bridge_nicknames.py @@ -0,0 +1,32 @@ +"""add bridge nicknames + +Revision ID: 5c69fe874e4d +Revises: e1332e4cb910 +Create Date: 2022-04-05 15:48:36.552558 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '5c69fe874e4d' +down_revision = 'e1332e4cb910' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('bridge', schema=None) as batch_op: + batch_op.add_column(sa.Column('nickname', sa.String(length=255), nullable=True)) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('bridge', schema=None) as batch_op: + batch_op.drop_column('nickname') + + # ### end Alembic commands ### diff --git a/migrations/versions/e1332e4cb910_add_mirror_lists.py b/migrations/versions/e1332e4cb910_add_mirror_lists.py new file mode 100644 index 0000000..c441911 --- /dev/null +++ b/migrations/versions/e1332e4cb910_add_mirror_lists.py @@ -0,0 +1,41 @@ +"""add mirror lists + +Revision ID: e1332e4cb910 +Revises: 07c4fb2af22c +Create Date: 2022-03-31 13:33:49.067575 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'e1332e4cb910' +down_revision = '07c4fb2af22c' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('mirror_list', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('provider', sa.String(length=255), nullable=False), + sa.Column('description', sa.String(length=255), nullable=False), + sa.Column('format', sa.String(length=20), nullable=False), + sa.Column('container', sa.String(length=255), nullable=False), + sa.Column('branch', sa.String(length=255), nullable=False), + sa.Column('filename', sa.String(length=255), nullable=False), + sa.Column('added', sa.DateTime(), nullable=False), + sa.Column('updated', sa.DateTime(), nullable=False), + sa.Column('deprecated', sa.DateTime(), nullable=True), + sa.Column('destroyed', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_mirror_list')) + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('mirror_list') + # ### end Alembic commands ### diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..2f3e956 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,16 @@ +flask~=2.0.2 +wtforms~=3.0.1 +boto3~=1.21.15 +alembic~=1.7.6 +sqlalchemy~=1.4.32 +pyyaml~=6.0 +jinja2~=3.0.2 +tldextract~=3.2.0 +requests~=2.27.1 +azure-identity +azure-mgmt-alertsmanagement +flask-migrate +flask-sqlalchemy +bootstrap-flask +flask-wtf +PyGithub \ No newline at end of file