feat: initial commit

This commit is contained in:
Iain Learmonth 2025-12-14 17:47:51 +00:00
commit 075939142f
63 changed files with 9494 additions and 0 deletions

12
.env.example Normal file
View file

@ -0,0 +1,12 @@
DATABASE_URL=postgresql://user@localhost:5432/lawa
DATABASE_ASYNC_URL=postgresql+asyncpg://user@localhost:5432/lawa
ENVIRONMENT=LOCAL
CORS_HEADERS=["*"]
CORS_ORIGINS=["http://localhost:3000"]
OIDC_ENDPOINT=https://www.example.com/.well-known/openid-configuration
OIDC_CLIENT_ID=6c1243fd-00c3-4a3b-9081-5d8403352612
OPENTOFU_PATH=/opt/homebrew/bin/tofu

143
.gitignore vendored Normal file
View file

@ -0,0 +1,143 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.env.prefect
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
postgres-data*
node_modules/
.DS_Store
# PyCharm
.idea/
backup.dump
backup.dump.gz
.vscode
# Terraform
.terraform
.terraform.lock.hcl

48
Dockerfile Normal file
View file

@ -0,0 +1,48 @@
FROM python:3.12-bullseye
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
PYTHONIOENCODING=utf-8 \
# pip
PIP_NO_CACHE_DIR=1 \
PIP_DISABLE_PIP_VERSION_CHECK=1 \
PIP_DEFAULT_TIMEOUT=100 \
PIP_ROOT_USER_ACTION=ignore \
# poetry
POETRY_VERSION=1.8.3 \
POETRY_NO_INTERACTION=1 \
POETRY_VIRTUALENVS_CREATE=false \
POETRY_CACHE_DIR='/var/cache/pypoetry' \
POETRY_HOME='/usr/local' \
# app
PROMETHEUS_MULTIPROC_DIR=/tmp/prometheus_multiproc_dir
SHELL ["/bin/bash", "-eo", "pipefail", "-c"]
RUN apt-get update && \
apt-get install -y gcc libpq-dev curl && \
curl -sSL 'https://install.python-poetry.org' | python - \
&& poetry --version \
apt clean && \
rm -rf /var/cache/apt/*
COPY poetry.lock pyproject.toml /src/
WORKDIR /src
RUN --mount=type=cache,target="$POETRY_CACHE_DIR" \
echo "$ENVIRONMENT" \
# Install deps:
&& poetry run pip install -U pip \
&& poetry install \
--no-interaction --no-ansi --sync --with prod
COPY . .
RUN useradd -m -d /src -s /bin/bash app \
&& chown -R app:app /src/* && chown -R app:app /src \
&& chmod +x entrypoints/* \
&& rm -rf /tmp/prometheus_multiproc_dir && mkdir -p /tmp/prometheus_multiproc_dir \
&& chown -R app:app /tmp/prometheus_multiproc_dir
USER app

21
LICENSE Normal file
View file

@ -0,0 +1,21 @@
(C) SR2 Communications Limited 2021-2025.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

99
README.md Normal file
View file

@ -0,0 +1,99 @@
# ilo pali lili
Backend for orchestrating deployment of censorship circumvention resources.
## 🪓 Local Development
### Setup just
MacOS:
```shell
brew install just
```
Debian/Ubuntu:
```shell
apt install just
````
Others: [link](https://github.com/casey/just?tab=readme-ov-file#packages)
### Setup poetry
```shell
pip install poetry
```
Other ways: [link](https://python-poetry.org/docs/#installation)
### Setup Postgres (16.3)
```shell
just up
```
### Copy the environment file and install dependencies
1. `cp .env.example .env`
2. `poetry install`
### Run the uvicorn server
With default settings:
```shell
just run
```
With extra configs (e.g. logging file)
```shell
just run --log-config logging.ini
```
### 🔍 Linters
Format the code with `ruff --fix` and `ruff format`
```shell
just lint
```
### Migrations
- Create an automatic migration from changes in `src/database.py`
```shell
just mm *migration_name*
```
- Run migrations
```shell
just migrate
```
- Downgrade migrations
```shell
just downgrade downgrade -1 # or -2 or base or hash of the migration
```
## 🏃🏻‍♀️‍➡️ Deployment
Deployment is done with Docker and Gunicorn. The Dockerfile is optimized for small size and fast builds with a non-root user. The gunicorn configuration is set to use the number of workers based on the number of CPU cores.
Example of running the app with docker compose:
```shell
docker compose -f docker-compose.prod.yml up -d --build
```
## 📜 Conventions
This project uses the
[FastAPI best practices](https://github.com/zhanymkanov/fastapi-best-practices)
documented by GitHub user zhanymkanov and was originally created from their
[template](https://github.com/zhanymkanov/fastapi_production_template).
## 🖋️ Copyright
Copyright © 2025 SR2 Communications Limited.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

104
alembic.ini Normal file
View file

@ -0,0 +1,104 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
hooks = ruff_format,ruff
ruff_format.type = exec
ruff_format.executable = ruff
ruff_format.options = format REVISION_SCRIPT_FILENAME
ruff.type = exec
ruff.executable = ruff
ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

1
alembic/README Normal file
View file

@ -0,0 +1 @@
Generic single-database configuration.

Binary file not shown.

92
alembic/env.py Normal file
View file

@ -0,0 +1,92 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config, pool
from alembic import context
import src.main as _
from src.config import settings
from src.database import metadata
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
DATABASE_URL = str(settings.DATABASE_URL)
db_driver = settings.DATABASE_URL.scheme
db_driver_parts = db_driver.split("+")
if len(db_driver_parts) > 1: # e.g. postgresql+asyncpg
sync_scheme = db_driver_parts[0].strip()
DATABASE_URL = DATABASE_URL.replace( # replace with sync driver
db_driver, sync_scheme
)
config.set_main_option("sqlalchemy.url", DATABASE_URL)
config.compare_type = True
config.compare_server_default = True
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

24
alembic/script.py.mako Normal file
View file

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View file

16
docker-compose.prod.yml Normal file
View file

@ -0,0 +1,16 @@
version: '3.7'
services:
app:
container_name: app
image: app
env_file:
- .env
ports:
- "8000:8000"
build:
context: .
dockerfile: Dockerfile
command:
- entrypoints/gunicorn.sh

19
docker-compose.yml Normal file
View file

@ -0,0 +1,19 @@
version: '3.7'
services:
app_db:
container_name: app_db
hostname: app_db
image: library/postgres:17.5
environment:
- POSTGRES_USER=app
- POSTGRES_PASSWORD=app
- POSTGRES_DB=app
volumes:
- app_db_data:/var/lib/postgresql/data
ports:
- "5432:5432"
volumes:
app_db_data:
driver: "local"

16
entrypoints/gunicorn.sh Executable file
View file

@ -0,0 +1,16 @@
#!/usr/bin/env bash
set -e
DEFAULT_MODULE_NAME=src.main
MODULE_NAME=${MODULE_NAME:-$DEFAULT_MODULE_NAME}
VARIABLE_NAME=${VARIABLE_NAME:-app}
export APP_MODULE=${APP_MODULE:-"$MODULE_NAME:$VARIABLE_NAME"}
DEFAULT_GUNICORN_CONF=/src/gunicorn/gunicorn_conf.py
export GUNICORN_CONF=${GUNICORN_CONF:-$DEFAULT_GUNICORN_CONF}
export WORKER_CLASS=${WORKER_CLASS:-"uvicorn.workers.UvicornWorker"}
# Start Gunicorn
gunicorn --forwarded-allow-ips "*" -k "$WORKER_CLASS" -c "$GUNICORN_CONF" "$APP_MODULE"

24
frontend/.gitignore vendored Normal file
View file

@ -0,0 +1,24 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist-ssr
*.local
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

73
frontend/README.md Normal file
View file

@ -0,0 +1,73 @@
# React + TypeScript + Vite
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
Currently, two official plugins are available:
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) (or [oxc](https://oxc.rs) when used in [rolldown-vite](https://vite.dev/guide/rolldown)) for Fast Refresh
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
## React Compiler
The React Compiler is not enabled on this template because of its impact on dev & build performances. To add it, see [this documentation](https://react.dev/learn/react-compiler/installation).
## Expanding the ESLint configuration
If you are developing a production application, we recommend updating the configuration to enable type-aware lint rules:
```js
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
// Other configs...
// Remove tseslint.configs.recommended and replace with this
tseslint.configs.recommendedTypeChecked,
// Alternatively, use this for stricter rules
tseslint.configs.strictTypeChecked,
// Optionally, add this for stylistic rules
tseslint.configs.stylisticTypeChecked,
// Other configs...
],
languageOptions: {
parserOptions: {
project: ['./tsconfig.node.json', './tsconfig.app.json'],
tsconfigRootDir: import.meta.dirname,
},
// other options...
},
},
])
```
You can also install [eslint-plugin-react-x](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-x) and [eslint-plugin-react-dom](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-dom) for React-specific lint rules:
```js
// eslint.config.js
import reactX from 'eslint-plugin-react-x'
import reactDom from 'eslint-plugin-react-dom'
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
// Other configs...
// Enable lint rules for React
reactX.configs['recommended-typescript'],
// Enable lint rules for React DOM
reactDom.configs.recommended,
],
languageOptions: {
parserOptions: {
project: ['./tsconfig.node.json', './tsconfig.app.json'],
tsconfigRootDir: import.meta.dirname,
},
// other options...
},
},
])
```

23
frontend/eslint.config.js Normal file
View file

@ -0,0 +1,23 @@
import js from '@eslint/js'
import globals from 'globals'
import reactHooks from 'eslint-plugin-react-hooks'
import reactRefresh from 'eslint-plugin-react-refresh'
import tseslint from 'typescript-eslint'
import { defineConfig, globalIgnores } from 'eslint/config'
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
js.configs.recommended,
tseslint.configs.recommended,
reactHooks.configs.flat.recommended,
reactRefresh.configs.vite,
],
languageOptions: {
ecmaVersion: 2020,
globals: globals.browser,
},
},
])

13
frontend/index.html Normal file
View file

@ -0,0 +1,13 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>frontend</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

4217
frontend/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

32
frontend/package.json Normal file
View file

@ -0,0 +1,32 @@
{
"name": "frontend",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc -b && vite build",
"lint": "eslint .",
"preview": "vite preview"
},
"dependencies": {
"antd": "^6.1.0",
"react": "^19.2.0",
"react-dom": "^19.2.0",
"react-router": "^7.10.1"
},
"devDependencies": {
"@eslint/js": "^9.39.1",
"@types/node": "^24.10.1",
"@types/react": "^19.2.5",
"@types/react-dom": "^19.2.3",
"@vitejs/plugin-react": "^5.1.1",
"eslint": "^9.39.1",
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.4.24",
"globals": "^16.5.0",
"typescript": "~5.9.3",
"typescript-eslint": "^8.46.4",
"vite": "^7.2.4"
}
}

1
frontend/public/vite.svg Normal file
View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

View file

@ -0,0 +1,67 @@
import {type Dispatch, type SetStateAction, useEffect} from 'react';
import {Button, message} from "antd";
interface ApiKeyProps {
apiKey?: string;
setApiKey: Dispatch<SetStateAction<string | undefined>>;
}
const ApiKeyButton: React.FC<ApiKeyProps> = ({apiKey, setApiKey}) => {
// Check if API key already exists in localStorage on component mount
useEffect(() => {
const savedApiKey = localStorage.getItem('apiKey');
if (savedApiKey) {
setApiKey(savedApiKey);
}
}, [setApiKey]);
const handleSetApiKey = async () => {
const key = prompt('Please enter your API key:');
if (key) {
const response = await fetch(
"/api/v1/tofu/instances",
{
headers: {
"Authorization": `Bearer ${key}`,
"Content-Type": "application/json"
}
}
);
if (response.ok) {
// Store the API key in localStorage
localStorage.setItem('apiKey', key);
setApiKey(key);
message.info("API key added successfully");
} else {
message.error("Invalid API key");
}
} else if (key !== undefined) { // User clicked OK but left empty
message.error('API key cannot be empty.');
}
// If user clicked Cancel, do nothing
};
const handleRemoveApiKey = () => {
localStorage.removeItem('apiKey');
setApiKey(undefined);
};
return (
<>
{apiKey !== undefined ? (
<div>
<Button type="default" danger onClick={handleRemoveApiKey}>
Remove API Key
</Button>
</div>
) : (
<Button type='primary' onClick={handleSetApiKey}>
Set API Key
</Button>
)}
</>
);
};
export default ApiKeyButton;

12
frontend/src/App.css Normal file
View file

@ -0,0 +1,12 @@
body {
margin: 0;
}
.demo-logo {
width: 120px;
min-width: 120px;
height: 32px;
background: rgba(255, 255, 255, .2);
border-radius: 6px;
margin-inline-end: 24px;
}

84
frontend/src/App.tsx Normal file
View file

@ -0,0 +1,84 @@
import React, {useState} from 'react';
import {CloudServerOutlined, UnorderedListOutlined} from '@ant-design/icons';
import {Breadcrumb, Layout, Menu, type MenuProps, theme, Typography} from 'antd';
import "./App.css";
import ApiKeyButton from "./ApiKeyButton.tsx";
import TofuInstanceList from "./TofuInstanceList.tsx";
import {Navigate, Route, Routes, useNavigate} from "react-router";
import TofuInstanceDetail from "./TofuInstanceDetail.tsx";
const {Header, Content, Footer, Sider} = Layout;
const {Title, Paragraph} = Typography;
const App: React.FC = () => {
const {
token: {colorBgContainer, borderRadiusLG},
} = theme.useToken();
const [apiKey, setApiKey] = useState<string | undefined>(undefined);
const [breadcrumb, setBreadcrumb] = useState();
const navigate = useNavigate();
const sideMenu: MenuProps['items'] = [{
key: 'tofu',
label: 'OpenTofu',
icon: React.createElement(CloudServerOutlined),
children: [
{
key: 'instances',
label: 'Instances',
icon: React.createElement(UnorderedListOutlined),
onClick: () => {
navigate("/tofu/instances/")
}
}
]
}]
return (
<Layout>
<Header style={{display: 'flex', alignItems: 'center'}}>
<div className="demo-logo"/>
<ApiKeyButton apiKey={apiKey} setApiKey={setApiKey}/>
</Header>
<div style={{padding: '0 48px'}}>
{apiKey && <Breadcrumb
style={{margin: '16px 0'}}
items={breadcrumb}
/>}
<Layout
style={{padding: '24px 0', background: colorBgContainer, borderRadius: borderRadiusLG}}
>
{apiKey && <Sider style={{background: colorBgContainer}} width={200}>
<Menu
mode="inline"
defaultSelectedKeys={['instances']}
defaultOpenKeys={['tofu']}
style={{height: '100%'}}
items={sideMenu}
/>
</Sider>}
<Content style={{padding: '0 24px', minHeight: 280}}>
{(apiKey && <Routes>
<Route index element={<Navigate to="/tofu/instances"/>}/>
<Route path="/tofu" element={<Navigate to="/tofu/instances"/>}/>
<Route path="/tofu/instances"
element={<TofuInstanceList apiKey={apiKey} setBreadcrumb={setBreadcrumb}/>}/>
<Route path="/tofu/instances/:instanceId"
element={<TofuInstanceDetail apiKey={apiKey} setBreadcrumb={setBreadcrumb}/>}/>
</Routes>) || <>
<Title>API Key Required</Title>
<Paragraph>Use the button above to enter your API key.</Paragraph>
</>}
</Content>
</Layout>
</div>
<Footer style={{textAlign: 'center'}}>
Copyright © 2021-{new Date().getFullYear()} SR2 Communications Limited.
</Footer>
</Layout>
);
};
export default App;

View file

@ -0,0 +1,215 @@
import {Alert, Button, Descriptions, Input, message, Modal, Space, Table, Tooltip, Typography} from "antd";
import {type Dispatch, type SetStateAction, useEffect, useState} from "react";
import {deleteInstance, statusTag, updateInstance} from "./client.tsx";
import {useNavigate, useParams} from "react-router";
import {DeleteOutlined, EyeOutlined, ReloadOutlined, ToolOutlined} from "@ant-design/icons";
const {Title, Paragraph, Text} = Typography;
const {TextArea} = Input;
interface TofuInstanceDetailProps {
apiKey: string
setBreadcrumb: Dispatch<SetStateAction<object[]>>;
}
const showLogs = async (apiKey: string, taskId: number, setLogs: Dispatch<SetStateAction<string>>) => {
const response = await fetch("/api/v1/tofu/logs/" + taskId, {
headers: {
"Authorization": `Bearer ${apiKey}`, "Content-Type": "application/json"
},
});
const reader = response.body?.getReader();
if (!reader) {
message.error("Could not open logs stream");
return;
}
const decoder = new TextDecoder()
let buffer = "";
let messages = "";
while (true) {
const {value, done} = await reader.read();
buffer += decoder.decode(value, {stream: true});
let boundary;
while ((boundary = buffer.indexOf('\n')) !== -1) {
const line = buffer.slice(0, boundary).trim();
buffer = buffer.slice(boundary + 1);
if (line) {
try {
const json = JSON.parse(line);
if (json["@message"] === undefined) {
messages += JSON.stringify(json) + "\n";
} else {
messages += json["@message"] + "\n";
}
} catch (e) {
console.error('Failed to parse JSON line:', e, 'Line:', line);
}
}
}
setLogs(messages);
if (done) break;
}
}
const TofuInstanceDetail: React.FC<TofuInstanceDetailProps> = ({apiKey, setBreadcrumb}) => {
const [instance, setInstance] = useState(undefined);
const [isLoading, setIsLoading] = useState(true);
const [isConfigurationVisible, setIsConfigurationVisible] = useState(false);
const [configuration, setConfiguration] = useState("");
const [reloadCount, setReloadCount] = useState(0);
const [errorMessage, setErrorMessage] = useState<string | undefined>(undefined);
const [logs, setLogs] = useState("");
const navigate = useNavigate();
const {instanceId} = useParams();
useEffect(() => {
const fetchInstance = async () => {
try {
setIsLoading(true);
const response = await fetch("/api/v1/tofu/instances/" + instanceId, {
headers: {
"Authorization": `Bearer ${apiKey}`, "Content-Type": "application/json"
}
});
if (response.ok) {
const data = await response.json();
data.tasks = data.tasks.map((task, index: number) => ({
...task, key: index + 1
}));
setInstance(data);
if (data.outputs) {
data.outputs = Object.entries<object>(data.outputs).map(([key, value]) => ({
...value,
output: key,
key: key,
}));
}
setConfiguration(JSON.stringify(data.configuration));
setErrorMessage(undefined);
} else {
setErrorMessage("Failed to fetch instances:" + response.status);
setInstance(undefined);
}
} catch (error) {
setErrorMessage("Error fetching instances:" + error);
setInstance(undefined);
} finally {
setIsLoading(false);
}
};
setBreadcrumb([
{"title": "OpenTofu", href: "/tofu"},
{"title": "Instances", href: "/tofu/instances"},
{"title": "Instance " + instanceId},
])
fetchInstance();
}, [apiKey, instanceId, reloadCount, setBreadcrumb, setConfiguration, setInstance, setIsLoading]);
const taskColumns = [{"title": "ID", "dataIndex": "id", "key": "id"}, {
"title": "Status", "dataIndex": "status", "key": "state", render: statusTag
}, {
"title": "Created",
dataIndex: "created_at",
key: "created_at",
sorter: (a, b) => a.created_at.localeCompare(b.created_at)
}, {
"title": "Started",
dataIndex: "start_time",
key: "start_time",
render: (time) => (time || "Not yet"),
sorter: (a, b) => a.start_time.localeCompare(b.start_time)
}, {
"title": "Ended",
dataIndex: "end_time",
key: "end_time",
render: (time) => (time || "Not yet"),
sorter: (a, b) => a.end_time.localeCompare(b.end_time)
}, {
title: 'Action', key: 'action', render: (_, record) => (<Space size="middle">
<Button type="default" onClick={() => {
showLogs(apiKey, record.id, setLogs)
}}><EyeOutlined/> View Log</Button>
</Space>),
},]
const outputColumns = [
{"title": "Output", "dataIndex": "output", "key": "output"},
{"title": "Value", "dataIndex": "value", "key": "value", render: (_, record) => {
if (record.sensitive) {
return (<Tooltip placement="top" title={record.value}>
<Text type="secondary">Sensitive Value</Text>
</Tooltip>);
} else {
return record.value;
}
}},
{"title": "Type", "dataIndex": "type", "key": "type"}
]
return (<>
<Title>Instance {instanceId}</Title>
<Paragraph>
{errorMessage && <Alert title={errorMessage} type="error"/>}
</Paragraph>
<Paragraph>
<Space size="middle">
<Button type="default" onClick={() => {
setIsConfigurationVisible(true)
}}><ToolOutlined/> Configuration</Button>
<Button onClick={() => setReloadCount(reloadCount + 1)}
loading={isLoading}><ReloadOutlined/> Reload</Button>
<Button type="default" danger={true} onClick={() => {
deleteInstance(apiKey, instanceId).then(() => {
navigate("/tofu/instances")
});
}}><DeleteOutlined/> Delete</Button>
</Space>
</Paragraph>
{instance && <>
<Descriptions title="Properties">
<Descriptions.Item label="ID">{instance.id}</Descriptions.Item>
<Descriptions.Item label="Status">{statusTag(instance.status)}</Descriptions.Item>
<Descriptions.Item label="Created">{instance.created_at}</Descriptions.Item>
<Descriptions.Item label="Updated">{instance.updated_at}</Descriptions.Item>
<Descriptions.Item label="Drift Checked">{instance.drift_checked_at || "Never"}</Descriptions.Item>
<Descriptions.Item label="State Lock">
{(instance.state_lock && "Since " + instance.state_lock.Created) || "Unlocked"}
</Descriptions.Item>
</Descriptions>
<Title level={2}>Instance Tasks</Title>
<Table columns={taskColumns} dataSource={instance.tasks} loading={isLoading}/>
{instance.outputs && <>
<Title level={2}>Instance Outputs</Title>
<Table columns={outputColumns} dataSource={instance.outputs} loading={isLoading}/>
</>}
<Modal
title="Configuration"
open={isConfigurationVisible}
onCancel={() => setIsConfigurationVisible(false)}
footer={[<Button key="back" danger onClick={() => {
updateInstance(apiKey, instanceId, configuration);
setIsConfigurationVisible(false);
}}>Update</Button>,
<Button key="back" onClick={() => setIsConfigurationVisible(false)}>Cancel</Button>]}
width="90%"
>
<TextArea rows={20} value={configuration} onChange={(e) => setConfiguration(e.target.value)}/>
</Modal>
<Modal
title="Logs"
footer={[<Button key="back" onClick={() => setLogs("")}>Close</Button>]}
onCancel={() => setLogs("")}
width="90%"
open={logs !== ""}
>
<pre style={{"overflow": "auto"}}>{logs}</pre>
</Modal>
</>}
</>)
};
export default TofuInstanceDetail;

View file

@ -0,0 +1,119 @@
import {Alert, Button, Input, Modal, Space, Table, Typography} from "antd";
import {type Dispatch, type SetStateAction, useEffect, useState} from "react";
import {createInstance, deleteInstance, statusTag} from "./client.tsx";
import {useNavigate} from "react-router";
import {PlusOutlined, ReloadOutlined} from "@ant-design/icons";
const {Title, Paragraph} = Typography;
const {TextArea} = Input;
const defaultNewConfiguration = "{\"resource\":{\"random_password\":{\"example\":{\"length\":20}}},\"output\":{\"example\":{\"value\":\"${random_password.example.result}\",\"sensitive\":true}}}"
interface TofuInstanceListProps {
apiKey?: string
setBreadcrumb: Dispatch<SetStateAction<object[]>>
}
const TofuInstanceList: React.FC<TofuInstanceListProps> = ({apiKey, setBreadcrumb}) => {
const [instances, setInstances] = useState([]);
const [isLoading, setIsLoading] = useState(true);
const [isNewModalVisible, setIsNewModalVisible] = useState(false);
const [configuration, setConfiguration] = useState<string>(defaultNewConfiguration);
const [reloadCount, setReloadCount] = useState(0);
const [errorMessage, setErrorMessage] = useState<string | undefined>(undefined);
const navigate = useNavigate();
useEffect(() => {
const fetchInstances = async () => {
try {
setIsLoading(true);
const response = await fetch("/api/v1/tofu/instances", {
headers: {
"Authorization": `Bearer ${apiKey}`, "Content-Type": "application/json"
}
});
if (response.ok) {
const data = await response.json();
setInstances((data.items || []).map((item: object, index: number) => ({
...item, key: index + 1
})));
setErrorMessage(undefined);
} else {
setErrorMessage("Failed to fetch deployments:" + response.status);
setInstances([]);
}
} catch (error) {
setErrorMessage("Error fetching deployments:" + error);
setInstances([]);
} finally {
setIsLoading(false);
}
};
setBreadcrumb([{"title": "OpenTofu", href: "/tofu"}, {
"title": "Instances",
href: "/tofu/instances"
}, {"title": "List"},])
if (apiKey) fetchInstances();
}, [apiKey, reloadCount, setBreadcrumb, setInstances, setIsLoading]); // Reload when apiKey or reloadCount changes
if (!apiKey) return <></>;
const columns = [{
"title": "ID",
"dataIndex": "id",
"key": "id",
sorter: (a, b) => a.id < b.id,
defaultSortOrder: 'descend'
}, {
"title": "Status", "dataIndex": "status", "key": "state", render: statusTag
}, {
"title": "Last Status Change",
key: "status_changed_at",
dataIndex: "status_changed_at"
}, {"title": "Last Drift Check", key: "drift_checked_at", dataIndex: "drift_checked_at"}, {
title: 'Action', key: 'action', render: (_, record: { id: string | number; }) => (<Space size="middle">
<Button type="primary" onClick={() => {
navigate("/tofu/instances/" + record.id)
}}>Details</Button>
<Button type="link" danger={true} onClick={() => {
deleteInstance(apiKey, record.id).then(() => {
setReloadCount(reloadCount + 1)
});
}}>Delete</Button>
</Space>),
},]
return (<>
<Title>Instances</Title>
<Paragraph>
{errorMessage && <Alert title={errorMessage} type="error"/>}
</Paragraph>
<Paragraph>
<Space size="middle">
<Button type="dashed" onClick={() => setIsNewModalVisible(true)}><PlusOutlined /> New</Button>
<Button onClick={() => setReloadCount(reloadCount + 1)} loading={isLoading}><ReloadOutlined /> Reload</Button>
</Space>
</Paragraph>
<Table columns={columns} dataSource={instances} loading={isLoading}/>
<Modal
title="New Instance Configuration"
open={isNewModalVisible}
onCancel={() => setIsNewModalVisible(false)}
footer={[<Button key="back" danger onClick={async () => {
const instanceId = await createInstance(apiKey, configuration);
setIsNewModalVisible(false);
navigate("/tofu/instances/" + instanceId)
}}>Update</Button>,
<Button key="back" onClick={() => setIsNewModalVisible(false)}>Cancel</Button>]}
width="90%"
>
<TextArea rows={20} value={configuration} onChange={(e) => setConfiguration(e.target.value)}/>
</Modal>
</>)
};
export default TofuInstanceList;

View file

@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" role="img"
class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228">
<path fill="#00D8FF"
d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path>
</svg>

After

Width:  |  Height:  |  Size: 4 KiB

81
frontend/src/client.tsx Normal file
View file

@ -0,0 +1,81 @@
import {message, Tag} from "antd";
const statusTag = (status: string) => {
switch (status) {
case "ACTIVE":
case "COMPLETED":
return (<Tag color="green">{status}</Tag>)
case "PENDING":
case "RUNNING":
return (<Tag color="blue">{status}</Tag>);
case "PENDING_DESTROY":
return (<Tag color="yellow">{status.replace("_", " ")}</Tag>);
case "FAILED":
case "FAILED_DESTROY":
return (<Tag color="red">{status.replace("_", " ")}</Tag>);
default:
return status;
}
}
const createInstance = async (apiKey: string, configuration: string) => {
const response = await fetch(
"/api/v1/tofu/instances",
{
method: "POST",
headers: {
"Authorization": `Bearer ${apiKey}`,
"Content-Type": "application/json"
},
body: JSON.stringify({"configuration": JSON.parse(configuration)})
}
);
const status = response.status;
const json = await response.json();
if (status === 202) {
message.info("OpenTofu instance " + json.id + " creation in progress!");
return json.id;
} else {
message.error("Could not update OpenTofu instance " + json.id + ": " + status);
}
}
const updateInstance = async (apiKey: string, instanceId: number, configuration: string) => {
const response = await fetch(
"/api/v1/tofu/instances/" + instanceId,
{
method: "PUT",
headers: {
"Authorization": `Bearer ${apiKey}`,
"Content-Type": "application/json"
},
body: JSON.stringify({"configuration": JSON.parse(configuration)})
}
);
const status = response.status;
if (status === 202) {
message.info("OpenTofu instance " + instanceId + " update in progress!");
} else {
message.error("Could not update OpenTofu instance " + instanceId + ": " + status);
}
}
const deleteInstance = async (apiKey: string, instanceId: number) => {
const response = await fetch(
"/api/v1/tofu/instances/" + instanceId,
{
method: "DELETE",
headers: {
"Authorization": `Bearer ${apiKey}`,
}
}
);
const status = response.status;
if (status === 202) {
message.info("OpenTofu instance " + instanceId + " deletion in progress!");
} else {
message.error("Could not delete OpenTofu instance " + instanceId + ": " + status);
}
}
export {createInstance, deleteInstance, statusTag, updateInstance};

12
frontend/src/main.tsx Normal file
View file

@ -0,0 +1,12 @@
import {StrictMode} from 'react'
import {createRoot} from 'react-dom/client'
import App from './App.tsx'
import {BrowserRouter} from "react-router";
createRoot(document.getElementById('root')!).render(
<StrictMode>
<BrowserRouter>
<App/>
</BrowserRouter>
</StrictMode>,
)

View file

@ -0,0 +1,28 @@
{
"compilerOptions": {
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
"target": "ES2022",
"useDefineForClassFields": true,
"lib": ["ES2022", "DOM", "DOM.Iterable"],
"module": "ESNext",
"types": ["vite/client"],
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"moduleDetection": "force",
"noEmit": true,
"jsx": "react-jsx",
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"erasableSyntaxOnly": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": ["src"]
}

7
frontend/tsconfig.json Normal file
View file

@ -0,0 +1,7 @@
{
"files": [],
"references": [
{ "path": "./tsconfig.app.json" },
{ "path": "./tsconfig.node.json" }
]
}

View file

@ -0,0 +1,26 @@
{
"compilerOptions": {
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
"target": "ES2023",
"lib": ["ES2023"],
"module": "ESNext",
"types": ["node"],
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"moduleDetection": "force",
"noEmit": true,
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"erasableSyntaxOnly": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": ["vite.config.ts"]
}

15
frontend/vite.config.ts Normal file
View file

@ -0,0 +1,15 @@
import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react'
// https://vite.dev/config/
export default defineConfig({
plugins: [react()],
server: {
proxy: {
'/api/': {
target: 'http://localhost:8000',
changeOrigin: true,
}
}
}
})

65
gunicorn/gunicorn_conf.py Normal file
View file

@ -0,0 +1,65 @@
import multiprocessing
from pydantic import Field
from pydantic_settings import BaseSettings, SettingsConfigDict
try:
from prometheus_client import multiprocess
def child_exit(_, worker):
multiprocess.mark_process_dead(worker.pid)
except ImportError:
pass
class Settings(BaseSettings):
model_config = SettingsConfigDict(
env_file=".env", env_file_encoding="utf-8", extra="ignore"
)
host: str = "0.0.0.0"
port: int = 8000
bind: str | None = None
workers_per_core: int = Field(1)
max_workers: int | None = None
web_concurrency: int | None = None
graceful_timeout: int = 120
timeout: int = 120
keepalive: int = 5
log_level: str = "INFO"
log_config: str = "/src/logging_production.ini"
@property
def computed_bind(self) -> str:
return self.bind if self.bind else f"{self.host}:{self.port}"
@property
def computed_web_concurrency(self) -> int:
cores = multiprocessing.cpu_count()
default_web_concurrency = self.workers_per_core * cores + 1
if self.web_concurrency:
assert self.web_concurrency > 0
return self.web_concurrency
else:
web_concurrency = max(default_web_concurrency, 2)
if self.max_workers:
return min(web_concurrency, self.max_workers)
return web_concurrency
settings = Settings()
# Gunicorn config variables
loglevel = settings.log_level
workers = settings.computed_web_concurrency
bind = settings.computed_bind
worker_tmp_dir = "/dev/shm"
graceful_timeout = settings.graceful_timeout
timeout = settings.timeout
keepalive = settings.keepalive
logconfig = settings.log_config

40
justfile Normal file
View file

@ -0,0 +1,40 @@
default:
just --list
run *args:
poetry run uvicorn src.main:app --reload {{args}}
mm *args:
poetry run alembic revision --autogenerate -m "{{args}}"
migrate:
poetry run alembic upgrade head
downgrade *args:
poetry run alembic downgrade {{args}}
black *args:
poetry run black {{args}} src
ruff *args:
poetry run ruff check {{args}} src
lint:
poetry run ruff format src
just ruff --fix
test:
PYTHONPATH=. pytest tests
# docker
up:
docker-compose up -d
kill *args:
docker-compose kill {{args}}
build:
docker-compose build
ps:
docker-compose ps

28
logging.ini Normal file
View file

@ -0,0 +1,28 @@
[loggers]
keys=root,uvicorn
[handlers]
keys=console
[formatters]
keys=standard
[logger_root]
level=INFO
handlers=console
formatter=standard
[logger_uvicorn]
propagate=0
handlers=console
formatter=standard
qualname=uvicorn
[handler_console]
class=logging.StreamHandler
level=DEBUG
formatter=standard
stream=ext://sys.stderr
[formatter_standard]
format=[%(asctime)s] [%(levelname)s] [%(name)s] %(message)s

44
logging_production.ini Normal file
View file

@ -0,0 +1,44 @@
[loggers]
keys=root,gunicorn.access,gunicorn.error,uvicorn
[handlers]
keys=console
[formatters]
keys=json
[logger_root]
level=INFO
handlers=console
formatter=json
[logger_gunicorn.access]
level=INFO
handlers=console
formatter=json
qualname=gunicorn.access
propagate=1
[logger_gunicorn.error]
level=INFO
handlers=console
formatter=json
qualname=gunicorn.error
propagate=0
[logger_uvicorn]
propagate=0
handlers=console
formatter=json
qualname=uvicorn
[handler_console]
class=logging.StreamHandler
level=INFO
formatter=json
stream=ext://sys.stderr
[formatter_json]
class=pythonjsonlogger.jsonlogger.JsonFormatter
format=[%(asctime)s] [%(levelname)s] [%(name)s] %(message)s
datefmt=%Y-%m-%dT%H:%M:%S

1782
poetry.lock generated Normal file

File diff suppressed because it is too large Load diff

48
pyproject.toml Normal file
View file

@ -0,0 +1,48 @@
[tool.poetry]
name = "pali-lili"
version = "0.0.0"
description = "Cloud orchestration microservice based on FastAPI."
authors = ["irl"]
readme = "README.md"
license = "BSD-2"
package-mode = false
[tool.poetry.dependencies]
python = "^3.12"
aiofiles = "^24.1.0"
bcrypt = "^5.0.0"
alembic = "^1.13.1"
asyncpg = "^0.29.0"
fastapi = "^0.115.12"
fastapi-pagination = "^0.13.1"
httpx = "^0.27.0"
psycopg2-binary = "^2.9.9"
pydantic = {extras = ["email"], version = "^2.11.4"}
pydantic-settings = "^2.9.1"
python-jose = {extras=["cryptogrpahy"], version = "^3.4.0"}
sqlalchemy = {extras = ["asyncio"], version = "^2.0.30"}
uvicorn = {extras = ["standard"], version = "^0.30.6"}
[tool.poetry.group.dev.dependencies]
black = "^25.1.0"
ruff = "^0.4.10"
pytest = "^8.3.5"
[tool.poetry.group.prod.dependencies]
gunicorn = "^22.0.0"
python-json-logger = "^2.0.7"
prometheus-client = "^0.20.0"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.black]
line-length = 92
[tool.pytest.ini_options]
asyncio_default_fixture_loop_scope = "module"
[tool.ruff]
target-version = "py312"
line-length = 92

17
scripts/postgres/backup Executable file
View file

@ -0,0 +1,17 @@
#!/bin/sh -e
echo "Backup process started."
export POSTGRES_USER="${POSTGRES_USER}"
# Save the current date in YYYY-MM-DD format to a variable
current_datetime=$(date +%Y-%m-%d-%H%M%S)
backup_directory="/backups"
backup_filename="${backup_directory}/backup-${current_datetime}.dump.gz"
# Run pg_dump and compress its output, then save to /backups with the current date in the filename
pg_dump -Fc app -U "$POSTGRES_USER" | gzip > "$backup_filename"
echo "Backup has been created and saved to ${backup_filename}"

38
scripts/postgres/restore Executable file
View file

@ -0,0 +1,38 @@
#!/bin/sh -e
# The directory where backups are stored
BACKUP_DIRECTORY="/backups"
# Check if a file name was provided as a parameter
if [ $# -eq 0 ]; then
echo "No file name provided. Please provide a file name to check."
exit 1
fi
# The file name is taken from the first argument provided to the script
file_name="$1"
# Full path to the file
full_file_path="${BACKUP_DIRECTORY}/${file_name}"
# Check if the file exists
if [ -f "$full_file_path" ]; then
echo "File ${file_name} exists."
else
echo "File ${file_name} does not exist."
exit 1
fi
export POSTGRES_USER="${POSTGRES_USER}"
export POSTGRES_DB="${POSTGRES_DB}"
echo "Dropping the database..."
dropdb "$POSTGRES_DB" -U "$POSTGRES_USER"
echo "Creating a new database..."
createdb "$POSTGRES_DB" --owner="$POSTGRES_USER" -U "$POSTGRES_USER"
echo "Applying the backup to the new database..."
gunzip -c "${full_file_path}" | pg_restore -U "$POSTGRES_USER" -d "$POSTGRES_DB"
echo "Backup applied successfully."

70
src/API.md Normal file
View file

@ -0,0 +1,70 @@
:::warning[Under construction]
This documentation is a work in progress. Please [get in touch](/contact) with us if you have any questions.
:::
Learn how to use the jasima.app API.
Everything that can be done within the portal can also be achieved using our API, documented in this section.
## Authentication
We use an API key for authentication.
This should be passed to the API in an `Authorization` header:
```http request
Authorization: Bearer <token>
```
## Data Format
The API uses JSON as the default format for both requests and responses, ensuring consistency and simplicity in data
handling:
* For GET requests, the API expects parameters to be passed via query strings.
* For other HTTP methods (e.g., POST, PUT), the request body (if required) should be formatted as JSON.
There is no need to explicitly specify content encoding in the headers, as the API assumes JSON by default for all
applicable requests and responses.
## Pagination
The API supports pagination to manage large datasets efficiently. Use the page and perPage query parameters to control the data returned in your requests.
Parameters:
* `page`: The page number to retrieve.
* `size`: The number of items to include per page.
Example request:
```http request
GET https://api.jasima.app/api/v1/widgets?page=1&size=10
```
Example response:
```http request
{
"items": [
],
"total": 0,
"page": 1,
"size": 10,
"pages": 0
}
```
Bear in mind the following:
* The `page` field in the response indicates the current page being viewed.
* The `total` field shows the total number of items available.
* The `pages` field indicates the total number of pages, and can be used to determine whether there are additional pages
to fetch.
Use this structure to efficiently navigate through large datasets while ensuring optimal performance.
## Errors
:::info[TODO]
Document the common status codes, and the validation error response format.
:::
## Schemas

0
src/__init__.py Normal file
View file

56
src/config.py Normal file
View file

@ -0,0 +1,56 @@
from os.path import abspath, dirname, join
from typing import Any
from pydantic import PostgresDsn
from pydantic_settings import BaseSettings, SettingsConfigDict
from src.constants import Environment
API_README_PATH = abspath(join(dirname(__file__), "API.md"))
with open(API_README_PATH, "r", encoding="utf-8") as f:
API_README_MD = f.read()
class CustomBaseSettings(BaseSettings):
model_config = SettingsConfigDict(
env_file=".env", env_file_encoding="utf-8", extra="ignore"
)
class Config(CustomBaseSettings):
DATABASE_URL: PostgresDsn # Used for alembic migrations
DATABASE_ASYNC_URL: PostgresDsn
DATABASE_POOL_SIZE: int = 16
DATABASE_POOL_TTL: int = 60 * 20 # 20 minutes
DATABASE_POOL_PRE_PING: bool = True
ENVIRONMENT: Environment = Environment.PRODUCTION
CORS_ORIGINS: list[str] = ["*"]
CORS_ORIGINS_REGEX: str | None = None
CORS_HEADERS: list[str] = ["*"]
API_KEY: str
APP_VERSION: str = "0.0.0"
settings = Config()
tags_metadata = [
{
"name": "OpenTofu",
"description": "Operations for managing OpenTofu deployments.",
},
]
app_configs: dict[str, Any] = {
"title": "pali lili API",
"version": settings.APP_VERSION,
"description": API_README_MD,
"openapi_tags": tags_metadata,
}
if not settings.ENVIRONMENT.is_debug:
app_configs["openapi_url"] = None # hide docs

32
src/constants.py Normal file
View file

@ -0,0 +1,32 @@
from enum import Enum
DB_NAMING_CONVENTION = {
"ix": "%(column_0_label)s_idx",
"uq": "%(table_name)s_%(column_0_name)s_key",
"ck": "%(table_name)s_%(constraint_name)s_check",
"fk": "%(table_name)s_%(column_0_name)s_fkey",
"pk": "%(table_name)s_pkey",
}
class Environment(str, Enum):
LOCAL = "LOCAL"
TESTING = "TESTING"
STAGING = "STAGING"
PRODUCTION = "PRODUCTION"
@property
def is_debug(self):
return self in (self.LOCAL, self.STAGING, self.TESTING)
@property
def is_local(self):
return self is Environment.LOCAL
@property
def is_testing(self):
return self == self.TESTING
@property
def is_deployed(self) -> bool:
return self in (self.STAGING, self.PRODUCTION)

57
src/database.py Normal file
View file

@ -0,0 +1,57 @@
import contextlib
from typing import AsyncIterator, AsyncGenerator, Annotated
from fastapi import Depends
from sqlalchemy import (
MetaData,
)
from sqlalchemy.ext.asyncio import (
AsyncConnection,
create_async_engine,
AsyncSession,
async_sessionmaker,
)
from src.config import settings
from src.constants import DB_NAMING_CONVENTION
DATABASE_URL = str(settings.DATABASE_ASYNC_URL)
engine = create_async_engine(
DATABASE_URL,
pool_size=settings.DATABASE_POOL_SIZE,
pool_recycle=settings.DATABASE_POOL_TTL,
pool_pre_ping=settings.DATABASE_POOL_PRE_PING,
)
metadata = MetaData(naming_convention=DB_NAMING_CONVENTION)
sessionmaker = async_sessionmaker(autocommit=False, expire_on_commit=False, bind=engine)
@contextlib.asynccontextmanager
async def get_db_connection() -> AsyncIterator[AsyncConnection]:
async with engine.begin() as connection:
try:
yield connection
except Exception:
await connection.rollback()
raise
@contextlib.asynccontextmanager
async def get_db_session() -> AsyncIterator[AsyncSession]:
session = sessionmaker()
try:
yield session
except Exception:
await session.rollback()
raise
finally:
await session.close()
async def get_db() -> AsyncGenerator[AsyncSession, None]:
async with get_db_session() as session:
yield session
DbSession = Annotated[AsyncSession, Depends(get_db)]

33
src/exceptions.py Normal file
View file

@ -0,0 +1,33 @@
from typing import Any
from fastapi import HTTPException, status
class DetailedHTTPException(HTTPException):
STATUS_CODE = status.HTTP_500_INTERNAL_SERVER_ERROR
DETAIL = "Server error"
def __init__(self, **kwargs: dict[str, Any]) -> None:
super().__init__(status_code=self.STATUS_CODE, detail=self.DETAIL, **kwargs)
class PermissionDenied(DetailedHTTPException):
STATUS_CODE = status.HTTP_403_FORBIDDEN
DETAIL = "Permission denied"
class NotFound(DetailedHTTPException):
STATUS_CODE = status.HTTP_404_NOT_FOUND
class BadRequest(DetailedHTTPException):
STATUS_CODE = status.HTTP_400_BAD_REQUEST
DETAIL = "Bad Request"
class NotAuthenticated(DetailedHTTPException):
STATUS_CODE = status.HTTP_401_UNAUTHORIZED
DETAIL = "User not authenticated"
def __init__(self) -> None:
super().__init__(headers={"WWW-Authenticate": "Bearer"})

44
src/main.py Normal file
View file

@ -0,0 +1,44 @@
from contextlib import asynccontextmanager
from typing import AsyncGenerator
from fastapi import FastAPI
from fastapi_pagination import add_pagination
from starlette.middleware.cors import CORSMiddleware
from src.config import app_configs, settings
from src.tofu.exceptions import TofuStateLocked
from src.tofu.router import router as tofu_router, tofu_state_locked_handler
from src.tofu.tasks import periodic_tofu_process_tasks
@asynccontextmanager
async def lifespan(_application: FastAPI) -> AsyncGenerator:
await periodic_tofu_process_tasks()
# Startup
yield
# Shutdown
app = FastAPI(**app_configs, lifespan=lifespan)
add_pagination(app)
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS,
allow_origin_regex=settings.CORS_ORIGINS_REGEX,
allow_credentials=True,
allow_methods=("GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"),
allow_headers=settings.CORS_HEADERS,
)
app.add_exception_handler(TofuStateLocked, tofu_state_locked_handler)
app.include_router(
tofu_router,
prefix="/api/v1/tofu",
)
@app.get("/healthcheck", include_in_schema=False)
async def healthcheck() -> dict[str, str]:
return {"status": "ok"}

50
src/models.py Normal file
View file

@ -0,0 +1,50 @@
from datetime import datetime
from typing import Any
from sqlalchemy import JSON, DateTime, func, ForeignKey
from sqlalchemy.orm import (
DeclarativeBase,
Mapped,
mapped_column,
relationship,
declared_attr,
)
from src.database import metadata
class CustomBase(DeclarativeBase):
type_annotation_map = {
datetime: DateTime(timezone=True),
dict[str, Any]: JSON,
}
metadata = metadata
class ActivatedMixin:
active: Mapped[bool] = mapped_column(default=True)
class DeletedTimestampMixin:
deleted_at: Mapped[datetime | None] = mapped_column(nullable=True)
class DescriptionMixin:
description: Mapped[str]
class IdMixin:
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
class TimestampMixin:
created_at: Mapped[datetime] = mapped_column(default=func.now())
updated_at: Mapped[datetime] = mapped_column(default=func.now(), onupdate=func.now())
class TofuInstanceMixin:
tofu_instance_id: Mapped[int] = mapped_column(ForeignKey("tofu_instance.id"))
@declared_attr
def tofu_instance(cls):
return relationship("TofuInstance")

91
src/schemas.py Normal file
View file

@ -0,0 +1,91 @@
from datetime import datetime, timezone
from typing import Annotated, Literal
from uuid import UUID
from email_validator.rfc_constants import DOMAIN_NAME_REGEX
from fastapi.encoders import jsonable_encoder
from pydantic import BaseModel, ConfigDict, PlainSerializer, RootModel, StringConstraints
def datetime_to_utc_str(dt: datetime) -> str:
if not dt.tzinfo:
dt = dt.replace(tzinfo=timezone.utc)
else:
dt = dt.astimezone(timezone.utc)
return dt.strftime("%Y-%m-%dT%H:%M:%SZ")
CustomDatetime = Annotated[datetime, PlainSerializer(datetime_to_utc_str, return_type=str)]
class CustomModelMixin:
model_config = ConfigDict(
populate_by_name=True,
)
def serializable_dict(self, **kwargs):
"""Return a dict which contains only serializable fields."""
default_dict = self.model_dump()
return jsonable_encoder(default_dict, custom_encoder={UUID: str})
class CustomModel(BaseModel, CustomModelMixin):
pass
class CustomRootModel(RootModel, CustomModelMixin):
pass
class OkResponse(CustomModel):
"""
A generic payload to respond with to indicate success where no other information is required or available.
Pairs well with HTTP status code 200.
"""
ok: Literal[True]
class AcceptedResponse(CustomModel):
"""
A generic payload to respond with where a request has been accepted but perhaps has not yet been processed.
Pairs well with HTTP status code 201.
"""
id: int
class CreatedResponse(CustomModel):
"""
A generic payload to respond with where a request has resulted in a thing being created.
Pairs well with HTTP status code 202.
"""
id: int
class ActivatedMixin:
active: bool
class DescriptionMixin:
description: str
class DeletedTimestampMixin:
deleted_at: CustomDatetime
class IdMixin:
id: int
class TimestampMixin:
created_at: CustomDatetime
updated_at: CustomDatetime
DomainName = Annotated[
str, StringConstraints(strip_whitespace=True, to_lower=True, pattern=DOMAIN_NAME_REGEX)
]

0
src/tofu/__init__.py Normal file
View file

165
src/tofu/client.py Normal file
View file

@ -0,0 +1,165 @@
import asyncio
import json
import os
import shutil
import tempfile
from datetime import datetime, timezone
from json import JSONDecodeError
from typing import Any, TYPE_CHECKING
import aiofiles
from sqlalchemy.ext.asyncio import AsyncSession
from src.tofu.config import settings
from src.tofu.exceptions import TofuExecutionError, TofuTypeError
from src.tofu.models import TofuInstanceTask, TofuInstanceTaskLog
from src.tofu.security import generate_password, generate_password_hash
def _convert_python_to_tf(value: Any) -> Any:
"""Convert Python types to Terraform-compatible types."""
if isinstance(value, bool):
return value
elif isinstance(value, (int, float)):
return value
elif isinstance(value, str):
return value
elif isinstance(value, list):
return [_convert_python_to_tf(item) for item in value]
elif isinstance(value, dict):
return {str(k): _convert_python_to_tf(v) for k, v in value.items()}
elif value is None:
return None
else:
raise TofuTypeError(f"Unsupported type for Terraform conversion: {type(value)}")
class TofuManager:
def __init__(
self,
db: AsyncSession,
instance_task: TofuInstanceTask,
working_dir: str | None = None,
):
self.db = db
self.instance_task = instance_task
self._is_temp_dir = working_dir is None
self._working_dir = working_dir
self.tofu_path = settings.OPENTOFU_PATH
async def __aenter__(self) -> "TofuManager":
return self
async def __aexit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
self.instance_task.instance.state_password = None
await self.db.commit()
# Only clean up temporary directories if there was no exception
if self._is_temp_dir and self._working_dir and exc_type is None:
shutil.rmtree(self._working_dir)
async def _create_config(self) -> None:
config = self.instance_task.instance.configuration
self.password = generate_password()
self.instance_task.instance.state_password = generate_password_hash(self.password)
await self.db.commit()
if "terraform" not in config:
config["terraform"] = {}
config["terraform"]["backend"] = {
"http": {
"address": f"http://localhost:8000/api/v1/tofu/instances/{self.instance_task.instance_id}/state",
"lock_address": f"http://localhost:8000/api/v1/tofu/instances/{self.instance_task.instance_id}/state",
"unlock_address": f"http://localhost:8000/api/v1/tofu/instances/{self.instance_task.instance_id}/state",
}
}
async with aiofiles.open(await self.config_file(), "w") as f:
await f.write(json.dumps(config, indent=2))
async def _process_output_line(self, line: str) -> None:
try:
data = json.loads(line)
except json.decoder.JSONDecodeError:
data = {
"@level": "info",
"@timestamp": datetime.now(tz=timezone.utc).strftime(
"%Y-%m-%dT%H:%M:%S.%f%z"
),
"@module": "manager",
"@message": line,
"type": "decode-error",
}
log = TofuInstanceTaskLog(
instance_task_id=self.instance_task.id,
timestamp=datetime.strptime(data["@timestamp"], "%Y-%m-%dT%H:%M:%S.%f%z"),
log=data,
)
self.db.add(log)
await self.db.commit()
async def _run_command(
self,
command: str,
*args: str,
json_output: bool = False,
log_output: bool = True,
return_output: bool = False,
) -> str | None:
cmd = [self.tofu_path, command] + list(args)
if json_output:
cmd.append("-json")
process = await asyncio.create_subprocess_exec(
*cmd,
cwd=await self.working_dir(),
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.DEVNULL,
env={
"TF_HTTP_USERNAME": "tofu",
"TF_HTTP_PASSWORD": self.password,
},
)
if TYPE_CHECKING:
assert process.stdout is not None
result = []
async for line in process.stdout:
if return_output:
result.append(line.decode("utf-8"))
if log_output:
await self._process_output_line(line.decode("utf-8"))
await process.wait()
if process.returncode != 0:
raise TofuExecutionError(f"Tofu command failed: {process.returncode}")
return "\n".join(result) if return_output else None
async def config_file(self) -> str:
return os.path.join(await self.working_dir(), "main.tf.json")
async def working_dir(self) -> str:
if self._working_dir:
return self._working_dir
self._working_dir = await asyncio.to_thread(tempfile.mkdtemp, prefix="tofu-")
return self._working_dir
async def init(self, upgrade: bool = True) -> None:
await self._create_config()
args = []
if upgrade:
args.append("-upgrade")
await self._run_command("init", *args, json_output=True)
async def apply(self) -> None:
await self._create_config()
await self._run_command("apply", "-auto-approve", json_output=True)
async def destroy(self) -> None:
await self._create_config()
await self._run_command("destroy", "-auto-approve", json_output=True)
async def output(self) -> None:
await self._create_config()
try:
outputs = await self._run_command(
"output", json_output=True, log_output=False, return_output=True
)
self.instance_task.instance.outputs = json.loads(outputs)
await self.db.commit()
except JSONDecodeError:
raise TofuExecutionError("Could not parse JSON output")

13
src/tofu/config.py Normal file
View file

@ -0,0 +1,13 @@
from os.path import abspath, dirname, join
from src.config import CustomBaseSettings
class Config(CustomBaseSettings):
OPENTOFU_PATH: str = "/usr/bin/tofu"
OPENTOFU_MODULES_PATH: str = abspath(
join(dirname(__file__), "..", "..", "tofu")
) # no trailing slash from abspath
settings = Config()

39
src/tofu/exceptions.py Normal file
View file

@ -0,0 +1,39 @@
from typing import Any
from starlette import status
from starlette.exceptions import HTTPException
class TofuExecutionError(RuntimeError):
"""An error occurred when executing an OpenTofu command."""
class TofuTypeError(TypeError):
"""A value provided used a type that cannot be converted to a supported OpenTofu type."""
class TofuStateLocked(RuntimeError):
"""A request could not be completed as the state is locked, and the requestor does not hold the lock."""
def __init__(self, lock: dict[str, Any]) -> None:
self.lock = lock
class TofuForbiddenException(HTTPException):
def __init__(
self,
**kwargs: Any,
) -> None:
super().__init__(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
class TofuUnauthorizedException(HTTPException):
def __init__(
self,
**kwargs: Any,
) -> None:
super().__init__(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Unauthorized",
headers={"WWW-Authenticate": 'Basic realm="jasima"'},
)

107
src/tofu/models.py Normal file
View file

@ -0,0 +1,107 @@
from datetime import datetime
from enum import Enum
from typing import Any
from sqlalchemy import ForeignKey, func, text
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import Mapped, mapped_column, relationship
from src.models import CustomBase, TimestampMixin, IdMixin, DeletedTimestampMixin
class TofuInstanceStatus(Enum):
ACTIVE = "ACTIVE"
DEPLOYING = "DEPLOYING"
DESTROYED = "DESTROYED"
DESTROYING = "DESTROYING"
DRIFTED = "DRIFTED"
FAILED = "FAILED"
FAILED_DESTROY = "FAILED_DESTROY"
PENDING = "PENDING"
PENDING_DESTROY = "PENDING_DESTROY"
PENDING_DRIFT_CHECK = "PENDING_DRIFT_CHECK"
class TofuInstance(CustomBase, IdMixin, TimestampMixin, DeletedTimestampMixin):
__tablename__ = "tofu_instance"
status: Mapped[TofuInstanceStatus] = mapped_column(default=TofuInstanceStatus.PENDING)
configuration: Mapped[dict[str, Any]]
outputs: Mapped[dict[str, Any] | None]
plan: Mapped[dict[str, Any] | None]
state: Mapped[dict[str, Any] | None]
state_password: Mapped[bytes | None]
state_lock: Mapped[dict[str, Any] | None]
status_changed_at: Mapped[datetime] = mapped_column(default=func.now())
drift_checked_at: Mapped[datetime | None]
tasks = relationship("TofuInstanceTask", back_populates="instance")
status_changes = relationship("TofuInstanceStatusChange", back_populates="instance")
class TofuInstanceStatusChange(CustomBase, IdMixin):
__tablename__ = "tofu_instance_status_change"
instance_id: Mapped[int] = mapped_column(ForeignKey("tofu_instance.id"))
instance_task_id: Mapped[int] = mapped_column(ForeignKey("tofu_instance_task.id"))
timestamp: Mapped[datetime] = mapped_column(default=func.now())
old_status: Mapped[TofuInstanceStatus]
new_status: Mapped[TofuInstanceStatus]
instance = relationship("TofuInstance", back_populates="status_changes")
class TofuInstanceTaskType(Enum):
CHECK_DRIFT = "CHECK_DRIFT"
DEPLOY = "DEPLOY"
DESTROY = "DESTROY"
class TofuInstanceTaskStatus(Enum):
CANCELED = "CANCELED"
COMPLETED = "COMPLETED"
FAILED = "FAILED"
PENDING = "PENDING"
RUNNING = "RUNNING"
class TofuInstanceTask(CustomBase, IdMixin, TimestampMixin):
__tablename__ = "tofu_instance_task"
instance_id: Mapped[int] = mapped_column(ForeignKey("tofu_instance.id"))
task: Mapped[TofuInstanceTaskType]
status: Mapped[TofuInstanceTaskStatus] = mapped_column(
default=TofuInstanceTaskStatus.PENDING
)
start_time: Mapped[datetime | None]
end_time: Mapped[datetime | None]
instance = relationship("TofuInstance", back_populates="tasks")
class TofuInstanceTaskLog(CustomBase, IdMixin):
__tablename__ = "tofu_instance_task_log"
instance_task_id: Mapped[int] = mapped_column(ForeignKey("tofu_instance_task.id"))
timestamp: Mapped[datetime] = mapped_column(default=func.now())
log: Mapped[dict[str, Any]]
class TofuBruteForce(CustomBase, IdMixin, TimestampMixin):
__tablename__ = "tofu_brute_force"
host: Mapped[str]
expiry: Mapped[datetime] = mapped_column(default=func.now() + text("INTERVAL '1 hour'"))
def update_tofu_instance_status(
db: AsyncSession, instance: TofuInstance, task_id: int, new_status: TofuInstanceStatus
) -> None:
status_change = TofuInstanceStatusChange(
instance_id=instance.id,
instance_task_id=task_id,
old_status=instance.status,
new_status=new_status,
)
db.add(status_change)
instance.status = new_status
instance.status_changed_at = func.now()
if new_status == TofuInstanceStatus.DESTROYED:
instance.deleted_at = func.now()

356
src/tofu/router.py Normal file
View file

@ -0,0 +1,356 @@
import asyncio
import json
from datetime import datetime, timezone, timedelta
from typing import Any, Annotated, AsyncIterator
from fastapi import APIRouter, HTTPException, Request, Query
from fastapi_pagination import Page
from fastapi_pagination.ext.sqlalchemy import apaginate
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from starlette import status
from starlette.responses import JSONResponse, Response, StreamingResponse
from src.database import DbSession
from src.schemas import AcceptedResponse, OkResponse
from src.tofu.exceptions import TofuStateLocked, TofuUnauthorizedException
from src.tofu.models import (
TofuInstance,
TofuInstanceTask,
TofuInstanceTaskType,
TofuInstanceStatus,
TofuInstanceTaskLog,
TofuInstanceTaskStatus,
update_tofu_instance_status,
)
from src.tofu.schemas import (
TofuInstanceDetail,
TofuInstanceCreate,
TofuInstanceSummary,
TofuInstanceUpdate,
TofuInstanceState,
TofuInstanceStateLock,
)
from src.tofu.security import (
Password,
validate_password,
generate_password_hash,
block_host_brute_force,
APIKey,
)
router = APIRouter()
TofuLockID = Annotated[str | None, Query(alias="ID", description="State lock ID")]
async def tofu_state_locked_handler(request: Request, exc: Exception) -> Response:
if isinstance(exc, TofuStateLocked):
return JSONResponse(status_code=status.HTTP_423_LOCKED, content=exc.lock)
raise RuntimeError(exc)
@router.get(
"/instances",
response_model=Page[TofuInstanceSummary],
tags=["OpenTofu"],
)
async def list_tofu_instances(db: DbSession, _: APIKey) -> Page[TofuInstance]:
return await apaginate(
db, select(TofuInstance).filter(TofuInstance.deleted_at == None) # noqa: E711
)
@router.post(
"/instances",
response_model=AcceptedResponse,
status_code=status.HTTP_202_ACCEPTED,
tags=["OpenTofu"],
)
async def create_tofu_instance(
instance_data: TofuInstanceCreate, db: DbSession, _: APIKey
) -> JSONResponse:
hashed_password: str | None = (
generate_password_hash(instance_data.password) if instance_data.password else None
)
instance = TofuInstance(
configuration=instance_data.configuration, state_password=hashed_password
)
db.add(instance)
await db.flush()
task = TofuInstanceTask(instance_id=instance.id, task=TofuInstanceTaskType.DEPLOY)
db.add(task)
await db.commit()
await db.refresh(instance)
return JSONResponse(status_code=status.HTTP_202_ACCEPTED, content={"id": instance.id})
async def _get_tofu_instance(
db: AsyncSession,
instance_id: int,
password: str | None = None,
host: str | None = None,
*,
detail: bool = False,
password_required: bool = True,
) -> TofuInstance:
if detail:
query = (
select(TofuInstance)
.filter(TofuInstance.id == instance_id)
.options(
selectinload(TofuInstance.status_changes), selectinload(TofuInstance.tasks)
)
)
else:
query = select(TofuInstance).filter(TofuInstance.id == instance_id)
row = (await db.execute(query)).first()
if not row:
raise HTTPException(status_code=404, detail="No instance found")
if password_required:
if not host:
raise TofuUnauthorizedException
if not validate_password(password, row[0].state_password):
await block_host_brute_force(db, host)
raise TofuUnauthorizedException
return row[0]
@router.get(
"/instances/{instance_id}",
response_model=TofuInstanceDetail,
tags=["OpenTofu"],
)
async def get_tofu_instance(instance_id: int, db: DbSession, _: APIKey) -> TofuInstance:
return await _get_tofu_instance(db, instance_id, detail=True, password_required=False)
@router.put(
"/instances/{instance_id}",
response_model=AcceptedResponse,
status_code=status.HTTP_202_ACCEPTED,
response_description="Update accepted (deployment task pending)",
responses={
status.HTTP_200_OK: {"description": "Update Successful", "model": AcceptedResponse},
status.HTTP_404_NOT_FOUND: {"description": "Instance not found"},
status.HTTP_412_PRECONDITION_FAILED: {
"description": "Update failed due to current status"
},
},
tags=["OpenTofu"],
)
async def update_tofu_instance(
instance_id: int, instance_data: TofuInstanceUpdate, db: DbSession, _: APIKey
) -> JSONResponse:
query = select(TofuInstance).with_for_update().filter(TofuInstance.id == instance_id)
row = (await db.execute(query)).first()
if not row:
raise HTTPException(status_code=404, detail="Instance not found")
instance = row[0]
if instance.status not in [TofuInstanceStatus.ACTIVE, TofuInstanceStatus.DRIFTED]:
raise HTTPException(
status_code=412, detail="Updates only allowed for active instances"
)
if instance_data.configuration:
instance.configuration = instance_data.configuration
task = TofuInstanceTask(instance_id=instance.id, task=TofuInstanceTaskType.DEPLOY)
db.add(task)
await db.flush()
update_tofu_instance_status(db, instance, task.id, TofuInstanceStatus.PENDING)
if instance_data.password:
instance.password = generate_password_hash(instance_data.password)
await db.commit() # release FOR UPDATE lock
return JSONResponse(status_code=status.HTTP_202_ACCEPTED, content={"id": instance.id})
@router.delete(
"/instances/{instance_id}",
status_code=202,
response_model=AcceptedResponse,
tags=["OpenTofu"],
)
async def destroy_tofu_instance(instance_id: int, db: DbSession, _: APIKey) -> Response:
query = select(TofuInstance).with_for_update().filter(TofuInstance.id == instance_id)
row = (await db.execute(query)).first()
if not row:
raise HTTPException(status_code=404, detail="Resource not found")
instance = row[0]
if instance.status not in [
TofuInstanceStatus.ACTIVE,
TofuInstanceStatus.DRIFTED,
TofuInstanceStatus.FAILED,
]:
raise HTTPException(
status_code=412,
detail="Instance cannot be destroyed currently as it is pending update",
)
task = TofuInstanceTask(instance_id=instance.id, task=TofuInstanceTaskType.DESTROY)
db.add(task)
await db.flush()
update_tofu_instance_status(db, instance, task.id, TofuInstanceStatus.PENDING_DESTROY)
await db.commit() # release FOR UPDATE lock
return JSONResponse(status_code=status.HTTP_202_ACCEPTED, content={"id": instance_id})
async def _get_tofu_instance_state(
db: AsyncSession,
instance_id: int,
password: str | None = None,
host: str | None = None,
*,
password_required: bool = True,
) -> dict[str, Any]:
instance = await _get_tofu_instance(
db, instance_id, password, host, password_required=password_required
)
if not instance.state:
raise HTTPException(status_code=404, detail="No state found")
return instance.state
@router.get(
"/instances/{instance_id}/state",
response_model=TofuInstanceState,
tags=["OpenTofu"],
)
async def get_tofu_instance_state_route(
instance_id: int, db: DbSession, password: Password, request: Request
) -> dict[str, Any]:
return await _get_tofu_instance_state(db, instance_id, password, request.client.host)
@router.post(
"/instances/{instance_id}/state",
response_model=None,
tags=["OpenTofu"],
)
async def update_tofu_instance_state(
instance_id: int,
new_state: TofuInstanceState,
db: DbSession,
password: Password,
request: Request,
lock_id: TofuLockID = None,
) -> Response:
instance = await _get_tofu_instance(db, instance_id, password, request.client.host)
if instance.state_lock and instance.state_lock["ID"] != lock_id:
raise TofuStateLocked(instance.state_lock)
instance.state = new_state.model_dump()
await db.commit()
return Response(status_code=200)
@router.api_route(
"/instances/{instance_id}/state",
methods=["LOCK"],
response_model=OkResponse,
tags=["OpenTofu"],
include_in_schema=False,
)
async def lock_tofu_instance_state(
instance_id: int,
lock: TofuInstanceStateLock,
db: DbSession,
password: Password,
request: Request,
) -> JSONResponse:
instance = await _get_tofu_instance(db, instance_id, password, request.client.host)
if instance.state_lock:
return JSONResponse(status_code=423, content=instance.state_lock)
instance.state_lock = lock.serializable_dict()
await db.commit()
return JSONResponse(
status_code=200, content={"result": "ok"}
) # A 204 would make sense here but the spec needs 200
@router.api_route(
"/instances/{instance_id}/state",
response_model=OkResponse,
methods=["UNLOCK"],
tags=["OpenTofu"],
include_in_schema=False,
)
async def unlock_tofu_instance_state(
instance_id: int,
db: DbSession,
password: Password,
request: Request,
lock_id: TofuLockID = None,
) -> JSONResponse:
instance = await _get_tofu_instance(db, instance_id, password, request.client.host)
if not instance.state_lock:
JSONResponse(
status_code=200, content={"result": "ok"}
) # A 204 would make sense here but the spec needs 200
if lock_id is None or instance.state_lock["ID"] == lock_id:
# force-unlock seems to not give an ID to verify so accept no ID being present.
# This may not be great for robustness but it's not a security issue as the protocol
# requires returning the lock ID to clients anyway when the lock ID is not correct.
instance.state_lock = None
await db.commit()
return JSONResponse(
status_code=200, content={"result": "ok"}
) # A 204 would make sense here but the spec needs 200
raise TofuStateLocked(instance.state_lock)
@router.delete(
"/instances/{instance_id}/state",
response_model=OkResponse,
tags=["OpenTofu"],
)
async def purge_tofu_instance_state(
instance_id: int,
db: DbSession,
password: Password,
request: Request,
lock_id: TofuLockID = None,
) -> JSONResponse:
instance = await _get_tofu_instance(db, instance_id, password, request.client.host)
if instance.state_lock:
if instance.state_lock["ID"] != lock_id:
raise TofuStateLocked(instance.state_lock)
instance.state = None
instance.state_lock = None
await db.commit()
return JSONResponse(status_code=200, content={"result": "ok"})
@router.get("/logs/{task_id}", tags=["OpenTofu"])
async def get_logs(task_id: int, db: DbSession, _: APIKey) -> StreamingResponse:
# TODO: This could definitely be optimised but I don't think it's going to see heavy usage, just for debugging
async def stream_log() -> AsyncIterator[str]:
start = datetime.now(tz=timezone.utc)
seen_log_id = 0
task_query = select(TofuInstanceTask).where(TofuInstanceTask.id == task_id)
row = (await db.execute(task_query)).first()
if not row:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
task = row[0]
while True:
if datetime.now(tz=timezone.utc) - start > timedelta(minutes=5):
return
result = await db.execute(
select(TofuInstanceTaskLog)
.where(
TofuInstanceTaskLog.instance_task_id == task_id,
TofuInstanceTaskLog.id > seen_log_id,
)
.order_by(TofuInstanceTaskLog.timestamp)
)
for log in result.all():
seen_log_id = log[0].id
yield json.dumps(log[0].log) + "\n"
await db.refresh(task)
if row[0].status in [
TofuInstanceTaskStatus.PENDING,
TofuInstanceTaskStatus.FAILED,
TofuInstanceTaskStatus.COMPLETED,
]:
print("done")
return
await asyncio.sleep(1)
return StreamingResponse(stream_log(), media_type="application/x-ndjson")

76
src/tofu/schemas.py Normal file
View file

@ -0,0 +1,76 @@
from datetime import datetime
from enum import Enum
from typing import Any
from uuid import UUID
from pydantic import ConfigDict
from src.schemas import CustomModel, TimestampMixin, CustomDatetime, IdMixin
from src.tofu.models import TofuInstanceStatus, TofuInstanceTaskType, TofuInstanceTaskStatus
class TofuOperationType(Enum):
# https://github.com/opentofu/opentofu/blob/main/internal/backend/operation_type.go
INVALID = "OperationTypeInvalid"
REFRESH = "OperationTypeRefresh"
PLAN = "OperationTypePlan"
APPLY = "OperationTypeApply"
class TofuInstanceState(CustomModel):
# TODO: Do better
model_config = ConfigDict(extra="allow")
class TofuInstanceStateLock(CustomModel):
model_config = ConfigDict(extra="allow")
ID: UUID
Operation: TofuOperationType
Info: str
Who: str
Version: str
Created: str
Path: str
class TofuInstanceSummary(CustomModel, IdMixin, TimestampMixin):
status: TofuInstanceStatus
status_changed_at: CustomDatetime
drift_checked_at: CustomDatetime | None
class TofuInstanceStatusChange(CustomModel):
instance_task_id: int
timestamp: datetime
old_status: TofuInstanceStatus
new_status: TofuInstanceStatus
class TofuInstanceTask(CustomModel, TimestampMixin):
id: int
task: TofuInstanceTaskType
status: TofuInstanceTaskStatus
start_time: datetime | None
end_time: datetime | None
class TofuInstanceDetail(CustomModel, IdMixin, TimestampMixin):
status: TofuInstanceStatus
configuration: dict[str, Any]
outputs: dict[str, Any] | None
plan: dict[str, Any] | None
status_changed_at: CustomDatetime
drift_checked_at: CustomDatetime | None
state_lock: TofuInstanceStateLock | None
tasks: list[TofuInstanceTask]
status_changes: list[TofuInstanceStatusChange]
class TofuInstanceCreate(CustomModel):
configuration: dict[str, Any]
password: str | None = None
class TofuInstanceUpdate(CustomModel):
configuration: dict[str, Any] | None = None
password: str | None = None

93
src/tofu/security.py Normal file
View file

@ -0,0 +1,93 @@
from datetime import datetime, timezone
from typing import Annotated
import bcrypt
from fastapi import Depends
from fastapi.security import (
HTTPBasic,
HTTPBasicCredentials,
HTTPBearer,
HTTPAuthorizationCredentials,
)
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from starlette.requests import Request
from src.config import settings
from src.database import DbSession
from src.tofu.exceptions import TofuForbiddenException, TofuUnauthorizedException
from src.tofu.models import TofuBruteForce
from src.utils import generate_random_alphanum
basic = HTTPBasic()
Credentials = Annotated[HTTPBasicCredentials, Depends(basic)]
async def block_host_brute_force(db: AsyncSession, host: str) -> None:
db.add(TofuBruteForce(host=host))
await db.commit()
async def is_blocked_brute_force(request: Request, db: DbSession) -> None:
query = select(TofuBruteForce).where(TofuBruteForce.host == request.client.host)
row = (await db.execute(query)).first()
block: TofuBruteForce | None = row[0] if row else None
if block:
if block.expiry < datetime.now(tz=timezone.utc):
db.delete(block)
await db.commit()
return await is_blocked_brute_force(
request, db
) # Just in case there's another block in the table
raise TofuForbiddenException
return None
BruteForceProtection = Annotated[None, Depends(is_blocked_brute_force)]
def generate_password() -> str:
return generate_random_alphanum(40)
def generate_password_hash(password_str: str) -> bytes:
password = password_str.encode("utf-8")
salt = bcrypt.gensalt()
return bcrypt.hashpw(password, salt)
def validate_password(password_str: str, password_hash: bytes) -> bool:
if password_hash is None:
return False
password = password_str.encode("utf-8")
return bcrypt.checkpw(password, password_hash)
def get_password(
db: DbSession, credentials: Credentials, request: Request, _: BruteForceProtection
) -> str:
if not credentials or not credentials.password:
block_host_brute_force(db, request.client.host)
raise TofuUnauthorizedException
return credentials.password
Password = Annotated[str, Depends(get_password)]
bearer = HTTPBearer()
async def api_key(
db: DbSession,
request: Request,
_: BruteForceProtection,
credentials: HTTPAuthorizationCredentials = Depends(bearer),
) -> None:
api_key = credentials.credentials
print(api_key)
if api_key != settings.API_KEY:
await block_host_brute_force(db, request.client.host)
raise TofuUnauthorizedException
APIKey = Annotated[None, Depends(api_key)]

80
src/tofu/service.py Normal file
View file

@ -0,0 +1,80 @@
from typing import Any
from sqlalchemy.ext.asyncio import AsyncSession
from starlette.background import BackgroundTask
from starlette.exceptions import HTTPException
from src.tofu.models import (
TofuInstance,
TofuInstanceTask,
TofuInstanceTaskType,
TofuInstanceStatus,
update_tofu_instance_status,
)
from src.tofu.tasks import process_tasks
async def create_tofu_instance(
configuration: dict[str, Any],
db: AsyncSession,
*,
commit: bool = False,
create_task: bool = True
) -> tuple[int, BackgroundTask | None]:
instance = TofuInstance(configuration=configuration)
db.add(instance)
await db.flush()
if create_task:
task = TofuInstanceTask(instance_id=instance.id, task=TofuInstanceTaskType.DEPLOY)
db.add(task)
if commit:
await db.commit()
return instance.id, BackgroundTask(process_tasks) if create_task else None
async def create_empty_tofu_instance(db: AsyncSession) -> int:
config = {}
tofu_instance_id, _ = await create_tofu_instance(config, db, create_task=False)
return tofu_instance_id
async def update_tofu_instance(
instance: TofuInstance,
configuration: dict[str, Any],
db: AsyncSession,
*,
commit: bool = False,
allow_pending: bool = False
) -> BackgroundTask:
allowed_status = [TofuInstanceStatus.ACTIVE, TofuInstanceStatus.DRIFTED]
if allow_pending:
allowed_status.append(TofuInstanceStatus.PENDING)
if instance.status not in allowed_status:
raise HTTPException(
status_code=412, detail="Updates only allowed for active instances"
)
instance.configuration = configuration
task = TofuInstanceTask(instance_id=instance.id, task=TofuInstanceTaskType.DEPLOY)
db.add(task)
await db.flush()
update_tofu_instance_status(db, instance, task.id, TofuInstanceStatus.PENDING)
if commit:
await db.commit()
return BackgroundTask(process_tasks)
async def destroy_tofu_instance(
instance: TofuInstance, db: AsyncSession, *, commit: bool = False
) -> BackgroundTask:
if instance.status not in [TofuInstanceStatus.ACTIVE, TofuInstanceStatus.DRIFTED]:
raise HTTPException(
status_code=412,
detail="Instance cannot be destroyed currently as it is pending update",
)
task = TofuInstanceTask(instance_id=instance.id, task=TofuInstanceTaskType.DESTROY)
db.add(task)
await db.flush()
update_tofu_instance_status(db, instance, task.id, TofuInstanceStatus.PENDING_DESTROY)
if commit:
await db.commit()
return BackgroundTask(process_tasks)

86
src/tofu/tasks.py Normal file
View file

@ -0,0 +1,86 @@
from datetime import datetime, timezone
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from src.database import get_db_session
from src.tofu.client import TofuManager
from src.tofu.exceptions import TofuExecutionError
from src.tofu.models import (
TofuInstanceStatus,
TofuInstanceTask,
TofuInstanceTaskType,
TofuInstanceTaskStatus,
update_tofu_instance_status,
)
from src.utils import repeat_every
async def deploy_tofu_instance(db: AsyncSession, task: TofuInstanceTask) -> None:
try:
async with TofuManager(db, task) as tofu:
await tofu.init()
await tofu.apply()
await tofu.output()
update_tofu_instance_status(db, task.instance, task.id, TofuInstanceStatus.ACTIVE)
task.instance.drift_checked_at = task.end_time = datetime.now(tz=timezone.utc)
except TofuExecutionError as exc:
update_tofu_instance_status(db, task.instance, task.id, TofuInstanceStatus.FAILED)
raise exc
async def destroy_tofu_instance(db: AsyncSession, task: TofuInstanceTask) -> None:
try:
async with TofuManager(db, task) as tofu:
await tofu.init()
await tofu.destroy()
update_tofu_instance_status(
db, task.instance, task.id, TofuInstanceStatus.DESTROYED
)
task.instance.outputs = {}
task.instance.drift_checked_at = task.end_time = datetime.now(tz=timezone.utc)
except TofuExecutionError as exc:
update_tofu_instance_status(
db, task.instance, task.id, TofuInstanceStatus.FAILED_DESTROY
)
raise exc
async def process_tasks() -> None:
async with get_db_session() as db:
while True:
query = (
select(TofuInstanceTask)
.with_for_update(skip_locked=True)
.options(selectinload(TofuInstanceTask.instance))
.filter(TofuInstanceTask.status == TofuInstanceTaskStatus.PENDING)
.limit(1)
)
row = (await db.execute(query)).first()
if not row:
break
task = row[0]
task.status = TofuInstanceTaskStatus.RUNNING
task.start_time = datetime.now(tz=timezone.utc)
await db.commit() # release FOR UPDATE lock
try:
match task.task:
case TofuInstanceTaskType.DEPLOY:
await deploy_tofu_instance(db, task)
case TofuInstanceTaskType.DESTROY:
await destroy_tofu_instance(db, task)
case TofuInstanceTaskType.CHECK_DRIFT:
print("NOT IMPLEMMENTED") # TODO: Implement
task.status = TofuInstanceTaskStatus.COMPLETED
task.end_time = datetime.now(tz=timezone.utc)
except TofuExecutionError:
task.status = TofuInstanceTaskStatus.FAILED
task.end_time = datetime.now(tz=timezone.utc)
finally:
await db.commit()
@repeat_every(seconds=5, wait_first=0)
async def periodic_tofu_process_tasks():
await process_tasks()

199
src/utils.py Normal file
View file

@ -0,0 +1,199 @@
import asyncio
import logging
import random
import string
from datetime import datetime, timedelta, timezone
from functools import wraps
from traceback import format_exception
from typing import Callable, Coroutine, Any
from starlette.concurrency import run_in_threadpool
logger = logging.getLogger(__name__)
ALPHA_NUM = string.ascii_letters + string.digits
LOWER_NUM = string.ascii_lowercase + string.digits
TOP_SUBDOMAINS = [
"admin",
"api",
"app",
"apps",
"autodiscover",
"baidu",
"bbs",
"beta",
"blog",
"cdn",
"citrix",
"cloud",
"demo",
"dev",
"email",
"en",
"exchange",
"forum",
"ftp",
"gateway",
"gov",
"gw",
"home",
"host",
"images",
"img",
"info",
"intranet",
"login",
"m",
"mail",
"mail1",
"mail2",
"mail3",
"media",
"mobile",
"news",
"office",
"owa",
"portal",
"remote",
"secure",
"server",
"server1",
"shop",
"ssl",
"stage",
"staging",
"start",
"static",
"store",
"support",
"test",
"web",
"webmail",
"wiki",
"www1",
"www2",
]
NoArgsNoReturnFuncT = Callable[[], None]
NoArgsNoReturnAsyncFuncT = Callable[[], Coroutine[Any, Any, None]]
ExcArgNoReturnFuncT = Callable[[Exception], None]
ExcArgNoReturnAsyncFuncT = Callable[[Exception], Coroutine[Any, Any, None]]
NoArgsNoReturnAnyFuncT = NoArgsNoReturnFuncT | NoArgsNoReturnAsyncFuncT
ExcArgNoReturnAnyFuncT = ExcArgNoReturnFuncT | ExcArgNoReturnAsyncFuncT
NoArgsNoReturnDecorator = Callable[[NoArgsNoReturnAnyFuncT], NoArgsNoReturnAsyncFuncT]
async def _handle_repeat_func(func: NoArgsNoReturnAnyFuncT) -> None:
if asyncio.iscoroutinefunction(func):
await func()
else:
await run_in_threadpool(func)
async def _handle_repeat_exc(
exc: Exception, on_exception: ExcArgNoReturnAnyFuncT | None
) -> None:
if on_exception:
if asyncio.iscoroutinefunction(on_exception):
await on_exception(exc)
else:
await run_in_threadpool(on_exception, exc)
def repeat_every(
*,
seconds: float,
wait_first: float | None = None,
max_repetitions: int | None = None,
on_complete: NoArgsNoReturnAnyFuncT | None = None,
on_exception: ExcArgNoReturnAnyFuncT | None = None,
) -> NoArgsNoReturnDecorator:
"""
This function returns a decorator that modifies a function so it is periodically re-executed after its first call.
The function it decorates should accept no arguments and return nothing. If necessary, this can be accomplished
by using `functools.partial` or otherwise wrapping the target function prior to decoration.
Parameters
----------
seconds: float
The number of seconds to wait between repeated calls
wait_first: float (default None)
If not None, the function will wait for the given duration before the first call
max_repetitions: Optional[int] (default None)
The maximum number of times to call the repeated function. If `None`, the function is repeated forever.
on_complete: Optional[Callable[[], None]] (default None)
A function to call after the final repetition of the decorated function.
on_exception: Optional[Callable[[Exception], None]] (default None)
A function to call when an exception is raised by the decorated function.
"""
def decorator(func: NoArgsNoReturnAnyFuncT) -> NoArgsNoReturnAsyncFuncT:
"""
Converts the decorated function into a repeated, periodically-called version of itself.
"""
@wraps(func)
async def wrapped() -> None:
async def loop() -> None:
if wait_first is not None:
await asyncio.sleep(wait_first)
repetitions = 0
while max_repetitions is None or repetitions < max_repetitions:
try:
await _handle_repeat_func(func)
except Exception as exc:
formatted_exception = "".join(
format_exception(type(exc), exc, exc.__traceback__)
)
logger.error(formatted_exception)
await _handle_repeat_exc(exc, on_exception)
repetitions += 1
await asyncio.sleep(seconds)
if on_complete:
await _handle_repeat_func(on_complete)
asyncio.ensure_future(loop())
return wrapped
return decorator
def perishable_cache(expires: int, minimum: int):
def decorator(func):
cache_data = {}
@wraps(func)
async def wrapper(*args, retry: bool = False, **kwargs):
timeout = minimum if retry else expires
if "cached_time" in cache_data:
if datetime.now(tz=timezone.utc) - cache_data["cached_time"] < timedelta(
seconds=timeout
):
return cache_data["cached_result"]
result = await func(*args, retry=retry, **kwargs)
cache_data["cached_result"] = result
cache_data["cached_time"] = datetime.now(tz=timezone.utc)
return result
return wrapper
return decorator
def generate_random_alphanum(length: int = 20, prefix: str = "") -> str:
return prefix + "".join(random.choices(ALPHA_NUM, k=length - len(prefix)))
def generate_random_lowernum(length: int = 20, prefix: str = "") -> str:
return prefix + "".join(random.choices(LOWER_NUM, k=length - len(prefix)))
def generate_random_subdomain() -> str:
return random.choice(TOP_SUBDOMAINS)

View file

@ -0,0 +1,136 @@
from typing import AsyncGenerator
import pytest
from httpx import AsyncClient, ASGITransport
from src.auth.service import get_admin
from src.main import app
PASSWORD="password123"
AUTH=("tofu", PASSWORD)
CREATE_INSTANCE_PAYLOAD = {
"suppress_deployment": True,
"password": PASSWORD,
"configuration": {
"terraform": {"required_providers": {"random": {"source": "hashicorp/random", "version": ">= 3.0.0"}}},
"provider": {"random": {}},
"variable": {
"password_length": {"description": "Length of the random password", "type": "number", "default": 16}
},
"resource": {"random_password": {"example": {"length": "${var.password_length}", "special": True}}},
"output": {"generated_password": {"value": "${random_password.example.result}", "sensitive": True}},
},
}
INITIAL_STATE_PAYLOAD = {"key": "value"}
UPDATE_STATE_PAYLOAD = {"key": "value"}
STATE_LOCK_PAYLOAD_1 = {"ID": "bd812a7e-2297-4b70-acc9-d51015a9172c",
"Operation": "OperationTypeInvalid",
"Info": "",
"Who": "",
"Version": "",
"Created": "1990-01-01T12:00:00Z",
"Path": ""
}
STATE_LOCK_PAYLOAD_2 = {"ID": "ab0eb55f-2f00-4e02-9bf5-e2c6658ab8af",
"Operation": "OperationTypeInvalid",
"Info": "",
"Who": "",
"Version": "",
"Created": "1990-01-01T12:00:00Z",
"Path": ""
}
@pytest.fixture(scope="module")
async def client() -> AsyncGenerator[AsyncClient, None]:
host, port = "127.0.0.1", "9000"
async def override_get_admin():
return None
app.dependency_overrides[get_admin] = override_get_admin
async with AsyncClient(transport=ASGITransport(app=app, client=(host, port)), base_url="http://test", auth=AUTH) as client:
yield client
@pytest.fixture
async def instance_id(client: AsyncClient):
response = await client.post("/api/v1/tofu/instances", json=CREATE_INSTANCE_PAYLOAD)
assert response.status_code == 202
return response.json()["id"]
@pytest.mark.anyio
async def test_state_no_locking(client: AsyncClient, instance_id: int):
# Initially there should be no state
response = await client.get(f"/api/v1/tofu/instances/{instance_id}/state")
assert response.status_code == 404
# Let's create the state
response = await client.post(f"/api/v1/tofu/instances/{instance_id}/state", json=INITIAL_STATE_PAYLOAD)
assert response.status_code == 200
# Now check the state is retrievable
response = await client.get(f"/api/v1/tofu/instances/{instance_id}/state")
assert response.status_code == 200
assert response.json() == INITIAL_STATE_PAYLOAD
# Now update the state
response = await client.post(f"/api/v1/tofu/instances/{instance_id}/state", json=UPDATE_STATE_PAYLOAD)
assert response.status_code == 200
# Now check the state is retrievable
response = await client.get(f"/api/v1/tofu/instances/{instance_id}/state")
assert response.status_code == 200
assert response.json() == UPDATE_STATE_PAYLOAD
# Now purge the state
response = await client.delete(f"/api/v1/tofu/instances/{instance_id}/state")
assert response.status_code == 200
# And check it is gone
response = await client.get(f"/api/v1/tofu/instances/{instance_id}/state")
assert response.status_code == 404
@pytest.mark.anyio
async def test_state_double_locking(client: AsyncClient, instance_id: int):
response = await client.request("LOCK", f"/api/v1/tofu/instances/{instance_id}/state", json=STATE_LOCK_PAYLOAD_1)
assert response.status_code == 200
response = await client.request("LOCK", f"/api/v1/tofu/instances/{instance_id}/state", json=STATE_LOCK_PAYLOAD_2)
assert response.status_code == 423
assert response.json() == STATE_LOCK_PAYLOAD_1
response = await client.request("UNLOCK",
f"/api/v1/tofu/instances/{instance_id}/state?ID=" + STATE_LOCK_PAYLOAD_2['ID'])
assert response.status_code == 423
assert response.json() == STATE_LOCK_PAYLOAD_1
response = await client.request("UNLOCK",
f"/api/v1/tofu/instances/{instance_id}/state?ID=" + STATE_LOCK_PAYLOAD_1['ID'])
assert response.status_code == 200
@pytest.mark.anyio
async def test_state_locked_update(client: AsyncClient, instance_id: int):
response = await client.request("LOCK", f"/api/v1/tofu/instances/{instance_id}/state", json=STATE_LOCK_PAYLOAD_1)
assert response.status_code == 200
response = await client.post(f"/api/v1/tofu/instances/{instance_id}/state?ID=" + STATE_LOCK_PAYLOAD_1['ID'],
json=INITIAL_STATE_PAYLOAD)
assert response.status_code == 200
response = await client.post(f"/api/v1/tofu/instances/{instance_id}/state?ID=" + STATE_LOCK_PAYLOAD_2['ID'],
json=UPDATE_STATE_PAYLOAD)
assert response.status_code == 423
assert response.json() == STATE_LOCK_PAYLOAD_1
response = await client.post(f"/api/v1/tofu/instances/{instance_id}/state?ID=" + STATE_LOCK_PAYLOAD_1['ID'],
json=UPDATE_STATE_PAYLOAD)
assert response.status_code == 200
response = await client.delete(f"/api/v1/tofu/instances/{instance_id}/state?ID=" + STATE_LOCK_PAYLOAD_2['ID'])
assert response.status_code == 423
assert response.json() == STATE_LOCK_PAYLOAD_1
response = await client.delete(f"/api/v1/tofu/instances/{instance_id}/state?ID=" + STATE_LOCK_PAYLOAD_1['ID'])
assert response.status_code == 200
@pytest.mark.anyio
async def test_state_allow_force_unlock(client: AsyncClient, instance_id: int):
# force-unlock doesn't include the ID when calling the unlock endpoint
response = await client.request("LOCK", f"/api/v1/tofu/instances/{instance_id}/state", json=STATE_LOCK_PAYLOAD_1)
assert response.status_code == 200
response = await client.request("UNLOCK", f"/api/v1/tofu/instances/{instance_id}/state", json=STATE_LOCK_PAYLOAD_1)
assert response.status_code == 200