feat: initial commit

This commit is contained in:
Iain Learmonth 2025-12-14 17:47:51 +00:00
commit 075939142f
63 changed files with 9494 additions and 0 deletions

70
src/API.md Normal file
View file

@ -0,0 +1,70 @@
:::warning[Under construction]
This documentation is a work in progress. Please [get in touch](/contact) with us if you have any questions.
:::
Learn how to use the jasima.app API.
Everything that can be done within the portal can also be achieved using our API, documented in this section.
## Authentication
We use an API key for authentication.
This should be passed to the API in an `Authorization` header:
```http request
Authorization: Bearer <token>
```
## Data Format
The API uses JSON as the default format for both requests and responses, ensuring consistency and simplicity in data
handling:
* For GET requests, the API expects parameters to be passed via query strings.
* For other HTTP methods (e.g., POST, PUT), the request body (if required) should be formatted as JSON.
There is no need to explicitly specify content encoding in the headers, as the API assumes JSON by default for all
applicable requests and responses.
## Pagination
The API supports pagination to manage large datasets efficiently. Use the page and perPage query parameters to control the data returned in your requests.
Parameters:
* `page`: The page number to retrieve.
* `size`: The number of items to include per page.
Example request:
```http request
GET https://api.jasima.app/api/v1/widgets?page=1&size=10
```
Example response:
```http request
{
"items": [
],
"total": 0,
"page": 1,
"size": 10,
"pages": 0
}
```
Bear in mind the following:
* The `page` field in the response indicates the current page being viewed.
* The `total` field shows the total number of items available.
* The `pages` field indicates the total number of pages, and can be used to determine whether there are additional pages
to fetch.
Use this structure to efficiently navigate through large datasets while ensuring optimal performance.
## Errors
:::info[TODO]
Document the common status codes, and the validation error response format.
:::
## Schemas

0
src/__init__.py Normal file
View file

56
src/config.py Normal file
View file

@ -0,0 +1,56 @@
from os.path import abspath, dirname, join
from typing import Any
from pydantic import PostgresDsn
from pydantic_settings import BaseSettings, SettingsConfigDict
from src.constants import Environment
API_README_PATH = abspath(join(dirname(__file__), "API.md"))
with open(API_README_PATH, "r", encoding="utf-8") as f:
API_README_MD = f.read()
class CustomBaseSettings(BaseSettings):
model_config = SettingsConfigDict(
env_file=".env", env_file_encoding="utf-8", extra="ignore"
)
class Config(CustomBaseSettings):
DATABASE_URL: PostgresDsn # Used for alembic migrations
DATABASE_ASYNC_URL: PostgresDsn
DATABASE_POOL_SIZE: int = 16
DATABASE_POOL_TTL: int = 60 * 20 # 20 minutes
DATABASE_POOL_PRE_PING: bool = True
ENVIRONMENT: Environment = Environment.PRODUCTION
CORS_ORIGINS: list[str] = ["*"]
CORS_ORIGINS_REGEX: str | None = None
CORS_HEADERS: list[str] = ["*"]
API_KEY: str
APP_VERSION: str = "0.0.0"
settings = Config()
tags_metadata = [
{
"name": "OpenTofu",
"description": "Operations for managing OpenTofu deployments.",
},
]
app_configs: dict[str, Any] = {
"title": "pali lili API",
"version": settings.APP_VERSION,
"description": API_README_MD,
"openapi_tags": tags_metadata,
}
if not settings.ENVIRONMENT.is_debug:
app_configs["openapi_url"] = None # hide docs

32
src/constants.py Normal file
View file

@ -0,0 +1,32 @@
from enum import Enum
DB_NAMING_CONVENTION = {
"ix": "%(column_0_label)s_idx",
"uq": "%(table_name)s_%(column_0_name)s_key",
"ck": "%(table_name)s_%(constraint_name)s_check",
"fk": "%(table_name)s_%(column_0_name)s_fkey",
"pk": "%(table_name)s_pkey",
}
class Environment(str, Enum):
LOCAL = "LOCAL"
TESTING = "TESTING"
STAGING = "STAGING"
PRODUCTION = "PRODUCTION"
@property
def is_debug(self):
return self in (self.LOCAL, self.STAGING, self.TESTING)
@property
def is_local(self):
return self is Environment.LOCAL
@property
def is_testing(self):
return self == self.TESTING
@property
def is_deployed(self) -> bool:
return self in (self.STAGING, self.PRODUCTION)

57
src/database.py Normal file
View file

@ -0,0 +1,57 @@
import contextlib
from typing import AsyncIterator, AsyncGenerator, Annotated
from fastapi import Depends
from sqlalchemy import (
MetaData,
)
from sqlalchemy.ext.asyncio import (
AsyncConnection,
create_async_engine,
AsyncSession,
async_sessionmaker,
)
from src.config import settings
from src.constants import DB_NAMING_CONVENTION
DATABASE_URL = str(settings.DATABASE_ASYNC_URL)
engine = create_async_engine(
DATABASE_URL,
pool_size=settings.DATABASE_POOL_SIZE,
pool_recycle=settings.DATABASE_POOL_TTL,
pool_pre_ping=settings.DATABASE_POOL_PRE_PING,
)
metadata = MetaData(naming_convention=DB_NAMING_CONVENTION)
sessionmaker = async_sessionmaker(autocommit=False, expire_on_commit=False, bind=engine)
@contextlib.asynccontextmanager
async def get_db_connection() -> AsyncIterator[AsyncConnection]:
async with engine.begin() as connection:
try:
yield connection
except Exception:
await connection.rollback()
raise
@contextlib.asynccontextmanager
async def get_db_session() -> AsyncIterator[AsyncSession]:
session = sessionmaker()
try:
yield session
except Exception:
await session.rollback()
raise
finally:
await session.close()
async def get_db() -> AsyncGenerator[AsyncSession, None]:
async with get_db_session() as session:
yield session
DbSession = Annotated[AsyncSession, Depends(get_db)]

33
src/exceptions.py Normal file
View file

@ -0,0 +1,33 @@
from typing import Any
from fastapi import HTTPException, status
class DetailedHTTPException(HTTPException):
STATUS_CODE = status.HTTP_500_INTERNAL_SERVER_ERROR
DETAIL = "Server error"
def __init__(self, **kwargs: dict[str, Any]) -> None:
super().__init__(status_code=self.STATUS_CODE, detail=self.DETAIL, **kwargs)
class PermissionDenied(DetailedHTTPException):
STATUS_CODE = status.HTTP_403_FORBIDDEN
DETAIL = "Permission denied"
class NotFound(DetailedHTTPException):
STATUS_CODE = status.HTTP_404_NOT_FOUND
class BadRequest(DetailedHTTPException):
STATUS_CODE = status.HTTP_400_BAD_REQUEST
DETAIL = "Bad Request"
class NotAuthenticated(DetailedHTTPException):
STATUS_CODE = status.HTTP_401_UNAUTHORIZED
DETAIL = "User not authenticated"
def __init__(self) -> None:
super().__init__(headers={"WWW-Authenticate": "Bearer"})

44
src/main.py Normal file
View file

@ -0,0 +1,44 @@
from contextlib import asynccontextmanager
from typing import AsyncGenerator
from fastapi import FastAPI
from fastapi_pagination import add_pagination
from starlette.middleware.cors import CORSMiddleware
from src.config import app_configs, settings
from src.tofu.exceptions import TofuStateLocked
from src.tofu.router import router as tofu_router, tofu_state_locked_handler
from src.tofu.tasks import periodic_tofu_process_tasks
@asynccontextmanager
async def lifespan(_application: FastAPI) -> AsyncGenerator:
await periodic_tofu_process_tasks()
# Startup
yield
# Shutdown
app = FastAPI(**app_configs, lifespan=lifespan)
add_pagination(app)
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS,
allow_origin_regex=settings.CORS_ORIGINS_REGEX,
allow_credentials=True,
allow_methods=("GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"),
allow_headers=settings.CORS_HEADERS,
)
app.add_exception_handler(TofuStateLocked, tofu_state_locked_handler)
app.include_router(
tofu_router,
prefix="/api/v1/tofu",
)
@app.get("/healthcheck", include_in_schema=False)
async def healthcheck() -> dict[str, str]:
return {"status": "ok"}

50
src/models.py Normal file
View file

@ -0,0 +1,50 @@
from datetime import datetime
from typing import Any
from sqlalchemy import JSON, DateTime, func, ForeignKey
from sqlalchemy.orm import (
DeclarativeBase,
Mapped,
mapped_column,
relationship,
declared_attr,
)
from src.database import metadata
class CustomBase(DeclarativeBase):
type_annotation_map = {
datetime: DateTime(timezone=True),
dict[str, Any]: JSON,
}
metadata = metadata
class ActivatedMixin:
active: Mapped[bool] = mapped_column(default=True)
class DeletedTimestampMixin:
deleted_at: Mapped[datetime | None] = mapped_column(nullable=True)
class DescriptionMixin:
description: Mapped[str]
class IdMixin:
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
class TimestampMixin:
created_at: Mapped[datetime] = mapped_column(default=func.now())
updated_at: Mapped[datetime] = mapped_column(default=func.now(), onupdate=func.now())
class TofuInstanceMixin:
tofu_instance_id: Mapped[int] = mapped_column(ForeignKey("tofu_instance.id"))
@declared_attr
def tofu_instance(cls):
return relationship("TofuInstance")

91
src/schemas.py Normal file
View file

@ -0,0 +1,91 @@
from datetime import datetime, timezone
from typing import Annotated, Literal
from uuid import UUID
from email_validator.rfc_constants import DOMAIN_NAME_REGEX
from fastapi.encoders import jsonable_encoder
from pydantic import BaseModel, ConfigDict, PlainSerializer, RootModel, StringConstraints
def datetime_to_utc_str(dt: datetime) -> str:
if not dt.tzinfo:
dt = dt.replace(tzinfo=timezone.utc)
else:
dt = dt.astimezone(timezone.utc)
return dt.strftime("%Y-%m-%dT%H:%M:%SZ")
CustomDatetime = Annotated[datetime, PlainSerializer(datetime_to_utc_str, return_type=str)]
class CustomModelMixin:
model_config = ConfigDict(
populate_by_name=True,
)
def serializable_dict(self, **kwargs):
"""Return a dict which contains only serializable fields."""
default_dict = self.model_dump()
return jsonable_encoder(default_dict, custom_encoder={UUID: str})
class CustomModel(BaseModel, CustomModelMixin):
pass
class CustomRootModel(RootModel, CustomModelMixin):
pass
class OkResponse(CustomModel):
"""
A generic payload to respond with to indicate success where no other information is required or available.
Pairs well with HTTP status code 200.
"""
ok: Literal[True]
class AcceptedResponse(CustomModel):
"""
A generic payload to respond with where a request has been accepted but perhaps has not yet been processed.
Pairs well with HTTP status code 201.
"""
id: int
class CreatedResponse(CustomModel):
"""
A generic payload to respond with where a request has resulted in a thing being created.
Pairs well with HTTP status code 202.
"""
id: int
class ActivatedMixin:
active: bool
class DescriptionMixin:
description: str
class DeletedTimestampMixin:
deleted_at: CustomDatetime
class IdMixin:
id: int
class TimestampMixin:
created_at: CustomDatetime
updated_at: CustomDatetime
DomainName = Annotated[
str, StringConstraints(strip_whitespace=True, to_lower=True, pattern=DOMAIN_NAME_REGEX)
]

0
src/tofu/__init__.py Normal file
View file

165
src/tofu/client.py Normal file
View file

@ -0,0 +1,165 @@
import asyncio
import json
import os
import shutil
import tempfile
from datetime import datetime, timezone
from json import JSONDecodeError
from typing import Any, TYPE_CHECKING
import aiofiles
from sqlalchemy.ext.asyncio import AsyncSession
from src.tofu.config import settings
from src.tofu.exceptions import TofuExecutionError, TofuTypeError
from src.tofu.models import TofuInstanceTask, TofuInstanceTaskLog
from src.tofu.security import generate_password, generate_password_hash
def _convert_python_to_tf(value: Any) -> Any:
"""Convert Python types to Terraform-compatible types."""
if isinstance(value, bool):
return value
elif isinstance(value, (int, float)):
return value
elif isinstance(value, str):
return value
elif isinstance(value, list):
return [_convert_python_to_tf(item) for item in value]
elif isinstance(value, dict):
return {str(k): _convert_python_to_tf(v) for k, v in value.items()}
elif value is None:
return None
else:
raise TofuTypeError(f"Unsupported type for Terraform conversion: {type(value)}")
class TofuManager:
def __init__(
self,
db: AsyncSession,
instance_task: TofuInstanceTask,
working_dir: str | None = None,
):
self.db = db
self.instance_task = instance_task
self._is_temp_dir = working_dir is None
self._working_dir = working_dir
self.tofu_path = settings.OPENTOFU_PATH
async def __aenter__(self) -> "TofuManager":
return self
async def __aexit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
self.instance_task.instance.state_password = None
await self.db.commit()
# Only clean up temporary directories if there was no exception
if self._is_temp_dir and self._working_dir and exc_type is None:
shutil.rmtree(self._working_dir)
async def _create_config(self) -> None:
config = self.instance_task.instance.configuration
self.password = generate_password()
self.instance_task.instance.state_password = generate_password_hash(self.password)
await self.db.commit()
if "terraform" not in config:
config["terraform"] = {}
config["terraform"]["backend"] = {
"http": {
"address": f"http://localhost:8000/api/v1/tofu/instances/{self.instance_task.instance_id}/state",
"lock_address": f"http://localhost:8000/api/v1/tofu/instances/{self.instance_task.instance_id}/state",
"unlock_address": f"http://localhost:8000/api/v1/tofu/instances/{self.instance_task.instance_id}/state",
}
}
async with aiofiles.open(await self.config_file(), "w") as f:
await f.write(json.dumps(config, indent=2))
async def _process_output_line(self, line: str) -> None:
try:
data = json.loads(line)
except json.decoder.JSONDecodeError:
data = {
"@level": "info",
"@timestamp": datetime.now(tz=timezone.utc).strftime(
"%Y-%m-%dT%H:%M:%S.%f%z"
),
"@module": "manager",
"@message": line,
"type": "decode-error",
}
log = TofuInstanceTaskLog(
instance_task_id=self.instance_task.id,
timestamp=datetime.strptime(data["@timestamp"], "%Y-%m-%dT%H:%M:%S.%f%z"),
log=data,
)
self.db.add(log)
await self.db.commit()
async def _run_command(
self,
command: str,
*args: str,
json_output: bool = False,
log_output: bool = True,
return_output: bool = False,
) -> str | None:
cmd = [self.tofu_path, command] + list(args)
if json_output:
cmd.append("-json")
process = await asyncio.create_subprocess_exec(
*cmd,
cwd=await self.working_dir(),
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.DEVNULL,
env={
"TF_HTTP_USERNAME": "tofu",
"TF_HTTP_PASSWORD": self.password,
},
)
if TYPE_CHECKING:
assert process.stdout is not None
result = []
async for line in process.stdout:
if return_output:
result.append(line.decode("utf-8"))
if log_output:
await self._process_output_line(line.decode("utf-8"))
await process.wait()
if process.returncode != 0:
raise TofuExecutionError(f"Tofu command failed: {process.returncode}")
return "\n".join(result) if return_output else None
async def config_file(self) -> str:
return os.path.join(await self.working_dir(), "main.tf.json")
async def working_dir(self) -> str:
if self._working_dir:
return self._working_dir
self._working_dir = await asyncio.to_thread(tempfile.mkdtemp, prefix="tofu-")
return self._working_dir
async def init(self, upgrade: bool = True) -> None:
await self._create_config()
args = []
if upgrade:
args.append("-upgrade")
await self._run_command("init", *args, json_output=True)
async def apply(self) -> None:
await self._create_config()
await self._run_command("apply", "-auto-approve", json_output=True)
async def destroy(self) -> None:
await self._create_config()
await self._run_command("destroy", "-auto-approve", json_output=True)
async def output(self) -> None:
await self._create_config()
try:
outputs = await self._run_command(
"output", json_output=True, log_output=False, return_output=True
)
self.instance_task.instance.outputs = json.loads(outputs)
await self.db.commit()
except JSONDecodeError:
raise TofuExecutionError("Could not parse JSON output")

13
src/tofu/config.py Normal file
View file

@ -0,0 +1,13 @@
from os.path import abspath, dirname, join
from src.config import CustomBaseSettings
class Config(CustomBaseSettings):
OPENTOFU_PATH: str = "/usr/bin/tofu"
OPENTOFU_MODULES_PATH: str = abspath(
join(dirname(__file__), "..", "..", "tofu")
) # no trailing slash from abspath
settings = Config()

39
src/tofu/exceptions.py Normal file
View file

@ -0,0 +1,39 @@
from typing import Any
from starlette import status
from starlette.exceptions import HTTPException
class TofuExecutionError(RuntimeError):
"""An error occurred when executing an OpenTofu command."""
class TofuTypeError(TypeError):
"""A value provided used a type that cannot be converted to a supported OpenTofu type."""
class TofuStateLocked(RuntimeError):
"""A request could not be completed as the state is locked, and the requestor does not hold the lock."""
def __init__(self, lock: dict[str, Any]) -> None:
self.lock = lock
class TofuForbiddenException(HTTPException):
def __init__(
self,
**kwargs: Any,
) -> None:
super().__init__(status_code=status.HTTP_403_FORBIDDEN, detail="Forbidden")
class TofuUnauthorizedException(HTTPException):
def __init__(
self,
**kwargs: Any,
) -> None:
super().__init__(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Unauthorized",
headers={"WWW-Authenticate": 'Basic realm="jasima"'},
)

107
src/tofu/models.py Normal file
View file

@ -0,0 +1,107 @@
from datetime import datetime
from enum import Enum
from typing import Any
from sqlalchemy import ForeignKey, func, text
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import Mapped, mapped_column, relationship
from src.models import CustomBase, TimestampMixin, IdMixin, DeletedTimestampMixin
class TofuInstanceStatus(Enum):
ACTIVE = "ACTIVE"
DEPLOYING = "DEPLOYING"
DESTROYED = "DESTROYED"
DESTROYING = "DESTROYING"
DRIFTED = "DRIFTED"
FAILED = "FAILED"
FAILED_DESTROY = "FAILED_DESTROY"
PENDING = "PENDING"
PENDING_DESTROY = "PENDING_DESTROY"
PENDING_DRIFT_CHECK = "PENDING_DRIFT_CHECK"
class TofuInstance(CustomBase, IdMixin, TimestampMixin, DeletedTimestampMixin):
__tablename__ = "tofu_instance"
status: Mapped[TofuInstanceStatus] = mapped_column(default=TofuInstanceStatus.PENDING)
configuration: Mapped[dict[str, Any]]
outputs: Mapped[dict[str, Any] | None]
plan: Mapped[dict[str, Any] | None]
state: Mapped[dict[str, Any] | None]
state_password: Mapped[bytes | None]
state_lock: Mapped[dict[str, Any] | None]
status_changed_at: Mapped[datetime] = mapped_column(default=func.now())
drift_checked_at: Mapped[datetime | None]
tasks = relationship("TofuInstanceTask", back_populates="instance")
status_changes = relationship("TofuInstanceStatusChange", back_populates="instance")
class TofuInstanceStatusChange(CustomBase, IdMixin):
__tablename__ = "tofu_instance_status_change"
instance_id: Mapped[int] = mapped_column(ForeignKey("tofu_instance.id"))
instance_task_id: Mapped[int] = mapped_column(ForeignKey("tofu_instance_task.id"))
timestamp: Mapped[datetime] = mapped_column(default=func.now())
old_status: Mapped[TofuInstanceStatus]
new_status: Mapped[TofuInstanceStatus]
instance = relationship("TofuInstance", back_populates="status_changes")
class TofuInstanceTaskType(Enum):
CHECK_DRIFT = "CHECK_DRIFT"
DEPLOY = "DEPLOY"
DESTROY = "DESTROY"
class TofuInstanceTaskStatus(Enum):
CANCELED = "CANCELED"
COMPLETED = "COMPLETED"
FAILED = "FAILED"
PENDING = "PENDING"
RUNNING = "RUNNING"
class TofuInstanceTask(CustomBase, IdMixin, TimestampMixin):
__tablename__ = "tofu_instance_task"
instance_id: Mapped[int] = mapped_column(ForeignKey("tofu_instance.id"))
task: Mapped[TofuInstanceTaskType]
status: Mapped[TofuInstanceTaskStatus] = mapped_column(
default=TofuInstanceTaskStatus.PENDING
)
start_time: Mapped[datetime | None]
end_time: Mapped[datetime | None]
instance = relationship("TofuInstance", back_populates="tasks")
class TofuInstanceTaskLog(CustomBase, IdMixin):
__tablename__ = "tofu_instance_task_log"
instance_task_id: Mapped[int] = mapped_column(ForeignKey("tofu_instance_task.id"))
timestamp: Mapped[datetime] = mapped_column(default=func.now())
log: Mapped[dict[str, Any]]
class TofuBruteForce(CustomBase, IdMixin, TimestampMixin):
__tablename__ = "tofu_brute_force"
host: Mapped[str]
expiry: Mapped[datetime] = mapped_column(default=func.now() + text("INTERVAL '1 hour'"))
def update_tofu_instance_status(
db: AsyncSession, instance: TofuInstance, task_id: int, new_status: TofuInstanceStatus
) -> None:
status_change = TofuInstanceStatusChange(
instance_id=instance.id,
instance_task_id=task_id,
old_status=instance.status,
new_status=new_status,
)
db.add(status_change)
instance.status = new_status
instance.status_changed_at = func.now()
if new_status == TofuInstanceStatus.DESTROYED:
instance.deleted_at = func.now()

356
src/tofu/router.py Normal file
View file

@ -0,0 +1,356 @@
import asyncio
import json
from datetime import datetime, timezone, timedelta
from typing import Any, Annotated, AsyncIterator
from fastapi import APIRouter, HTTPException, Request, Query
from fastapi_pagination import Page
from fastapi_pagination.ext.sqlalchemy import apaginate
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from starlette import status
from starlette.responses import JSONResponse, Response, StreamingResponse
from src.database import DbSession
from src.schemas import AcceptedResponse, OkResponse
from src.tofu.exceptions import TofuStateLocked, TofuUnauthorizedException
from src.tofu.models import (
TofuInstance,
TofuInstanceTask,
TofuInstanceTaskType,
TofuInstanceStatus,
TofuInstanceTaskLog,
TofuInstanceTaskStatus,
update_tofu_instance_status,
)
from src.tofu.schemas import (
TofuInstanceDetail,
TofuInstanceCreate,
TofuInstanceSummary,
TofuInstanceUpdate,
TofuInstanceState,
TofuInstanceStateLock,
)
from src.tofu.security import (
Password,
validate_password,
generate_password_hash,
block_host_brute_force,
APIKey,
)
router = APIRouter()
TofuLockID = Annotated[str | None, Query(alias="ID", description="State lock ID")]
async def tofu_state_locked_handler(request: Request, exc: Exception) -> Response:
if isinstance(exc, TofuStateLocked):
return JSONResponse(status_code=status.HTTP_423_LOCKED, content=exc.lock)
raise RuntimeError(exc)
@router.get(
"/instances",
response_model=Page[TofuInstanceSummary],
tags=["OpenTofu"],
)
async def list_tofu_instances(db: DbSession, _: APIKey) -> Page[TofuInstance]:
return await apaginate(
db, select(TofuInstance).filter(TofuInstance.deleted_at == None) # noqa: E711
)
@router.post(
"/instances",
response_model=AcceptedResponse,
status_code=status.HTTP_202_ACCEPTED,
tags=["OpenTofu"],
)
async def create_tofu_instance(
instance_data: TofuInstanceCreate, db: DbSession, _: APIKey
) -> JSONResponse:
hashed_password: str | None = (
generate_password_hash(instance_data.password) if instance_data.password else None
)
instance = TofuInstance(
configuration=instance_data.configuration, state_password=hashed_password
)
db.add(instance)
await db.flush()
task = TofuInstanceTask(instance_id=instance.id, task=TofuInstanceTaskType.DEPLOY)
db.add(task)
await db.commit()
await db.refresh(instance)
return JSONResponse(status_code=status.HTTP_202_ACCEPTED, content={"id": instance.id})
async def _get_tofu_instance(
db: AsyncSession,
instance_id: int,
password: str | None = None,
host: str | None = None,
*,
detail: bool = False,
password_required: bool = True,
) -> TofuInstance:
if detail:
query = (
select(TofuInstance)
.filter(TofuInstance.id == instance_id)
.options(
selectinload(TofuInstance.status_changes), selectinload(TofuInstance.tasks)
)
)
else:
query = select(TofuInstance).filter(TofuInstance.id == instance_id)
row = (await db.execute(query)).first()
if not row:
raise HTTPException(status_code=404, detail="No instance found")
if password_required:
if not host:
raise TofuUnauthorizedException
if not validate_password(password, row[0].state_password):
await block_host_brute_force(db, host)
raise TofuUnauthorizedException
return row[0]
@router.get(
"/instances/{instance_id}",
response_model=TofuInstanceDetail,
tags=["OpenTofu"],
)
async def get_tofu_instance(instance_id: int, db: DbSession, _: APIKey) -> TofuInstance:
return await _get_tofu_instance(db, instance_id, detail=True, password_required=False)
@router.put(
"/instances/{instance_id}",
response_model=AcceptedResponse,
status_code=status.HTTP_202_ACCEPTED,
response_description="Update accepted (deployment task pending)",
responses={
status.HTTP_200_OK: {"description": "Update Successful", "model": AcceptedResponse},
status.HTTP_404_NOT_FOUND: {"description": "Instance not found"},
status.HTTP_412_PRECONDITION_FAILED: {
"description": "Update failed due to current status"
},
},
tags=["OpenTofu"],
)
async def update_tofu_instance(
instance_id: int, instance_data: TofuInstanceUpdate, db: DbSession, _: APIKey
) -> JSONResponse:
query = select(TofuInstance).with_for_update().filter(TofuInstance.id == instance_id)
row = (await db.execute(query)).first()
if not row:
raise HTTPException(status_code=404, detail="Instance not found")
instance = row[0]
if instance.status not in [TofuInstanceStatus.ACTIVE, TofuInstanceStatus.DRIFTED]:
raise HTTPException(
status_code=412, detail="Updates only allowed for active instances"
)
if instance_data.configuration:
instance.configuration = instance_data.configuration
task = TofuInstanceTask(instance_id=instance.id, task=TofuInstanceTaskType.DEPLOY)
db.add(task)
await db.flush()
update_tofu_instance_status(db, instance, task.id, TofuInstanceStatus.PENDING)
if instance_data.password:
instance.password = generate_password_hash(instance_data.password)
await db.commit() # release FOR UPDATE lock
return JSONResponse(status_code=status.HTTP_202_ACCEPTED, content={"id": instance.id})
@router.delete(
"/instances/{instance_id}",
status_code=202,
response_model=AcceptedResponse,
tags=["OpenTofu"],
)
async def destroy_tofu_instance(instance_id: int, db: DbSession, _: APIKey) -> Response:
query = select(TofuInstance).with_for_update().filter(TofuInstance.id == instance_id)
row = (await db.execute(query)).first()
if not row:
raise HTTPException(status_code=404, detail="Resource not found")
instance = row[0]
if instance.status not in [
TofuInstanceStatus.ACTIVE,
TofuInstanceStatus.DRIFTED,
TofuInstanceStatus.FAILED,
]:
raise HTTPException(
status_code=412,
detail="Instance cannot be destroyed currently as it is pending update",
)
task = TofuInstanceTask(instance_id=instance.id, task=TofuInstanceTaskType.DESTROY)
db.add(task)
await db.flush()
update_tofu_instance_status(db, instance, task.id, TofuInstanceStatus.PENDING_DESTROY)
await db.commit() # release FOR UPDATE lock
return JSONResponse(status_code=status.HTTP_202_ACCEPTED, content={"id": instance_id})
async def _get_tofu_instance_state(
db: AsyncSession,
instance_id: int,
password: str | None = None,
host: str | None = None,
*,
password_required: bool = True,
) -> dict[str, Any]:
instance = await _get_tofu_instance(
db, instance_id, password, host, password_required=password_required
)
if not instance.state:
raise HTTPException(status_code=404, detail="No state found")
return instance.state
@router.get(
"/instances/{instance_id}/state",
response_model=TofuInstanceState,
tags=["OpenTofu"],
)
async def get_tofu_instance_state_route(
instance_id: int, db: DbSession, password: Password, request: Request
) -> dict[str, Any]:
return await _get_tofu_instance_state(db, instance_id, password, request.client.host)
@router.post(
"/instances/{instance_id}/state",
response_model=None,
tags=["OpenTofu"],
)
async def update_tofu_instance_state(
instance_id: int,
new_state: TofuInstanceState,
db: DbSession,
password: Password,
request: Request,
lock_id: TofuLockID = None,
) -> Response:
instance = await _get_tofu_instance(db, instance_id, password, request.client.host)
if instance.state_lock and instance.state_lock["ID"] != lock_id:
raise TofuStateLocked(instance.state_lock)
instance.state = new_state.model_dump()
await db.commit()
return Response(status_code=200)
@router.api_route(
"/instances/{instance_id}/state",
methods=["LOCK"],
response_model=OkResponse,
tags=["OpenTofu"],
include_in_schema=False,
)
async def lock_tofu_instance_state(
instance_id: int,
lock: TofuInstanceStateLock,
db: DbSession,
password: Password,
request: Request,
) -> JSONResponse:
instance = await _get_tofu_instance(db, instance_id, password, request.client.host)
if instance.state_lock:
return JSONResponse(status_code=423, content=instance.state_lock)
instance.state_lock = lock.serializable_dict()
await db.commit()
return JSONResponse(
status_code=200, content={"result": "ok"}
) # A 204 would make sense here but the spec needs 200
@router.api_route(
"/instances/{instance_id}/state",
response_model=OkResponse,
methods=["UNLOCK"],
tags=["OpenTofu"],
include_in_schema=False,
)
async def unlock_tofu_instance_state(
instance_id: int,
db: DbSession,
password: Password,
request: Request,
lock_id: TofuLockID = None,
) -> JSONResponse:
instance = await _get_tofu_instance(db, instance_id, password, request.client.host)
if not instance.state_lock:
JSONResponse(
status_code=200, content={"result": "ok"}
) # A 204 would make sense here but the spec needs 200
if lock_id is None or instance.state_lock["ID"] == lock_id:
# force-unlock seems to not give an ID to verify so accept no ID being present.
# This may not be great for robustness but it's not a security issue as the protocol
# requires returning the lock ID to clients anyway when the lock ID is not correct.
instance.state_lock = None
await db.commit()
return JSONResponse(
status_code=200, content={"result": "ok"}
) # A 204 would make sense here but the spec needs 200
raise TofuStateLocked(instance.state_lock)
@router.delete(
"/instances/{instance_id}/state",
response_model=OkResponse,
tags=["OpenTofu"],
)
async def purge_tofu_instance_state(
instance_id: int,
db: DbSession,
password: Password,
request: Request,
lock_id: TofuLockID = None,
) -> JSONResponse:
instance = await _get_tofu_instance(db, instance_id, password, request.client.host)
if instance.state_lock:
if instance.state_lock["ID"] != lock_id:
raise TofuStateLocked(instance.state_lock)
instance.state = None
instance.state_lock = None
await db.commit()
return JSONResponse(status_code=200, content={"result": "ok"})
@router.get("/logs/{task_id}", tags=["OpenTofu"])
async def get_logs(task_id: int, db: DbSession, _: APIKey) -> StreamingResponse:
# TODO: This could definitely be optimised but I don't think it's going to see heavy usage, just for debugging
async def stream_log() -> AsyncIterator[str]:
start = datetime.now(tz=timezone.utc)
seen_log_id = 0
task_query = select(TofuInstanceTask).where(TofuInstanceTask.id == task_id)
row = (await db.execute(task_query)).first()
if not row:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Not found")
task = row[0]
while True:
if datetime.now(tz=timezone.utc) - start > timedelta(minutes=5):
return
result = await db.execute(
select(TofuInstanceTaskLog)
.where(
TofuInstanceTaskLog.instance_task_id == task_id,
TofuInstanceTaskLog.id > seen_log_id,
)
.order_by(TofuInstanceTaskLog.timestamp)
)
for log in result.all():
seen_log_id = log[0].id
yield json.dumps(log[0].log) + "\n"
await db.refresh(task)
if row[0].status in [
TofuInstanceTaskStatus.PENDING,
TofuInstanceTaskStatus.FAILED,
TofuInstanceTaskStatus.COMPLETED,
]:
print("done")
return
await asyncio.sleep(1)
return StreamingResponse(stream_log(), media_type="application/x-ndjson")

76
src/tofu/schemas.py Normal file
View file

@ -0,0 +1,76 @@
from datetime import datetime
from enum import Enum
from typing import Any
from uuid import UUID
from pydantic import ConfigDict
from src.schemas import CustomModel, TimestampMixin, CustomDatetime, IdMixin
from src.tofu.models import TofuInstanceStatus, TofuInstanceTaskType, TofuInstanceTaskStatus
class TofuOperationType(Enum):
# https://github.com/opentofu/opentofu/blob/main/internal/backend/operation_type.go
INVALID = "OperationTypeInvalid"
REFRESH = "OperationTypeRefresh"
PLAN = "OperationTypePlan"
APPLY = "OperationTypeApply"
class TofuInstanceState(CustomModel):
# TODO: Do better
model_config = ConfigDict(extra="allow")
class TofuInstanceStateLock(CustomModel):
model_config = ConfigDict(extra="allow")
ID: UUID
Operation: TofuOperationType
Info: str
Who: str
Version: str
Created: str
Path: str
class TofuInstanceSummary(CustomModel, IdMixin, TimestampMixin):
status: TofuInstanceStatus
status_changed_at: CustomDatetime
drift_checked_at: CustomDatetime | None
class TofuInstanceStatusChange(CustomModel):
instance_task_id: int
timestamp: datetime
old_status: TofuInstanceStatus
new_status: TofuInstanceStatus
class TofuInstanceTask(CustomModel, TimestampMixin):
id: int
task: TofuInstanceTaskType
status: TofuInstanceTaskStatus
start_time: datetime | None
end_time: datetime | None
class TofuInstanceDetail(CustomModel, IdMixin, TimestampMixin):
status: TofuInstanceStatus
configuration: dict[str, Any]
outputs: dict[str, Any] | None
plan: dict[str, Any] | None
status_changed_at: CustomDatetime
drift_checked_at: CustomDatetime | None
state_lock: TofuInstanceStateLock | None
tasks: list[TofuInstanceTask]
status_changes: list[TofuInstanceStatusChange]
class TofuInstanceCreate(CustomModel):
configuration: dict[str, Any]
password: str | None = None
class TofuInstanceUpdate(CustomModel):
configuration: dict[str, Any] | None = None
password: str | None = None

93
src/tofu/security.py Normal file
View file

@ -0,0 +1,93 @@
from datetime import datetime, timezone
from typing import Annotated
import bcrypt
from fastapi import Depends
from fastapi.security import (
HTTPBasic,
HTTPBasicCredentials,
HTTPBearer,
HTTPAuthorizationCredentials,
)
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from starlette.requests import Request
from src.config import settings
from src.database import DbSession
from src.tofu.exceptions import TofuForbiddenException, TofuUnauthorizedException
from src.tofu.models import TofuBruteForce
from src.utils import generate_random_alphanum
basic = HTTPBasic()
Credentials = Annotated[HTTPBasicCredentials, Depends(basic)]
async def block_host_brute_force(db: AsyncSession, host: str) -> None:
db.add(TofuBruteForce(host=host))
await db.commit()
async def is_blocked_brute_force(request: Request, db: DbSession) -> None:
query = select(TofuBruteForce).where(TofuBruteForce.host == request.client.host)
row = (await db.execute(query)).first()
block: TofuBruteForce | None = row[0] if row else None
if block:
if block.expiry < datetime.now(tz=timezone.utc):
db.delete(block)
await db.commit()
return await is_blocked_brute_force(
request, db
) # Just in case there's another block in the table
raise TofuForbiddenException
return None
BruteForceProtection = Annotated[None, Depends(is_blocked_brute_force)]
def generate_password() -> str:
return generate_random_alphanum(40)
def generate_password_hash(password_str: str) -> bytes:
password = password_str.encode("utf-8")
salt = bcrypt.gensalt()
return bcrypt.hashpw(password, salt)
def validate_password(password_str: str, password_hash: bytes) -> bool:
if password_hash is None:
return False
password = password_str.encode("utf-8")
return bcrypt.checkpw(password, password_hash)
def get_password(
db: DbSession, credentials: Credentials, request: Request, _: BruteForceProtection
) -> str:
if not credentials or not credentials.password:
block_host_brute_force(db, request.client.host)
raise TofuUnauthorizedException
return credentials.password
Password = Annotated[str, Depends(get_password)]
bearer = HTTPBearer()
async def api_key(
db: DbSession,
request: Request,
_: BruteForceProtection,
credentials: HTTPAuthorizationCredentials = Depends(bearer),
) -> None:
api_key = credentials.credentials
print(api_key)
if api_key != settings.API_KEY:
await block_host_brute_force(db, request.client.host)
raise TofuUnauthorizedException
APIKey = Annotated[None, Depends(api_key)]

80
src/tofu/service.py Normal file
View file

@ -0,0 +1,80 @@
from typing import Any
from sqlalchemy.ext.asyncio import AsyncSession
from starlette.background import BackgroundTask
from starlette.exceptions import HTTPException
from src.tofu.models import (
TofuInstance,
TofuInstanceTask,
TofuInstanceTaskType,
TofuInstanceStatus,
update_tofu_instance_status,
)
from src.tofu.tasks import process_tasks
async def create_tofu_instance(
configuration: dict[str, Any],
db: AsyncSession,
*,
commit: bool = False,
create_task: bool = True
) -> tuple[int, BackgroundTask | None]:
instance = TofuInstance(configuration=configuration)
db.add(instance)
await db.flush()
if create_task:
task = TofuInstanceTask(instance_id=instance.id, task=TofuInstanceTaskType.DEPLOY)
db.add(task)
if commit:
await db.commit()
return instance.id, BackgroundTask(process_tasks) if create_task else None
async def create_empty_tofu_instance(db: AsyncSession) -> int:
config = {}
tofu_instance_id, _ = await create_tofu_instance(config, db, create_task=False)
return tofu_instance_id
async def update_tofu_instance(
instance: TofuInstance,
configuration: dict[str, Any],
db: AsyncSession,
*,
commit: bool = False,
allow_pending: bool = False
) -> BackgroundTask:
allowed_status = [TofuInstanceStatus.ACTIVE, TofuInstanceStatus.DRIFTED]
if allow_pending:
allowed_status.append(TofuInstanceStatus.PENDING)
if instance.status not in allowed_status:
raise HTTPException(
status_code=412, detail="Updates only allowed for active instances"
)
instance.configuration = configuration
task = TofuInstanceTask(instance_id=instance.id, task=TofuInstanceTaskType.DEPLOY)
db.add(task)
await db.flush()
update_tofu_instance_status(db, instance, task.id, TofuInstanceStatus.PENDING)
if commit:
await db.commit()
return BackgroundTask(process_tasks)
async def destroy_tofu_instance(
instance: TofuInstance, db: AsyncSession, *, commit: bool = False
) -> BackgroundTask:
if instance.status not in [TofuInstanceStatus.ACTIVE, TofuInstanceStatus.DRIFTED]:
raise HTTPException(
status_code=412,
detail="Instance cannot be destroyed currently as it is pending update",
)
task = TofuInstanceTask(instance_id=instance.id, task=TofuInstanceTaskType.DESTROY)
db.add(task)
await db.flush()
update_tofu_instance_status(db, instance, task.id, TofuInstanceStatus.PENDING_DESTROY)
if commit:
await db.commit()
return BackgroundTask(process_tasks)

86
src/tofu/tasks.py Normal file
View file

@ -0,0 +1,86 @@
from datetime import datetime, timezone
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from src.database import get_db_session
from src.tofu.client import TofuManager
from src.tofu.exceptions import TofuExecutionError
from src.tofu.models import (
TofuInstanceStatus,
TofuInstanceTask,
TofuInstanceTaskType,
TofuInstanceTaskStatus,
update_tofu_instance_status,
)
from src.utils import repeat_every
async def deploy_tofu_instance(db: AsyncSession, task: TofuInstanceTask) -> None:
try:
async with TofuManager(db, task) as tofu:
await tofu.init()
await tofu.apply()
await tofu.output()
update_tofu_instance_status(db, task.instance, task.id, TofuInstanceStatus.ACTIVE)
task.instance.drift_checked_at = task.end_time = datetime.now(tz=timezone.utc)
except TofuExecutionError as exc:
update_tofu_instance_status(db, task.instance, task.id, TofuInstanceStatus.FAILED)
raise exc
async def destroy_tofu_instance(db: AsyncSession, task: TofuInstanceTask) -> None:
try:
async with TofuManager(db, task) as tofu:
await tofu.init()
await tofu.destroy()
update_tofu_instance_status(
db, task.instance, task.id, TofuInstanceStatus.DESTROYED
)
task.instance.outputs = {}
task.instance.drift_checked_at = task.end_time = datetime.now(tz=timezone.utc)
except TofuExecutionError as exc:
update_tofu_instance_status(
db, task.instance, task.id, TofuInstanceStatus.FAILED_DESTROY
)
raise exc
async def process_tasks() -> None:
async with get_db_session() as db:
while True:
query = (
select(TofuInstanceTask)
.with_for_update(skip_locked=True)
.options(selectinload(TofuInstanceTask.instance))
.filter(TofuInstanceTask.status == TofuInstanceTaskStatus.PENDING)
.limit(1)
)
row = (await db.execute(query)).first()
if not row:
break
task = row[0]
task.status = TofuInstanceTaskStatus.RUNNING
task.start_time = datetime.now(tz=timezone.utc)
await db.commit() # release FOR UPDATE lock
try:
match task.task:
case TofuInstanceTaskType.DEPLOY:
await deploy_tofu_instance(db, task)
case TofuInstanceTaskType.DESTROY:
await destroy_tofu_instance(db, task)
case TofuInstanceTaskType.CHECK_DRIFT:
print("NOT IMPLEMMENTED") # TODO: Implement
task.status = TofuInstanceTaskStatus.COMPLETED
task.end_time = datetime.now(tz=timezone.utc)
except TofuExecutionError:
task.status = TofuInstanceTaskStatus.FAILED
task.end_time = datetime.now(tz=timezone.utc)
finally:
await db.commit()
@repeat_every(seconds=5, wait_first=0)
async def periodic_tofu_process_tasks():
await process_tasks()

199
src/utils.py Normal file
View file

@ -0,0 +1,199 @@
import asyncio
import logging
import random
import string
from datetime import datetime, timedelta, timezone
from functools import wraps
from traceback import format_exception
from typing import Callable, Coroutine, Any
from starlette.concurrency import run_in_threadpool
logger = logging.getLogger(__name__)
ALPHA_NUM = string.ascii_letters + string.digits
LOWER_NUM = string.ascii_lowercase + string.digits
TOP_SUBDOMAINS = [
"admin",
"api",
"app",
"apps",
"autodiscover",
"baidu",
"bbs",
"beta",
"blog",
"cdn",
"citrix",
"cloud",
"demo",
"dev",
"email",
"en",
"exchange",
"forum",
"ftp",
"gateway",
"gov",
"gw",
"home",
"host",
"images",
"img",
"info",
"intranet",
"login",
"m",
"mail",
"mail1",
"mail2",
"mail3",
"media",
"mobile",
"news",
"office",
"owa",
"portal",
"remote",
"secure",
"server",
"server1",
"shop",
"ssl",
"stage",
"staging",
"start",
"static",
"store",
"support",
"test",
"web",
"webmail",
"wiki",
"www1",
"www2",
]
NoArgsNoReturnFuncT = Callable[[], None]
NoArgsNoReturnAsyncFuncT = Callable[[], Coroutine[Any, Any, None]]
ExcArgNoReturnFuncT = Callable[[Exception], None]
ExcArgNoReturnAsyncFuncT = Callable[[Exception], Coroutine[Any, Any, None]]
NoArgsNoReturnAnyFuncT = NoArgsNoReturnFuncT | NoArgsNoReturnAsyncFuncT
ExcArgNoReturnAnyFuncT = ExcArgNoReturnFuncT | ExcArgNoReturnAsyncFuncT
NoArgsNoReturnDecorator = Callable[[NoArgsNoReturnAnyFuncT], NoArgsNoReturnAsyncFuncT]
async def _handle_repeat_func(func: NoArgsNoReturnAnyFuncT) -> None:
if asyncio.iscoroutinefunction(func):
await func()
else:
await run_in_threadpool(func)
async def _handle_repeat_exc(
exc: Exception, on_exception: ExcArgNoReturnAnyFuncT | None
) -> None:
if on_exception:
if asyncio.iscoroutinefunction(on_exception):
await on_exception(exc)
else:
await run_in_threadpool(on_exception, exc)
def repeat_every(
*,
seconds: float,
wait_first: float | None = None,
max_repetitions: int | None = None,
on_complete: NoArgsNoReturnAnyFuncT | None = None,
on_exception: ExcArgNoReturnAnyFuncT | None = None,
) -> NoArgsNoReturnDecorator:
"""
This function returns a decorator that modifies a function so it is periodically re-executed after its first call.
The function it decorates should accept no arguments and return nothing. If necessary, this can be accomplished
by using `functools.partial` or otherwise wrapping the target function prior to decoration.
Parameters
----------
seconds: float
The number of seconds to wait between repeated calls
wait_first: float (default None)
If not None, the function will wait for the given duration before the first call
max_repetitions: Optional[int] (default None)
The maximum number of times to call the repeated function. If `None`, the function is repeated forever.
on_complete: Optional[Callable[[], None]] (default None)
A function to call after the final repetition of the decorated function.
on_exception: Optional[Callable[[Exception], None]] (default None)
A function to call when an exception is raised by the decorated function.
"""
def decorator(func: NoArgsNoReturnAnyFuncT) -> NoArgsNoReturnAsyncFuncT:
"""
Converts the decorated function into a repeated, periodically-called version of itself.
"""
@wraps(func)
async def wrapped() -> None:
async def loop() -> None:
if wait_first is not None:
await asyncio.sleep(wait_first)
repetitions = 0
while max_repetitions is None or repetitions < max_repetitions:
try:
await _handle_repeat_func(func)
except Exception as exc:
formatted_exception = "".join(
format_exception(type(exc), exc, exc.__traceback__)
)
logger.error(formatted_exception)
await _handle_repeat_exc(exc, on_exception)
repetitions += 1
await asyncio.sleep(seconds)
if on_complete:
await _handle_repeat_func(on_complete)
asyncio.ensure_future(loop())
return wrapped
return decorator
def perishable_cache(expires: int, minimum: int):
def decorator(func):
cache_data = {}
@wraps(func)
async def wrapper(*args, retry: bool = False, **kwargs):
timeout = minimum if retry else expires
if "cached_time" in cache_data:
if datetime.now(tz=timezone.utc) - cache_data["cached_time"] < timedelta(
seconds=timeout
):
return cache_data["cached_result"]
result = await func(*args, retry=retry, **kwargs)
cache_data["cached_result"] = result
cache_data["cached_time"] = datetime.now(tz=timezone.utc)
return result
return wrapper
return decorator
def generate_random_alphanum(length: int = 20, prefix: str = "") -> str:
return prefix + "".join(random.choices(ALPHA_NUM, k=length - len(prefix)))
def generate_random_lowernum(length: int = 20, prefix: str = "") -> str:
return prefix + "".join(random.choices(LOWER_NUM, k=length - len(prefix)))
def generate_random_subdomain() -> str:
return random.choice(TOP_SUBDOMAINS)