2022-11-13 20:09:48 +00:00
|
|
|
import base64
|
2022-04-22 12:52:41 +01:00
|
|
|
import datetime
|
2022-11-13 20:09:48 +00:00
|
|
|
import json
|
2022-04-22 12:52:41 +01:00
|
|
|
import logging
|
|
|
|
import sys
|
2022-11-13 20:09:48 +00:00
|
|
|
from collections import defaultdict
|
2024-11-18 09:56:48 +00:00
|
|
|
from typing import Any, Callable, Dict, List, Type
|
2022-11-13 20:09:48 +00:00
|
|
|
|
|
|
|
from sqlalchemy import inspect
|
2022-04-22 12:52:41 +01:00
|
|
|
|
|
|
|
from app import app
|
2024-12-06 18:02:59 +00:00
|
|
|
from app.cli import BaseCliHandler, _SubparserType
|
2022-04-22 12:52:41 +01:00
|
|
|
from app.extensions import db
|
2024-12-06 18:02:59 +00:00
|
|
|
from app.models.activity import Activity, Webhook
|
|
|
|
from app.models.alarms import Alarm, AlarmState
|
|
|
|
from app.models.automation import Automation, AutomationLogs, AutomationState
|
|
|
|
from app.models.base import Group, MirrorList, Pool, PoolGroup
|
2022-05-06 12:28:11 +01:00
|
|
|
from app.models.bridges import Bridge, BridgeConf
|
2022-11-13 20:09:48 +00:00
|
|
|
from app.models.mirrors import Origin, Proxy, SmartProxy
|
2024-12-06 18:02:59 +00:00
|
|
|
from app.models.onions import Eotk, Onion
|
2022-11-13 20:09:48 +00:00
|
|
|
from app.models.tfstate import TerraformState
|
2022-04-22 12:52:41 +01:00
|
|
|
|
2024-11-18 09:56:48 +00:00
|
|
|
Model = Type[db.Model] # type: ignore[name-defined]
|
|
|
|
|
2022-11-14 12:21:41 +00:00
|
|
|
# order matters due to foreign key constraints
|
2024-11-18 09:56:48 +00:00
|
|
|
models: List[Model] = [
|
2022-11-14 12:21:41 +00:00
|
|
|
Group,
|
|
|
|
Activity,
|
|
|
|
Pool,
|
|
|
|
PoolGroup,
|
|
|
|
SmartProxy,
|
|
|
|
Origin,
|
|
|
|
Proxy,
|
|
|
|
Onion,
|
|
|
|
Alarm,
|
|
|
|
Automation,
|
|
|
|
AutomationLogs,
|
|
|
|
BridgeConf,
|
|
|
|
Bridge,
|
|
|
|
Eotk,
|
|
|
|
MirrorList,
|
|
|
|
TerraformState,
|
|
|
|
Webhook
|
|
|
|
]
|
2022-11-13 20:09:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
class ExportEncoder(json.JSONEncoder):
|
|
|
|
"""Encoder to serialise all types used in the database."""
|
|
|
|
|
2024-11-16 19:47:41 +00:00
|
|
|
def default(self, o: Any) -> Any:
|
|
|
|
if isinstance(o, AlarmState):
|
|
|
|
return o.name
|
|
|
|
if isinstance(o, AutomationState):
|
|
|
|
return o.name
|
|
|
|
if isinstance(o, bytes):
|
|
|
|
return base64.encodebytes(o).decode('utf-8')
|
|
|
|
if isinstance(o, (datetime.datetime, datetime.date, datetime.time)):
|
|
|
|
return o.isoformat()
|
|
|
|
return super().default(o)
|
2022-11-13 20:09:48 +00:00
|
|
|
|
|
|
|
|
2024-11-18 09:56:48 +00:00
|
|
|
def model_to_dict(model: Model) -> Dict[str, Any]:
|
2022-11-13 20:09:48 +00:00
|
|
|
output = {}
|
2024-11-18 09:56:48 +00:00
|
|
|
inspection = inspect(type(model))
|
|
|
|
if not inspection:
|
|
|
|
raise RuntimeError(f"Could not inspect model {model}")
|
|
|
|
for column in inspection.columns:
|
2022-11-13 20:09:48 +00:00
|
|
|
item = getattr(model, column.name)
|
|
|
|
output[f"{type(item).__name__}_{column.name}"] = item
|
|
|
|
return output
|
|
|
|
|
|
|
|
|
|
|
|
def db_export() -> None:
|
|
|
|
encoder = ExportEncoder()
|
|
|
|
output = defaultdict(list)
|
|
|
|
for model in models:
|
2024-11-18 09:56:48 +00:00
|
|
|
for row in model.query.all():
|
2022-11-14 12:21:41 +00:00
|
|
|
output[model.__name__].append(model_to_dict(row))
|
2022-11-13 20:09:48 +00:00
|
|
|
print(encoder.encode(output))
|
|
|
|
|
|
|
|
|
2024-11-18 09:56:48 +00:00
|
|
|
decoder: Dict[str, Callable[[Any], Any]] = {
|
2022-11-13 20:09:48 +00:00
|
|
|
"AlarmState": lambda x: AlarmState.__getattribute__(AlarmState, x),
|
|
|
|
"AutomationState": lambda x: AutomationState.__getattribute__(AutomationState, x),
|
|
|
|
"bytes": lambda x: base64.decodebytes(x.encode('utf-8')),
|
2024-11-16 19:47:41 +00:00
|
|
|
"datetime": datetime.datetime.fromisoformat,
|
|
|
|
"int": int,
|
2022-11-13 20:09:48 +00:00
|
|
|
"str": lambda x: x,
|
2022-04-22 12:52:41 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-11-18 09:56:48 +00:00
|
|
|
def db_import_model(model: Model, data: List[Dict[str, Any]]) -> None:
|
2022-11-13 20:09:48 +00:00
|
|
|
for row in data:
|
2022-11-14 12:21:41 +00:00
|
|
|
new = model()
|
2022-11-13 20:09:48 +00:00
|
|
|
for col in row:
|
|
|
|
type_name, col_name = col.split("_", 1)
|
2024-11-18 09:56:48 +00:00
|
|
|
setattr(new, col_name, decoder.get(type_name, lambda x: x)(row[col]))
|
2022-11-13 20:09:48 +00:00
|
|
|
db.session.add(new)
|
|
|
|
|
|
|
|
|
|
|
|
def db_import() -> None:
|
|
|
|
data = json.load(sys.stdin)
|
|
|
|
# import order matters due to foreign key constraints
|
2022-11-14 12:21:41 +00:00
|
|
|
for model in models:
|
2022-11-14 16:25:43 +00:00
|
|
|
db_import_model(model, data.get(model.__name__, []))
|
|
|
|
db.session.commit()
|
2022-04-22 12:52:41 +01:00
|
|
|
|
|
|
|
|
2022-06-17 13:21:35 +01:00
|
|
|
class DbCliHandler(BaseCliHandler):
|
2022-04-22 12:52:41 +01:00
|
|
|
@classmethod
|
2022-05-16 11:44:03 +01:00
|
|
|
def add_subparser_to(cls, subparsers: _SubparserType) -> None:
|
2022-04-22 12:52:41 +01:00
|
|
|
parser = subparsers.add_parser("db", help="database operations")
|
2022-11-13 20:09:48 +00:00
|
|
|
parser.add_argument("--export", help="export data to JSON format", action="store_true")
|
|
|
|
parser.add_argument("--import", help="import data from JSON format", action="store_true")
|
2022-04-22 12:52:41 +01:00
|
|
|
parser.set_defaults(cls=cls)
|
|
|
|
|
2022-05-16 11:44:03 +01:00
|
|
|
def run(self) -> None:
|
2022-04-22 12:52:41 +01:00
|
|
|
with app.app_context():
|
|
|
|
if self.args.export:
|
2022-11-13 20:09:48 +00:00
|
|
|
db_export()
|
|
|
|
elif vars(self.args)["import"]:
|
|
|
|
db_import()
|
2022-04-22 12:52:41 +01:00
|
|
|
else:
|
|
|
|
logging.error("No action requested")
|