2024-11-09 11:08:48 +00:00
|
|
|
from typing import List, Optional, TypedDict
|
2022-03-10 14:26:22 +00:00
|
|
|
|
2022-09-26 14:51:11 +01:00
|
|
|
from app.models.base import Pool
|
2022-05-17 09:44:18 +01:00
|
|
|
from app.models.mirrors import Origin, Proxy
|
2022-04-27 13:30:49 +01:00
|
|
|
|
|
|
|
|
2024-11-09 11:08:48 +00:00
|
|
|
class BC2Alternative(TypedDict):
|
2022-04-27 13:30:49 +01:00
|
|
|
proto: str
|
|
|
|
type: str
|
2024-11-09 11:08:48 +00:00
|
|
|
created_at: str
|
|
|
|
updated_at: str
|
2022-04-27 13:30:49 +01:00
|
|
|
url: str
|
|
|
|
|
|
|
|
|
2024-11-09 11:08:48 +00:00
|
|
|
class BC2Site(TypedDict):
|
|
|
|
main_domain: str
|
2022-04-27 13:30:49 +01:00
|
|
|
available_alternatives: List[BC2Alternative]
|
|
|
|
|
|
|
|
|
2024-11-09 11:08:48 +00:00
|
|
|
class BypassCensorship2(TypedDict):
|
|
|
|
version: str
|
2022-04-27 13:30:49 +01:00
|
|
|
sites: List[BC2Site]
|
|
|
|
|
2022-03-10 14:26:22 +00:00
|
|
|
|
2024-11-09 11:08:48 +00:00
|
|
|
def onion_alternative(origin: Origin) -> List[BC2Alternative]:
|
2022-05-17 09:44:18 +01:00
|
|
|
url: Optional[str] = origin.onion()
|
|
|
|
if url is None:
|
|
|
|
return []
|
2022-06-17 14:02:10 +01:00
|
|
|
return [{
|
|
|
|
"proto": "tor",
|
|
|
|
"type": "eotk",
|
|
|
|
"created_at": str(origin.added),
|
|
|
|
"updated_at": str(origin.updated),
|
2024-11-09 11:08:48 +00:00
|
|
|
"url": url
|
|
|
|
}]
|
2022-04-27 14:50:41 +01:00
|
|
|
|
|
|
|
|
2024-11-09 11:08:48 +00:00
|
|
|
def proxy_alternative(proxy: Proxy) -> Optional[BC2Alternative]:
|
|
|
|
if proxy.url is None:
|
|
|
|
return None
|
2022-05-16 13:29:48 +01:00
|
|
|
return {
|
|
|
|
"proto": "https",
|
|
|
|
"type": "mirror",
|
2024-11-09 11:08:48 +00:00
|
|
|
"created_at": proxy.added.isoformat(),
|
|
|
|
"updated_at": proxy.updated.isoformat(),
|
2022-05-16 13:29:48 +01:00
|
|
|
"url": proxy.url
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-05-17 09:44:18 +01:00
|
|
|
def main_domain(origin: Origin) -> str:
|
|
|
|
description: str = origin.description
|
|
|
|
if description.startswith("proxy:"):
|
|
|
|
return description[len("proxy:"):].replace("www.", "")
|
|
|
|
domain_name: str = origin.domain_name
|
|
|
|
return domain_name.replace("www.", "")
|
|
|
|
|
|
|
|
|
2022-09-26 14:53:01 +01:00
|
|
|
def active_proxies(origin: Origin, pool: Pool) -> List[Proxy]:
|
2024-11-09 11:08:48 +00:00
|
|
|
return [
|
|
|
|
proxy for proxy in origin.proxies
|
|
|
|
if proxy.url is not None and not proxy.deprecated and not proxy.destroyed and proxy.pool_id == pool.id
|
|
|
|
]
|
|
|
|
|
2022-05-17 09:44:18 +01:00
|
|
|
|
2024-11-09 11:08:48 +00:00
|
|
|
def mirror_sites(pool: Pool) -> BypassCensorship2:
|
|
|
|
origins = Origin.query.filter(Origin.destroyed.is_(None)).order_by(Origin.domain_name).all()
|
2022-05-17 09:44:18 +01:00
|
|
|
|
2024-11-09 11:08:48 +00:00
|
|
|
sites: List[BC2Site] = []
|
|
|
|
for origin in origins:
|
|
|
|
# Gather alternatives, filtering out None values from proxy_alternative
|
|
|
|
alternatives = onion_alternative(origin) + [
|
|
|
|
alt for proxy in active_proxies(origin, pool)
|
|
|
|
if (alt := proxy_alternative(proxy)) is not None
|
|
|
|
]
|
2022-05-16 13:29:48 +01:00
|
|
|
|
2024-11-09 11:08:48 +00:00
|
|
|
# Add the site dictionary to the list
|
|
|
|
sites.append({
|
|
|
|
"main_domain": main_domain(origin),
|
|
|
|
"available_alternatives": list(alternatives)
|
|
|
|
})
|
2022-05-16 13:29:48 +01:00
|
|
|
|
2024-11-09 11:08:48 +00:00
|
|
|
return {
|
|
|
|
"version": "2.0",
|
|
|
|
"sites": sites
|
|
|
|
}
|