2022-01-30 12:15:42 +00:00
|
|
|
import json
|
2022-02-01 19:54:51 +00:00
|
|
|
import logging
|
2022-01-30 12:15:42 +00:00
|
|
|
import os
|
2022-02-01 19:54:51 +00:00
|
|
|
import pprint
|
2022-01-30 12:15:42 +00:00
|
|
|
import re
|
2022-02-01 19:54:51 +00:00
|
|
|
import sys
|
2022-02-12 12:51:17 +00:00
|
|
|
import dataclasses
|
|
|
|
from typing import Literal, cast
|
2022-02-01 19:54:51 +00:00
|
|
|
|
2022-02-12 11:23:37 +00:00
|
|
|
|
|
|
|
import pydantic
|
2022-01-30 12:15:42 +00:00
|
|
|
import aiohttp
|
2022-02-01 19:54:51 +00:00
|
|
|
import streamlink
|
|
|
|
from tornado import template
|
|
|
|
|
|
|
|
logging.basicConfig(
|
|
|
|
format="[%(filename)s:%(lineno)d] %(message)s",
|
|
|
|
stream=sys.stdout,
|
|
|
|
level=logging.INFO,
|
|
|
|
)
|
|
|
|
logger = logging.getLogger(__name__)
|
2022-01-30 12:15:42 +00:00
|
|
|
|
2022-02-01 19:54:51 +00:00
|
|
|
providers: dict[str, str] = {}
|
2022-01-30 12:15:42 +00:00
|
|
|
providers["nrk"] = "https://tv.nrk.no"
|
|
|
|
providers["nrk_web"] = "https://nrk.no"
|
|
|
|
providers["svt"] = "https://svtplay.se"
|
|
|
|
providers["youtube"] = "https://www.youtube.com/watch?v="
|
|
|
|
providers["twitch"] = "https://twitch.tv"
|
|
|
|
providers["twitter"] = "https://twitter.com"
|
|
|
|
|
|
|
|
nextcloud_server = os.environ.get("NEXTCLOUD_SERVER")
|
|
|
|
if nextcloud_server is not None:
|
|
|
|
providers["nextcloud"] = nextcloud_server
|
|
|
|
|
|
|
|
seafile_server = os.environ.get("SEAFILE_SERVER")
|
|
|
|
if seafile_server is not None:
|
|
|
|
providers["seafile"] = seafile_server
|
|
|
|
|
|
|
|
playlist = None
|
|
|
|
icecast_server = os.environ.get("ICECAST_SERVER")
|
|
|
|
stream_server = os.environ.get("STREAM_SERVER")
|
|
|
|
proxy_server = os.environ.get("PROXY_SERVER")
|
|
|
|
|
2022-02-01 19:54:51 +00:00
|
|
|
|
2022-02-12 12:51:17 +00:00
|
|
|
@dataclasses.dataclass
|
|
|
|
class LinkWithType:
|
|
|
|
upstream: str
|
2022-02-12 13:01:30 +00:00
|
|
|
ctype: None | str = None
|
2022-02-12 12:51:17 +00:00
|
|
|
|
|
|
|
|
2022-02-12 11:23:37 +00:00
|
|
|
class ProxyCreateLink(pydantic.BaseModel):
|
2022-02-12 12:51:17 +00:00
|
|
|
upstream: None | pydantic.HttpUrl
|
|
|
|
ctype: None | pydantic.StrictStr
|
|
|
|
region: None | pydantic.StrictStr
|
2022-02-12 11:23:37 +00:00
|
|
|
|
|
|
|
|
|
|
|
class ProxyRequest(pydantic.BaseModel):
|
2022-02-12 11:48:41 +00:00
|
|
|
action: Literal["create-urls", "read-config"]
|
2022-02-12 12:51:17 +00:00
|
|
|
urls: None | list[ProxyCreateLink]
|
2022-02-12 11:23:37 +00:00
|
|
|
|
|
|
|
|
|
|
|
class ProxyResponse(pydantic.BaseModel):
|
2022-02-12 11:48:41 +00:00
|
|
|
action: Literal["create-urls", "read-config"]
|
2022-02-12 12:51:17 +00:00
|
|
|
urls: None | list[pydantic.HttpUrl]
|
2022-02-12 11:23:37 +00:00
|
|
|
|
|
|
|
|
2022-02-01 19:54:51 +00:00
|
|
|
class ProxyElem:
|
2022-01-30 12:15:42 +00:00
|
|
|
def __init__(self, proxy, region):
|
|
|
|
self.proxy = proxy
|
|
|
|
self.region = region
|
2022-02-01 19:54:51 +00:00
|
|
|
|
2022-01-30 12:15:42 +00:00
|
|
|
def local(self):
|
|
|
|
timeout = aiohttp.ClientTimeout(total=5)
|
|
|
|
return aiohttp.ClientSession(timeout=timeout)
|
2022-02-01 19:54:51 +00:00
|
|
|
|
2022-01-30 12:15:42 +00:00
|
|
|
def __repr__(self):
|
2022-01-30 13:14:22 +00:00
|
|
|
return f"<region=<{str(self.region)}> proxy=<{str(self.proxy)}>>"
|
2022-02-01 19:54:51 +00:00
|
|
|
|
2022-02-12 12:51:17 +00:00
|
|
|
async def proxy_url(self, urls: list[LinkWithType]):
|
2022-01-30 12:15:42 +00:00
|
|
|
clean_urls = []
|
2022-02-12 11:23:37 +00:00
|
|
|
link_requests: list[ProxyCreateLink] = []
|
2022-01-30 12:15:42 +00:00
|
|
|
for url in urls:
|
2022-02-12 13:04:18 +00:00
|
|
|
clean_urls.append(url.upstream if isinstance(url.upstream, str) else None)
|
2022-01-30 12:15:42 +00:00
|
|
|
if not isinstance(proxy_server, str):
|
|
|
|
return clean_urls
|
|
|
|
|
2022-02-12 12:31:09 +00:00
|
|
|
try:
|
|
|
|
for url in urls:
|
2022-02-12 12:51:17 +00:00
|
|
|
link_requests.append(
|
|
|
|
ProxyCreateLink.parse_obj(
|
|
|
|
{
|
|
|
|
"upstream": url.upstream,
|
|
|
|
"ctype": url.ctype,
|
|
|
|
"region": self.region,
|
|
|
|
}
|
2022-02-12 12:31:09 +00:00
|
|
|
)
|
2022-02-12 12:51:17 +00:00
|
|
|
)
|
2022-02-12 12:31:09 +00:00
|
|
|
except pydantic.ValidationError as e:
|
|
|
|
logger.info(e)
|
|
|
|
return clean_urls
|
|
|
|
|
2022-02-12 12:51:17 +00:00
|
|
|
request_data: ProxyRequest
|
2022-02-12 11:23:37 +00:00
|
|
|
response_data: ProxyResponse
|
|
|
|
|
|
|
|
try:
|
2022-02-12 12:05:42 +00:00
|
|
|
request_data = ProxyRequest.parse_obj(
|
|
|
|
{"urls": link_requests, "action": "create-urls"}
|
|
|
|
)
|
2022-02-12 11:23:37 +00:00
|
|
|
except pydantic.ValidationError as e:
|
|
|
|
logger.info(e)
|
|
|
|
return clean_urls
|
|
|
|
|
2022-01-30 12:15:42 +00:00
|
|
|
try:
|
|
|
|
async with self.local() as session:
|
2022-02-12 13:17:10 +00:00
|
|
|
resp = await session.post(proxy_server, json=request_data.dict())
|
2022-02-12 12:05:42 +00:00
|
|
|
response_data = cast(
|
2022-02-13 10:18:29 +00:00
|
|
|
ProxyResponse, ProxyResponse.parse_obj(await resp.json())
|
2022-02-12 12:05:42 +00:00
|
|
|
)
|
2022-02-12 12:31:09 +00:00
|
|
|
except (aiohttp.ClientError, pydantic.ValidationError) as e:
|
2022-01-30 12:15:42 +00:00
|
|
|
logger.info(e)
|
2022-02-12 11:23:37 +00:00
|
|
|
else:
|
2022-02-12 12:51:17 +00:00
|
|
|
ret_data: list[None | str] = []
|
2022-02-12 11:23:37 +00:00
|
|
|
if response_data.urls is not None:
|
|
|
|
for src, dst in zip(clean_urls, response_data.urls):
|
|
|
|
if isinstance(src, str):
|
|
|
|
ret_data.append(dst)
|
|
|
|
else:
|
|
|
|
ret_data.append(None)
|
2022-01-30 12:15:42 +00:00
|
|
|
return ret_data
|
2022-02-12 11:23:37 +00:00
|
|
|
return clean_urls
|
2022-01-30 12:15:42 +00:00
|
|
|
|
2022-02-01 19:54:51 +00:00
|
|
|
|
|
|
|
proxies: dict[str, list[ProxyElem]] = {}
|
|
|
|
|
2022-01-30 12:15:42 +00:00
|
|
|
new_providers = {}
|
|
|
|
for key in providers:
|
2022-02-01 19:54:51 +00:00
|
|
|
region_expr = re.compile(f"^{key}_region(_[a-z][a-z])?[0-9]+$", re.IGNORECASE)
|
2022-01-30 12:15:42 +00:00
|
|
|
region_matches = list(filter(region_expr.match, os.environ.keys()))
|
2022-02-01 19:54:51 +00:00
|
|
|
proxy_expr = re.compile(f"^{key}_proxy(_[a-z][a-z])?[0-9]+$", re.IGNORECASE)
|
2022-01-30 12:15:42 +00:00
|
|
|
proxy_matches = list(filter(proxy_expr.match, os.environ.keys()))
|
|
|
|
proxy_current = []
|
|
|
|
proxy_current_keys = set()
|
|
|
|
proxy_current_keys.add(key)
|
|
|
|
proxy_countries = []
|
|
|
|
proxy_empty = True
|
|
|
|
|
|
|
|
region_current = []
|
|
|
|
region_current_keys = set()
|
|
|
|
region_current_keys.add(key)
|
|
|
|
region_countries = []
|
|
|
|
region_empty = True
|
|
|
|
|
|
|
|
for match in proxy_matches:
|
2022-02-04 16:36:36 +00:00
|
|
|
proxy_match = proxy_expr.match(match.lower())
|
|
|
|
if proxy_match is None:
|
|
|
|
continue
|
|
|
|
proxy_country_groups = proxy_match.groups()
|
2022-01-30 12:15:42 +00:00
|
|
|
proxy_country = None
|
|
|
|
pos = len(proxy_country_groups) - 1
|
|
|
|
if pos >= 0:
|
|
|
|
country_temp = proxy_country_groups[pos]
|
|
|
|
if isinstance(country_temp, str):
|
|
|
|
proxy_country = country_temp.strip("_")
|
2022-02-01 19:54:51 +00:00
|
|
|
proxy_current_keys.add(f"{key}_{proxy_country}")
|
|
|
|
proxy_link = os.environ.get(match)
|
|
|
|
if proxy_link is not None:
|
|
|
|
proxy_current.append(proxy_link)
|
2022-01-30 12:15:42 +00:00
|
|
|
proxy_countries.append(proxy_country)
|
|
|
|
if proxy_country is None:
|
|
|
|
proxy_empty = False
|
|
|
|
|
|
|
|
for match in region_matches:
|
2022-02-04 16:36:36 +00:00
|
|
|
region_match = region_expr.match(match.lower())
|
|
|
|
if region_match is None:
|
|
|
|
continue
|
|
|
|
region_country_groups = region_match.groups()
|
2022-01-30 12:15:42 +00:00
|
|
|
region_country = None
|
|
|
|
pos = len(region_country_groups) - 1
|
|
|
|
if pos >= 0:
|
|
|
|
country_temp = region_country_groups[pos]
|
|
|
|
if isinstance(country_temp, str):
|
|
|
|
region_country = country_temp.strip("_")
|
2022-02-01 19:54:51 +00:00
|
|
|
region_current_keys.add(f"{key}_{region_country}")
|
|
|
|
region_name = os.environ.get(match)
|
|
|
|
if region_name is not None:
|
|
|
|
region_current.append(region_name)
|
2022-01-30 12:15:42 +00:00
|
|
|
region_countries.append(region_country)
|
|
|
|
if region_country is None:
|
|
|
|
region_empty = False
|
|
|
|
|
|
|
|
for elem in proxy_current_keys:
|
|
|
|
proxies[elem] = []
|
|
|
|
new_providers[elem] = providers[key]
|
2022-02-01 19:54:51 +00:00
|
|
|
|
2022-02-04 16:44:47 +00:00
|
|
|
for proxy_link, region_name, country in zip(
|
|
|
|
proxy_current, region_current, proxy_countries
|
|
|
|
):
|
2022-01-30 12:15:42 +00:00
|
|
|
new_key = key
|
|
|
|
if country is not None:
|
2022-02-01 19:54:51 +00:00
|
|
|
new_key = f"{key}_{country}"
|
|
|
|
proxies[new_key].append(ProxyElem(proxy_link, region_name))
|
2022-01-30 12:15:42 +00:00
|
|
|
|
|
|
|
for elem in proxy_current_keys:
|
|
|
|
if len(proxies[elem]) == 0:
|
|
|
|
proxies[elem].append(ProxyElem(None, None))
|
|
|
|
|
2022-01-30 13:14:22 +00:00
|
|
|
pprint.pp(proxies)
|
2022-01-30 12:15:42 +00:00
|
|
|
providers = new_providers
|
|
|
|
|
2022-02-01 19:54:51 +00:00
|
|
|
streamlink_sessions: dict[str, streamlink.session.Streamlink] = {}
|
|
|
|
streamlink_default_session = streamlink.Streamlink()
|
|
|
|
|
|
|
|
proxy_keys: list[str] = []
|
2022-01-30 12:15:42 +00:00
|
|
|
for proxy_provider in proxies.values():
|
2022-02-01 19:54:51 +00:00
|
|
|
for proxy_elem in proxy_provider:
|
|
|
|
if isinstance(proxy_elem, ProxyElem) and isinstance(proxy_elem.proxy, str):
|
|
|
|
proxy_keys.append(proxy_elem.proxy)
|
|
|
|
streamlink_sessions[proxy_elem.proxy] = streamlink.Streamlink()
|
2022-02-04 16:44:47 +00:00
|
|
|
streamlink_sessions[proxy_elem.proxy].set_option(
|
|
|
|
"http-proxy", "socks5://" + proxy_elem.proxy
|
|
|
|
)
|
|
|
|
streamlink_sessions[proxy_elem.proxy].set_option(
|
|
|
|
"https-proxy", "socks5://" + proxy_elem.proxy
|
|
|
|
)
|
2022-01-30 12:15:42 +00:00
|
|
|
|
|
|
|
if icecast_server is not None and stream_server is not None:
|
|
|
|
try:
|
2022-02-01 19:54:51 +00:00
|
|
|
with open("/app/sources.json", "r", encoding="utf-8") as f:
|
2022-01-30 12:15:42 +00:00
|
|
|
data = json.loads(f.read())
|
|
|
|
playlist = "#EXTM3U\n"
|
|
|
|
for key in data:
|
|
|
|
current = data[key]
|
|
|
|
name = current["name"]
|
|
|
|
radio = current["radio"]
|
|
|
|
if radio:
|
|
|
|
playlist += f'#EXTINF:0 radio="true", {name}\n'
|
|
|
|
playlist += icecast_server + key + "\n"
|
|
|
|
else:
|
|
|
|
playlist += f'#EXTINF:0 radio="false", {name}\n'
|
|
|
|
playlist += stream_server + key + "\n"
|
2022-02-01 19:54:51 +00:00
|
|
|
except OSError as e:
|
2022-01-30 12:15:42 +00:00
|
|
|
logger.info(e)
|
|
|
|
|
|
|
|
template_html = None
|
|
|
|
template_script = None
|
|
|
|
videojs_version = None
|
|
|
|
font_awesome_version = None
|
|
|
|
custom_style = None
|
|
|
|
favicon = None
|
|
|
|
try:
|
2022-02-01 19:54:51 +00:00
|
|
|
with open("/app/index.html", "r", encoding="utf-8") as f:
|
|
|
|
template_html = template.Template(f.read().strip())
|
|
|
|
with open("/app/script.js", "r", encoding="utf-8") as f:
|
|
|
|
template_script = template.Template(f.read().strip())
|
|
|
|
with open("/app/version/video.js.txt", "r", encoding="utf-8") as f:
|
2022-01-30 12:15:42 +00:00
|
|
|
videojs_version = f.read().strip()
|
2022-02-01 19:54:51 +00:00
|
|
|
with open("/app/version/chromecast.txt", "r", encoding="utf-8") as f:
|
2022-01-30 12:15:42 +00:00
|
|
|
chromecast_version = f.read().strip()
|
2022-02-01 19:54:51 +00:00
|
|
|
with open("/app/version/font-awesome.txt", "r", encoding="utf-8") as f:
|
2022-01-30 12:15:42 +00:00
|
|
|
font_awesome_version = f.read().strip()
|
2022-02-01 19:54:51 +00:00
|
|
|
with open("/app/favicon.png", "rb") as fb:
|
|
|
|
favicon = fb.read()
|
|
|
|
with open("/app/style.css", "r", encoding="utf-8") as f:
|
2022-01-30 12:15:42 +00:00
|
|
|
custom_style = f.read()
|
2022-02-01 19:54:51 +00:00
|
|
|
except OSError as e:
|
2022-01-30 12:15:42 +00:00
|
|
|
logger.info(e)
|