experimental region support
This commit is contained in:
parent
6d88564ebf
commit
dddc8a5a49
@ -5,6 +5,7 @@ COPY ["backend/start.sh", "/app/start.sh"]
|
|||||||
COPY ["backend/sources.py", "/app/setup/sources.py"]
|
COPY ["backend/sources.py", "/app/setup/sources.py"]
|
||||||
COPY ["backend/style.js", "/app/setup/style.js"]
|
COPY ["backend/style.js", "/app/setup/style.js"]
|
||||||
COPY ["backend/stream.py", "/app/stream.py"]
|
COPY ["backend/stream.py", "/app/stream.py"]
|
||||||
|
COPY ["backend/config.py", "/app/config.py"]
|
||||||
COPY ["backend/seafile.js", "/app/seafile.js"]
|
COPY ["backend/seafile.js", "/app/seafile.js"]
|
||||||
COPY ["backend/stream_providers.py", "/app/stream_providers.py"]
|
COPY ["backend/stream_providers.py", "/app/stream_providers.py"]
|
||||||
COPY ["frontend/index.html", "/app/index.html"]
|
COPY ["frontend/index.html", "/app/index.html"]
|
||||||
|
204
backend/config.py
Normal file
204
backend/config.py
Normal file
@ -0,0 +1,204 @@
|
|||||||
|
import json
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import base64
|
||||||
|
import logging
|
||||||
|
import distutils.util
|
||||||
|
import asyncio
|
||||||
|
import tornado.web
|
||||||
|
import tornado.routing
|
||||||
|
import config
|
||||||
|
import stream_providers
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
providers = {}
|
||||||
|
providers["nrk"] = "https://tv.nrk.no"
|
||||||
|
providers["nrk_web"] = "https://nrk.no"
|
||||||
|
providers["svt"] = "https://svtplay.se"
|
||||||
|
providers["youtube"] = "https://www.youtube.com/watch?v="
|
||||||
|
providers["twitch"] = "https://twitch.tv"
|
||||||
|
providers["twitter"] = "https://twitter.com"
|
||||||
|
|
||||||
|
nextcloud_server = os.environ.get("NEXTCLOUD_SERVER")
|
||||||
|
if nextcloud_server is not None:
|
||||||
|
providers["nextcloud"] = nextcloud_server
|
||||||
|
|
||||||
|
seafile_server = os.environ.get("SEAFILE_SERVER")
|
||||||
|
if seafile_server is not None:
|
||||||
|
providers["seafile"] = seafile_server
|
||||||
|
|
||||||
|
playlist = None
|
||||||
|
icecast_server = os.environ.get("ICECAST_SERVER")
|
||||||
|
stream_server = os.environ.get("STREAM_SERVER")
|
||||||
|
proxy_server = os.environ.get("PROXY_SERVER")
|
||||||
|
|
||||||
|
class ProxyElem():
|
||||||
|
def __init__(self, proxy, region):
|
||||||
|
self.proxy = proxy
|
||||||
|
self.region = region
|
||||||
|
def local(self):
|
||||||
|
timeout = aiohttp.ClientTimeout(total=5)
|
||||||
|
return aiohttp.ClientSession(timeout=timeout)
|
||||||
|
def __repr__(self):
|
||||||
|
return str(self.proxy)
|
||||||
|
async def proxy_url(self, urls):
|
||||||
|
clean_urls = []
|
||||||
|
for url in urls:
|
||||||
|
if isinstance(url, tuple):
|
||||||
|
clean_urls.append(url[0])
|
||||||
|
else:
|
||||||
|
clean_urls.append(url)
|
||||||
|
if not isinstance(proxy_server, str):
|
||||||
|
return clean_urls
|
||||||
|
|
||||||
|
jdata = None
|
||||||
|
data_list = []
|
||||||
|
for url in urls:
|
||||||
|
data = {}
|
||||||
|
if isinstance(url, str):
|
||||||
|
data["upstream"] = url
|
||||||
|
elif isinstance(url, tuple):
|
||||||
|
if isinstance(url[0], str):
|
||||||
|
data["upstream"] = url[0]
|
||||||
|
if isinstance(url[1], str):
|
||||||
|
data["ctype"] = url[1]
|
||||||
|
|
||||||
|
data["region"] = self.region
|
||||||
|
data_list.append(data)
|
||||||
|
try:
|
||||||
|
async with self.local() as session:
|
||||||
|
resp = await session.post(proxy_server, json=data_list)
|
||||||
|
text = await resp.text()
|
||||||
|
jdata = json.loads(text)
|
||||||
|
except Exception as e:
|
||||||
|
logger.info(e)
|
||||||
|
if isinstance(jdata, list):
|
||||||
|
ret_data = []
|
||||||
|
for src, dst in zip(clean_urls, jdata):
|
||||||
|
if isinstance(src, str):
|
||||||
|
ret_data.append(dst)
|
||||||
|
else:
|
||||||
|
ret_data.append(None)
|
||||||
|
return ret_data
|
||||||
|
else:
|
||||||
|
return clean_urls
|
||||||
|
|
||||||
|
proxies = {}
|
||||||
|
new_providers = {}
|
||||||
|
for key in providers:
|
||||||
|
region_expr = re.compile(f'^{key}_region(_[a-z][a-z])?[0-9]+$', re.IGNORECASE)
|
||||||
|
region_matches = list(filter(region_expr.match, os.environ.keys()))
|
||||||
|
proxy_expr = re.compile(f'^{key}_proxy(_[a-z][a-z])?[0-9]+$', re.IGNORECASE)
|
||||||
|
proxy_matches = list(filter(proxy_expr.match, os.environ.keys()))
|
||||||
|
proxy_current = []
|
||||||
|
proxy_current_keys = set()
|
||||||
|
proxy_current_keys.add(key)
|
||||||
|
proxy_countries = []
|
||||||
|
proxy_empty = True
|
||||||
|
|
||||||
|
region_current = []
|
||||||
|
region_current_keys = set()
|
||||||
|
region_current_keys.add(key)
|
||||||
|
region_countries = []
|
||||||
|
region_empty = True
|
||||||
|
|
||||||
|
for match in proxy_matches:
|
||||||
|
proxy_country_groups = proxy_expr.match(match.lower()).groups()
|
||||||
|
proxy_country = None
|
||||||
|
pos = len(proxy_country_groups) - 1
|
||||||
|
if pos >= 0:
|
||||||
|
country_temp = proxy_country_groups[pos]
|
||||||
|
if isinstance(country_temp, str):
|
||||||
|
proxy_country = country_temp.strip("_")
|
||||||
|
proxy_current_keys.add(f'{key}_{proxy_country}')
|
||||||
|
proxy = os.environ.get(match)
|
||||||
|
if proxy is not None:
|
||||||
|
proxy_current.append(proxy)
|
||||||
|
proxy_countries.append(proxy_country)
|
||||||
|
if proxy_country is None:
|
||||||
|
proxy_empty = False
|
||||||
|
|
||||||
|
for match in region_matches:
|
||||||
|
region_country_groups = region_expr.match(match.lower()).groups()
|
||||||
|
region_country = None
|
||||||
|
pos = len(region_country_groups) - 1
|
||||||
|
if pos >= 0:
|
||||||
|
country_temp = region_country_groups[pos]
|
||||||
|
if isinstance(country_temp, str):
|
||||||
|
region_country = country_temp.strip("_")
|
||||||
|
region_current_keys.add(f'{key}_{region_country}')
|
||||||
|
region = os.environ.get(match)
|
||||||
|
if region is not None:
|
||||||
|
region_current.append(proxy)
|
||||||
|
region_countries.append(region_country)
|
||||||
|
if region_country is None:
|
||||||
|
region_empty = False
|
||||||
|
|
||||||
|
for elem in proxy_current_keys:
|
||||||
|
proxies[elem] = []
|
||||||
|
new_providers[elem] = providers[key]
|
||||||
|
print(proxies)
|
||||||
|
|
||||||
|
for proxy, region, country in zip(proxy_current, region_current, proxy_countries):
|
||||||
|
new_key = key
|
||||||
|
if country is not None:
|
||||||
|
new_key = f'{key}_{country}'
|
||||||
|
proxies[new_key].append(ProxyElem(proxy, region))
|
||||||
|
|
||||||
|
for elem in proxy_current_keys:
|
||||||
|
if len(proxies[elem]) == 0:
|
||||||
|
proxies[elem].append(ProxyElem(None, None))
|
||||||
|
|
||||||
|
providers = new_providers
|
||||||
|
|
||||||
|
proxy_keys = []
|
||||||
|
for proxy_provider in proxies.values():
|
||||||
|
for proxy in proxy_provider:
|
||||||
|
if isinstance(proxy, ProxyElem) and isinstance(proxy.proxy, str):
|
||||||
|
proxy_keys.append(proxy.proxy)
|
||||||
|
stream_providers.setup(proxy_keys)
|
||||||
|
|
||||||
|
if icecast_server is not None and stream_server is not None:
|
||||||
|
try:
|
||||||
|
with open("/app/sources.json", "r") as f:
|
||||||
|
data = json.loads(f.read())
|
||||||
|
playlist = "#EXTM3U\n"
|
||||||
|
for key in data:
|
||||||
|
current = data[key]
|
||||||
|
name = current["name"]
|
||||||
|
radio = current["radio"]
|
||||||
|
if radio:
|
||||||
|
playlist += f'#EXTINF:0 radio="true", {name}\n'
|
||||||
|
playlist += icecast_server + key + "\n"
|
||||||
|
else:
|
||||||
|
playlist += f'#EXTINF:0 radio="false", {name}\n'
|
||||||
|
playlist += stream_server + key + "\n"
|
||||||
|
except Exception as e:
|
||||||
|
logger.info(e)
|
||||||
|
|
||||||
|
template_html = None
|
||||||
|
template_script = None
|
||||||
|
videojs_version = None
|
||||||
|
font_awesome_version = None
|
||||||
|
custom_style = None
|
||||||
|
favicon = None
|
||||||
|
try:
|
||||||
|
with open("/app/index.html", "r") as f:
|
||||||
|
template_html = tornado.template.Template(f.read().strip())
|
||||||
|
with open("/app/script.js", "r") as f:
|
||||||
|
template_script = tornado.template.Template(f.read().strip())
|
||||||
|
with open("/app/version/video.js.txt", "r") as f:
|
||||||
|
videojs_version = f.read().strip()
|
||||||
|
with open("/app/version/chromecast.txt", "r") as f:
|
||||||
|
chromecast_version = f.read().strip()
|
||||||
|
with open("/app/version/font-awesome.txt", "r") as f:
|
||||||
|
font_awesome_version = f.read().strip()
|
||||||
|
with open("/app/favicon.png", "rb") as f:
|
||||||
|
favicon = f.read()
|
||||||
|
with open("/app/style.css", "r") as f:
|
||||||
|
custom_style = f.read()
|
||||||
|
except Exception as e:
|
||||||
|
logger.info(e)
|
||||||
|
|
||||||
|
|
@ -8,130 +8,12 @@ import logging
|
|||||||
import asyncio
|
import asyncio
|
||||||
import tornado.web
|
import tornado.web
|
||||||
import tornado.routing
|
import tornado.routing
|
||||||
|
import config
|
||||||
import stream_providers
|
import stream_providers
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
|
||||||
logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', stream=sys.stdout, level=logging.INFO)
|
logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', stream=sys.stdout, level=logging.INFO)
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
providers = {}
|
|
||||||
providers["nrk"] = "https://tv.nrk.no"
|
|
||||||
providers["nrk_web"] = "https://nrk.no"
|
|
||||||
providers["svt"] = "https://svtplay.se"
|
|
||||||
providers["youtube"] = "https://www.youtube.com/watch?v="
|
|
||||||
providers["twitch"] = "https://twitch.tv"
|
|
||||||
providers["twitter"] = "https://twitter.com"
|
|
||||||
|
|
||||||
nextcloud_server = os.environ.get("NEXTCLOUD_SERVER")
|
|
||||||
if nextcloud_server is not None:
|
|
||||||
providers["nextcloud"] = nextcloud_server
|
|
||||||
|
|
||||||
seafile_server = os.environ.get("SEAFILE_SERVER")
|
|
||||||
if seafile_server is not None:
|
|
||||||
providers["seafile"] = seafile_server
|
|
||||||
|
|
||||||
playlist = None
|
|
||||||
icecast_server = os.environ.get("ICECAST_SERVER")
|
|
||||||
stream_server = os.environ.get("STREAM_SERVER")
|
|
||||||
proxy_server = os.environ.get("PROXY_SERVER")
|
|
||||||
|
|
||||||
class ProxyElem():
|
|
||||||
def __init__(self, proxy):
|
|
||||||
self.proxy = proxy
|
|
||||||
def local(self):
|
|
||||||
timeout = aiohttp.ClientTimeout(total=5)
|
|
||||||
return aiohttp.ClientSession(timeout=timeout)
|
|
||||||
def __repr__(self):
|
|
||||||
return str(self.proxy)
|
|
||||||
async def proxy_url(self, urls):
|
|
||||||
clean_urls = []
|
|
||||||
for url in urls:
|
|
||||||
if isinstance(url, tuple):
|
|
||||||
clean_urls.append(url[0])
|
|
||||||
else:
|
|
||||||
clean_urls.append(url)
|
|
||||||
if not isinstance(proxy_server, str):
|
|
||||||
return clean_urls
|
|
||||||
|
|
||||||
jdata = None
|
|
||||||
data_list = []
|
|
||||||
for url in urls:
|
|
||||||
data = {}
|
|
||||||
if isinstance(url, str):
|
|
||||||
data["upstream"] = url
|
|
||||||
elif isinstance(url, tuple):
|
|
||||||
if isinstance(url[0], str):
|
|
||||||
data["upstream"] = url[0]
|
|
||||||
if isinstance(url[1], str):
|
|
||||||
data["ctype"] = url[1]
|
|
||||||
|
|
||||||
data["proxy"] = self.proxy
|
|
||||||
data["proxied"] = isinstance(self.proxy, str)
|
|
||||||
data_list.append(data)
|
|
||||||
try:
|
|
||||||
async with self.local() as session:
|
|
||||||
resp = await session.post(proxy_server, json=data_list)
|
|
||||||
text = await resp.text()
|
|
||||||
jdata = json.loads(text)
|
|
||||||
except Exception as e:
|
|
||||||
logger.info(e)
|
|
||||||
if isinstance(jdata, list):
|
|
||||||
ret_data = []
|
|
||||||
for src, dst in zip(clean_urls, jdata):
|
|
||||||
if isinstance(src, str):
|
|
||||||
ret_data.append(dst)
|
|
||||||
else:
|
|
||||||
ret_data.append(None)
|
|
||||||
return ret_data
|
|
||||||
else:
|
|
||||||
return clean_urls
|
|
||||||
|
|
||||||
proxies = {}
|
|
||||||
new_providers = {}
|
|
||||||
for key in providers:
|
|
||||||
expr = re.compile(f'^{key}_proxy(_[a-z][a-z])?[0-9]+$', re.IGNORECASE)
|
|
||||||
matches = list(filter(expr.match, os.environ.keys()))
|
|
||||||
current = []
|
|
||||||
current_keys = set()
|
|
||||||
current_keys.add(key)
|
|
||||||
countries = []
|
|
||||||
empty = True
|
|
||||||
for match in matches:
|
|
||||||
country_groups = expr.match(match.lower()).groups()
|
|
||||||
country = None
|
|
||||||
pos = len(country_groups) - 1
|
|
||||||
if pos >= 0:
|
|
||||||
country_temp = country_groups[pos]
|
|
||||||
if isinstance(country_temp, str):
|
|
||||||
country = country_temp.strip("_")
|
|
||||||
current_keys.add(f'{key}_{country}')
|
|
||||||
proxy = os.environ.get(match)
|
|
||||||
if proxy is not None:
|
|
||||||
current.append(proxy)
|
|
||||||
countries.append(country)
|
|
||||||
if country is None:
|
|
||||||
empty = False
|
|
||||||
for elem in current_keys:
|
|
||||||
proxies[elem] = []
|
|
||||||
new_providers[elem] = providers[key]
|
|
||||||
print(proxies)
|
|
||||||
for proxy, country in zip(current, countries):
|
|
||||||
new_key = key
|
|
||||||
if country is not None:
|
|
||||||
new_key = f'{key}_{country}'
|
|
||||||
proxies[new_key].append(ProxyElem(proxy))
|
|
||||||
for elem in current_keys:
|
|
||||||
if len(proxies[elem]) == 0:
|
|
||||||
proxies[elem].append(ProxyElem(None))
|
|
||||||
providers = new_providers
|
|
||||||
|
|
||||||
proxy_keys = []
|
|
||||||
for proxy_provider in proxies.values():
|
|
||||||
for proxy in proxy_provider:
|
|
||||||
if isinstance(proxy, ProxyElem) and isinstance(proxy.proxy, str):
|
|
||||||
proxy_keys.append(proxy.proxy)
|
|
||||||
stream_providers.setup(proxy_keys)
|
|
||||||
|
|
||||||
class UpstreamHandler():
|
class UpstreamHandler():
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.provider = None
|
self.provider = None
|
||||||
@ -196,48 +78,6 @@ class UpstreamHandler():
|
|||||||
if not future.done():
|
if not future.done():
|
||||||
future.cancel()
|
future.cancel()
|
||||||
|
|
||||||
if icecast_server is not None and stream_server is not None:
|
|
||||||
try:
|
|
||||||
with open("/app/sources.json", "r") as f:
|
|
||||||
data = json.loads(f.read())
|
|
||||||
playlist = "#EXTM3U\n"
|
|
||||||
for key in data:
|
|
||||||
current = data[key]
|
|
||||||
name = current["name"]
|
|
||||||
radio = current["radio"]
|
|
||||||
if radio:
|
|
||||||
playlist += f'#EXTINF:0 radio="true", {name}\n'
|
|
||||||
playlist += icecast_server + key + "\n"
|
|
||||||
else:
|
|
||||||
playlist += f'#EXTINF:0 radio="false", {name}\n'
|
|
||||||
playlist += stream_server + key + "\n"
|
|
||||||
except Exception as e:
|
|
||||||
logger.info(e)
|
|
||||||
|
|
||||||
template_html = None
|
|
||||||
template_script = None
|
|
||||||
videojs_version = None
|
|
||||||
font_awesome_version = None
|
|
||||||
custom_style = None
|
|
||||||
favicon = None
|
|
||||||
try:
|
|
||||||
with open("/app/index.html", "r") as f:
|
|
||||||
template_html = tornado.template.Template(f.read().strip())
|
|
||||||
with open("/app/script.js", "r") as f:
|
|
||||||
template_script = tornado.template.Template(f.read().strip())
|
|
||||||
with open("/app/version/video.js.txt", "r") as f:
|
|
||||||
videojs_version = f.read().strip()
|
|
||||||
with open("/app/version/chromecast.txt", "r") as f:
|
|
||||||
chromecast_version = f.read().strip()
|
|
||||||
with open("/app/version/font-awesome.txt", "r") as f:
|
|
||||||
font_awesome_version = f.read().strip()
|
|
||||||
with open("/app/favicon.png", "rb") as f:
|
|
||||||
favicon = f.read()
|
|
||||||
with open("/app/style.css", "r") as f:
|
|
||||||
custom_style = f.read()
|
|
||||||
except Exception as e:
|
|
||||||
logger.info(e)
|
|
||||||
|
|
||||||
class MainHandler(tornado.web.RequestHandler):
|
class MainHandler(tornado.web.RequestHandler):
|
||||||
async def handle_any(self, redir):
|
async def handle_any(self, redir):
|
||||||
handler = UpstreamHandler()
|
handler = UpstreamHandler()
|
||||||
|
Loading…
Reference in New Issue
Block a user