add some type info
This commit is contained in:
parent
d7bab22280
commit
b89f2bccab
10
.env
Normal file
10
.env
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
export NEXTCLOUD_SERVER=https://nx12210.your-storageshare.de
|
||||||
|
export SEAFILE_SERVER=https://seafile.purser.it
|
||||||
|
export PROXY_SERVER=http://127.0.0.1:8080
|
||||||
|
export YOUTUBE_PROXY_FR0=fr4-wg.socks5.mullvad.net:1080
|
||||||
|
export YOUTUBE_PROXY_FR1=fr5-wg.socks5.mullvad.net:1080
|
||||||
|
export YOUTUBE_REGION_FR0=MULLVAD_FR0
|
||||||
|
export YOUTUBE_REGION_FR1=MULLVAD_FR1
|
||||||
|
export ICECAST_SERVER=https://icecast.purser.it:7000
|
||||||
|
export STREAM_SERVER=https://stream.purser.it
|
||||||
|
|
5
.pylintrc
Normal file
5
.pylintrc
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
[MASTER]
|
||||||
|
disable = C0103, C0114, C0115, C0116
|
||||||
|
|
||||||
|
[FORMAT]
|
||||||
|
max-line-length=120
|
@ -1,19 +1,22 @@
|
|||||||
import json
|
import json
|
||||||
import sys
|
|
||||||
import pprint
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import base64
|
|
||||||
import logging
|
import logging
|
||||||
import distutils.util
|
import os
|
||||||
import asyncio
|
import pprint
|
||||||
import tornado.web
|
import re
|
||||||
import tornado.routing
|
import sys
|
||||||
import config
|
|
||||||
import stream_providers
|
|
||||||
import aiohttp
|
|
||||||
|
|
||||||
providers = {}
|
import aiohttp
|
||||||
|
import streamlink
|
||||||
|
from tornado import template
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
format="[%(filename)s:%(lineno)d] %(message)s",
|
||||||
|
stream=sys.stdout,
|
||||||
|
level=logging.INFO,
|
||||||
|
)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
providers: dict[str, str] = {}
|
||||||
providers["nrk"] = "https://tv.nrk.no"
|
providers["nrk"] = "https://tv.nrk.no"
|
||||||
providers["nrk_web"] = "https://nrk.no"
|
providers["nrk_web"] = "https://nrk.no"
|
||||||
providers["svt"] = "https://svtplay.se"
|
providers["svt"] = "https://svtplay.se"
|
||||||
@ -34,15 +37,19 @@ icecast_server = os.environ.get("ICECAST_SERVER")
|
|||||||
stream_server = os.environ.get("STREAM_SERVER")
|
stream_server = os.environ.get("STREAM_SERVER")
|
||||||
proxy_server = os.environ.get("PROXY_SERVER")
|
proxy_server = os.environ.get("PROXY_SERVER")
|
||||||
|
|
||||||
class ProxyElem():
|
|
||||||
|
class ProxyElem:
|
||||||
def __init__(self, proxy, region):
|
def __init__(self, proxy, region):
|
||||||
self.proxy = proxy
|
self.proxy = proxy
|
||||||
self.region = region
|
self.region = region
|
||||||
|
|
||||||
def local(self):
|
def local(self):
|
||||||
timeout = aiohttp.ClientTimeout(total=5)
|
timeout = aiohttp.ClientTimeout(total=5)
|
||||||
return aiohttp.ClientSession(timeout=timeout)
|
return aiohttp.ClientSession(timeout=timeout)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<region=<{str(self.region)}> proxy=<{str(self.proxy)}>>"
|
return f"<region=<{str(self.region)}> proxy=<{str(self.proxy)}>>"
|
||||||
|
|
||||||
async def proxy_url(self, urls):
|
async def proxy_url(self, urls):
|
||||||
clean_urls = []
|
clean_urls = []
|
||||||
for url in urls:
|
for url in urls:
|
||||||
@ -85,12 +92,14 @@ class ProxyElem():
|
|||||||
else:
|
else:
|
||||||
return clean_urls
|
return clean_urls
|
||||||
|
|
||||||
proxies = {}
|
|
||||||
|
proxies: dict[str, list[ProxyElem]] = {}
|
||||||
|
|
||||||
new_providers = {}
|
new_providers = {}
|
||||||
for key in providers:
|
for key in providers:
|
||||||
region_expr = re.compile(f'^{key}_region(_[a-z][a-z])?[0-9]+$', re.IGNORECASE)
|
region_expr = re.compile(f"^{key}_region(_[a-z][a-z])?[0-9]+$", re.IGNORECASE)
|
||||||
region_matches = list(filter(region_expr.match, os.environ.keys()))
|
region_matches = list(filter(region_expr.match, os.environ.keys()))
|
||||||
proxy_expr = re.compile(f'^{key}_proxy(_[a-z][a-z])?[0-9]+$', re.IGNORECASE)
|
proxy_expr = re.compile(f"^{key}_proxy(_[a-z][a-z])?[0-9]+$", re.IGNORECASE)
|
||||||
proxy_matches = list(filter(proxy_expr.match, os.environ.keys()))
|
proxy_matches = list(filter(proxy_expr.match, os.environ.keys()))
|
||||||
proxy_current = []
|
proxy_current = []
|
||||||
proxy_current_keys = set()
|
proxy_current_keys = set()
|
||||||
@ -112,10 +121,10 @@ for key in providers:
|
|||||||
country_temp = proxy_country_groups[pos]
|
country_temp = proxy_country_groups[pos]
|
||||||
if isinstance(country_temp, str):
|
if isinstance(country_temp, str):
|
||||||
proxy_country = country_temp.strip("_")
|
proxy_country = country_temp.strip("_")
|
||||||
proxy_current_keys.add(f'{key}_{proxy_country}')
|
proxy_current_keys.add(f"{key}_{proxy_country}")
|
||||||
proxy = os.environ.get(match)
|
proxy_link = os.environ.get(match)
|
||||||
if proxy is not None:
|
if proxy_link is not None:
|
||||||
proxy_current.append(proxy)
|
proxy_current.append(proxy_link)
|
||||||
proxy_countries.append(proxy_country)
|
proxy_countries.append(proxy_country)
|
||||||
if proxy_country is None:
|
if proxy_country is None:
|
||||||
proxy_empty = False
|
proxy_empty = False
|
||||||
@ -128,10 +137,10 @@ for key in providers:
|
|||||||
country_temp = region_country_groups[pos]
|
country_temp = region_country_groups[pos]
|
||||||
if isinstance(country_temp, str):
|
if isinstance(country_temp, str):
|
||||||
region_country = country_temp.strip("_")
|
region_country = country_temp.strip("_")
|
||||||
region_current_keys.add(f'{key}_{region_country}')
|
region_current_keys.add(f"{key}_{region_country}")
|
||||||
region = os.environ.get(match)
|
region_name = os.environ.get(match)
|
||||||
if region is not None:
|
if region_name is not None:
|
||||||
region_current.append(region)
|
region_current.append(region_name)
|
||||||
region_countries.append(region_country)
|
region_countries.append(region_country)
|
||||||
if region_country is None:
|
if region_country is None:
|
||||||
region_empty = False
|
region_empty = False
|
||||||
@ -140,30 +149,34 @@ for key in providers:
|
|||||||
proxies[elem] = []
|
proxies[elem] = []
|
||||||
new_providers[elem] = providers[key]
|
new_providers[elem] = providers[key]
|
||||||
|
|
||||||
for proxy, region, country in zip(proxy_current, region_current, proxy_countries):
|
for proxy_link, region_name, country in zip(proxy_current, region_current, proxy_countries):
|
||||||
new_key = key
|
new_key = key
|
||||||
if country is not None:
|
if country is not None:
|
||||||
new_key = f'{key}_{country}'
|
new_key = f"{key}_{country}"
|
||||||
proxies[new_key].append(ProxyElem(proxy, region))
|
proxies[new_key].append(ProxyElem(proxy_link, region_name))
|
||||||
|
|
||||||
for elem in proxy_current_keys:
|
for elem in proxy_current_keys:
|
||||||
if len(proxies[elem]) == 0:
|
if len(proxies[elem]) == 0:
|
||||||
proxies[elem].append(ProxyElem(None, None))
|
proxies[elem].append(ProxyElem(None, None))
|
||||||
|
|
||||||
pprint.pp(proxies)
|
pprint.pp(proxies)
|
||||||
|
|
||||||
providers = new_providers
|
providers = new_providers
|
||||||
|
|
||||||
proxy_keys = []
|
streamlink_sessions: dict[str, streamlink.session.Streamlink] = {}
|
||||||
|
streamlink_default_session = streamlink.Streamlink()
|
||||||
|
|
||||||
|
proxy_keys: list[str] = []
|
||||||
for proxy_provider in proxies.values():
|
for proxy_provider in proxies.values():
|
||||||
for proxy in proxy_provider:
|
for proxy_elem in proxy_provider:
|
||||||
if isinstance(proxy, ProxyElem) and isinstance(proxy.proxy, str):
|
if isinstance(proxy_elem, ProxyElem) and isinstance(proxy_elem.proxy, str):
|
||||||
proxy_keys.append(proxy.proxy)
|
proxy_keys.append(proxy_elem.proxy)
|
||||||
stream_providers.setup(proxy_keys)
|
streamlink_sessions[proxy_elem.proxy] = streamlink.Streamlink()
|
||||||
|
streamlink_sessions[proxy_elem.proxy].set_option("http-proxy", "socks5://" + proxy_elem.proxy)
|
||||||
|
streamlink_sessions[proxy_elem.proxy].set_option("https-proxy", "socks5://" + proxy_elem.proxy)
|
||||||
|
|
||||||
if icecast_server is not None and stream_server is not None:
|
if icecast_server is not None and stream_server is not None:
|
||||||
try:
|
try:
|
||||||
with open("/app/sources.json", "r") as f:
|
with open("/app/sources.json", "r", encoding="utf-8") as f:
|
||||||
data = json.loads(f.read())
|
data = json.loads(f.read())
|
||||||
playlist = "#EXTM3U\n"
|
playlist = "#EXTM3U\n"
|
||||||
for key in data:
|
for key in data:
|
||||||
@ -176,7 +189,7 @@ if icecast_server is not None and stream_server is not None:
|
|||||||
else:
|
else:
|
||||||
playlist += f'#EXTINF:0 radio="false", {name}\n'
|
playlist += f'#EXTINF:0 radio="false", {name}\n'
|
||||||
playlist += stream_server + key + "\n"
|
playlist += stream_server + key + "\n"
|
||||||
except Exception as e:
|
except OSError as e:
|
||||||
logger.info(e)
|
logger.info(e)
|
||||||
|
|
||||||
template_html = None
|
template_html = None
|
||||||
@ -186,21 +199,19 @@ font_awesome_version = None
|
|||||||
custom_style = None
|
custom_style = None
|
||||||
favicon = None
|
favicon = None
|
||||||
try:
|
try:
|
||||||
with open("/app/index.html", "r") as f:
|
with open("/app/index.html", "r", encoding="utf-8") as f:
|
||||||
template_html = tornado.template.Template(f.read().strip())
|
template_html = template.Template(f.read().strip())
|
||||||
with open("/app/script.js", "r") as f:
|
with open("/app/script.js", "r", encoding="utf-8") as f:
|
||||||
template_script = tornado.template.Template(f.read().strip())
|
template_script = template.Template(f.read().strip())
|
||||||
with open("/app/version/video.js.txt", "r") as f:
|
with open("/app/version/video.js.txt", "r", encoding="utf-8") as f:
|
||||||
videojs_version = f.read().strip()
|
videojs_version = f.read().strip()
|
||||||
with open("/app/version/chromecast.txt", "r") as f:
|
with open("/app/version/chromecast.txt", "r", encoding="utf-8") as f:
|
||||||
chromecast_version = f.read().strip()
|
chromecast_version = f.read().strip()
|
||||||
with open("/app/version/font-awesome.txt", "r") as f:
|
with open("/app/version/font-awesome.txt", "r", encoding="utf-8") as f:
|
||||||
font_awesome_version = f.read().strip()
|
font_awesome_version = f.read().strip()
|
||||||
with open("/app/favicon.png", "rb") as f:
|
with open("/app/favicon.png", "rb") as fb:
|
||||||
favicon = f.read()
|
favicon = fb.read()
|
||||||
with open("/app/style.css", "r") as f:
|
with open("/app/style.css", "r", encoding="utf-8") as f:
|
||||||
custom_style = f.read()
|
custom_style = f.read()
|
||||||
except Exception as e:
|
except OSError as e:
|
||||||
logger.info(e)
|
logger.info(e)
|
||||||
|
|
||||||
|
|
||||||
|
@ -11,17 +11,18 @@ import tornado.routing
|
|||||||
import config
|
import config
|
||||||
import stream_providers
|
import stream_providers
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', stream=sys.stdout, level=logging.INFO)
|
logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', stream=sys.stdout, level=logging.INFO)
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
class UpstreamHandler():
|
class UpstreamHandler():
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.provider = None
|
self.provider: Optional[str] = None
|
||||||
self.raw = False
|
self.raw: bool = False
|
||||||
self.valid = False
|
self.valid: bool = False
|
||||||
self.proxy = config.ProxyElem(None, None)
|
self.proxy: config.ProxyElem = config.ProxyElem(None, None)
|
||||||
self.direct = False
|
self.direct: bool = False
|
||||||
self.upstream = None
|
self.upstream: Optional[str] = None
|
||||||
async def test_socks(self, proxy):
|
async def test_socks(self, proxy):
|
||||||
if not hasattr(proxy, "proxy") or not isinstance(proxy.proxy, str):
|
if not hasattr(proxy, "proxy") or not isinstance(proxy.proxy, str):
|
||||||
return (True, config.ProxyElem(None, None))
|
return (True, config.ProxyElem(None, None))
|
||||||
@ -78,6 +79,7 @@ class UpstreamHandler():
|
|||||||
if not future.done():
|
if not future.done():
|
||||||
future.cancel()
|
future.cancel()
|
||||||
|
|
||||||
|
|
||||||
class MainHandler(tornado.web.RequestHandler):
|
class MainHandler(tornado.web.RequestHandler):
|
||||||
async def handle_any(self, redir):
|
async def handle_any(self, redir):
|
||||||
handler = UpstreamHandler()
|
handler = UpstreamHandler()
|
||||||
|
@ -1,10 +1,6 @@
|
|||||||
import youtube_dl
|
import youtube_dl
|
||||||
|
|
||||||
try:
|
|
||||||
import streamlink
|
import streamlink
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import asyncio
|
import asyncio
|
||||||
import html.parser
|
import html.parser
|
||||||
@ -15,21 +11,6 @@ import json
|
|||||||
import re
|
import re
|
||||||
import config
|
import config
|
||||||
|
|
||||||
try:
|
|
||||||
streamlink_sessions = {}
|
|
||||||
streamlink_default_session = streamlink.Streamlink()
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
def setup(proxies):
|
|
||||||
try:
|
|
||||||
for proxy in proxies:
|
|
||||||
streamlink_sessions[proxy] = streamlink.Streamlink()
|
|
||||||
streamlink_sessions[proxy].set_option("http-proxy", "socks5://" + proxy)
|
|
||||||
streamlink_sessions[proxy].set_option("https-proxy", "socks5://" + proxy)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
class DummyLogger():
|
class DummyLogger():
|
||||||
def debug(self, msg):
|
def debug(self, msg):
|
||||||
pass
|
pass
|
||||||
@ -189,9 +170,9 @@ class StreamlinkRunner(StreamProvider):
|
|||||||
try:
|
try:
|
||||||
session = None
|
session = None
|
||||||
if self.proxy is None or self.proxy.proxy is None:
|
if self.proxy is None or self.proxy.proxy is None:
|
||||||
session = streamlink_default_session
|
session = config.streamlink_default_session
|
||||||
else:
|
else:
|
||||||
session = streamlink_sessions.get(self.proxy.proxy)
|
session = config.streamlink_sessions.get(self.proxy.proxy)
|
||||||
media = session.resolve_url(self.upstream)
|
media = session.resolve_url(self.upstream)
|
||||||
streams = None
|
streams = None
|
||||||
if isinstance(media, tuple):
|
if isinstance(media, tuple):
|
||||||
|
Loading…
Reference in New Issue
Block a user