2021-04-30 08:49:10 +00:00
|
|
|
#!/usr/bin/env python3
|
2021-05-11 12:45:33 +00:00
|
|
|
import json
|
2021-05-14 16:57:26 +00:00
|
|
|
import sys
|
2021-05-11 12:45:33 +00:00
|
|
|
import urllib.parse
|
|
|
|
import re
|
|
|
|
import os
|
2021-05-15 08:18:10 +00:00
|
|
|
import time
|
2021-05-14 13:10:12 +00:00
|
|
|
import base64
|
|
|
|
import logging
|
2021-05-14 17:13:18 +00:00
|
|
|
import asyncio
|
2021-04-30 08:49:10 +00:00
|
|
|
import tornado.web
|
|
|
|
import tornado.routing
|
2021-05-14 13:10:12 +00:00
|
|
|
import aiohttp
|
|
|
|
import aiohttp_socks
|
2021-05-24 18:59:35 +00:00
|
|
|
import html.parser
|
2021-05-20 11:09:00 +00:00
|
|
|
import stream_providers
|
2021-05-13 07:16:47 +00:00
|
|
|
|
2021-05-14 16:57:26 +00:00
|
|
|
logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', stream=sys.stdout, level=logging.INFO)
|
2021-05-13 07:16:47 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2021-04-30 08:49:10 +00:00
|
|
|
|
|
|
|
providers = {}
|
|
|
|
providers["nrk"] = "https://tv.nrk.no"
|
|
|
|
providers["svt"] = "https://svtplay.se"
|
2021-05-11 12:00:43 +00:00
|
|
|
providers["youtube"] = "https://www.youtube.com/watch?v="
|
2021-04-30 11:26:55 +00:00
|
|
|
providers["twitch"] = "https://twitch.tv"
|
2021-05-22 17:08:44 +00:00
|
|
|
|
|
|
|
nextcloud_server = os.environ.get("NEXTCLOUD_SERVER")
|
|
|
|
if nextcloud_server is not None:
|
|
|
|
providers["nextcloud"] = nextcloud_server
|
|
|
|
|
2021-05-14 09:31:30 +00:00
|
|
|
playlist = None
|
|
|
|
icecast_server = os.environ.get("ICECAST_SERVER")
|
|
|
|
stream_server = os.environ.get("STREAM_SERVER")
|
|
|
|
proxy_server = os.environ.get("PROXY_SERVER")
|
2021-04-30 08:49:10 +00:00
|
|
|
|
2021-05-11 12:00:43 +00:00
|
|
|
class ProxyElem():
|
|
|
|
def __init__(self, proxy):
|
|
|
|
self.proxy = proxy
|
2021-05-14 16:03:53 +00:00
|
|
|
def local(self):
|
|
|
|
timeout = aiohttp.ClientTimeout(total=1)
|
|
|
|
return aiohttp.ClientSession(timeout=timeout)
|
2021-05-14 13:10:12 +00:00
|
|
|
def session(self):
|
|
|
|
connector = None
|
|
|
|
if self.proxy is not None:
|
|
|
|
connector = aiohttp_socks.ProxyConnector.from_url("socks5://" + self.proxy)
|
|
|
|
timeout = aiohttp.ClientTimeout(total=2)
|
|
|
|
return aiohttp.ClientSession(connector=connector, timeout=timeout)
|
2021-05-11 14:33:18 +00:00
|
|
|
def __repr__(self):
|
2021-05-13 06:31:50 +00:00
|
|
|
return str(self.proxy)
|
2021-05-14 13:10:12 +00:00
|
|
|
async def content_type(self, url):
|
2021-05-15 07:45:30 +00:00
|
|
|
ctype = None
|
|
|
|
async with self.session() as session:
|
|
|
|
for i in range(5):
|
|
|
|
try:
|
|
|
|
resp = await session.head(url)
|
|
|
|
ctype = resp.headers.get("Content-Type", None)
|
|
|
|
except Exception as e:
|
|
|
|
logger.info(e)
|
|
|
|
else:
|
|
|
|
if isinstance(ctype, str):
|
|
|
|
return ctype
|
|
|
|
return "binary/octet-type"
|
2021-05-14 13:10:12 +00:00
|
|
|
async def proxy_url(self, current, path):
|
2021-05-14 16:03:53 +00:00
|
|
|
jdata = None
|
2021-05-14 13:10:12 +00:00
|
|
|
data = {}
|
2021-05-14 13:35:22 +00:00
|
|
|
data_list = [data]
|
2021-05-14 13:30:37 +00:00
|
|
|
if path is None:
|
|
|
|
data["upstream"] = current
|
|
|
|
else:
|
|
|
|
data["upstream"] = urllib.parse.urljoin(current, path)
|
2021-05-14 13:10:12 +00:00
|
|
|
data["proxied"] = True
|
|
|
|
if self.proxy is None:
|
|
|
|
data["proxied"] = False
|
|
|
|
else:
|
|
|
|
data["proxy"] = self.proxy
|
2021-05-14 17:13:18 +00:00
|
|
|
if proxy_server is None:
|
|
|
|
return data["upstream"]
|
2021-05-14 14:47:09 +00:00
|
|
|
try:
|
2021-05-14 16:03:53 +00:00
|
|
|
async with self.local() as session:
|
2021-05-14 14:47:09 +00:00
|
|
|
resp = await session.post(proxy_server, json=data_list)
|
|
|
|
text = await resp.text()
|
|
|
|
jdata = json.loads(text)
|
|
|
|
except Exception as e:
|
|
|
|
logger.info(e)
|
2021-05-14 13:30:37 +00:00
|
|
|
if isinstance(jdata, list) and len(jdata) == 1:
|
|
|
|
return jdata[0]
|
|
|
|
else:
|
|
|
|
return data["upstream"]
|
2021-05-14 13:10:12 +00:00
|
|
|
|
2021-05-14 17:57:08 +00:00
|
|
|
class AsyncSessionData():
|
2021-05-14 21:18:51 +00:00
|
|
|
def __init__(self, resp, current):
|
2021-05-14 17:57:08 +00:00
|
|
|
self.resp = resp
|
2021-05-14 21:18:51 +00:00
|
|
|
self.current = current
|
2021-05-14 17:57:08 +00:00
|
|
|
class AsyncSession():
|
2021-05-14 21:18:51 +00:00
|
|
|
def __init__(self, resp, current):
|
|
|
|
self.sdata = AsyncSessionData(resp, current)
|
2021-05-14 17:57:08 +00:00
|
|
|
async def task(self):
|
|
|
|
resp = await self.sdata.resp
|
2021-05-14 21:18:51 +00:00
|
|
|
return AsyncSessionData(resp, self.sdata.current)
|
2021-05-14 09:31:30 +00:00
|
|
|
|
2021-05-11 12:00:43 +00:00
|
|
|
proxies = {}
|
2021-05-08 17:08:54 +00:00
|
|
|
for key in providers:
|
2021-05-11 12:19:17 +00:00
|
|
|
proxies[key] = []
|
|
|
|
current = []
|
|
|
|
for i in range(0,9):
|
|
|
|
proxy = os.environ.get(f'{key}_proxy{i}'.upper())
|
|
|
|
if proxy is not None:
|
|
|
|
current.append(proxy)
|
|
|
|
if len(current) == 0:
|
|
|
|
proxies[key].append(ProxyElem(None))
|
|
|
|
else:
|
|
|
|
for proxy in current:
|
|
|
|
proxies[key].append(ProxyElem(proxy))
|
2021-04-30 11:55:10 +00:00
|
|
|
|
2021-05-24 18:59:35 +00:00
|
|
|
class MetaParser(html.parser.HTMLParser):
|
|
|
|
def __init__(self):
|
|
|
|
self.meta_data = {}
|
|
|
|
self.accepted_attrs = []
|
|
|
|
self.accepted_attrs.append("og:title")
|
|
|
|
self.accepted_attrs.append("og:description")
|
|
|
|
self.accepted_attrs.append("og:image")
|
|
|
|
super().__init__()
|
|
|
|
def handle_starttag(self, tag, attrs):
|
|
|
|
if tag == "meta":
|
|
|
|
name = None
|
|
|
|
for attr in (attrs + attrs):
|
|
|
|
if len(attr) == 2:
|
|
|
|
if isinstance(name, str):
|
|
|
|
if attr[0] == "content":
|
|
|
|
self.meta_data[name] = attr[1]
|
|
|
|
return
|
|
|
|
elif attr[0] == "property" and attr[1] in self.accepted_attrs:
|
|
|
|
name = attr[1]
|
|
|
|
|
2021-05-14 09:31:30 +00:00
|
|
|
class UpstreamHandler():
|
2021-05-14 13:10:12 +00:00
|
|
|
def __init__(self):
|
|
|
|
self.provider = None
|
2021-05-14 09:31:30 +00:00
|
|
|
self.render_url = None
|
|
|
|
self.stream_url = None
|
|
|
|
self.proxy = None
|
|
|
|
self.upstream = None
|
|
|
|
self.upstream_safe = None
|
|
|
|
self.render = False
|
2021-05-19 06:55:20 +00:00
|
|
|
self.stream = False
|
2021-05-14 13:10:12 +00:00
|
|
|
async def setup(self, handler):
|
|
|
|
self.provider = handler.get_query_argument("provider", None)
|
2021-05-14 09:31:30 +00:00
|
|
|
render_str = handler.get_query_argument("render", "false")
|
2021-05-14 13:10:12 +00:00
|
|
|
if self.provider in providers.keys():
|
2021-05-14 15:28:10 +00:00
|
|
|
if render_str.lower() == "true":
|
|
|
|
self.render = True
|
|
|
|
else:
|
|
|
|
self.stream = True
|
2021-05-19 06:55:20 +00:00
|
|
|
|
2021-05-14 09:31:30 +00:00
|
|
|
path = handler.request.path
|
2021-05-25 09:56:59 +00:00
|
|
|
if self.provider == "nextcloud":
|
2021-05-25 10:14:49 +00:00
|
|
|
path = path.removesuffix("/").removesuffix("download").removesuffix("/")
|
|
|
|
elif self.provider == "youtube":
|
|
|
|
path = path.removeprefix("/")
|
2021-05-25 09:56:59 +00:00
|
|
|
|
2021-05-25 10:14:49 +00:00
|
|
|
src = providers[self.provider] + path
|
2021-05-14 21:18:51 +00:00
|
|
|
proxy_list = proxies.get(self.provider)
|
2021-05-14 09:31:30 +00:00
|
|
|
if isinstance(proxy_list, list):
|
2021-05-14 17:57:08 +00:00
|
|
|
futures = []
|
2021-05-14 21:09:16 +00:00
|
|
|
sessions = []
|
2021-05-14 21:18:51 +00:00
|
|
|
for current in proxy_list:
|
2021-05-14 13:10:12 +00:00
|
|
|
session = current.session()
|
2021-05-14 21:09:16 +00:00
|
|
|
sessions.append(session)
|
2021-05-14 21:18:51 +00:00
|
|
|
future = AsyncSession(session.head(src), current)
|
2021-05-14 17:57:08 +00:00
|
|
|
task = asyncio.create_task(future.task())
|
|
|
|
futures.append(task)
|
2021-05-14 17:13:18 +00:00
|
|
|
done = False
|
2021-05-14 17:57:08 +00:00
|
|
|
for future in asyncio.as_completed(futures):
|
2021-05-14 19:55:56 +00:00
|
|
|
try:
|
|
|
|
result = await future
|
|
|
|
resp = result.resp
|
|
|
|
except Exception as e:
|
|
|
|
logger.info(e)
|
2021-05-14 09:31:30 +00:00
|
|
|
else:
|
2021-05-14 19:55:56 +00:00
|
|
|
done = True
|
|
|
|
new_url = str(resp.url)
|
|
|
|
if new_url.lower().startswith("https://consent.youtube.com"):
|
|
|
|
self.upstream = src
|
|
|
|
self.upstream_safe = urllib.parse.quote(src)
|
2021-05-14 09:31:30 +00:00
|
|
|
else:
|
2021-05-14 19:55:56 +00:00
|
|
|
self.upstream = new_url
|
|
|
|
self.upstream_safe = urllib.parse.quote(new_url)
|
2021-05-14 21:18:51 +00:00
|
|
|
self.proxy = result.current
|
2021-05-14 19:55:56 +00:00
|
|
|
break
|
|
|
|
for future in futures:
|
|
|
|
if not future.done():
|
|
|
|
future.cancel()
|
2021-05-14 21:09:16 +00:00
|
|
|
for session in sessions:
|
|
|
|
await session.close()
|
2021-05-14 19:55:56 +00:00
|
|
|
|
2021-05-14 13:10:12 +00:00
|
|
|
async def meta(self):
|
2021-05-25 07:39:34 +00:00
|
|
|
data = {}
|
2021-05-14 09:31:30 +00:00
|
|
|
try:
|
|
|
|
embed_url = f'https://noembed.com/embed?url={self.upstream_safe}'
|
2021-05-14 13:10:12 +00:00
|
|
|
async with self.proxy.session() as session:
|
2021-05-25 09:56:59 +00:00
|
|
|
resp_embed_future = session.get(embed_url)
|
|
|
|
resp_upstream_future = session.get(self.upstream)
|
|
|
|
resp_embed = await resp_embed_future
|
|
|
|
resp_upstream = await resp_upstream_future
|
|
|
|
text_embed_future = resp_embed.text()
|
|
|
|
text_upstream_future = resp_upstream.text()
|
|
|
|
text_embed = await text_embed_future
|
|
|
|
text_upstream = await text_upstream_future
|
|
|
|
parser = MetaParser()
|
|
|
|
parser.feed(text_upstream)
|
|
|
|
data_raw = json.loads(text_embed)
|
2021-05-24 18:59:35 +00:00
|
|
|
if isinstance(data_raw, dict):
|
|
|
|
data_new = {}
|
|
|
|
data_valid = True
|
|
|
|
data_new["og:title"] = data_raw.get("title")
|
|
|
|
data_new["og:description"] = data_raw.get("author_name")
|
|
|
|
data_new["og:image"] = data_raw.get("thumbnail_url")
|
2021-05-25 11:00:19 +00:00
|
|
|
data_filtered = {}
|
2021-05-24 18:59:35 +00:00
|
|
|
for key in data_new:
|
|
|
|
value = data_new.get(key)
|
|
|
|
if isinstance(value, str):
|
|
|
|
data_filtered[key] = value
|
2021-05-25 11:00:19 +00:00
|
|
|
data_filtered.update(parser.meta_data)
|
2021-05-25 09:56:59 +00:00
|
|
|
data = data_filtered
|
2021-05-25 11:39:39 +00:00
|
|
|
image = data.get("og:image")
|
|
|
|
if isinstance(image, str):
|
|
|
|
if self.provider == "youtube":
|
|
|
|
full_image = re.sub(r'\/[a-zA-Z0-9]+\.([a-zA-Z0-9]+)$', r'/maxresdefault.\1', image)
|
2021-05-26 18:09:35 +00:00
|
|
|
standard_image = re.sub(r'\/[a-zA-Z0-9]+\.([a-zA-Z0-9]+)$', r'/sddefault.\1', image)
|
|
|
|
image_status_standard_future = None
|
|
|
|
image_status_full_future = None
|
2021-05-25 11:39:39 +00:00
|
|
|
if full_image != image:
|
2021-05-26 18:09:35 +00:00
|
|
|
image_status_full_future = session.head(full_image)
|
|
|
|
if full_image != image:
|
|
|
|
image_status_standard_future = session.head(standard_image)
|
|
|
|
image_status_full = None
|
|
|
|
image_status_standard = None
|
|
|
|
if image_status_standard_future != None:
|
|
|
|
image_status_standard = await image_status_standard_future
|
|
|
|
if image_status_full_future != None:
|
|
|
|
image_status_full = await image_status_full_future
|
|
|
|
if hasattr(image_status_full, "status") and (image_status_full.status < 400):
|
2021-05-25 11:39:39 +00:00
|
|
|
data["og:image"] = full_image
|
2021-05-26 18:09:35 +00:00
|
|
|
elif hasattr(image_status_standard, "status") and (image_status_standard.status < 400):
|
|
|
|
data["og:image"] = standard_image
|
2021-05-14 09:31:30 +00:00
|
|
|
except Exception as e:
|
|
|
|
logger.info(e)
|
|
|
|
return data
|
2021-05-06 13:54:18 +00:00
|
|
|
|
2021-05-06 13:23:43 +00:00
|
|
|
if icecast_server is not None and stream_server is not None:
|
2021-05-13 13:24:22 +00:00
|
|
|
try:
|
|
|
|
with open("/app/sources.json", "r") as f:
|
|
|
|
data = json.loads(f.read())
|
|
|
|
playlist = "#EXTM3U\n"
|
|
|
|
for key in data:
|
|
|
|
current = data[key]
|
|
|
|
name = current["name"]
|
|
|
|
radio = current["radio"]
|
|
|
|
if radio:
|
|
|
|
playlist += f'#EXTINF:0 radio="true", {name}\n'
|
|
|
|
playlist += icecast_server + key + "\n"
|
|
|
|
else:
|
|
|
|
playlist += f'#EXTINF:0 radio="false", {name}\n'
|
|
|
|
playlist += stream_server + key + "\n"
|
|
|
|
except Exception as e:
|
|
|
|
logger.info(e)
|
2021-05-06 13:23:43 +00:00
|
|
|
|
2021-05-12 07:50:51 +00:00
|
|
|
template_html = None
|
2021-05-20 16:33:51 +00:00
|
|
|
script_file = None
|
2021-05-10 15:47:34 +00:00
|
|
|
videojs_version = None
|
2021-05-19 13:54:42 +00:00
|
|
|
font_awesome_version = None
|
2021-05-13 09:15:40 +00:00
|
|
|
custom_style = None
|
2021-05-23 19:48:10 +00:00
|
|
|
favicon = None
|
2021-05-09 16:49:40 +00:00
|
|
|
try:
|
|
|
|
with open("/app/index.html", "r") as f:
|
2021-05-12 07:50:51 +00:00
|
|
|
template_html = tornado.template.Template(f.read().strip())
|
|
|
|
with open("/app/script.js", "r") as f:
|
2021-05-20 16:33:51 +00:00
|
|
|
script_raw = bytes(f.read().strip(), "utf-8")
|
|
|
|
b64 = str(base64.b64encode(script_raw), "ascii")
|
|
|
|
script_file = f'data:text/javascript;charset=utf-8;base64,{b64}'
|
2021-05-19 13:54:42 +00:00
|
|
|
with open("/app/version/video.js.txt", "r") as f:
|
2021-05-10 15:40:38 +00:00
|
|
|
videojs_version = f.read().strip()
|
2021-05-19 13:54:42 +00:00
|
|
|
with open("/app/version/chromecast.txt", "r") as f:
|
2021-05-13 09:15:40 +00:00
|
|
|
chromecast_version = f.read().strip()
|
2021-05-19 13:54:42 +00:00
|
|
|
with open("/app/version/font-awesome.txt", "r") as f:
|
|
|
|
font_awesome_version = f.read().strip()
|
2021-05-23 19:48:10 +00:00
|
|
|
with open("/app/favicon.png", "rb") as f:
|
|
|
|
favicon = f.read()
|
2021-05-13 09:15:40 +00:00
|
|
|
with open("/app/style.css", "r") as f:
|
|
|
|
custom_style_raw = bytes(f.read().strip(), "utf-8")
|
|
|
|
b64 = str(base64.b64encode(custom_style_raw), "ascii")
|
|
|
|
custom_style = f'data:text/css;charset=utf-8;base64,{b64}'
|
2021-05-10 15:40:38 +00:00
|
|
|
except Exception as e:
|
2021-05-13 07:16:47 +00:00
|
|
|
logger.info(e)
|
2021-05-10 15:40:38 +00:00
|
|
|
|
2021-05-26 17:40:31 +00:00
|
|
|
async def rewrite(upstream, current, proxy):
|
2021-05-08 22:49:52 +00:00
|
|
|
ndata = None
|
2021-05-14 14:47:09 +00:00
|
|
|
text = None
|
|
|
|
try:
|
|
|
|
async with proxy.session() as session:
|
2021-05-26 17:40:31 +00:00
|
|
|
resp = await session.get(upstream)
|
2021-05-14 14:47:09 +00:00
|
|
|
text = await resp.text()
|
|
|
|
except Exception as e:
|
|
|
|
logger.info(e)
|
2021-05-14 14:26:31 +00:00
|
|
|
if text is not None:
|
2021-05-14 16:57:26 +00:00
|
|
|
lines = text.splitlines()
|
2021-05-10 14:24:31 +00:00
|
|
|
links = []
|
2021-05-14 16:57:26 +00:00
|
|
|
for line in lines:
|
2021-05-09 10:37:36 +00:00
|
|
|
if line.startswith("#EXT-X-KEY:METHOD="):
|
|
|
|
matches = re.findall(r'(?<=URI=").+(?=")', line)
|
|
|
|
if len(matches) == 1:
|
2021-05-10 14:24:31 +00:00
|
|
|
ldata = {}
|
|
|
|
ldata["upstream"] = urllib.parse.urljoin(current, matches[0])
|
2021-05-11 12:00:43 +00:00
|
|
|
ldata["proxy"] = proxy.proxy
|
|
|
|
ldata["proxied"] = isinstance(proxy.proxy, str)
|
2021-05-10 14:24:31 +00:00
|
|
|
links.append(ldata)
|
|
|
|
elif line.startswith("#"):
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
ldata = {}
|
|
|
|
ldata["upstream"] = urllib.parse.urljoin(current, line)
|
2021-05-11 12:00:43 +00:00
|
|
|
ldata["proxy"] = proxy.proxy
|
|
|
|
ldata["proxied"] = isinstance(proxy.proxy, str)
|
2021-05-10 14:24:31 +00:00
|
|
|
links.append(ldata)
|
2021-05-11 12:00:43 +00:00
|
|
|
if isinstance(proxy_server, str):
|
|
|
|
ndata = ""
|
2021-05-14 14:47:09 +00:00
|
|
|
try:
|
2021-05-14 16:03:53 +00:00
|
|
|
async with proxy.local() as session:
|
2021-05-14 14:47:09 +00:00
|
|
|
resp = await session.post(proxy_server, json=links)
|
|
|
|
link_text = await resp.text()
|
|
|
|
except Exception as e:
|
|
|
|
logger.info(e)
|
|
|
|
else:
|
|
|
|
if isinstance(link_text, str):
|
|
|
|
links = json.loads(link_text)
|
2021-05-14 16:57:26 +00:00
|
|
|
for line in lines:
|
2021-05-14 14:47:09 +00:00
|
|
|
if line.startswith("#EXT-X-KEY:METHOD="):
|
|
|
|
matches = re.findall(r'(?<=URI=").+(?=")', line)
|
|
|
|
if len(matches) == 1:
|
|
|
|
new_url = links.pop(0)
|
|
|
|
ndata += re.sub(r'URI=".+"', f'URI="{new_url}"', line)
|
|
|
|
elif line.startswith("#"):
|
|
|
|
ndata += line
|
|
|
|
else:
|
|
|
|
ndata += links.pop(0)
|
2021-05-14 16:10:06 +00:00
|
|
|
ndata += "\n"
|
2021-05-10 14:24:31 +00:00
|
|
|
return ndata
|
2021-05-15 08:18:10 +00:00
|
|
|
|
2021-04-30 08:49:10 +00:00
|
|
|
class MainHandler(tornado.web.RequestHandler):
|
2021-05-14 13:43:24 +00:00
|
|
|
async def handle_any(self, redir):
|
2021-05-14 13:10:12 +00:00
|
|
|
handler = UpstreamHandler()
|
|
|
|
await handler.setup(self)
|
2021-05-14 09:31:30 +00:00
|
|
|
if handler.render:
|
2021-05-14 13:10:12 +00:00
|
|
|
await self.handle_render(handler)
|
2021-05-14 09:31:30 +00:00
|
|
|
elif handler.stream:
|
2021-05-14 13:43:24 +00:00
|
|
|
await self.handle_stream(handler, redir)
|
2021-05-09 16:43:28 +00:00
|
|
|
else:
|
2021-05-13 07:16:47 +00:00
|
|
|
logger.info(f'provider missing {self.request.uri}')
|
2021-05-09 16:43:28 +00:00
|
|
|
self.set_status(404)
|
2021-05-14 09:31:30 +00:00
|
|
|
self.write("Stream not found. (provider missing)")
|
2021-05-09 16:43:28 +00:00
|
|
|
|
2021-05-14 13:10:12 +00:00
|
|
|
async def handle_render(self, handler):
|
2021-05-20 16:33:51 +00:00
|
|
|
if script_file is not None and template_html is not None:
|
2021-05-25 08:22:22 +00:00
|
|
|
meta = await handler.meta()
|
|
|
|
meta_list = list(meta.items())
|
2021-05-25 08:35:54 +00:00
|
|
|
title = meta.get("og:title")
|
2021-05-20 16:33:51 +00:00
|
|
|
data["script"] = script_file
|
2021-05-14 09:31:30 +00:00
|
|
|
data["videojs_version"] = videojs_version
|
|
|
|
data["chromecast_version"] = chromecast_version
|
2021-05-19 13:54:42 +00:00
|
|
|
data["font_awesome_version"] = font_awesome_version
|
2021-05-14 09:31:30 +00:00
|
|
|
data["custom_style"] = custom_style
|
2021-05-25 08:35:54 +00:00
|
|
|
rendered_html = template_html.generate(data=data, meta=meta_list, title=title)
|
2021-05-12 07:54:38 +00:00
|
|
|
self.write(rendered_html)
|
2021-05-09 16:49:40 +00:00
|
|
|
else:
|
|
|
|
self.set_status(404)
|
|
|
|
self.write("HTML template missing.")
|
|
|
|
|
2021-05-25 08:22:22 +00:00
|
|
|
async def handle_stream(self, handler, redir):
|
2021-05-25 09:56:59 +00:00
|
|
|
if handler.provider == "nextcloud":
|
|
|
|
upstream = handler.upstream + "/download"
|
|
|
|
else:
|
2021-05-25 11:00:19 +00:00
|
|
|
if not redir:
|
|
|
|
meta = await handler.meta()
|
|
|
|
image = meta.get("og:image")
|
|
|
|
if isinstance(image, str):
|
|
|
|
image = await handler.proxy.proxy_url(image, None)
|
|
|
|
if isinstance(image, str):
|
|
|
|
self.set_header("Custom-Poster", image)
|
2021-05-22 17:08:44 +00:00
|
|
|
upstream = await stream_providers.get_any(handler.upstream, handler.proxy, logger)
|
2021-05-13 07:16:47 +00:00
|
|
|
if upstream is None:
|
2021-05-14 13:10:12 +00:00
|
|
|
logger.info(f'invalid upstream ({handler.provider})')
|
2021-05-13 07:16:47 +00:00
|
|
|
self.set_status(404)
|
2021-05-14 09:31:30 +00:00
|
|
|
self.write("Stream not found. (invalid upstream)")
|
2021-04-30 11:26:55 +00:00
|
|
|
else:
|
2021-05-14 14:09:06 +00:00
|
|
|
upstream_proxy = await handler.proxy.proxy_url(upstream, None)
|
|
|
|
ctype = await handler.proxy.content_type(upstream_proxy)
|
2021-05-11 22:17:44 +00:00
|
|
|
data = None
|
2021-05-11 22:22:55 +00:00
|
|
|
if "mpegurl" in ctype.lower():
|
2021-05-26 17:40:31 +00:00
|
|
|
data = await rewrite(upstream_proxy, upstream, handler.proxy)
|
2021-05-14 15:02:56 +00:00
|
|
|
if isinstance(data, str):
|
|
|
|
self.set_header("Content-Type", "application/vnd.apple.mpegurl")
|
|
|
|
self.write(data)
|
2021-05-14 13:43:24 +00:00
|
|
|
else:
|
|
|
|
self.set_header("Content-Type", ctype)
|
|
|
|
if redir:
|
2021-05-14 14:09:06 +00:00
|
|
|
self.redirect(upstream_proxy, status=303)
|
2021-05-14 13:10:12 +00:00
|
|
|
async def get(self):
|
2021-05-14 13:43:24 +00:00
|
|
|
await self.handle_any(True)
|
2021-05-14 13:10:12 +00:00
|
|
|
async def head(self):
|
2021-05-14 13:43:24 +00:00
|
|
|
await self.handle_any(False)
|
2021-05-01 18:01:24 +00:00
|
|
|
|
|
|
|
class FileHandler(tornado.web.RequestHandler):
|
|
|
|
def get(self):
|
|
|
|
self.set_header("Content-Type", "text/plain; charset=utf-8")
|
2021-05-06 13:23:43 +00:00
|
|
|
self.write(playlist)
|
2021-05-13 07:16:47 +00:00
|
|
|
class IconHandler(tornado.web.RequestHandler):
|
|
|
|
def get(self):
|
2021-05-23 19:48:10 +00:00
|
|
|
self.set_header("Content-Type", "image/png")
|
|
|
|
self.write(favicon)
|
2021-04-30 08:49:10 +00:00
|
|
|
try:
|
2021-05-01 18:01:24 +00:00
|
|
|
handlers = []
|
|
|
|
handlers.append((tornado.routing.PathMatches("/sources.m3u8"), FileHandler))
|
2021-05-13 07:16:47 +00:00
|
|
|
handlers.append((tornado.routing.PathMatches("/favicon.ico"), IconHandler))
|
2021-05-01 18:01:24 +00:00
|
|
|
handlers.append((tornado.routing.AnyMatches(), MainHandler))
|
|
|
|
app_web = tornado.web.Application(handlers)
|
2021-04-30 08:49:10 +00:00
|
|
|
app_web.listen(8080)
|
|
|
|
tornado.ioloop.IOLoop.current().start()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
print()
|
|
|
|
|