add seafile support
This commit is contained in:
parent
1caea2792e
commit
02a9b5e470
@ -25,6 +25,10 @@ nextcloud_server = os.environ.get("NEXTCLOUD_SERVER")
|
||||
if nextcloud_server is not None:
|
||||
providers["nextcloud"] = nextcloud_server
|
||||
|
||||
seafile_server = os.environ.get("SEAFILE_SERVER")
|
||||
if seafile_server is not None:
|
||||
providers["seafile"] = seafile_server
|
||||
|
||||
playlist = None
|
||||
icecast_server = os.environ.get("ICECAST_SERVER")
|
||||
stream_server = os.environ.get("STREAM_SERVER")
|
||||
@ -218,6 +222,8 @@ class MainHandler(tornado.web.RequestHandler):
|
||||
provider_data = None
|
||||
if handler.provider.startswith("nextcloud"):
|
||||
provider_data = await stream_providers.get_nextcloud(handler.upstream, handler.proxy, logger)
|
||||
elif handler.provider.startswith("seafile"):
|
||||
provider_data = await stream_providers.get_seafile(handler.upstream, handler.proxy, logger)
|
||||
else:
|
||||
provider_data = await stream_providers.get_any(handler.upstream, handler.proxy, logger)
|
||||
proxied = await handler.proxy.proxy_url([provider_data.upstream(), provider_data.thumbnail()])
|
||||
|
@ -6,6 +6,7 @@ import html.parser
|
||||
import urllib.parse
|
||||
import expiringdict
|
||||
import json
|
||||
import re
|
||||
|
||||
streamlink_sessions = {}
|
||||
streamlink_default_session = streamlink.Streamlink()
|
||||
@ -222,6 +223,40 @@ class YoutubeRunner(StreamProvider):
|
||||
self.logger.info("%s <%s>", e, self.upstream)
|
||||
return StreamData(**best_stream)
|
||||
|
||||
class SeafileRunner(StreamProvider):
|
||||
def stream(self):
|
||||
stream_data = self.init_stream()
|
||||
data = {}
|
||||
try:
|
||||
resp = requests.get(self.upstream)
|
||||
expr = re.compile(u'pageOptions.{1,4}(\{[^\u1354]+\})[^{}]+\}', re.DOTALL)
|
||||
comment_expr = re.compile("[^:]\/\/.+")
|
||||
quote_add_expr = re.compile("^[^:a-zA-Z]+([a-zA-Z]+):", re.MULTILINE)
|
||||
func_expr = re.compile('\([^"]+"', re.DOTALL)
|
||||
optional_expr = re.compile(".+\|\|.+")
|
||||
text = resp.text.replace("</script>", u'\u1354').replace("'", '"')
|
||||
text = re.sub(quote_add_expr, r'"\1":', text)
|
||||
text = re.sub(optional_expr, "", text)
|
||||
text = re.sub(comment_expr, "", text)
|
||||
for func in re.findall(func_expr, text):
|
||||
text = text.replace(func, '0,"')
|
||||
json_data_src = []
|
||||
for res in re.findall(expr, text):
|
||||
try:
|
||||
json_data_src.append(json.loads(res))
|
||||
except Exception as e:
|
||||
self.logger.info("%s <%s>", e, self.upstream)
|
||||
json_data = dict()
|
||||
for elem in json_data_src:
|
||||
for k,v in elem.items():
|
||||
if hasattr(v, "__len__") and len(v) > 0:
|
||||
json_data[k] = v
|
||||
stream_data["title"] = json_data.get("filePath")
|
||||
stream_data["upstream"] = json_data.get("rawPath")
|
||||
except Exception as e:
|
||||
self.logger.info("%s <%s>", e, self.upstream)
|
||||
return StreamData(**stream_data)
|
||||
|
||||
class MetaProvider(StreamProvider):
|
||||
def parse_web(self):
|
||||
stream_data = self.init_stream()
|
||||
@ -283,6 +318,9 @@ async def get_meta(upstream, proxy, logger):
|
||||
async def get_nextcloud(upstream, proxy, logger):
|
||||
return await get_from_runner((3, upstream), NextcloudRunner(upstream, proxy, logger), logger)
|
||||
|
||||
async def get_seafile(upstream, proxy, logger):
|
||||
return await get_from_runner((3, upstream), SeafileRunner(upstream, proxy, logger), logger)
|
||||
|
||||
async def get_any(upstream, proxy, logger):
|
||||
cache_key = (4, upstream)
|
||||
cached = upstream_cache.get(cache_key)
|
||||
|
Loading…
Reference in New Issue
Block a user