You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
cast/src/Channel/SVT/__init__.py

121 lines
4.3 KiB

#!/usr/bin/env python3
import hashlib
import io
import json
import os
import pickle
import threading
from datetime import datetime
import feedparser
import requests
import wx
from bs4 import BeautifulSoup
from Channel import Channel
from Items import Item
from Utils import make_bitmap_from_url, resolve_svt_channel
default_rss_url = 'http://www.svtplay.se/rss.xml'
class SVT(Channel):
m_cache: dict = dict()
m_cachefile = '/tmp/svt_cache'
def __init__(self, svt_id: str) -> None:
chan_dict = resolve_svt_channel(svt_id)
logo = chan_dict['thumbnail']
name = chan_dict['name']
super().__init__('SVT', default_rss_url,logo,name)
if os.path.exists(self.m_cachefile):
with open(self.m_cachefile, 'rb') as cachehandle:
self.m_cache = pickle.load(cachehandle)
self.m_thr = threading.Thread(target=self.parse_feed,
args=[svt_id],
kwargs={})
self.m_thr.start()
def wait(self) -> bool:
return self.m_thr.is_alive()
def parse_feed(self, *args, **kwargs) -> None:
svt_id = args[0]
feed = feedparser.parse(self.get_feed())
entries = feed['entries']
self.m_items: list[Item] = list()
if svt_id == 'feed':
for entry in entries:
key = hashlib.sha256(entry['link'].encode('utf-8')).hexdigest()
if key in self.m_cache.keys():
thumbnail_link = self.m_cache[key]['thumbnail_link']
resolved_link = self.m_cache[key]['resolved_link']
description = self.m_cache[key]['description']
published_parsed = self.m_cache[key]['published_parsed']
title = self.m_cache[key]['title']
else:
for link in entry['links']:
if str(link['type']).startswith('image/'):
thumbnail_link = str(link['href'])
break
page = requests.get(str(entry['link']))
soup = BeautifulSoup(page.text, 'html.parser')
for element in soup.find_all('a'):
href = element.get('href')
datart = element.get('data-rt')
if datart == 'top-area-play-button':
svt_id = href.split('=')[1].split('&')[0]
resolved_link = resolve_link(svt_id)
description = str(entry['description'])
published_parsed = entry['published_parsed']
title = str(entry['title'])
self.m_cache[key] = {'thumbnail_link': thumbnail_link}
self.m_cache[key]['resolved_link'] = resolved_link
self.m_cache[key]['description'] = description
self.m_cache[key]['published_parsed'] = published_parsed
self.m_cache[key]['title'] = title
thumbnail = make_bitmap_from_url(thumbnail_link)
if resolved_link:
item = Item(description, resolved_link,
self.m_provider_name, published_parsed,
thumbnail, title)
self.m_items.append(item)
# write to cache file
with open(self.m_cachefile, 'wb') as cachehandle:
pickle.dump(self.m_cache, cachehandle)
else:
chan_dict = resolve_svt_channel(svt_id)
resolved_link = resolve_link(svt_id)
title = chan_dict['name']
published_parsed = datetime.now()
description = "Live channel stream"
thumbnail = chan_dict['thumbnail']
if resolved_link:
item = Item(description, resolved_link, self.m_provider_name,
published_parsed, thumbnail, title)
self.m_items.append(item)
def resolve_link(svt_id: str) -> str:
api = json.loads(
requests.get('https://api.svt.se/video/{}'.format(svt_id)).text)
resolved_link = ''
try:
for reference in api['videoReferences']:
if reference['format'] == "dashhbbtv":
resolved_link = reference['url']
except KeyError:
pass
return resolved_link