#!/usr/bin/env python3 import hashlib import io import json import os import pickle import threading import ChannelProvider import feedparser import requests import wx from bs4 import BeautifulSoup from Items import Item class SVT(ChannelProvider.ChannelProvider): m_cache: dict = dict() m_cachefile = '/tmp/svt_cache' def __init__(self) -> None: rss_url = 'http://www.svtplay.se/rss.xml' logo_url = 'https://upload.wikimedia.org/wikipedia/commons/' logo_url += 'thumb/4/4b/Logotyp_SVT_Play.png/480px-Logotyp_SVT_Play.png' super().__init__('SVT', rss_url, logo_url) if os.path.exists(self.m_cachefile): with open(self.m_cachefile, 'rb') as cachehandle: self.m_cache = pickle.load(cachehandle) self.m_thr = threading.Thread(target=self.parse_feed, args=(), kwargs={}) self.m_thr.start() def wait(self) -> bool: return self.m_thr.is_alive() def parse_feed(self) -> None: feed = feedparser.parse(self.get_feed()) entries = feed['entries'] self.m_items: list[Item] = list() for entry in entries: key = hashlib.sha256(entry['link'].encode('utf-8')).hexdigest() if key in self.m_cache.keys(): thumbnail_link = self.m_cache[key]['thumbnail_link'] content = self.m_cache[key]['content'] resolved_link = self.m_cache[key]['resolved_link'] description = self.m_cache[key]['description'] published_parsed = self.m_cache[key]['published_parsed'] title = self.m_cache[key]['title'] else: svt_id = '' for link in entry['links']: if str(link['type']).startswith('image/'): thumbnail_link = str(link['href']) break page = requests.get(str(entry['link'])) soup = BeautifulSoup(page.text, 'html.parser') for element in soup.find_all('a'): href = element.get('href') datart = element.get('data-rt') if datart == 'top-area-play-button': svt_id = href.split('=')[1].split('&')[0] api = json.loads( requests.get( 'https://api.svt.se/video/{}'.format(svt_id)).text) resolved_link = '' for reference in api['videoReferences']: if reference['format'] == "dashhbbtv": resolved_link = reference['url'] print(resolved_link) description = str(entry['description']) published_parsed = entry['published_parsed'] title = str(entry['title']) res = requests.get(thumbnail_link) content = res.content content_bytes = io.BytesIO(content) self.m_cache[key] = {'thumbnail_link': thumbnail_link} self.m_cache[key]['content'] = content self.m_cache[key]['resolved_link'] = resolved_link self.m_cache[key]['description'] = description self.m_cache[key]['published_parsed'] = published_parsed self.m_cache[key]['title'] = title image = wx.Image(content_bytes, type=wx.BITMAP_TYPE_ANY, index=-1) thumbnail = wx.Bitmap(image) item = Item(description, resolved_link, self.m_provider_name, published_parsed, thumbnail, title) self.m_items.append(item) # write to cache file with open(self.m_cachefile, 'wb') as cachehandle: pickle.dump(self.m_cache, cachehandle)