parent
9d166d8b42
commit
cc6c244769
@ -1,49 +1,97 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import hashlib
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import pickle
|
||||
|
||||
import ChannelProvider
|
||||
import feedparser
|
||||
import requests
|
||||
import wx
|
||||
from bs4 import BeautifulSoup
|
||||
from Items import Item
|
||||
import json
|
||||
|
||||
|
||||
class SVT(ChannelProvider.ChannelProvider):
|
||||
m_cache: dict = dict()
|
||||
m_cachefile = '/tmp/svt_cache'
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__('SVT', 'http://www.svtplay.se/rss.xml')
|
||||
|
||||
if os.path.exists(self.m_cachefile):
|
||||
print("Found cache file: {}".format(self.m_cachefile))
|
||||
with open(self.m_cachefile, 'rb') as cachehandle:
|
||||
self.m_cache = pickle.load(cachehandle)
|
||||
self.m_items: list[Item] = self.parse_feed()
|
||||
|
||||
def refresh_items(self):
|
||||
self.m_items: list[Item] = self.parse_feed()
|
||||
|
||||
def parse_feed(self) -> list[Item]:
|
||||
feed = feedparser.parse(self.get_feed())
|
||||
entries = feed['entries']
|
||||
items: list[Item] = list()
|
||||
|
||||
for entry in entries:
|
||||
thumbnail = ''
|
||||
svt_id = ''
|
||||
for link in entry['links']:
|
||||
if str(link['type']).startswith('image/'):
|
||||
thumbnail = str(link['href'])
|
||||
page = requests.get(str(entry['link']))
|
||||
soup = BeautifulSoup(page.text, 'html.parser')
|
||||
|
||||
for element in soup.find_all('a'):
|
||||
href = element.get('href')
|
||||
datart= element.get('data-rt')
|
||||
|
||||
if datart == 'top-area-play-button':
|
||||
svt_id = href.split('=')[1].split('&')[0]
|
||||
|
||||
api = json.loads(requests.get('https://api.svt.se/video/{}'.format(svt_id)).text)
|
||||
#print(api['videoReferences'])
|
||||
resolved_link = ''
|
||||
for reference in api['videoReferences']:
|
||||
if reference['format'] == "dashhbbtv":
|
||||
resolved_link = reference['url']
|
||||
print(resolved_link)
|
||||
|
||||
item = Item(str(entry['description']), resolved_link,
|
||||
self.m_provider_name, entry['published_parsed'],
|
||||
thumbnail, str(entry['title']))
|
||||
key = hashlib.sha256(entry['link'].encode('utf-8')).hexdigest()
|
||||
|
||||
if key in self.m_cache.keys():
|
||||
print("Cache hit: {}".format(key))
|
||||
thumbnail_link = self.m_cache[key]['thumbnail_link']
|
||||
resolved_link = self.m_cache[key]['resolved_link']
|
||||
description = self.m_cache[key]['description']
|
||||
published_parsed = self.m_cache[key]['published_parsed']
|
||||
title = self.m_cache[key]['title']
|
||||
else:
|
||||
svt_id = ''
|
||||
|
||||
for link in entry['links']:
|
||||
if str(link['type']).startswith('image/'):
|
||||
thumbnail_link = str(link['href'])
|
||||
break
|
||||
page = requests.get(str(entry['link']))
|
||||
soup = BeautifulSoup(page.text, 'html.parser')
|
||||
|
||||
for element in soup.find_all('a'):
|
||||
href = element.get('href')
|
||||
datart = element.get('data-rt')
|
||||
|
||||
if datart == 'top-area-play-button':
|
||||
svt_id = href.split('=')[1].split('&')[0]
|
||||
|
||||
api = json.loads(
|
||||
requests.get(
|
||||
'https://api.svt.se/video/{}'.format(svt_id)).text)
|
||||
resolved_link = ''
|
||||
|
||||
for reference in api['videoReferences']:
|
||||
if reference['format'] == "dashhbbtv":
|
||||
resolved_link = reference['url']
|
||||
print(resolved_link)
|
||||
description = str(entry['description'])
|
||||
published_parsed = entry['published_parsed']
|
||||
title = str(entry['title'])
|
||||
self.m_cache[key] = {'thumbnail_link': thumbnail_link}
|
||||
self.m_cache[key]['resolved_link'] = resolved_link
|
||||
self.m_cache[key]['description'] = description
|
||||
self.m_cache[key]['published_parsed'] = published_parsed
|
||||
self.m_cache[key]['title'] = title
|
||||
|
||||
res = requests.get(thumbnail_link)
|
||||
content_bytes = io.BytesIO(res.content)
|
||||
image = wx.Image(content_bytes,
|
||||
type=wx.BITMAP_TYPE_ANY,
|
||||
index=-1)
|
||||
thumbnail = wx.Bitmap(image)
|
||||
item = Item(description, resolved_link, self.m_provider_name,
|
||||
published_parsed, thumbnail, title)
|
||||
items.append(item)
|
||||
|
||||
# write to cache file
|
||||
with open(self.m_cachefile, 'wb') as cachehandle:
|
||||
pickle.dump(self.m_cache, cachehandle)
|
||||
|
||||
return items
|
||||
|
Loading…
Reference in new issue