|
|
@ -5,6 +5,7 @@ import io
|
|
|
|
import json
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import os
|
|
|
|
import pickle
|
|
|
|
import pickle
|
|
|
|
|
|
|
|
import threading
|
|
|
|
|
|
|
|
|
|
|
|
import ChannelProvider
|
|
|
|
import ChannelProvider
|
|
|
|
import feedparser
|
|
|
|
import feedparser
|
|
|
@ -24,15 +25,18 @@ class SVT(ChannelProvider.ChannelProvider):
|
|
|
|
if os.path.exists(self.m_cachefile):
|
|
|
|
if os.path.exists(self.m_cachefile):
|
|
|
|
with open(self.m_cachefile, 'rb') as cachehandle:
|
|
|
|
with open(self.m_cachefile, 'rb') as cachehandle:
|
|
|
|
self.m_cache = pickle.load(cachehandle)
|
|
|
|
self.m_cache = pickle.load(cachehandle)
|
|
|
|
self.m_items: list[Item] = self.parse_feed()
|
|
|
|
self.m_thr = threading.Thread(target=self.parse_feed,
|
|
|
|
|
|
|
|
args=(),
|
|
|
|
|
|
|
|
kwargs={})
|
|
|
|
|
|
|
|
self.m_thr.start()
|
|
|
|
|
|
|
|
|
|
|
|
def refresh_items(self):
|
|
|
|
def wait(self) -> bool:
|
|
|
|
self.m_items: list[Item] = self.parse_feed()
|
|
|
|
return self.m_thr.is_alive()
|
|
|
|
|
|
|
|
|
|
|
|
def parse_feed(self) -> list[Item]:
|
|
|
|
def parse_feed(self) -> None:
|
|
|
|
feed = feedparser.parse(self.get_feed())
|
|
|
|
feed = feedparser.parse(self.get_feed())
|
|
|
|
entries = feed['entries']
|
|
|
|
entries = feed['entries']
|
|
|
|
items: list[Item] = list()
|
|
|
|
self.m_items: list[Item] = list()
|
|
|
|
|
|
|
|
|
|
|
|
for entry in entries:
|
|
|
|
for entry in entries:
|
|
|
|
key = hashlib.sha256(entry['link'].encode('utf-8')).hexdigest()
|
|
|
|
key = hashlib.sha256(entry['link'].encode('utf-8')).hexdigest()
|
|
|
@ -50,6 +54,7 @@ class SVT(ChannelProvider.ChannelProvider):
|
|
|
|
for link in entry['links']:
|
|
|
|
for link in entry['links']:
|
|
|
|
if str(link['type']).startswith('image/'):
|
|
|
|
if str(link['type']).startswith('image/'):
|
|
|
|
thumbnail_link = str(link['href'])
|
|
|
|
thumbnail_link = str(link['href'])
|
|
|
|
|
|
|
|
|
|
|
|
break
|
|
|
|
break
|
|
|
|
page = requests.get(str(entry['link']))
|
|
|
|
page = requests.get(str(entry['link']))
|
|
|
|
soup = BeautifulSoup(page.text, 'html.parser')
|
|
|
|
soup = BeautifulSoup(page.text, 'html.parser')
|
|
|
@ -83,16 +88,12 @@ class SVT(ChannelProvider.ChannelProvider):
|
|
|
|
self.m_cache[key]['published_parsed'] = published_parsed
|
|
|
|
self.m_cache[key]['published_parsed'] = published_parsed
|
|
|
|
self.m_cache[key]['title'] = title
|
|
|
|
self.m_cache[key]['title'] = title
|
|
|
|
|
|
|
|
|
|
|
|
image = wx.Image(content_bytes,
|
|
|
|
image = wx.Image(content_bytes, type=wx.BITMAP_TYPE_ANY, index=-1)
|
|
|
|
type=wx.BITMAP_TYPE_ANY,
|
|
|
|
|
|
|
|
index=-1)
|
|
|
|
|
|
|
|
thumbnail = wx.Bitmap(image)
|
|
|
|
thumbnail = wx.Bitmap(image)
|
|
|
|
item = Item(description, resolved_link, self.m_provider_name,
|
|
|
|
item = Item(description, resolved_link, self.m_provider_name,
|
|
|
|
published_parsed, thumbnail, title)
|
|
|
|
published_parsed, thumbnail, title)
|
|
|
|
items.append(item)
|
|
|
|
self.m_items.append(item)
|
|
|
|
|
|
|
|
|
|
|
|
# write to cache file
|
|
|
|
# write to cache file
|
|
|
|
with open(self.m_cachefile, 'wb') as cachehandle:
|
|
|
|
with open(self.m_cachefile, 'wb') as cachehandle:
|
|
|
|
pickle.dump(self.m_cache, cachehandle)
|
|
|
|
pickle.dump(self.m_cache, cachehandle)
|
|
|
|
|
|
|
|
|
|
|
|
return items
|
|
|
|
|
|
|
|