parent
de0bdcdffe
commit
969786e299
@ -0,0 +1,96 @@
|
|||||||
|
import hashlib
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import pickle
|
||||||
|
import threading
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
import feedparser
|
||||||
|
import requests
|
||||||
|
import wx
|
||||||
|
from ChannelProvider import ChannelProvider
|
||||||
|
from Items import Item
|
||||||
|
from youtube_dl import YoutubeDL as yt
|
||||||
|
|
||||||
|
|
||||||
|
class YouTube(ChannelProvider):
|
||||||
|
m_cache: dict = dict()
|
||||||
|
m_cachefile = '/tmp/yt_cache'
|
||||||
|
|
||||||
|
def __init__(self, channel_id) -> None:
|
||||||
|
super().__init__(
|
||||||
|
channel_id,
|
||||||
|
'https://www.youtube.com/feeds/videos.xml?channel_id={}'.format(
|
||||||
|
channel_id))
|
||||||
|
self.m_items: Union[list[Item], None] = None
|
||||||
|
|
||||||
|
if os.path.exists(self.m_cachefile):
|
||||||
|
with open(self.m_cachefile, 'rb') as cachehandle:
|
||||||
|
self.m_cache = pickle.load(cachehandle)
|
||||||
|
self.m_thr = threading.Thread(target=self.parse_feed,
|
||||||
|
args=(),
|
||||||
|
kwargs={})
|
||||||
|
self.m_thr.start()
|
||||||
|
|
||||||
|
def wait(self) -> bool:
|
||||||
|
return self.m_thr.is_alive()
|
||||||
|
|
||||||
|
def parse_feed(self) -> None:
|
||||||
|
feed = feedparser.parse(self.get_feed())
|
||||||
|
|
||||||
|
ydl_opts = {
|
||||||
|
'format': 'worst',
|
||||||
|
'container': 'webm_dash',
|
||||||
|
}
|
||||||
|
entries = feed['entries']
|
||||||
|
self.m_items: list[Item] = list()
|
||||||
|
|
||||||
|
for entry in entries:
|
||||||
|
key = hashlib.sha256(entry['link'].encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
if key in self.m_cache.keys():
|
||||||
|
thumbnail_link = self.m_cache[key]['thumbnail_link']
|
||||||
|
content = self.m_cache[key]['content']
|
||||||
|
resolved_link = self.m_cache[key]['resolved_link']
|
||||||
|
description = self.m_cache[key]['description']
|
||||||
|
published_parsed = self.m_cache[key]['published_parsed']
|
||||||
|
title = self.m_cache[key]['title']
|
||||||
|
else:
|
||||||
|
|
||||||
|
title = str(entry['title'])
|
||||||
|
thumbnail_link = str(entry['media_thumbnail'][0]['url'])
|
||||||
|
description = str(entry['description'])
|
||||||
|
link = ''
|
||||||
|
with yt(ydl_opts) as ydl:
|
||||||
|
video = yt(ydl_opts).extract_info(entry['link'],
|
||||||
|
download=False)
|
||||||
|
|
||||||
|
for form in video['formats']:
|
||||||
|
if form['height']:
|
||||||
|
if form['height'] < 480 and form[
|
||||||
|
'acodec'] != 'none':
|
||||||
|
link = form['url']
|
||||||
|
|
||||||
|
resolved_link = link
|
||||||
|
|
||||||
|
published_parsed = entry['published_parsed']
|
||||||
|
res = requests.get(thumbnail_link)
|
||||||
|
content = res.content
|
||||||
|
|
||||||
|
if not resolved_link:
|
||||||
|
continue
|
||||||
|
content_bytes = io.BytesIO(content)
|
||||||
|
self.m_cache[key] = {'thumbnail_link': thumbnail_link}
|
||||||
|
self.m_cache[key]['content'] = content
|
||||||
|
self.m_cache[key]['resolved_link'] = resolved_link
|
||||||
|
self.m_cache[key]['description'] = description
|
||||||
|
self.m_cache[key]['published_parsed'] = published_parsed
|
||||||
|
self.m_cache[key]['title'] = title
|
||||||
|
image = wx.Image(content_bytes, type=wx.BITMAP_TYPE_ANY, index=-1)
|
||||||
|
thumbnail = wx.Bitmap(image)
|
||||||
|
item = Item(description, resolved_link, self.m_provider_name,
|
||||||
|
published_parsed, thumbnail, title)
|
||||||
|
self.m_items.append(item)
|
||||||
|
# write to cache file
|
||||||
|
with open(self.m_cachefile, 'wb') as cachehandle:
|
||||||
|
pickle.dump(self.m_cache, cachehandle)
|
Loading…
Reference in new issue