Move to sqlite and remove pickle-ing

main
Micke Nordin 2 years ago
parent 712e0eeaa2
commit efc6f8c8c0
Signed by: micke
GPG Key ID: 014B273D614BE877

@ -1,10 +1,6 @@
#!/usr/bin/env python3
import hashlib
import io
import json
import os
import pickle
import threading
from datetime import datetime
@ -15,27 +11,24 @@ from bs4 import BeautifulSoup
from Channel import Channel
from Items import Item
from Utils import make_bitmap_from_url, resolve_svt_channel
from Utils import (add_video, hash_string, make_bitmap_from_url,
resolve_svt_channel, video_exists)
default_rss_url = 'http://www.svtplay.se/rss.xml'
class SVT(Channel):
m_cache: dict = dict()
m_cachefile = '/tmp/svt_cache'
def __init__(self, svt_id: str) -> None:
chan_dict = resolve_svt_channel(svt_id)
logo = chan_dict['thumbnail']
name = chan_dict['name']
super().__init__('SVT', default_rss_url,logo,name)
super().__init__(svt_id, 'SVT', default_rss_url, logo, name)
if os.path.exists(self.m_cachefile):
with open(self.m_cachefile, 'rb') as cachehandle:
self.m_cache = pickle.load(cachehandle)
self.m_thr = threading.Thread(target=self.parse_feed,
args=[svt_id],
kwargs={})
def refresh(self) -> None:
self.m_thr.start()
def wait(self) -> bool:
@ -46,64 +39,56 @@ class SVT(Channel):
feed = feedparser.parse(self.get_feed())
entries = feed['entries']
self.m_items: list[Item] = list()
resolved_link = str()
description = str()
title = str()
thumbnail_link = str()
thumbnail: wx.Bitmap = wx.Bitmap()
published_parsed: datetime = datetime.now()
video_id = str()
if svt_id == 'feed':
for entry in entries:
key = hashlib.sha256(entry['link'].encode('utf-8')).hexdigest()
if key in self.m_cache.keys():
thumbnail_link = self.m_cache[key]['thumbnail_link']
resolved_link = self.m_cache[key]['resolved_link']
description = self.m_cache[key]['description']
published_parsed = self.m_cache[key]['published_parsed']
title = self.m_cache[key]['title']
else:
for link in entry['links']:
if str(link['type']).startswith('image/'):
thumbnail_link = str(link['href'])
break
page = requests.get(str(entry['link']))
soup = BeautifulSoup(page.text, 'html.parser')
for element in soup.find_all('a'):
href = element.get('href')
datart = element.get('data-rt')
if datart == 'top-area-play-button':
svt_id = href.split('=')[1].split('&')[0]
resolved_link = resolve_link(svt_id)
description = str(entry['description'])
published_parsed = entry['published_parsed']
title = str(entry['title'])
self.m_cache[key] = {'thumbnail_link': thumbnail_link}
self.m_cache[key]['resolved_link'] = resolved_link
self.m_cache[key]['description'] = description
self.m_cache[key]['published_parsed'] = published_parsed
self.m_cache[key]['title'] = title
thumbnail = make_bitmap_from_url(thumbnail_link,wx.Size(self.m_screen_width,150))
if resolved_link:
item = Item(description, resolved_link,
self.m_provider_name, published_parsed,
thumbnail, title)
self.m_items.append(item)
# write to cache file
with open(self.m_cachefile, 'wb') as cachehandle:
pickle.dump(self.m_cache, cachehandle)
video_id = hash_string(entry['id'])
if video_exists(video_id, svt_id):
pass
for link in entry['links']:
if str(link['type']).startswith('image/'):
thumbnail_link = str(link['href'])
break
page = requests.get(str(entry['link']))
soup = BeautifulSoup(page.text, 'html.parser')
for element in soup.find_all('a'):
href = element.get('href')
datart = element.get('data-rt')
if datart == 'top-area-play-button':
svt_id = href.split('=')[1].split('&')[0]
resolved_link = resolve_link(svt_id)
description = str(entry['description'])
published_parsed = entry['published_parsed']
title = str(entry['title'])
if resolved_link and thumbnail_link:
thumbnail = make_bitmap_from_url(
thumbnail_link, wx.Size(self.m_screen_width, 150))
else:
chan_dict = resolve_svt_channel(svt_id)
resolved_link = resolve_link(svt_id)
video_id = hash_string(resolved_link)
title = chan_dict['name']
published_parsed = datetime.now()
description = "Live channel stream"
thumbnail = chan_dict['thumbnail']
if resolved_link:
item = Item(description, resolved_link, self.m_provider_name,
published_parsed, thumbnail, title)
self.m_items.append(item)
if resolved_link:
item = Item(description, resolved_link, self.m_provider_name,
published_parsed, thumbnail, title)
self.m_items.append(item)
add_video(video_id, svt_id, self.m_provider_name, description,
resolved_link, published_parsed, thumbnail, title, 0)
def resolve_link(svt_id: str) -> str:

@ -1,46 +1,31 @@
import hashlib
import os
import pickle
import threading
import time
from typing import Union
import feedparser
import wx
from Channel import Channel
from Items import Item
from Utils import get_default_logo, make_bitmap_from_url
from youtube_dl import YoutubeDL as yt
from youtube_dl.utils import DownloadError, ExtractorError
from Channel import Channel
from Items import Item
from Utils import (add_video, get_default_logo, hash_string,
make_bitmap_from_url, video_exists)
class YouTube(Channel):
m_cache: dict = dict()
class YouTube(Channel):
def __init__(self, channel_id: str, name: str) -> None:
self.m_channel_id = channel_id
self.m_name = name
rss_url = 'https://www.youtube.com/feeds/videos.xml?channel_id={}'.format(
channel_id)
self.m_cachefile = '/tmp/yt_cache_{}'.format(channel_id)
self.m_logo = get_default_logo('YouTube')
super().__init__(channel_id, rss_url, self.m_logo,
self.m_name)
self.m_items: Union[list[Item], None] = None
if os.path.exists(self.m_cachefile):
with open(self.m_cachefile, 'rb') as cachehandle:
try:
self.m_cache = pickle.load(cachehandle)
except EOFError or pickle.UnpicklingError:
pass
super().__init__(channel_id, 'YouTube', rss_url, self.m_logo, name)
self.m_thr = threading.Thread(target=self.parse_feed,
args=(),
kwargs={})
self.m_thr.start()
self.pickle()
def refresh(self) -> None:
self.m_thr.start()
def wait(self) -> bool:
return self.m_thr.is_alive()
@ -57,31 +42,24 @@ class YouTube(Channel):
self.m_items: list[Item] = list()
for entry in entries:
key = hashlib.sha256(entry['link'].encode('utf-8')).hexdigest()
if key in self.m_cache.keys():
thumbnail_link = self.m_cache[key]['thumbnail_link']
resolved_link = self.m_cache[key]['resolved_link']
description = self.m_cache[key]['description']
published_parsed = self.m_cache[key]['published_parsed']
title = self.m_cache[key]['title']
else:
title = str(entry['title'])
thumbnail_link = str(entry['media_thumbnail'][0]['url'])
description = str(entry['description'])
link = ''
with yt(ydl_opts) as ydl:
try:
video = ydl.extract_info(entry['link'], download=False)
for form in video['formats']:
if form['height']:
if form['height'] < 480 and form[
'acodec'] != 'none':
link = form['url']
except ExtractorError or DownloadError:
pass
video_id = hash_string(entry['id'])
if video_exists(video_id, self.m_channel_id):
pass
title = str(entry['title'])
thumbnail_link = str(entry['media_thumbnail'][0]['url'])
description = str(entry['description'])
link = ''
with yt(ydl_opts) as ydl:
try:
video = ydl.extract_info(entry['link'], download=False)
for form in video['formats']: # type: ignore
if form['height']:
if form['height'] < 480 and form[
'acodec'] != 'none':
link = form['url']
except ExtractorError or DownloadError:
pass
resolved_link = link
@ -89,21 +67,11 @@ class YouTube(Channel):
if not resolved_link:
continue
self.m_cache[key] = {'thumbnail_link': thumbnail_link}
self.m_cache[key]['resolved_link'] = resolved_link
self.m_cache[key]['description'] = description
self.m_cache[key]['published_parsed'] = published_parsed
self.m_cache[key]['title'] = title
thumbnail = make_bitmap_from_url(thumbnail_link,
wx.Size(self.m_screen_width, 150))
item = Item(description, resolved_link, self.m_provider_name,
published_parsed, thumbnail, title)
self.m_items.append(item)
def pickle(self) -> None:
while self.wait():
time.sleep(1)
# write to cache file
with open(self.m_cachefile, 'wb') as cachehandle:
pickle.dump(self.m_cache, cachehandle)
add_video(video_id, self.m_channel_id, self.m_provider_name,
description, resolved_link, published_parsed, thumbnail,
title, 0)

@ -5,10 +5,12 @@ from typing import Union
import wx
from Items import Item
from Utils import get_videos
class Channel:
def __init__(self,
channel_id: str,
provider_name: str,
feed: str,
logo: wx.Bitmap,
@ -17,7 +19,7 @@ class Channel:
self.m_provider_name = provider_name
self.m_name = name
self.m_feed = feed
self.m_items: Union[list[Item], None] = None
self.m_items: list[Item] = get_videos(channel_id)
self.m_screen_width = 720/2
def get_logo_as_bitmap(self) -> wx.Bitmap:

@ -6,10 +6,11 @@ from os import path
import wx
from Channel import Channel
from Utils import get_default_logo
from Utils import get_default_logo, get_latest
MYPATH = path.dirname(path.abspath(__file__))
class ChannelProvider:
def __init__(self, providerid: str, channels=list()):
self.m_id = providerid
@ -38,12 +39,13 @@ class ChannelProvider:
return self.m_id
def make_latest(self) -> None:
items = list()
for chan in self.m_channels:
while chan.wait():
time.sleep(1)
items.append(chan.get_latest_item())
channel = Channel(self.get_name, None, self.m_logo, "Latest videos")
items = get_latest(self.m_id)
channel_id = self.m_id + "_latest"
channel = Channel(channel_id, self.get_name(), '', self.m_logo,
"Latest videos")
channel.set_items(items)
self.append_channel(channel)
self.prepend_channel(channel)
def prepend_channel(self, channel: Channel) -> int:
self.m_channels.insert(0, channel)
return len(self.m_channels)

@ -1,16 +1,19 @@
#!/usr/bin/env python3
import hashlib
import io
import json
import sqlite3
import time
from datetime import datetime
from os import environ, makedirs, path
from typing import Callable, Union
from urllib.parse import urlparse
from Items import Item
import requests
import wx
from Items import Item
SIZE = wx.Size(100, 68)
MYPATH = path.dirname(path.abspath(__file__))
SCREEN_WIDTH = int(720 / 2)
@ -45,22 +48,31 @@ def add_video(video_id: str,
provider_id: str,
description: str,
link: str,
published: datetime,
published: Union[datetime, time.struct_time],
bitmap: wx.Bitmap,
title: str,
watchtime: str,
watchtime: int,
basepath: str = BASEPATH,
filename: str = DB_FILE_NAME) -> None:
thumbnail = bitmap.GetData()
try:
timestamp = published.timestamp() #type: ignore
except AttributeError:
timestamp = time.mktime(published) #type: ignore
if not video_id:
video_id = hash_string(link)
thumbpath = path.join(basepath, 'thumbnails')
thumbnail = path.join(thumbpath, video_id)
fullpath = path.join(basepath, filename)
if not path.isdir(basepath):
makedirs(basepath)
if not path.isdir(thumbpath):
makedirs(thumbpath)
bitmap.SaveFile(thumbnail, wx.BITMAP_TYPE_PNG)
con = sqlite3.connect(fullpath)
cur = con.cursor()
create_query: str = '''CREATE TABLE IF NOT EXISTS {}
(video_id TEXT PRIMARY KEY, channel_id TEXT, provider_id TEXT,
title TEXT, link text, description TEXT, thumbnail BLOB, published DATETIME)'''.format(
VIDEO_TABLE)
title TEXT, link text, description TEXT, thumbnail TEXT,
published DATETIME, watchtime NUMBER)'''.format(VIDEO_TABLE)
cur.execute(create_query)
con.commit()
@ -68,8 +80,10 @@ def add_video(video_id: str,
VALUES(?,?,?,?,?,?,?,?,?) ON CONFLICT(video_id) DO NOTHING'''.format(
VIDEO_TABLE)
cur.execute(upsert_query, video_id, channel_id, provider_id, title, link,
description, thumbnail, published, watchtime)
cur.execute(upsert_query, [
video_id, channel_id, provider_id, title, link, description, thumbnail,
int(timestamp), watchtime
])
con.commit()
@ -83,6 +97,39 @@ def get_default_logo(providerid: str = 'default') -> wx.Bitmap:
return wx.Bitmap('{}/assets/Default.png'.format(MYPATH))
def get_latest(provider_id: str,
basepath: str = BASEPATH,
filename: str = DB_FILE_NAME) -> list[Item]:
videos = list()
fullpath = path.join(basepath, filename)
try:
con = sqlite3.connect(fullpath)
cur = con.cursor()
select_query = '''SELECT * FROM {} WHERE provider_id = ? ORDER BY published DESC LIMIT 50'''.format(
VIDEO_TABLE)
cur.execute(select_query, [provider_id])
for result in cur.fetchall():
description = result[5]
link = result[4]
provider_id = result[2]
published = datetime.fromtimestamp(int(result[7]))
thumbnail = wx.Bitmap(result[6])
title = result[3]
watchtime = result[8]
videos.append(
Item(description,
link,
provider_id,
published,
thumbnail,
title,
watchtime=watchtime)) # Make an item from db
except sqlite3.OperationalError:
pass
return videos
def get_subscriptions(basepath: str = BASEPATH,
filename: str = DB_FILE_NAME) -> list[tuple[str, str]]:
subscriptions = list()
@ -95,20 +142,45 @@ def get_subscriptions(basepath: str = BASEPATH,
subscriptions.append(result)
return subscriptions
def get_videos(channel_id: str,
basepath: str = BASEPATH,
filename: str = DB_FILE_NAME) -> list[Item]:
basepath: str = BASEPATH,
filename: str = DB_FILE_NAME) -> list[Item]:
videos = list()
fullpath = path.join(basepath, filename)
con = sqlite3.connect(fullpath)
cur = con.cursor()
select_query = '''SELECT * FROM {} WHERE channel_id = ?'''.format(VIDEO_TABLE)
cur.execute(select_query,channel_id)
for result in cur.fetchall():
try:
con = sqlite3.connect(fullpath)
cur = con.cursor()
select_query = '''SELECT * FROM {} WHERE channel_id = ? ORDER BY published DESC'''.format(
VIDEO_TABLE)
cur.execute(select_query, [channel_id])
for result in cur.fetchall():
description = result[5]
link = result[4]
provider_id = result[2]
published = datetime.fromtimestamp(int(result[7]))
thumbnail = wx.Bitmap(result[6])
title = result[3]
watchtime = result[8]
videos.append(
Item(description,
link,
provider_id,
published,
thumbnail,
title,
watchtime=watchtime)) # Make an item from db
except sqlite3.OperationalError:
pass
# videos.append(Item()) # Make an item from db
return videos
def hash_string(string: str) -> str:
hash_object = hashlib.sha256(string.encode('utf-8'))
return hash_object.hexdigest()
def import_from_newpipe(filename) -> None:
if path.isfile(filename):
@ -215,3 +287,20 @@ def resolve_svt_channel(svt_id: str) -> dict:
}
return channels[svt_id]
def video_exists(video_id: str,
channel_id: str,
basepath: str = BASEPATH,
filename: str = DB_FILE_NAME) -> bool:
fullpath = path.join(basepath, filename)
try:
con = sqlite3.connect(fullpath)
cur = con.cursor()
select_query = '''SELECT * FROM {} WHERE channel_id = ? AND video_id = ?'''.format(
VIDEO_TABLE)
cur.execute(select_query, [channel_id, video_id])
return bool(len(cur.fetchall()))
except sqlite3.OperationalError:
return False

@ -69,6 +69,7 @@ class Cast(wx.Frame):
def get_providers(self) -> list[ChannelProvider]:
providers = list()
channels = list()
svt = ChannelProvider(
"SVT",
channels=[
@ -82,21 +83,14 @@ class Cast(wx.Frame):
)
providers.append(svt)
youtube = ChannelProvider(
"YouTube",
channels=[
YouTube.YouTube("UCs6A_0Jm21SIvpdKyg9Gmxw", "Pine 64"),
],
)
subfile = 'yt_subs.json'
if os.path.isfile(subfile):
import_from_newpipe(subfile)
subscriptions = get_subscriptions()
subscriptions = get_subscriptions()
if subscriptions:
for channel in subscriptions:
print(channel)
youtube.append_channel(YouTube.YouTube(channel[0], channel[1]))
channels.append(YouTube.YouTube(channel[0], channel[1]))
else:
channels.append(YouTube.YouTube("UCs6A_0Jm21SIvpdKyg9Gmxw", "Pine 64"))
youtube = ChannelProvider("YouTube", channels=channels)
providers.append(youtube)
return providers
@ -104,7 +98,7 @@ class Cast(wx.Frame):
def show_provider_list(self, _) -> None:
self.m_sizer.Clear(delete_windows=True)
self.m_sizer = wx.BoxSizer(wx.VERTICAL)
self.m_sizer.AddSpacer(SPACER_HEIGHT * 4)
# self.m_sizer.AddSpacer(SPACER_HEIGHT * 4)
closebtn = wx.Button(self.m_panel,
-1,
label="Close",
@ -133,7 +127,7 @@ class Cast(wx.Frame):
self.m_selected_provider = self.m_providers[provider_index]
self.m_sizer.Clear(delete_windows=True)
self.m_sizer = wx.BoxSizer(wx.VERTICAL)
self.m_sizer.AddSpacer(SPACER_HEIGHT * 4)
#self.m_sizer.AddSpacer(SPACER_HEIGHT * 4)
bck_callback = lambda event: self.show_provider_list(event)
self.add_back_button(bck_callback)
channel_index = 0
@ -156,7 +150,7 @@ class Cast(wx.Frame):
def show_video_list(self, _, index=0) -> None:
self.m_sizer.Clear(delete_windows=True)
self.m_sizer = wx.BoxSizer(wx.VERTICAL)
self.m_sizer.AddSpacer(SPACER_HEIGHT * 4)
# self.m_sizer.AddSpacer(SPACER_HEIGHT * 4)
channel = self.m_selected_provider.get_channel_by_index(index)
if channel.wait():
@ -209,7 +203,7 @@ class Cast(wx.Frame):
"""
self.m_sizer.Clear(delete_windows=True)
self.m_sizer = wx.BoxSizer(wx.VERTICAL)
self.m_sizer.AddSpacer(SPACER_HEIGHT * 4)
# self.m_sizer.AddSpacer(SPACER_HEIGHT * 4)
inner_sizer = wx.GridBagSizer()
self.m_control = wx.media.MediaCtrl(
self.m_panel,
@ -268,7 +262,7 @@ class Cast(wx.Frame):
def select_chromecast(self, _, uri, provider_index):
self.m_sizer.Clear(delete_windows=True)
self.m_sizer = wx.BoxSizer(wx.VERTICAL)
self.m_sizer.AddSpacer(SPACER_HEIGHT * 4)
# self.m_sizer.AddSpacer(SPACER_HEIGHT * 4)
cancel_btn = wx.Button(self.m_panel,
-1,

Loading…
Cancel
Save