Better caching

main
Micke Nordin 2 years ago
parent 60ab02b4ba
commit 5a161c7e82

@ -22,7 +22,8 @@ BASEPATH = path.join(str(environ.get("HOME")), '.config/cast')
DB_FILE_NAME = 'cast.db' DB_FILE_NAME = 'cast.db'
SUB_TABLE = 'subscriptions' SUB_TABLE = 'subscriptions'
VIDEO_TABLE = 'videos' VIDEO_TABLE = 'videos'
USE_CACHED_CATEGORIES = False CAT_CACHE = None
CHAN_CACHE = None
def add_video(video_id: str, def add_video(video_id: str,
@ -77,27 +78,42 @@ def get_svt_thumb_from_id_changed(id: str,
size, id, changed) size, id, changed)
def get_all_svt_categories(basepath=BASEPATH) -> list: def get_all_svt_categories() -> list:
global USE_CACHED_CATEGORIES global CAT_CACHE
categoryfile = path.join(basepath,"categories.json") if CAT_CACHE:
if USE_CACHED_CATEGORIES: categories = CAT_CACHE
if path.isfile(categoryfile): else:
with open(categoryfile, 'r') as jfile: categories: list = list()
return(json.loads(jfile.read())) url = "https://www.svtplay.se/kategori"
data = get_svt_data(url)
for entry in data:
if 'genres' in entry.keys():
categories = entry['genres']
break
CAT_CACHE = categories
return categories
categories: list = list() def get_all_svt_channels() -> dict:
url = "https://www.svtplay.se/kategori" url = "https://www.svtplay.se/kanaler"
result: dict = dict()
data = get_svt_data(url) data = get_svt_data(url)
for entry in data: for entry in data:
if 'genres' in entry.keys(): if "channels" in entry:
categories = entry['genres'] for channel in entry["channels"]:
break if type(entry["channels"][channel]) == type(list()):
with open(categoryfile, 'w') as jfile: for item in entry["channels"][channel]:
jfile.write(json.dumps(categories)) if item["__typename"] == "Channel" and "running" in item:
USE_CACHED_CATEGORIES = True result[item["id"]] = {
return categories "thumbnail":
make_bitmap_from_url(
get_svt_thumb_from_id_changed(
item["running"]["image"]['id'],
item["running"]["image"]['changed'])),
"name":
item["name"]
}
return result
def get_svt_category(category: str) -> list: def get_svt_category(category: str) -> list:
@ -118,8 +134,8 @@ def get_svt_data(url: str) -> list:
result: list = list() result: list = list()
res = requests.get(url) res = requests.get(url)
soup = BeautifulSoup(res.text, features="lxml") soup = BeautifulSoup(res.text, features="lxml")
data = json.loads( data = json.loads(soup.find(
soup.find(id="__NEXT_DATA__").string)["props"]["urqlState"] id="__NEXT_DATA__").string)["props"]["urqlState"] # type: ignore
for key in data.keys(): for key in data.keys():
result.append(json.loads(data[key]["data"])) result.append(json.loads(data[key]["data"]))
return result return result
@ -232,9 +248,9 @@ def get_svt_thumbnail(link: str) -> str:
page = requests.get(link) page = requests.get(link)
soup = BeautifulSoup(page.text, 'html.parser') soup = BeautifulSoup(page.text, 'html.parser')
meta = soup.find(property="og:image") meta = soup.find(property="og:image")
image_link = meta["content"] image_link = meta["content"] #type: ignore
return image_link return image_link # type: ignore
def get_videos(channel_id: str, def get_videos(channel_id: str,
@ -308,7 +324,9 @@ def make_sized_button(parent_pnl: wx.Panel, bitmap_or_str: Union[wx.Bitmap,
return btn_sizer return btn_sizer
def make_bitmap_from_url(logo_url: str, size: wx.Size = SIZE, video_id: str = "") -> wx.Bitmap: def make_bitmap_from_url(logo_url: str,
size: wx.Size = SIZE,
video_id: str = "") -> wx.Bitmap:
if not video_id: if not video_id:
video_id = hash_string(logo_url) video_id = hash_string(logo_url)
thumbpath = path.join(BASEPATH, 'thumbnails') thumbpath = path.join(BASEPATH, 'thumbnails')
@ -344,55 +362,21 @@ def make_bitmap_from_file(path, size: wx.Size = SIZE) -> wx.Bitmap:
def resolve_svt_channel(svt_id: str, path: str = '/usr/share/cast') -> dict: def resolve_svt_channel(svt_id: str, path: str = '/usr/share/cast') -> dict:
global CHAN_CACHE
channels = { if CHAN_CACHE:
"ch-barnkanalen": { channels = CHAN_CACHE
"name": else:
"Barnkanalen", channels = get_all_svt_channels()
"thumbnail": channels["feed"] = {
make_bitmap_from_file('{}/assets/Barnkanalen.png'.format(path))
},
"ch-svt1": {
"name": "SVT 1",
"thumbnail":
make_bitmap_from_file('{}/assets/SVT1.png'.format(path))
},
"ch-svt2": {
"name": "SVT 2",
"thumbnail":
make_bitmap_from_file('{}/assets/SVT2.png'.format(path))
},
"ch-svt24": {
"name": "SVT 24",
"thumbnail":
make_bitmap_from_file('{}/assets/SVT24.png'.format(path))
},
"ch-kunskapskanalen": {
"name":
"Kunskapskanalen",
"thumbnail":
make_bitmap_from_file('{}/assets/Kunskapskanalen.png'.format(path))
},
"feed": {
"name": "Senaste program", "name": "Senaste program",
"thumbnail": "thumbnail":
make_bitmap_from_file('{}/assets/SVT.png'.format(path)) make_bitmap_from_file('{}/assets/SVT.png'.format(path))
}, }
"allprograms": { channels["allprograms"] = {
"name": "Alla program", "name": "Alla program",
"thumbnail": "thumbnail":
make_bitmap_from_file('{}/assets/SVT.png'.format(path)) make_bitmap_from_file('{}/assets/SVT.png'.format(path))
},
}
for category in get_all_svt_categories():
channels[category['id']] = {
"name":
category["name"],
"thumbnail":
make_bitmap_from_url(
get_svt_thumb_from_id_changed(category['image']['id'],
category['image']['changed']))
} }
return channels[svt_id] return channels[svt_id]

Loading…
Cancel
Save