Skip to content
Snippets Groups Projects
Commit f512fe79 authored by Cy8aer's avatar Cy8aer
Browse files

Merge branch 'thigg/podqast-discover-fixes' into ff

parents 7dc3eadc ae443ee6
No related branches found
No related tags found
No related merge requests found
Pipeline #315470809 passed
Showing
with 662 additions and 573 deletions
...@@ -118,7 +118,7 @@ class Constants(metaclass=singleton.Singleton): ...@@ -118,7 +118,7 @@ class Constants(metaclass=singleton.Singleton):
self.__init(self.progname) self.__init(self.progname)
db: DatabaseProxy = DatabaseProxy() db: SqliteQueueDatabase = DatabaseProxy()
class BaseModel(Model): class BaseModel(Model):
......
import datetime
import hashlib import hashlib
import logging import logging
import os import os
...@@ -7,16 +8,18 @@ import pyotherside ...@@ -7,16 +8,18 @@ import pyotherside
from peewee import Model from peewee import Model
from playhouse.migrate import migrate from playhouse.migrate import migrate
from podcast import POST_ID_TYPE from podcast import POST_ID_TYPE, podcastlist
from podcast.archive import ArchiveEntry, archivename from podcast.archive import ArchiveEntry, archivename
from podcast.constants import Constants, db from podcast.constants import Constants, db
from podcast.external import externalname, ExternalEntry, ExternalFactory from podcast.external import externalname, ExternalEntry, ExternalFactory
from podcast.factory import Factory from podcast.factory import Factory
from podcast.inbox import InboxEntry, inboxname, Inbox from podcast.inbox import InboxEntry, inboxname, Inbox
from podcast.persistent_log import LogMessage
from podcast.podcast import PodcastFactory, Podcast from podcast.podcast import PodcastFactory, Podcast
from podcast.podcastlist import listname from podcast.podcastlist import listname
from podcast.podpost import Podpost, PodpostFactory, PodpostChapter from podcast.podpost import Podpost, PodpostFactory, PodpostChapter
from podcast.queue import QueueFactory from podcast.queue import QueueFactory
from podcast.util import chunks
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
...@@ -50,51 +53,64 @@ def needs_migration(): ...@@ -50,51 +53,64 @@ def needs_migration():
def run_migrations(strict=True): def run_migrations(strict=True):
current_version = get_versionnumber() current_version = get_versionnumber()
start_version = current_version start_version = current_version
logger.info("Base migration version is %s", current_version)
pyotherside.send("migrationSections", start_version, 2)
if (current_version < 1): if (current_version < 1):
logger.info("migration1: migrating pickles to sqlite") logger.info("migration1: migrating pickles to sqlite")
setup_db() setup_db()
with db.atomic(): logger.info("Migrating from version %d to version 1", current_version)
logger.info("Migrating from version %d to version 1", current_version) podcasts = get_podcastlist_from_pickles()
podcasts = get_podcastlist_from_pickles() pyotherside.send("migrationStart", 1, len(podcasts))
pyotherside.send("migrationStart", len(podcasts)) i: int = 0
i: int = 0 post_hash_to_id: Dict[str:POST_ID_TYPE] = {}
post_hash_to_id: Dict[str:POST_ID_TYPE] = {} for podcast_url in podcasts:
for podcast_url in podcasts: podcast_dict = Factory().get_store().get(podcast_url).__dict__
podcast_dict = Factory().get_store().get(podcast_url).__dict__ if podcast_dict != None:
if podcast_dict != None: podcast, entry_hashs = migrate_podcast_v0_v1(podcast_dict, strict)
podcast, entry_hashs = migrate_podcast_v0_v1(podcast_dict, strict) for entry_id, podpost in entry_hashs.items():
for entry_id, podpost in entry_hashs.items(): if not podpost.id:
if not podpost.id: raise ValueError("Podpost has no id")
raise ValueError("Podpost has no id") post_hash_to_id[entry_id] = podpost.id
post_hash_to_id[entry_id] = podpost.id store_post = Factory().get_store().get(entry_id)
store_post = Factory().get_store().get(entry_id) if store_post:
if store_post: migrate_podpost_v0_v1(store_post.__dict__, podpost)
migrate_podpost_v0_v1(store_post.__dict__, podpost) else:
else: logger.debug("Could not find a already persisted post object for %s, reading it from feed",
logger.debug("Could not find a already persisted post object for %s, reading it from feed", entry_id)
entry_id) i += 1
i += 1 pyotherside.send("migrationProgress", 1, i)
pyotherside.send("migrationProgress", i) migrate_archive_v0_v1(post_hash_to_id)
migrate_archive_v0_v1(post_hash_to_id) migrate_queue_v0_v1(post_hash_to_id)
migrate_queue_v0_v1(post_hash_to_id) migrate_inbox_v0_v1(post_hash_to_id)
migrate_inbox_v0_v1(post_hash_to_id) migrate_external_v0_v1(post_hash_to_id)
migrate_external_v0_v1(post_hash_to_id) set_versionnumber(1)
set_versionnumber(1)
if current_version < 2 and start_version == 1: if current_version < 2 and start_version == 1:
pyotherside.send("migrationStart", 2, 4 + get_archive_v0_count())
from playhouse.migrate import SqliteMigrator from playhouse.migrate import SqliteMigrator
migrator = SqliteMigrator(db) migrator = SqliteMigrator(db)
logger.info("migration2: Starting migration of the datamodel...") logger.info("migration2: Starting migration of the datamodel...")
db.create_tables([LogMessage])
db.pragma("foreign_keys", "off") db.pragma("foreign_keys", "off")
recreate_table(Podpost, "podpost") recreate_table(Podpost, "podpost")
recreate_table(PodpostChapter, "podpostchapter"), pyotherside.send("migrationProgress", 2, 1)
recreate_table(ArchiveEntry, "archiveentry"), recreate_table(PodpostChapter, "podpostchapter")
recreate_table(InboxEntry, "inboxentry"), pyotherside.send("migrationProgress", 2, 2)
recreate_table(ArchiveEntry, "archiveentry")
pyotherside.send("migrationProgress", 2, 3)
recreate_table(InboxEntry, "inboxentry")
pyotherside.send("migrationProgress", 2, 4)
migrate( migrate(
migrator.add_index("podcast", Podcast.url.column_name) migrator.add_index("podcast", Podcast.url.column_name)
) )
db.pragma("foreign_keys", "on") db.pragma("foreign_keys", "on")
set_versionnumber(2) try:
migrate_archive_v0_v1({}, progress_callback=lambda i: pyotherside.send("migrationProgress", 2, 4 + i))
except BaseException as e:
if strict:
raise e
set_versionnumber(2)
db.execute_sql('VACUUM "main"')
pyotherside.send("migrationDone") pyotherside.send("migrationDone")
...@@ -111,6 +127,13 @@ def recreate_table(table: Type[Model], tablename: str): ...@@ -111,6 +127,13 @@ def recreate_table(table: Type[Model], tablename: str):
db.execute_sql("DROP TABLE %s_x;" % tablename) db.execute_sql("DROP TABLE %s_x;" % tablename)
def get_archive_v0_count():
archive = get_archive_from_v0_store()
if archive is None:
return 0
return len(archive.__dict__['podposts'])
def get_podcastlist_from_pickles(): def get_podcastlist_from_pickles():
plist = Factory().get_store().get(listname) plist = Factory().get_store().get(listname)
if not plist: if not plist:
...@@ -140,45 +163,102 @@ def get_versionfile_path(): ...@@ -140,45 +163,102 @@ def get_versionfile_path():
def setup_db(): def setup_db():
from .podpost import Podpost, PodpostChapter from .podpost import Podpost, PodpostChapter
db.create_tables([Podpost, PodpostChapter, Podcast, ArchiveEntry, InboxEntry, ExternalEntry]) db.create_tables([Podpost, PodpostChapter, Podcast, ArchiveEntry, InboxEntry, ExternalEntry, LogMessage])
def migrate_archive_v0_v1(post_hash_to_id): def migrate_archive_v0_v1(post_hash_to_id, progress_callback=None):
""" """
migrates the id list to ArchiveEntry migrates the id list to ArchiveEntry
""" """
from podcast.archive import Archive from podcast.archive import Archive
archive: Archive = Factory().get_store().get(archivename)
if not archive: def iterate_archive(archive_insert_ids, old_archive, post_hash_to_id, progress_callback):
logger.warning("Could not unpickle archive") podcasts = {podcast.url: podcast for podcast in podcastlist.PodcastList().get_podcasts_objects()}
hrefs = set(t[0] for t in Podpost.select(Podpost.href).tuples())
for count, hash in enumerate(old_archive):
if progress_callback:
progress_callback(count)
if not hash in post_hash_to_id:
known_entry = Factory().get_store().get(hash)
if not known_entry:
logger.warning("Found archiveentry which was not found in a feed without podpost in store")
continue
known_post = known_entry.__dict__
podcast = podcasts[known_post['podurl']] if known_post['podurl'] in podcasts.keys() else None
if known_post['podurl'] and not podcast:
logger.warning("Found archiveentry with unknown podcast %s", known_post['podurl'])
continue
if not "href" in known_post.keys():
if not "link" in known_post.keys():
logger.warning("Cannot migrate podpost from store, missing attributes: '%s'", known_post)
continue
known_post["href"] = known_post["link"]
href = known_post['href']
if href not in hrefs:
yield create_post_from_store_dict(known_post, podcast)
hrefs.add(href)
else:
continue
else:
archive_insert_ids.append(post_hash_to_id[hash])
archive = get_archive_from_v0_store()
if archive is None:
return return
podposts = archive.__dict__['podposts'] podposts = archive.__dict__['podposts']
if len(podposts) > 0: if len(podposts) > 0:
old_archive: List[str] = podposts old_archive: List[str] = podposts
archive._archive_entries = [] archive_insert_ids = []
for hash in old_archive: for offset, _, chunk in chunks(iterate_archive(archive_insert_ids, old_archive, post_hash_to_id,
if not hash in post_hash_to_id: progress_callback), 50):
logger.error("Had unknown post %s in archive", hash) Podpost.bulk_create(chunk)
continue logging.info("created chunk %s", offset)
postid = post_hash_to_id[hash] archive_insert_ids.extend(
Archive().insert(postid) t[0] for t in Podpost.select(Podpost.id).where(Podpost.href.in_([p.href for p in chunk])).tuples())
logger.info("Inserting %s new entries into the archive", len(archive_insert_ids))
Archive().bulk_insert(archive_insert_ids)
else: else:
logger.warning("Could not get old archive for migration") logger.warning("Could not get old archive for migration")
def migrate_podpost_v0_v1(post_dict: dict, podpost: Podpost): def get_archive_from_v0_store():
def transfer_attribute(which: str, post_dict: dict, podpost: Podpost): from podcast.archive import Archive
if which in post_dict: archive: Archive = Factory().get_store().get(archivename)
setattr(podpost, which, post_dict[which]) if not archive:
logger.warning("Could not unpickle archive")
return archive
def create_post_from_store_dict(known_post, podcast):
post = Podpost()
post.podcast = podcast
post.guid = known_post["id"]
for attr in ["isaudio", "position", "logo_url",
"logo_path", "insert_date", "published",
"title", "link", "href", "plainpart",
"htmlpart", "author", "length", "type", "duration"]:
transfer_attribute(attr, known_post, post)
if post.insert_date is None:
post.insert_date = datetime.datetime.now().timestamp()
migrate_podpost_v0_v1(known_post, post, do_persist=False)
return post
def migrate_podpost_v0_v1(post_dict: dict, podpost: Podpost, do_persist=True):
if not podpost: if not podpost:
raise ValueError("podpost mus not be none") raise ValueError("podpost must not be none")
if hasattr(podpost, "entry") and not podpost.isaudio: if hasattr(podpost, "entry") and not podpost.isaudio:
podpost.init(podpost.entry, podpost.logo_url, podpost.podurl) podpost.init(podpost.entry, podpost.logo_url, podpost.podurl)
for attr in ["favorite", "file_path", "percentage", "position", "state"]: for attr in ["favorite", "file_path", "percentage", "position", "state"]:
transfer_attribute(attr, post_dict, podpost) transfer_attribute(attr, post_dict, podpost)
PodpostFactory().persist(podpost) if do_persist:
PodpostFactory().persist(podpost)
def transfer_attribute(which: str, post_dict: dict, podpost: Podpost):
if which in post_dict:
setattr(podpost, which, post_dict[which])
def migrate_podcast_v0_v1(podcast_dict: dict, strict=True) -> Tuple[Podcast, Dict[str, Podpost]]: def migrate_podcast_v0_v1(podcast_dict: dict, strict=True) -> Tuple[Podcast, Dict[str, Podpost]]:
......
...@@ -6,7 +6,6 @@ Factory is a Singleton ...@@ -6,7 +6,6 @@ Factory is a Singleton
""" """
import sys import sys
import os
from podcast.constants import Constants from podcast.constants import Constants
...@@ -50,17 +49,3 @@ class Factory(BaseFactory): ...@@ -50,17 +49,3 @@ class Factory(BaseFactory):
return self.store return self.store
def nomedia(self, doset):
"""
create .nomedia file in root path of podqast
"""
audiopath = os.path.join(Constants().audiofilepath, ".nomedia")
iconpath = os.path.join(Constants().iconpath, ".nomedia")
open(iconpath, "a").close()
if doset == False:
open(audiopath, "a").close()
else:
os.remove(audiopath)
...@@ -30,22 +30,23 @@ def fetch_feed(published, url) -> FeedParserDict: ...@@ -30,22 +30,23 @@ def fetch_feed(published, url) -> FeedParserDict:
agent = util.user_agent2 agent = util.user_agent2
else: else:
agent = util.user_agent agent = util.user_agent
feed = feedparser.parse(url, agent=agent, modified=time.gmtime(published)) feed: FeedParserDict = feedparser.parse(url, agent=agent, modified=time.gmtime(published))
if feed.status == 304: if feed.status == 304:
raise NotModified() raise NotModified()
if feed.bozo != 0: if feed.bozo != 0:
exc = feed.bozo_exception exc : Exception = feed.bozo_exception
if type(exc) != feedparser.CharacterEncodingOverride: if type(exc) != feedparser.CharacterEncodingOverride:
logger.exception( logger.exception(
"Podcast init: error in parsing feed %s", str(type(exc)), exc_info = exc "Podcast init: error in parsing feed %s", str(type(exc)), exc_info=exc
) )
raise FeedFetchingError("error in feed") raise FeedFetchingError(exc.message if hasattr(exc, 'message') else "message_missing")
logger.info( logger.info(
"podcast init size of entries: %d", len(feed.entries) "podcast init size of entries: %d", len(feed.entries)
) )
return feed return feed
def iterate_feed_entries(feed)->Iterator:
def iterate_feed_entries(feed) -> Iterator:
while True: while True:
for entry in feed.entries: for entry in feed.entries:
yield entry yield entry
...@@ -59,8 +60,9 @@ def iterate_feed_entries(feed)->Iterator: ...@@ -59,8 +60,9 @@ def iterate_feed_entries(feed)->Iterator:
break break
class FeedFetchingError(Exception): class FeedFetchingError(BaseException):
pass def __init__(self, msg):
self.message = msg
class NotModified(Exception): class NotModified(Exception):
......
...@@ -114,7 +114,7 @@ class InboxFactory(metaclass=Singleton): ...@@ -114,7 +114,7 @@ class InboxFactory(metaclass=Singleton):
otherwise it returns the single element otherwise it returns the single element
""" """
def get_inbox(self): def get_inbox(self) -> Inbox:
""" """
Get the Inbox Get the Inbox
""" """
......
import datetime
import json
from enum import Enum
from typing import Iterator
from peewee import AutoField, CharField, TextField, DateTimeField
from podcast.constants import BaseModel
LOG_LIMIT = 1000
class LogType(Enum):
AutoPostLimit = "AutoPostLimit",
Exception = "Exception",
NetworkError = "NetworkError",
FeedParseError = "FeedParseError"
FeedEpisodeParseError = "FeedEpisodeParseError"
AddPodcastError = "AddPodcastError"
SuccessfulRefresh = "SuccessfulRefresh"
Refresh304 = "Refresh304"
class LogMessage(BaseModel):
id = AutoField()
messagetype = CharField(null=False)
params = TextField()
insert_date = DateTimeField(default=datetime.datetime.now)
def to_dict(self):
return {
"messagetype": str(self.messagetype),
"params": str(self.params),
"insert_date": self.insert_date
}
def convert_params(params):
converted = {}
for k, v in params.items():
if isinstance(v, Exception) and hasattr(v, "message"):
converted[k] = v.message
else:
converted[k] = str(v)
return converted
def persist_log(type: LogType, **params):
msg = LogMessage()
msg.messagetype = type.value
msg.params = json.dumps(convert_params(params))
msg.save()
over_limit = LogMessage.select().count() - LOG_LIMIT
if over_limit > 0:
for msg_id in LogMessage.select(LogMessage.id).order_by(LogMessage.insert_date.asc()).limit(
over_limit).tuples():
LogMessage.delete_by_id(msg_id[0])
def get_log_messages() -> Iterator[LogMessage]:
return LogMessage.select().order_by(LogMessage.insert_date.desc()).iterator()
...@@ -7,10 +7,12 @@ from typing import List, Dict, Optional, Iterator ...@@ -7,10 +7,12 @@ from typing import List, Dict, Optional, Iterator
from peewee import CharField, TextField, BooleanField, IntegerField, ModelSelect, DoesNotExist, FloatField, \ from peewee import CharField, TextField, BooleanField, IntegerField, ModelSelect, DoesNotExist, FloatField, \
IntegrityError IntegrityError
from podcast.persistent_log import persist_log, LogType
sys.path.append("/usr/share/podqast/python") sys.path.append("/usr/share/podqast/python")
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
if TYPE_CHECKING: if TYPE_CHECKING:
from podpost import Podpost from podpost import Podpost
...@@ -129,7 +131,8 @@ class Podcast(BaseModel): ...@@ -129,7 +131,8 @@ class Podcast(BaseModel):
returns the episodes in order from newest to oldest returns the episodes in order from newest to oldest
""" """
logger.debug("fetching episodes for %s", self.title) logger.debug("fetching episodes for %s full=%s limit=%d", self.title, str(not break_on_first_existing_episode),
limit)
image = self.logo() image = self.logo()
...@@ -145,19 +148,14 @@ class Podcast(BaseModel): ...@@ -145,19 +148,14 @@ class Podcast(BaseModel):
href_known = "href" in entry and entry.href in known_hrefs href_known = "href" in entry and entry.href in known_hrefs
has_guid = "guid" in entry has_guid = "guid" in entry
guid_known = has_guid and entry["guid"] in known_guids guid_known = has_guid and entry["guid"] in known_guids
logger.debug("entry knowncheck: %s %s %s", has_guid, guid_known, href_known) #logger.debug("entry knowncheck: %s %s %s", has_guid, guid_known, href_known)
if has_guid: return guid_known or href_known
if not guid_known and href_known:
logger.warning("podcast %s entry href already known, but new guid: %s", self.url, str(entry))
return False
return guid_known
return href_known
if self.autolimit: if self.autolimit:
logger.debug("Overwriting limit (%d) with autolimit (%d)", limit, self.autolimit) logger.debug("Overwriting limit (%d) with autolimit (%d)", limit, self.autolimit)
limit = self.autolimit limit = self.autolimit
all_episode_count = len(self.entry_ids_old_to_new) episode_count = 0
new_posts = [] new_posts = []
for entry in feedutils.iterate_feed_entries(feed): for entry in feedutils.iterate_feed_entries(feed):
if entry_already_known(entry): if entry_already_known(entry):
...@@ -170,13 +168,14 @@ class Podcast(BaseModel): ...@@ -170,13 +168,14 @@ class Podcast(BaseModel):
post = Podpost.from_feed_entry(self, entry, self.url, image) post = Podpost.from_feed_entry(self, entry, self.url, image)
except: except:
logger.exception("could not process entry of podcast %s", self.url) logger.exception("could not process entry of podcast %s", self.url)
persist_log(LogType.FeedEpisodeParseError, title=self.title, entry=entry)
continue continue
all_episode_count += 1 episode_count += 1
new_posts.append(post) new_posts.append(post)
# todo: limit stuff probably broken # todo: limit stuff probably broken
if limit != 0 and all_episode_count >= limit: if limit != 0 and episode_count >= limit:
if not supress_limit_notification: if not supress_limit_notification:
pyotherside.send("apperror", "Auto post limit reached for %s!" % self.title) persist_log(LogType.AutoPostLimit, id=self.id, title=self.title, limit=limit)
break break
if not do_not_store: if not do_not_store:
from podcast.podpost import PodpostFactory from podcast.podpost import PodpostFactory
...@@ -333,10 +332,18 @@ class Podcast(BaseModel): ...@@ -333,10 +332,18 @@ class Podcast(BaseModel):
logger.debug("Moveto is %d", move) logger.debug("Moveto is %d", move)
posts = [post for post in self.__process_refreshed_feedentries(feed, limit, full_refresh)] posts = [post for post in self.__process_refreshed_feedentries(feed, limit, full_refresh)]
persist_log(LogType.SuccessfulRefresh, title=self.title, num_new_episodes=len(posts))
for post in posts: for post in posts:
yield (move, post) yield (move, post)
except NotModified: except NotModified:
logger.info("Got 304 response, skipping") logger.info("Got 304 response, skipping")
persist_log(LogType.Refresh304, title=self.title)
except FeedFetchingError as ffe:
logger.exception("Could not fetch feed")
persist_log(LogType.FeedParseError, title=self.title, errormsg=ffe)
except Exception as e:
persist_log(LogType.Exception, msg="during refresh", podcasttitle=self.title, exception=e)
pyotherside.send("refreshPost", None) pyotherside.send("refreshPost", None)
def __process_refreshed_feedentries(self, feed, limit, full_refresh): def __process_refreshed_feedentries(self, feed, limit, full_refresh):
...@@ -344,7 +351,8 @@ class Podcast(BaseModel): ...@@ -344,7 +351,8 @@ class Podcast(BaseModel):
saves new entries for this feed saves new entries for this feed
yields all new entries yields all new entries
""" """
new_posts = self.__process_episodes(feed, limit, break_on_first_existing_episode=not full_refresh) new_posts = self.__process_episodes(feed, 0 if full_refresh else limit,
break_on_first_existing_episode=not full_refresh)
logger.info("Fount %d new entries.", len(new_posts)) logger.info("Fount %d new entries.", len(new_posts))
return new_posts return new_posts
...@@ -408,7 +416,6 @@ class PodcastFactory(BaseFactory): ...@@ -408,7 +416,6 @@ class PodcastFactory(BaseFactory):
podcast = self.podcastcache.get(url) podcast = self.podcastcache.get(url)
if not podcast: if not podcast:
logger.debug("PodcastFactory: not in cache %s", url)
try: try:
podcast = Podcast.get(Podcast.url == url) podcast = Podcast.get(Podcast.url == url)
self.podcastcache.store(url, podcast) self.podcastcache.store(url, podcast)
......
...@@ -5,6 +5,8 @@ List of subscribed podcasts ...@@ -5,6 +5,8 @@ List of subscribed podcasts
import sys import sys
from typing import Tuple, Iterator from typing import Tuple, Iterator
from podcast.persistent_log import persist_log, LogType
sys.path.append("../") sys.path.append("../")
from podcast.singleton import Singleton from podcast.singleton import Singleton
...@@ -125,9 +127,10 @@ class PodcastList: ...@@ -125,9 +127,10 @@ class PodcastList:
try: try:
self.add(pod) self.add(pod)
podcounter += 1 podcounter += 1
except: except Exception as e:
logger.exception("Could not import %s",pod) logger.exception("Could not import %s",pod)
pyotherside.send("apperror", "failed to import " + pod) pyotherside.send("apperror", "failed to import " + pod)
persist_log(LogType.AddPodcastError, podcast=pod, exception=e)
return podcounter return podcounter
......
...@@ -5,7 +5,9 @@ import datetime ...@@ -5,7 +5,9 @@ import datetime
import sys import sys
from functools import reduce from functools import reduce
from math import floor from math import floor
from urllib.error import URLError
from podcast.persistent_log import persist_log, LogType
sys.path.append("../") sys.path.append("../")
from podcast.podcast import Podcast from podcast.podcast import Podcast
...@@ -41,13 +43,13 @@ class Podpost(BaseModel): ...@@ -41,13 +43,13 @@ class Podpost(BaseModel):
Most fields are persisted in the database Most fields are persisted in the database
""" """
guid: str = CharField(index=True) guid: str = CharField(index=True)
id: AutoField = AutoField(primary_key=True) #POST_ID_TYPE id: AutoField = AutoField(primary_key=True) # POST_ID_TYPE
author: str = CharField(default="") author: str = CharField(default="")
duration: int = IntegerField(null=True) duration: int = IntegerField(null=True)
favorite: bool = BooleanField(default=False) favorite: bool = BooleanField(default=False)
file_path: str = TextField(null=True) file_path: str = TextField(null=True)
# podcast file url # podcast file url
href: str = TextField(default="", index=True) href: TextField = TextField(default="", index=True)
htmlpart: str = TextField(null=True) htmlpart: str = TextField(null=True)
# when we entered this post into our db # when we entered this post into our db
insert_date = FloatField(default=lambda: datetime.datetime.now().timestamp()) insert_date = FloatField(default=lambda: datetime.datetime.now().timestamp())
...@@ -99,49 +101,56 @@ class Podpost(BaseModel): ...@@ -99,49 +101,56 @@ class Podpost(BaseModel):
post.logo_path = logo_url post.logo_path = logo_url
else: else:
post.logo_path = None post.logo_path = None
try: if 'published_parsed' in entry.keys():
post.published = timegm(entry.published_parsed) post.published = timegm(entry['published_parsed'])
except: else:
try: if "published" in entry.keys():
logger.debug("no published_parsed") logger.debug("no published_parsed")
post.published = mktime_tz(parsedate_tz(entry.published)) if type(entry["published"]) == float:
except: post.published = entry["published"]
else:
try:
post.published = mktime_tz(parsedate_tz(entry["published"]))
except:
logger.warning("Could not parse published % of entry %s", entry["published"], post.title)
post.published = 0
else:
post.published = 0 post.published = 0
post.title = entry.title post.title = entry["title"]
if "link" in entry: if "link" in entry:
post.link = entry.link post.link = entry["link"]
else: else:
pass pass
if "content" in entry.keys(): if "content" in entry.keys():
for cont in entry.content: for cont in entry["content"]:
if cont.type == "text/html": if cont.type == "text/html":
post.htmlpart = cont.value post.htmlpart = cont.value
if cont.type == "text/plain": if cont.type == "text/plain":
post.plainpart = cont.value post.plainpart = cont.value
if not post.plainpart: if not post.plainpart:
try: if "summary" in entry.keys():
post.plainpart = entry.summary post.plainpart = entry["summary"]
except: else:
post.plainpart = "" post.plainpart = ""
if not post.htmlpart: if not post.htmlpart:
try: if "summary_detail" in entry.keys():
post.htmlpart = entry.summary_detail.value post.htmlpart = entry["summary_detail"].value
except: else:
post.htmlpart = "" post.htmlpart = ""
h = html2text.HTML2Text() h = html2text.HTML2Text()
h.ignore_links = True h.ignore_links = True
h.ignore_images = True h.ignore_images = True
post.plainpart = h.handle(post.plainpart) post.plainpart = h.handle(post.plainpart)
if "author" in entry.keys(): if "author" in entry.keys():
post.author = entry.author post.author = entry["author"]
if "id" in entry: if "id" in entry:
if entry.id != "": if entry["id"] != "":
post.guid = entry.id post.guid = entry["id"]
else: else:
post.id = hashlib.sha256(entry.summary.encode()).hexdigest() post.id = hashlib.sha256(entry["summary"].encode()).hexdigest()
if len(entry.enclosures) == 0: if len(entry["enclosures"]) == 0:
logger.warning("post %s has no enclosures", post.title) logger.warning("post %s has no enclosures", post.title)
for e in entry.enclosures: for e in entry["enclosures"]:
if e.type[:5] == "audio": if e.type[:5] == "audio":
if "length" in e.keys(): if "length" in e.keys():
post.length = e.length post.length = e.length
...@@ -159,7 +168,7 @@ class Podpost(BaseModel): ...@@ -159,7 +168,7 @@ class Podpost(BaseModel):
post.type = "audio/mp3" post.type = "audio/mp3"
post.href = e.href post.href = e.href
if "itunes_duration" in entry: if "itunes_duration" in entry:
post.duration = util.tx_to_s(entry.itunes_duration) post.duration = util.tx_to_s(entry["itunes_duration"])
else: else:
post.duration = 0 post.duration = 0
post.chapters = list(PodpostChapter.from_feed_entry(post, entry)) post.chapters = list(PodpostChapter.from_feed_entry(post, entry))
...@@ -210,7 +219,6 @@ class Podpost(BaseModel): ...@@ -210,7 +219,6 @@ class Podpost(BaseModel):
section = util.format_full_date(self.published) section = util.format_full_date(self.published)
date = self.published date = self.published
fdate = util.s_to_year(date)
asection = util.format_full_date(self.insert_date) asection = util.format_full_date(self.insert_date)
loaded = False loaded = False
...@@ -234,7 +242,6 @@ class Podpost(BaseModel): ...@@ -234,7 +242,6 @@ class Podpost(BaseModel):
"detail": self.htmlpart, "detail": self.htmlpart,
"position": self.position, "position": self.position,
"date": date, "date": date,
"fdate": fdate,
"section": section, "section": section,
"asection": asection, "asection": asection,
"length": self.length, "length": self.length,
...@@ -245,7 +252,8 @@ class Podpost(BaseModel): ...@@ -245,7 +252,8 @@ class Podpost(BaseModel):
"isaudio": self.isaudio, "isaudio": self.isaudio,
"loaded": loaded, "loaded": loaded,
"haschapters": haschapters, "haschapters": haschapters,
"listened": self.position > 0 and (self.duration*1000-self.position < Constants().markListenedBeforeEndThreshold*1000) "listened": self.position > 0 and (
self.duration * 1000 - self.position < Constants().markListenedBeforeEndThreshold * 1000)
} }
def get_image_descriptor(self): def get_image_descriptor(self):
...@@ -290,18 +298,19 @@ class Podpost(BaseModel): ...@@ -290,18 +298,19 @@ class Podpost(BaseModel):
self.percentage = perc self.percentage = perc
yield perc yield perc
self.percentage = 100 self.percentage = 100
except:
os.rename(file_path_part, file_path)
except URLError:
logger.exception("Download failed") logger.exception("Download failed")
self.delete_file() self.delete_file()
file_path = None file_path = None
self.percentage = 0 self.percentage = 0
persist_log(LogType.NetworkError, what="episode download", title=self.title, url=self.href)
try: except BaseException as e:
os.rename(file_path_part, file_path)
except:
logger.exception("renaming the downloaded file failed") logger.exception("renaming the downloaded file failed")
file_path = None file_path = None
self.percentage = 0 self.percentage = 0
persist_log(LogType.Exception, what="episode download", title=self.title, exception=e)
self.file_path = file_path self.file_path = file_path
PodpostFactory().persist(self) PodpostFactory().persist(self)
...@@ -515,7 +524,6 @@ class Podpost(BaseModel): ...@@ -515,7 +524,6 @@ class Podpost(BaseModel):
PodpostFactory().persist(self) PodpostFactory().persist(self)
class PodpostChapter(BaseModel): class PodpostChapter(BaseModel):
podpost: Podpost = ForeignKeyField(Podpost, backref="chapters", lazy_load=False, on_delete='CASCADE') podpost: Podpost = ForeignKeyField(Podpost, backref="chapters", lazy_load=False, on_delete='CASCADE')
start_millis: datetime.time = IntegerField() start_millis: datetime.time = IntegerField()
...@@ -560,17 +568,20 @@ class PodpostFactory(BaseFactory): ...@@ -560,17 +568,20 @@ class PodpostFactory(BaseFactory):
except DoesNotExist: except DoesNotExist:
return None return None
def exists(self, index):
return Podpost.select(Podpost.id == index).exists()
def delete_podpost(self, index): def delete_podpost(self, index):
Podpost.delete_by_id(index) Podpost.delete_by_id(index)
PodpostChapter.delete().where(PodpostChapter.podpost == index).execute() PodpostChapter.delete().where(PodpostChapter.podpost == index).execute()
def persist(self, post: Podpost, store_chapters = False): def persist(self, post: Podpost, store_chapters=False):
if type(post) != Podpost: if type(post) != Podpost:
raise ValueError("Can only persist podposts, not %s", type(post)) raise ValueError("Can only persist podposts, not %s", type(post))
post.save() post.save()
if store_chapters: if store_chapters:
PodpostChapter.bulk_create(post.chapters, PodpostChapter.bulk_create(post.chapters,
batch_size=PodpostChapter.get_bulk_size()) batch_size=PodpostChapter.get_bulk_size())
def create(self, post: Podpost): def create(self, post: Podpost):
post.create() post.create()
......
...@@ -206,9 +206,8 @@ class Queue: ...@@ -206,9 +206,8 @@ class Queue:
post = PodpostFactory().get_podpost(podpost) post = PodpostFactory().get_podpost(podpost)
for perc in post.download_audio(): for perc in post.download_audio():
yield perc yield perc
if podpost == self.podposts[0] and post.file_path:
if podpost == self.podposts[0]: pyotherside.send("firstDownloaded", post.file_path)
pyotherside.send("firstDownloaded")
def download_all(self): def download_all(self):
""" """
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import asyncio
from typing import Iterator from typing import Iterator
import pyotherside import pyotherside
import threading
import sys
sys.path.append("/usr/share/harbour-podqast/python")
from podcast.podpost import Podpost from podcast.podpost import Podpost
from podcast.util import chunks from podcast.util import chunks
from podcast.archive import ArchiveFactory from podcast.archive import ArchiveFactory
...@@ -14,7 +11,7 @@ import logging ...@@ -14,7 +11,7 @@ import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def get_archive_posts(podurl=None): async def get_archive_posts(podurl=None):
""" """
Return a list of all archive posts Return a list of all archive posts
""" """
...@@ -25,7 +22,7 @@ def get_archive_posts(podurl=None): ...@@ -25,7 +22,7 @@ def get_archive_posts(podurl=None):
pyotherside.send("historyData", offset, [post.get_data() for post in chunk]) pyotherside.send("historyData", offset, [post.get_data() for post in chunk])
def get_archive_pod_data(): async def get_archive_pod_data():
""" """
""" """
...@@ -47,28 +44,12 @@ def get_archive_pod_data(): ...@@ -47,28 +44,12 @@ def get_archive_pod_data():
class ArchiveHandler: class ArchiveHandler:
def __init__(self):
self.bgthread = threading.Thread()
self.bgthread.start()
self.bgthread2 = threading.Thread()
self.bgthread2.start()
def getarchiveposts(self, podurl=None): def getarchiveposts(self, podurl=None):
if self.bgthread2.is_alive(): asyncio.run(get_archive_posts(podurl))
return
if podurl:
self.bgthread2 = threading.Thread(
target=get_archive_posts, args=[podurl]
)
else:
self.bgthread2 = threading.Thread(target=get_archive_posts)
self.bgthread2.start()
def getarchivepoddata(self): def getarchivepoddata(self):
if self.bgthread.is_alive(): asyncio.run(get_archive_pod_data())
return
self.bgthread = threading.Thread(target=get_archive_pod_data)
self.bgthread.start()
archivehandler = ArchiveHandler() archivehandler = ArchiveHandler()
...@@ -9,6 +9,7 @@ Python { ...@@ -9,6 +9,7 @@ Python {
signal archivePodList(var data) signal archivePodList(var data)
Component.onCompleted: { Component.onCompleted: {
addImportPath(Qt.resolvedUrl("./python"))
setHandler("historyData", historyData) setHandler("historyData", historyData)
setHandler("archivePodList", archivePodList) setHandler("archivePodList", archivePodList)
...@@ -18,6 +19,10 @@ Python { ...@@ -18,6 +19,10 @@ Python {
}) })
} }
onError: {
console.log('python error: ' + traceback)
}
function getArchiveEntries(podurl) { function getArchiveEntries(podurl) {
if (podurl === "home") { if (podurl === "home") {
call("ArchiveHandler.archivehandler.getarchiveposts", call("ArchiveHandler.archivehandler.getarchiveposts",
......
import asyncio
import threading
from concurrent.futures import Executor
from functools import wraps
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
def async_threaded(pool: Executor = None):
def wrapper(fun):
@wraps(fun)
async def inner(*args, **kwargs):
coroutine = fun(*args, **kwargs)
if pool is None:
thread = threading.Thread(target=asyncio.run, args=(coroutine,))
thread.start()
else:
pool.submit(asyncio.run, (coroutine,))
return inner
return wrapper
class AsyncWrapper(object):
def __init__(self, subject):
self.__subject = subject
def __getattr__(self, item):
fun = getattr(self.__subject, item)
def wrapped(*args, **kwargs):
asyncio.run(fun(*args, **kwargs))
return wrapped
...@@ -8,6 +8,14 @@ IconContextMenu { ...@@ -8,6 +8,14 @@ IconContextMenu {
property bool archiveEnabled property bool archiveEnabled
property bool favoriteEnabled property bool favoriteEnabled
property var upPressedHandler: function (model) {
queuehandler.queueInsertNext(model.id)
}
property var downPressedHandler: function (model) {
queuehandler.queueInsertBottom(model.id)
}
property var archivePressedHandler
spacing: (archiveEnabled spacing: (archiveEnabled
&& favoriteEnabled) ? Theme.paddingMedium : Theme.paddingLarge && favoriteEnabled) ? Theme.paddingMedium : Theme.paddingLarge
...@@ -28,7 +36,7 @@ IconContextMenu { ...@@ -28,7 +36,7 @@ IconContextMenu {
text: 'QueueTop' text: 'QueueTop'
icon.source: 'image://theme/icon-m-up' icon.source: 'image://theme/icon-m-up'
onClicked: { onClicked: {
queuehandler.queueInsertNext(model.id) upPressedHandler(model)
closeMenu() closeMenu()
} }
} }
...@@ -36,7 +44,7 @@ IconContextMenu { ...@@ -36,7 +44,7 @@ IconContextMenu {
text: 'QueueBottom' text: 'QueueBottom'
icon.source: 'image://theme/icon-m-down' icon.source: 'image://theme/icon-m-down'
onClicked: { onClicked: {
queuehandler.queueInsertBottom(model.id) downPressedHandler(model)
closeMenu() closeMenu()
} }
} }
...@@ -46,7 +54,7 @@ IconContextMenu { ...@@ -46,7 +54,7 @@ IconContextMenu {
text: 'Archive' text: 'Archive'
icon.source: 'image://theme/icon-m-backup' icon.source: 'image://theme/icon-m-backup'
onClicked: { onClicked: {
inboxhandler.moveArchive(id) archivePressedHandler(model)
closeMenu() closeMenu()
} }
} }
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import logging import logging
import threading
from urllib.error import URLError
import pyotherside import pyotherside
import threading
import sys
import os import os
import time import time
import tarfile import tarfile
sys.path.append("/usr/share/harbour-podqast/python") from AsyncWrapper import AsyncWrapper, async_threaded
from podcast.constants import Constants from podcast.constants import Constants
from podcast.podcast import PodcastFactory, Podcast from podcast.podcast import PodcastFactory, Podcast
from podcast.podcastlist import PodcastListFactory from podcast.podcastlist import PodcastListFactory
from podcast.archive import ArchiveFactory from podcast.archive import ArchiveFactory
from podcast.factory import Factory
from podcast.util import create_opml, movePost, chunks from podcast.util import create_opml, movePost, chunks
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
cachevar = {}
def get_feedinfo(url, preview=False, num_preview_episodes=3):
logger.info("getting feedinfo %s for %s",
"in previewmode with %d episodes" % num_preview_episodes if preview else "", url)
if preview:
podcast, episodes = Podcast.create_from_url(url, preview=True, num_preview_items=num_preview_episodes)
else:
podcast, episodes = PodcastFactory().get_podcast(url)
feedinfo = podcast.feedinfo()
feedinfo["latest_entries"] = [episode.get_data() for episode in episodes[:num_preview_episodes]]
if hasattr(podcast, "alt_feeds"):
feedinfo["altfeeds"] = podcast.alt_feeds
else:
logger.warning("No altfeeds attribute found in podcast")
feedinfo["altfeeds"] = []
pyotherside.send("feedinfo", feedinfo)
def init_from_qml(object): def init_from_qml(object):
Constants().init_from_qml(object) Constants().init_from_qml(object)
# safeguard to prevent parallel execution
def get_entries(url): lock_refresh = threading.Lock()
"""
Get all podposts
"""
podcast = PodcastFactory().get_podcast(url)
link = podcast.link
for offset, max, chunk in chunks(podcast.get_entries(), 32):
pyotherside.send("episodeListData", link, podcast.title, chunk, offset, max)
def subscribe_podcast(url):
"""
Subscribe a podcast
"""
PodcastFactory().remove_podcast(url)
Podcast.create_from_url(url)
podcast_list = PodcastListFactory().get_podcast_list()
podcast_list.add(url)
pyotherside.send("subscribed", url)
def get_podcasts():
"""
Get Podcast List
"""
podcasts = []
pclist = PodcastListFactory().get_podcast_list()
for pc in pclist.get_podcasts_objects():
podcasts.append(pc.feedinfo())
pyotherside.send("podcastslist", podcasts)
def delete_podcast(url):
"""
Delete a podcast from list
"""
PodcastListFactory().get_podcast_list().delete_podcast(url)
get_podcasts()
def move_archive(id):
"""
move post to Archive
"""
ArchiveFactory().get_archive().insert(id)
pyotherside.send("movedToArchive")
def refresh_podcast(url, moveto, download, limit=0, full_refresh=False):
"""
Refresh one podcast
"""
if not download:
return
podcast = PodcastFactory().get_podcast(url)
posts = []
for move, post in podcast.refresh(moveto, limit, full_refresh=full_refresh):
page = movePost(move, post.id)
posts.append(post.id)
pyotherside.send(
"updatesNotification", podcast.title, post.title, page
)
pyotherside.send("refreshFinished")
def refresh_podcasts(moveto, download, limit=0, full_refresh=False):
"""
Refresh all podcasts
"""
if not download:
return
podcast_list = PodcastListFactory().get_podcast_list()
for postid, podcasttitle, posttitle, move in podcast_list.refresh(moveto, limit, full_refresh=full_refresh):
page = movePost(move, postid)
pyotherside.send(
"updatesNotification", podcasttitle, posttitle, page
)
pyotherside.send("refreshFinished")
def get_podcast_params(url):
"""
Get Parameter set of podcast
"""
podcast = PodcastFactory().get_podcast(url)
pyotherside.send("podcastParams", podcast.get_params())
def set_podcast_params(url, params):
"""
Put parameter set to podcast
"""
podcast = PodcastFactory().get_podcast(url)
podcast.set_params(params)
def render_html(data):
"""
Render a temporary page
"""
storepath = Constants().iconpath
htmlfile = os.path.join(storepath, "page.html")
with open(htmlfile, "wb") as h:
h.write("<html><body>".encode())
h.write(data.encode())
h.write("</body></html".encode())
pyotherside.send("htmlfile", htmlfile)
def nomedia(doset):
"""
Set nomedia in config root
"""
Factory().nomedia(doset)
def import_opml(opmlfile):
"""
Import podcasts from opmlfile
"""
pclist = PodcastListFactory().get_podcast_list()
imported = pclist.import_opml(opmlfile)
if imported > 0:
pyotherside.send("opmlimported", imported)
def import_gpodder():
"""
Import podcasts from opmlfile
"""
pclist = PodcastListFactory().get_podcast_list()
imported = pclist.import_gpodder()
if imported > 0:
pyotherside.send("opmlimported", imported)
def do_backup():
"""
Create a backup tarball of store and icons
"""
rootpath = Constants().data_home
homedir = os.path.expanduser("~")
os.chdir(homedir)
filename = "podqast-%s.tar.gz" % time.strftime("%Y%m%d%H%M")
tarfilename = os.path.join(homedir, filename)
try:
tar = tarfile.open(tarfilename, "w:gz")
tar.add(os.path.join(rootpath, "icons"), arcname="./icons")
tar.add(os.path.join(rootpath, "store"), arcname="./store")
tar.close()
except:
pyotherside.send("error", "Backup failed")
return
pyotherside.send("backupDone", tarfilename)
def write_opml():
"""
Create opml file
"""
podcasts = []
pclist = PodcastListFactory().get_podcast_list()
for pc in pclist.get_podcasts_objects():
podcasts.append(
{"name": pc.title, "xml_url": pc.url, "html_url": pc.link}
)
homedir = os.path.expanduser("~")
os.chdir(homedir)
filename = "podqast-%s.opml" % time.strftime("%Y%m%d%H%M")
opmlfilename = os.path.join(homedir, filename)
create_opml(opmlfilename, podcasts)
class FeedParser: class FeedParser:
def __init__(self): def __init__(self):
self.bgthread = threading.Thread()
self.bgthread.start()
self.bgthread1 = threading.Thread()
self.bgthread1.start()
pyotherside.atexit(self.doexit) pyotherside.atexit(self.doexit)
async def get_feedinfo(self, url, preview=False, num_preview_episodes=3):
import urllib3.util
logger.info("getting feedinfo %s for %s",
"in previewmode with %d episodes" % num_preview_episodes if preview else "", url)
podcast = None
episodes = None
if not urllib3.util.parse_url(url).scheme:
logger.warning("Adding https to schemaless url %s",url)
url = 'https://' + url
try:
if preview:
podcast, episodes = Podcast.create_from_url(url, preview=True, num_preview_items=num_preview_episodes)
else:
podcast, episodes = PodcastFactory().get_podcast(url)
except URLError:
pyotherside.send("feedFetchError",url)
return
feedinfo = podcast.feedinfo()
feedinfo["latest_entries"] = [episode.get_data() for episode in episodes[:num_preview_episodes]]
if hasattr(podcast, "alt_feeds"):
feedinfo["altfeeds"] = podcast.alt_feeds
else:
logger.warning("No altfeeds attribute found in podcast")
feedinfo["altfeeds"] = []
pyotherside.send("feedinfo", feedinfo)
def doexit(self): def doexit(self):
from podcast.constants import db from podcast.constants import db
db.close() db.close()
def getfeedinfo(self, theurl): async def get_podcast_preview(self, theurl, num_preview_episodes):
if self.bgthread.is_alive(): await self.get_feedinfo(theurl, True, num_preview_episodes=num_preview_episodes)
return
self.bgthread = threading.Thread(target=get_feedinfo, args=[theurl, False]) async def get_entries(self, url):
self.bgthread.start() """
Get all podposts
def getpodcastpreview(self, theurl, num_preview_episodes): """
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=get_feedinfo, args=[theurl, True, num_preview_episodes])
self.bgthread.start()
def getentries(self, theurl):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=get_entries, args=[theurl])
self.bgthread.start()
def getpodcasts(self):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=get_podcasts)
self.bgthread.start()
def subscribepodcast(self, theurl):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(
target=subscribe_podcast, args=[theurl]
)
self.bgthread.start()
def deletepodcast(self, theurl): podcast = PodcastFactory().get_podcast(url)
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=delete_podcast, args=[theurl])
self.bgthread.start()
def refreshpodcast(self, theurl, moveto, download): link = podcast.link
if self.bgthread.is_alive():
for offset, max, chunk in chunks(podcast.get_entries(), 32):
pyotherside.send("episodeListData", link, podcast.title, chunk, offset, max)
async def subscribe_podcast(self,url):
"""
Subscribe a podcast
"""
PodcastFactory().remove_podcast(url)
Podcast.create_from_url(url)
podcast_list = PodcastListFactory().get_podcast_list()
podcast_list.add(url)
pyotherside.send("subscribed", url)
async def get_podcasts(self):
"""
Get Podcast List
"""
podcasts = []
pclist = PodcastListFactory().get_podcast_list()
for pc in pclist.get_podcasts_objects():
podcasts.append(pc.feedinfo())
pyotherside.send("podcastslist", podcasts)
async def delete_podcast(self,url):
"""
Delete a podcast from list
"""
PodcastListFactory().get_podcast_list().delete_podcast(url)
await self.get_podcasts()
async def move_archive(self,id):
"""
move post to Archive
"""
ArchiveFactory().get_archive().insert(id)
pyotherside.send("movedToArchive")
@async_threaded()
async def refresh_podcast(self, url, moveto, download, limit=0, full_refresh=False):
"""
Refresh one podcast
"""
with lock_refresh:
if not download:
return
podcast = PodcastFactory().get_podcast(url)
for move, post in podcast.refresh(moveto, limit, full_refresh=full_refresh):
page = movePost(move, post.id)
pyotherside.send(
"updatesNotification", podcast.title, post.title, page
)
pyotherside.send("refreshFinished")
@async_threaded()
async def refresh_podcasts(self,moveto, download, limit=0, full_refresh=False):
"""
Refresh all podcasts
"""
with lock_refresh:
if not download:
return
podcast_list = PodcastListFactory().get_podcast_list()
for postid, podcasttitle, posttitle, move in podcast_list.refresh(moveto, limit, full_refresh=full_refresh):
page = movePost(move, postid)
pyotherside.send(
"updatesNotification", podcasttitle, posttitle, page
)
pyotherside.send("refreshFinished")
async def get_podcast_params(self,url):
"""
Get Parameter set of podcast
"""
podcast = PodcastFactory().get_podcast(url)
pyotherside.send("podcastParams", podcast.get_params())
async def set_podcast_params(self,url, params):
"""
Put parameter set to podcast
"""
podcast = PodcastFactory().get_podcast(url)
podcast.set_params(params)
async def render_html(self,data):
"""
Render a temporary page
"""
storepath = Constants().iconpath
htmlfile = os.path.join(storepath, "page.html")
with open(htmlfile, "wb") as h:
h.write("<html><body>".encode())
h.write(data.encode())
h.write("</body></html".encode())
pyotherside.send("htmlfile", htmlfile)
async def nomedia(self,doset):
"""
Set nomedia in config root
"""
audiopath = os.path.join(Constants().audiofilepath, ".nomedia")
iconpath = os.path.join(Constants().iconpath, ".nomedia")
open(iconpath, "a").close()
if doset == False:
open(audiopath, "a").close()
else:
os.remove(audiopath)
async def import_opml(self,opmlfile):
"""
Import podcasts from opmlfile
"""
pclist = PodcastListFactory().get_podcast_list()
imported = pclist.import_opml(opmlfile)
if imported > 0:
pyotherside.send("opmlimported", imported)
async def import_gpodder(self):
"""
Import podcasts from opmlfile
"""
pclist = PodcastListFactory().get_podcast_list()
imported = pclist.import_gpodder()
if imported > 0:
pyotherside.send("opmlimported", imported)
async def do_backup(self):
"""
Create a backup tarball of store and icons
"""
rootpath = Constants().data_home
homedir = os.path.expanduser("~")
os.chdir(homedir)
filename = "podqast-%s.tar.gz" % time.strftime("%Y%m%d%H%M")
tarfilename = os.path.join(homedir, filename)
try:
tar = tarfile.open(tarfilename, "w:gz")
tar.add(os.path.join(rootpath, "icons"), arcname="./icons")
tar.add(os.path.join(rootpath, "store"), arcname="./store")
tar.close()
except:
pyotherside.send("error", "Backup failed")
return return
self.bgthread1 = threading.Thread(
target=refresh_podcast, args=[theurl, moveto, download]
)
self.bgthread1.start()
def refreshpodcasts(self, moveto, download, limit=0, full_refresh=False): pyotherside.send("backupDone", tarfilename)
if self.bgthread.is_alive():
return
self.bgthread1 = threading.Thread(
target=refresh_podcasts, args=[moveto, download, limit, full_refresh]
)
self.bgthread1.start()
def getpodcastparams(self, theurl): async def write_opml(self):
if self.bgthread.is_alive(): """
return Create opml file
self.bgthread = threading.Thread( """
target=get_podcast_params, args=[theurl]
)
self.bgthread.start()
def setpodcastparams(self, theurl, params): podcasts = []
if self.bgthread.is_alive(): pclist = PodcastListFactory().get_podcast_list()
return for pc in pclist.get_podcasts_objects():
self.bgthread = threading.Thread( podcasts.append(
target=set_podcast_params, args=[theurl, params] {"name": pc.title, "xml_url": pc.url, "html_url": pc.link}
) )
self.bgthread.start()
def renderhtml(self, data): homedir = os.path.expanduser("~")
if self.bgthread.is_alive(): os.chdir(homedir)
return filename = "podqast-%s.opml" % time.strftime("%Y%m%d%H%M")
self.bgthread = threading.Thread(target=render_html, args=[data]) opmlfilename = os.path.join(homedir, filename)
self.bgthread.start()
def nomedia(self, doset): create_opml(opmlfilename, podcasts)
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=doset, args=[doset])
self.bgthread.start()
def importopml(self, opmlfile):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=import_opml, args=[opmlfile])
self.bgthread.start()
def importgpodder(self):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=import_gpodder)
self.bgthread.start()
def dobackup(self):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=do_backup)
self.bgthread.start()
def writeopml(self):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=write_opml)
self.bgthread.start()
def movearchive(self, id):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=move_archive, args=[id])
self.bgthread.start()
feedparse = FeedParser() instance = AsyncWrapper(FeedParser())
...@@ -10,15 +10,17 @@ Python { ...@@ -10,15 +10,17 @@ Python {
signal podcastParams(var pcdata) signal podcastParams(var pcdata)
signal subscribed(string pcurl) signal subscribed(string pcurl)
signal updatesNotification(string pctitle, string pstitle, string page) signal updatesNotification(string pctitle, string pstitle, string page)
signal refreshFinished() signal refreshFinished
signal htmlfile(string htmlfile) signal htmlfile(string htmlfile)
signal refreshProgress(real progress) signal refreshProgress(real progress)
signal refreshPost(string posttitle) signal refreshPost(string posttitle)
signal opmlImported(int opmlcount) signal opmlImported(int opmlcount)
signal appError(string errmessage) signal appError(string errmessage)
signal refreshlimit(string podcasttitle)
signal backupDone(string tarpath) signal backupDone(string tarpath)
signal opmlSaveDone(string opmlpath) signal opmlSaveDone(string opmlpath)
signal episodeListData(string podlink, string podtitle,var episodes, string offset, string totalcount) signal episodeListData(string podlink, string podtitle, var episodes, string offset, string totalcount)
signal feedFetchError(string pod_url)
Component.onCompleted: { Component.onCompleted: {
setHandler("feedinfo", feedInfo) setHandler("feedinfo", feedInfo)
...@@ -32,64 +34,67 @@ Python { ...@@ -32,64 +34,67 @@ Python {
setHandler("refreshPost", refreshPost) setHandler("refreshPost", refreshPost)
setHandler("opmlimported", opmlImported) setHandler("opmlimported", opmlImported)
setHandler("apperror", appError) setHandler("apperror", appError)
setHandler("refreshlimit", refreshlimit)
setHandler("backupDone", backupDone) setHandler("backupDone", backupDone)
setHandler("opmlSaveDone", opmlSaveDone) setHandler("opmlSaveDone", opmlSaveDone)
setHandler("episodeListData", episodeListData) setHandler("episodeListData", episodeListData)
setHandler("feedFetchError", feedFetchError)
addImportPath(Qt.resolvedUrl('.')); addImportPath(Qt.resolvedUrl("./python"))
addImportPath(Qt.resolvedUrl('.'))
importModule('FeedParser', function () { importModule('FeedParser', function () {
console.log('FeedParser is now imported'); console.log('FeedParser is now imported')
call("FeedParser.init_from_qml", [podqast], function() {}); call("FeedParser.init_from_qml", [podqast], function () {})
}); })
} }
function getPodcast(url) { function getPodcast(url) {
console.log("url: " + url) call("FeedParser.instance.get_feedinfo", [url], function () {})
call("FeedParser.feedparse.getfeedinfo", [url], function() {});
} }
function getPodcastPreview(url, num_preview_episodes) { function getPodcastPreview(url, num_preview_episodes) {
console.log("fetching preview for " + url) console.log("fetching preview for " + url)
call("FeedParser.feedparse.getpodcastpreview", [url, num_preview_episodes], function() {}); call("FeedParser.instance.get_podcast_preview",
[url, num_preview_episodes], function () {})
} }
function getPodcasts() { function getPodcasts() {
call("FeedParser.feedparse.getpodcasts", function() {}); call("FeedParser.instance.get_podcasts", function () {})
// call("FeedParser.get_podcasts", function() {});
} }
function getEntries(url) { function getEntries(url) {
console.log("url: " + url) console.log("url: " + url)
call("FeedParser.feedparse.getentries", [url], function() {}); call("FeedParser.instance.get_entries", [url], function () {})
// call("FeedParser.get_entries", [url], function() {});
} }
function subscribePodcast(url) { function subscribePodcast(url) {
console.log("Subscribe url: " + url) console.log("Subscribe url: " + url)
call("FeedParser.feedparse.subscribepodcast", [url], function() {}) call("FeedParser.instance.subscribe_podcast", [url], function () {})
// call("FeedParser.subscribe_podcast", [url], function() {})
}
function subscribePodcastFg(url) {
console.log("Subscribe url: " + url)
// Needs foreground! call("FeedParser.feedparse.subscribepodcast", [url], function() {})
call("FeedParser.subscribe_podcast", [url], function() {})
} }
function deletePodcast(url) { function deletePodcast(url) {
console.log("Delete url: " + url) console.log("Delete url: " + url)
call("FeedParser.feedparse.deletepodcast", [url], function() {}) call("FeedParser.instance.delete_podcast", [url], function () {})
// call("FeedParser.delete_podcast", [url], function() {})
} }
function refreshPodcast(url) { function refreshPodcast(url) {
console.log("Refresh url: " + url) console.log("Refresh url: " + url)
_showUpdatesNotification = true _showUpdatesNotification = true
call("FeedParser.refresh_podcast", [url, moveToConf.value, call("FeedParser.instance.refresh_podcast",
doDownloadConf.value && (wifiConnected || doMobileDownConf.value), autoLimitConf.value], function() {}) [url, moveToConf.value, doDownloadConf.value
&& (wifiConnected
|| doMobileDownConf.value), autoLimitConf.value],
function () {})
} }
function refreshPodcasts(full) { function refreshPodcasts(full) {
if (full === undefined) full = false if (migrationhandler.running)
return
if (full === undefined)
full = false
console.log("Refreshing all podcasts.") console.log("Refreshing all podcasts.")
_showUpdatesNotification = true _showUpdatesNotification = true
call("FeedParser.feedparse.refreshpodcasts", [moveToConf.value, call("FeedParser.instance.refresh_podcasts",
doDownloadConf.value && (wifiConnected || doMobileDownConf.value), autoLimitConf.value, Boolean(full)], function() {}) [moveToConf.value, doDownloadConf.value
&& (wifiConnected
|| doMobileDownConf.value), autoLimitConf.value, Boolean(
full)], function () {})
var d = new Date() var d = new Date()
var seconds = Math.round(d.getTime() / 1000) var seconds = Math.round(d.getTime() / 1000)
lastRefreshed.value = seconds lastRefreshed.value = seconds
...@@ -97,54 +102,54 @@ Python { ...@@ -97,54 +102,54 @@ Python {
function getPodcastParams(url) { function getPodcastParams(url) {
console.log("Get Podcast params") console.log("Get Podcast params")
call("FeedParser.feedparse.getpodcastparams", [url], function() {}) call("FeedParser.instance.get_podcast_params", [url], function () {})
// call("FeedParser.get_podcast_params", [url], function() {})
} }
function setPodcastParams(url, params) { function setPodcastParams(url, params) {
console.log("Set Podcast Params") console.log("Set Podcast Params")
call("FeedParser.feedparse.setpodcastparams", [url, params], function() {}) call("FeedParser.instance.set_podcast_params", [url, params],
// call("FeedParser.set_podcast_params", [url, params], function() {}) function () {})
} }
function renderHtml(data) { function renderHtml(data) {
console.log("Set Podcast Params") console.log("Set Podcast Params")
call("FeedParser.feedparse.renderhtml", [data], function() {}) call("FeedParser.instance.render_html", [data], function () {})
// call("FeedParser.render_html", [data], function() {})
} }
function nomedia(doset) { function nomedia(doset) {
console.log("Set Podcast Params") console.log("Set Podcast Params")
// call("FeedParser.feedparse.nomedia", [doset], function() {}) call("FeedParser.instance.nomedia", [doset], function () {})
call("FeedParser.nomedia", [doset], function() {})
} }
function importOpml(opmlfile) { function importOpml(opmlfile) {
console.log("Import Podcasts from OPML") console.log("Import Podcasts from OPML")
// call("FeedParser.feedparse.importopml", [opmlfile], function() {}) call("FeedParser.instance.import_opml", [opmlfile], function () {})
call("FeedParser.import_opml", [opmlfile], function() {})
} }
function importGpodder() { function importGpodder() {
console.log("Import Podcasts from Gpodder database") console.log("Import Podcasts from Gpodder database")
// call("FeedParser.feedparse.importgpodder", function() {}) call("FeedParser.instance.import_gpodder", function () {})
call("FeedParser.import_gpodder", function() {})
} }
function doBackup() { function doBackup() {
console.log("Backup") console.log("Backup")
// call("FeedParser.feedparse.dobackup", function() {}) call("FeedParser.instance.do_backup", function () {})
call("FeedParser.do_backup", function() {})
} }
function doWriteOpml() { function doWriteOpml() {
console.log("Writing opml file") console.log("Writing opml file")
// call("FeedParser.feedparse.writeopml", function() {}) call("FeedParser.instance.write_opml", function () {})
call("FeedParser.write_opml", function() {})
} }
function moveArchive(id) { function moveArchive(id) {
// call("FeedParser.feedparse.movearchive", [id], function() {}) call("FeedParser.instance.move_archive", [id], function () {})
call("FeedParser.move_archive", [id], function() {})
} }
onError: { onError: {
console.log('python error: ' + traceback); console.log('python error: ' + traceback)
}
onRefreshlimit: {
appNotification.previewSummary = qsTr("Auto-Post-Limit reached")
appNotification.previewBody = qsTr("for %1").arg(podcasttitle)
appNotification.body = qsTr("Auto-Post-Limit reached for %1").arg(
podcasttitle)
appNotification.replacesId = "limitNotification"
appNotification.publish()
} }
onAppError: { onAppError: {
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import json
import logging
import pyotherside import pyotherside
import threading import threading
import sys
sys.path.append("/usr/share/harbour-podqast/python")
from mygpoclient import public
tagslist = [] tagslist = []
toplist = [] toplist = []
client = public.PublicClient()
http = None
def get_toptags(): def get_toptags():
""" """
...@@ -19,30 +16,38 @@ def get_toptags(): ...@@ -19,30 +16,38 @@ def get_toptags():
""" """
if tagslist == []: if tagslist == []:
toptags = client.get_toptags() toptags = url_get_json("https://www.gpodder.net/api/2/tags/12.json")
for tag in toptags: for tag in toptags:
if tag.usage > 70: if tag["usage"] > 70:
tagslist.append({"tagname": tag.tag, "position": "TagsList"}) tagslist.append({"tagname": tag["tag"], "position": "TagsList"})
pyotherside.send("toptags", tagslist) pyotherside.send("toptags", tagslist)
def url_get_json(url, params=None):
import urllib3
global http
if not http:
http = urllib3.PoolManager()
return json.loads(http.request("GET", url, fields=params).data.decode('utf-8'))
def get_toplist(): def get_toplist():
""" """
Get the actual top podcasts (used for Discover grid) Get the actual top podcasts (used for Discover grid)
""" """
if toplist == []: if toplist == []:
tops = client.get_toplist(count=12) tops = url_get_json("https://gpodder.net/toplist/20.json")
for top in tops: for top in tops:
description = top.description description = top["description"]
if len(description) > 160: if len(description) > 160:
description = description[:160] + "..." description = description[:160] + "..."
toplist.append( toplist.append(
{ {
"title": top.title, "title": top["title"],
"url": top.url, "url": top["url"],
"logo_url": top.logo_url, "logo_url": top["logo_url"],
"description": description, "description": description,
} }
) )
...@@ -54,22 +59,22 @@ def get_tags_by_name(tagname): ...@@ -54,22 +59,22 @@ def get_tags_by_name(tagname):
Get podcast list by tag name Get podcast list by tag name
""" """
pclist = [] pclist = []
podcasts = client.get_podcasts_of_a_tag(tagname, count=20) podcasts = url_get_json(f"https://gpodder.net/api/2/tag/{tagname}/20.json")
for podcast in podcasts: for podcast in podcasts:
title = podcast.title title = podcast["title"]
if len(title) > 50: if len(title) > 50:
title = title[:50] + "..." title = title[:50] + "..."
description = podcast.description description = podcast["description"]
if len(description) > 160: if len(description) > 160:
description = description[:160] + "..." description = description[:160] + "..."
pclist.append( pclist.append(
{ {
"url": podcast.url, "url": podcast["url"],
"title": title, "title": title,
"titlefull": podcast.title, "titlefull": podcast["title"],
"description": description, "description": description,
"website": podcast.website, "website": podcast["website"],
"logo_url": podcast.logo_url, "logo_url": podcast["logo_url"],
} }
) )
pyotherside.send("podcastlist", pclist) pyotherside.send("podcastlist", pclist)
...@@ -80,28 +85,28 @@ def search_pods(query): ...@@ -80,28 +85,28 @@ def search_pods(query):
Search for podcast list Search for podcast list
query: the search query query: the search query
""" """
logging.debug("searching gpodder for '%s'",query)
pclist = [] pclist = []
podcasts = client.search_podcasts(query) podcasts = url_get_json("https://gpodder.net/search.json", params={"q": query})
for podcast in podcasts: for podcast in podcasts:
title = podcast.title title = podcast["title"]
if len(title) > 50: if len(title) > 50:
title = title[:50] + "..." title = title[:50] + "..."
description = podcast.description description = podcast["description"]
if len(description) > 160: if len(description) > 160:
description = description[:160] + "..." description = description[:160] + "..."
if not podcast.logo_url: if not podcast["logo_url"]:
logo_url = "" logo_url = ""
else: else:
logo_url = podcast.logo_url logo_url = podcast["logo_url"]
pclist.append( pclist.append(
{ {
"url": podcast.url, "url": podcast["url"],
"title": title, "title": title,
"titlefull": podcast.title, "titlefull": podcast["title"],
"description": description, "description": description,
"website": podcast.website, "website": podcast["website"],
"logo_url": logo_url, "logo_url": logo_url,
} }
) )
......
...@@ -14,24 +14,25 @@ Python { ...@@ -14,24 +14,25 @@ Python {
setHandler("toptags", topTags) setHandler("toptags", topTags)
setHandler("podcastlist", podcastList) setHandler("podcastlist", podcastList)
addImportPath(Qt.resolvedUrl('.')); addImportPath(Qt.resolvedUrl('.'))
importModule('GpodderNet', function () { importModule('GpodderNet', function () {
console.log('GpodderNet is now imported') console.log('GpodderNet is now imported')
}) })
} }
function getTopList() { function getTopList() {
call("GpodderNet.gpoddernet.gettoplist", function() {}); call("GpodderNet.gpoddernet.gettoplist", function () {})
// call("GpodderNet.get_toplist", function() {});
} }
function doSearch(query) { function doSearch(query) {
call("GpodderNet.gpoddernet.searchpods", [query], function() {}); call("GpodderNet.gpoddernet.searchpods", [query], function () {})
// call("GpodderNet.search_pods", [query], function() {});
} }
function getTags() { function getTags() {
call("GpodderNet.gpoddernet.gettags", function() {}); call("GpodderNet.gpoddernet.gettags", function () {})
// call("GpodderNet.get_toptags", function() {});
} }
function getPodcasts(tagname) { function getPodcasts(tagname) {
call("GpodderNet.gpoddernet.getpcbytagname", [tagname], function() {}); call("GpodderNet.gpoddernet.getpcbytagname", [tagname], function () {})
}
onError: {
console.log('python error: ' + traceback)
} }
} }
...@@ -7,5 +7,8 @@ PostListItem { ...@@ -7,5 +7,8 @@ PostListItem {
id: contextMenu id: contextMenu
favoriteEnabled: false favoriteEnabled: false
archiveEnabled: true archiveEnabled: true
archivePressedHandler: function (model) {
inboxhandler.moveArchive(model.id)
}
} }
} }
import asyncio
from typing import Iterator
import pyotherside
from podcast.persistent_log import LogMessage, get_log_messages
from podcast.util import chunks
async def get_logs_asnyc():
log: Iterator[LogMessage] = get_log_messages()
for offset, max, chunk in chunks(log, 32):
pyotherside.send("logData",offset, [log.to_dict() for log in chunk])
def get_logs():
asyncio.run(get_logs_asnyc())
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment