Skip to content
Snippets Groups Projects
Commit 1a5f0244 authored by Thilo Kogge's avatar Thilo Kogge
Browse files

added asyncio for some handlers

parent 7d6385f0
No related branches found
No related tags found
3 merge requests!73Discover fixes,!72migration for lost episodes,!71Async logs
# -*- coding: utf-8 -*-
import asyncio
from typing import Iterator
import pyotherside
import threading
import sys
sys.path.append("/usr/share/harbour-podqast/python")
from podcast.podpost import Podpost
from podcast.util import chunks
from podcast.archive import ArchiveFactory
......@@ -14,7 +11,7 @@ import logging
logger = logging.getLogger(__name__)
def get_archive_posts(podurl=None):
async def get_archive_posts(podurl=None):
"""
Return a list of all archive posts
"""
......@@ -25,7 +22,7 @@ def get_archive_posts(podurl=None):
pyotherside.send("historyData", offset, [post.get_data() for post in chunk])
def get_archive_pod_data():
async def get_archive_pod_data():
"""
"""
......@@ -47,28 +44,12 @@ def get_archive_pod_data():
class ArchiveHandler:
def __init__(self):
self.bgthread = threading.Thread()
self.bgthread.start()
self.bgthread2 = threading.Thread()
self.bgthread2.start()
def getarchiveposts(self, podurl=None):
if self.bgthread2.is_alive():
return
if podurl:
self.bgthread2 = threading.Thread(
target=get_archive_posts, args=[podurl]
)
else:
self.bgthread2 = threading.Thread(target=get_archive_posts)
self.bgthread2.start()
asyncio.run(get_archive_posts(podurl))
def getarchivepoddata(self):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=get_archive_pod_data)
self.bgthread.start()
asyncio.run(get_archive_pod_data())
archivehandler = ArchiveHandler()
......@@ -9,6 +9,7 @@ Python {
signal archivePodList(var data)
Component.onCompleted: {
addImportPath(Qt.resolvedUrl("/usr/share/harbour-podqast/python"))
setHandler("historyData", historyData)
setHandler("archivePodList", archivePodList)
......@@ -18,6 +19,10 @@ Python {
})
}
onError: {
console.log('python error: ' + traceback)
}
function getArchiveEntries(podurl) {
if (podurl === "home") {
call("ArchiveHandler.archivehandler.getarchiveposts",
......
......@@ -19,8 +19,6 @@ from podcast.util import create_opml, movePost, chunks
logger = logging.getLogger(__name__)
cachevar = {}
def get_feedinfo(url, preview=False, num_preview_episodes=3):
logger.info("getting feedinfo %s for %s",
......
......@@ -10,15 +10,16 @@ Python {
signal podcastParams(var pcdata)
signal subscribed(string pcurl)
signal updatesNotification(string pctitle, string pstitle, string page)
signal refreshFinished()
signal refreshFinished
signal htmlfile(string htmlfile)
signal refreshProgress(real progress)
signal refreshPost(string posttitle)
signal opmlImported(int opmlcount)
signal appError(string errmessage)
signal refreshlimit(string podcasttitle)
signal backupDone(string tarpath)
signal opmlSaveDone(string opmlpath)
signal episodeListData(string podlink, string podtitle,var episodes, string offset, string totalcount)
signal episodeListData(string podlink, string podtitle, var episodes, string offset, string totalcount)
Component.onCompleted: {
setHandler("feedinfo", feedInfo)
......@@ -32,64 +33,73 @@ Python {
setHandler("refreshPost", refreshPost)
setHandler("opmlimported", opmlImported)
setHandler("apperror", appError)
setHandler("refreshlimit", refreshlimit)
setHandler("backupDone", backupDone)
setHandler("opmlSaveDone", opmlSaveDone)
setHandler("episodeListData", episodeListData)
addImportPath(Qt.resolvedUrl('.'));
addImportPath(Qt.resolvedUrl('.'))
importModule('FeedParser', function () {
console.log('FeedParser is now imported');
call("FeedParser.init_from_qml", [podqast], function() {});
});
console.log('FeedParser is now imported')
call("FeedParser.init_from_qml", [podqast], function () {})
})
}
function getPodcast(url) {
console.log("url: " + url)
call("FeedParser.feedparse.getfeedinfo", [url], function() {});
call("FeedParser.feedparse.getfeedinfo", [url], function () {})
}
function getPodcastPreview(url, num_preview_episodes) {
console.log("fetching preview for " + url)
call("FeedParser.feedparse.getpodcastpreview", [url, num_preview_episodes], function() {});
call("FeedParser.feedparse.getpodcastpreview",
[url, num_preview_episodes], function () {})
}
function getPodcasts() {
call("FeedParser.feedparse.getpodcasts", function() {});
call("FeedParser.feedparse.getpodcasts", function () {})
// call("FeedParser.get_podcasts", function() {});
}
function getEntries(url) {
console.log("url: " + url)
call("FeedParser.feedparse.getentries", [url], function() {});
call("FeedParser.feedparse.getentries", [url], function () {})
// call("FeedParser.get_entries", [url], function() {});
}
function subscribePodcast(url) {
console.log("Subscribe url: " + url)
call("FeedParser.feedparse.subscribepodcast", [url], function() {})
call("FeedParser.feedparse.subscribepodcast", [url], function () {})
// call("FeedParser.subscribe_podcast", [url], function() {})
}
function subscribePodcastFg(url) {
console.log("Subscribe url: " + url)
// Needs foreground! call("FeedParser.feedparse.subscribepodcast", [url], function() {})
call("FeedParser.subscribe_podcast", [url], function() {})
call("FeedParser.subscribe_podcast", [url], function () {})
}
function deletePodcast(url) {
console.log("Delete url: " + url)
call("FeedParser.feedparse.deletepodcast", [url], function() {})
call("FeedParser.feedparse.deletepodcast", [url], function () {})
// call("FeedParser.delete_podcast", [url], function() {})
}
function refreshPodcast(url) {
console.log("Refresh url: " + url)
_showUpdatesNotification = true
call("FeedParser.refresh_podcast", [url, moveToConf.value,
doDownloadConf.value && (wifiConnected || doMobileDownConf.value), autoLimitConf.value], function() {})
call("FeedParser.refresh_podcast",
[url, moveToConf.value, doDownloadConf.value
&& (wifiConnected
|| doMobileDownConf.value), autoLimitConf.value],
function () {})
}
function refreshPodcasts(full) {
if (full === undefined) full = false
if (full === undefined)
full = false
console.log("Refreshing all podcasts.")
_showUpdatesNotification = true
call("FeedParser.feedparse.refreshpodcasts", [moveToConf.value,
doDownloadConf.value && (wifiConnected || doMobileDownConf.value), autoLimitConf.value, Boolean(full)], function() {})
call("FeedParser.feedparse.refreshpodcasts",
[moveToConf.value, doDownloadConf.value
&& (wifiConnected
|| doMobileDownConf.value), autoLimitConf.value, Boolean(
full)], function () {})
var d = new Date()
var seconds = Math.round(d.getTime() / 1000)
lastRefreshed.value = seconds
......@@ -97,54 +107,63 @@ Python {
function getPodcastParams(url) {
console.log("Get Podcast params")
call("FeedParser.feedparse.getpodcastparams", [url], function() {})
call("FeedParser.feedparse.getpodcastparams", [url], function () {})
// call("FeedParser.get_podcast_params", [url], function() {})
}
function setPodcastParams(url, params) {
console.log("Set Podcast Params")
call("FeedParser.feedparse.setpodcastparams", [url, params], function() {})
call("FeedParser.feedparse.setpodcastparams", [url, params],
function () {})
// call("FeedParser.set_podcast_params", [url, params], function() {})
}
function renderHtml(data) {
console.log("Set Podcast Params")
call("FeedParser.feedparse.renderhtml", [data], function() {})
call("FeedParser.feedparse.renderhtml", [data], function () {})
// call("FeedParser.render_html", [data], function() {})
}
function nomedia(doset) {
console.log("Set Podcast Params")
// call("FeedParser.feedparse.nomedia", [doset], function() {})
call("FeedParser.nomedia", [doset], function() {})
call("FeedParser.nomedia", [doset], function () {})
}
function importOpml(opmlfile) {
console.log("Import Podcasts from OPML")
// call("FeedParser.feedparse.importopml", [opmlfile], function() {})
call("FeedParser.import_opml", [opmlfile], function() {})
call("FeedParser.import_opml", [opmlfile], function () {})
}
function importGpodder() {
console.log("Import Podcasts from Gpodder database")
// call("FeedParser.feedparse.importgpodder", function() {})
call("FeedParser.import_gpodder", function() {})
call("FeedParser.import_gpodder", function () {})
}
function doBackup() {
console.log("Backup")
// call("FeedParser.feedparse.dobackup", function() {})
call("FeedParser.do_backup", function() {})
call("FeedParser.do_backup", function () {})
}
function doWriteOpml() {
console.log("Writing opml file")
// call("FeedParser.feedparse.writeopml", function() {})
call("FeedParser.write_opml", function() {})
call("FeedParser.write_opml", function () {})
}
function moveArchive(id) {
// call("FeedParser.feedparse.movearchive", [id], function() {})
call("FeedParser.move_archive", [id], function() {})
call("FeedParser.move_archive", [id], function () {})
}
onError: {
console.log('python error: ' + traceback);
console.log('python error: ' + traceback)
}
onRefreshlimit: {
appNotification.previewSummary = qsTr("Auto-Post-Limit reached")
appNotification.previewBody = qsTr("for %1").arg(podcasttitle)
appNotification.body = qsTr("Auto-Post-Limit reached for %1").arg(
podcasttitle)
appNotification.replacesId = "limitNotification"
appNotification.publish()
}
onAppError: {
......
# -*- coding: utf-8 -*-
import asyncio
import pyotherside
import threading
import sys
sys.path.append("/usr/share/harbour-podqast/python")
from podcast.podpost import PodpostFactory
from podcast.queue import QueueFactory
from podcast.archive import ArchiveFactory
......@@ -14,7 +10,7 @@ import logging
logger = logging.getLogger(__name__)
def get_queue_posts(moved=""):
async def get_queue_posts(moved=""):
"""
Return a list of all queue posts
"""
......@@ -35,7 +31,7 @@ def get_queue_posts(moved=""):
pyotherside.send("createList", entries, moved)
def get_first_entry():
async def get_first_entry():
"""
Return the data of the first entry
"""
......@@ -47,7 +43,7 @@ def get_first_entry():
)
def queue_insert_top(id, doplay=False):
async def queue_insert_top(id, doplay=False):
"""
Insert Podpost element to top of queue
"""
......@@ -55,10 +51,10 @@ def queue_insert_top(id, doplay=False):
queue = QueueFactory().get_queue()
if queue.insert_top(id) == 1:
logger.info("stopped in insert_top")
get_queue_posts()
await get_queue_posts()
def queue_insert_next(id, doplay=False):
async def queue_insert_next(id, doplay=False):
"""
Insert Podpost at next Entry in Queue
"""
......@@ -68,7 +64,7 @@ def queue_insert_next(id, doplay=False):
queue.insert_next(id)
def queue_insert_bottom(id, doplay=False):
async def queue_insert_bottom(id, doplay=False):
"""
Insert Podpost at last position in queue
"""
......@@ -77,25 +73,25 @@ def queue_insert_bottom(id, doplay=False):
queue.insert_bottom(id)
def queue_move_up(id):
async def queue_move_up(id):
"""
move element up
"""
QueueFactory().get_queue().move_up(id)
get_queue_posts(moved=id)
await get_queue_posts(moved=id)
def queue_move_down(id):
async def queue_move_down(id):
"""
move element down
"""
QueueFactory().get_queue().move_down(id)
get_queue_posts(moved=id)
await get_queue_posts(moved=id)
def queue_play():
async def queue_play():
"""
Get the play information from queue
"""
......@@ -110,7 +106,7 @@ def queue_play():
pyotherside.send("playing", data["url"], data["position"])
def queue_pause(position):
async def queue_pause(position):
"""
Set Pause
"""
......@@ -122,7 +118,7 @@ def queue_pause(position):
pyotherside.send("pausing")
def queue_stop(position):
async def queue_stop(position):
"""
Stop and save position
"""
......@@ -133,7 +129,7 @@ def queue_stop(position):
pyotherside.send("stopping")
def queue_seek(position):
async def queue_seek(position):
"""
Set queue position
"""
......@@ -144,7 +140,7 @@ def queue_seek(position):
pyotherside.send("positioning")
def queue_to_archive(id):
async def queue_to_archive(id):
"""
Move an item to archive
"""
......@@ -156,20 +152,20 @@ def queue_to_archive(id):
queue.remove(id)
archive.insert(id)
get_queue_posts()
await get_queue_posts()
def queue_top_to_archive():
async def queue_top_to_archive():
"""
Remove top Element from queue
"""
queue = QueueFactory().get_queue()
queue_to_archive(queue.get_top_id())
queue_play()
await queue_to_archive(queue.get_top_id())
await queue_play()
def queue_do_download(id):
async def queue_do_download(id):
"""
Do the Download of an archive
"""
......@@ -179,7 +175,7 @@ def queue_do_download(id):
pyotherside.send("downloading", id, perc)
def queue_download_all():
async def queue_download_all():
"""
Download the whole queue if not done
"""
......@@ -188,23 +184,22 @@ def queue_download_all():
queue.download_all()
def update_position(position):
async def update_position(position):
queue = QueueFactory().get_queue()
if queue.count() > 0:
queue.update_position(position)
def set_duration(duration):
async def set_duration(duration):
"""
Set podposts duration if not set
"""
QueueFactory().get_queue().set_duration(duration)
get_queue_posts()
await get_queue_posts()
def get_episode_chapters(id):
async def get_episode_chapters(id):
"""
Sends the list of chapters for an episode
id: id of the episode
......@@ -214,8 +209,7 @@ def get_episode_chapters(id):
chapters = entry.get_chapters()
pyotherside.send("episodeChapters", chapters)
def send_first_episode_chapters(id):
async def send_first_episode_chapters(id):
"""
Sends episode data and chapter list
(necessary when chapters of the currently
......@@ -227,7 +221,7 @@ def send_first_episode_chapters(id):
pyotherside.send("setFirst", edata, chapters)
def toggle_chapter(episodeid, chapterid):
async def toggle_chapter(episodeid, chapterid):
"""
Toggles the selected state of a chapterid
episodeid: id of the episode
......@@ -239,128 +233,73 @@ def toggle_chapter(episodeid, chapterid):
class QueueHandler:
def __init__(self):
self.bgthread = threading.Thread()
self.bgthread.start()
self.bgthread1 = threading.Thread()
self.bgthread1.start()
pyotherside.atexit(self.doexit)
def doexit(self):
"""
On exit: we need to stop ourself
"""
asyncio.run(queue_stop(-1))
queue_stop(-1)
def getqueueposts(self):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=get_queue_posts)
self.bgthread.start()
def get_queue_posts(self):
asyncio.run(get_queue_posts())
def getfirstentry(self):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=get_first_entry)
self.bgthread.start()
asyncio.run(get_first_entry())
def queuedownload(self, id):
"""
download audio post
"""
asyncio.run(queue_do_download(id))
dlthread = threading.Thread(target=queue_do_download, args=[id])
dlthread.start()
def queuedownloadall(self):
asyncio.run(queue_download_all())
def queueinserttop(self, id):
if self.bgthread1.is_alive():
return
self.bgthread1 = threading.Thread(target=queue_insert_top, args=[id])
self.bgthread1.start()
asyncio.run(queue_insert_top(id))
def queueinsertnext(self, id):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=queue_insert_next, args=[id])
self.bgthread.start()
asyncio.run(queue_insert_next(id))
def queueinsertbottom(self, id):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=queue_insert_bottom, args=[id])
self.bgthread.start()
asyncio.run(queue_insert_bottom(id))
def queuemoveup(self, id):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=queue_move_up, args=[id])
self.bgthread.start()
asyncio.run(queue_move_up(id))
def queuemovedown(self, id):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=queue_move_down, args=[id])
self.bgthread.start()
asyncio.run(queue_move_down(id))
def queueplay(self):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=queue_play)
self.bgthread.start()
asyncio.run(queue_play())
def queuepause(self, position):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=queue_pause, args=[position])
self.bgthread.start()
asyncio.run(queue_pause(position))
def queuestop(self, position):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=queue_stop, args=[position])
self.bgthread.start()
asyncio.run(queue_stop(position))
def queueseek(self, position):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=queue_seek, args=[position])
self.bgthread.start()
asyncio.run(queue_seek(position))
def queueupdateposition(self, position):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=update_position, args=[position])
self.bgthread.start()
asyncio.run(update_position(position))
def queuetoarchive(self, id):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=queue_to_archive, args=[id])
self.bgthread.start()
asyncio.run(queue_to_archive(id))
def queuetoptoarchive(self):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=queue_top_to_archive)
self.bgthread.start()
asyncio.run(queue_top_to_archive())
def setduration(self, duration):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=set_duration, args=[duration])
self.bgthread.start()
asyncio.run(set_duration(duration))
def getepisodechapters(self, episodeid):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=get_episode_chapters, args=[episodeid])
self.bgthread.start()
asyncio.run(get_episode_chapters(episodeid))
def togglechapter(self, episodeid, chapterid):
if self.bgthread.is_alive():
return
self.bgthread = threading.Thread(target=toggle_chapter, args=[episodeid, chapterid])
self.bgthread.start()
asyncio.run(toggle_chapter(episodeid, chapterid))
def send_first_episode_chapters(self,id):
asyncio.run(send_first_episode_chapters(id))
queuehandler = QueueHandler()
......@@ -32,6 +32,7 @@ Python {
setHandler("firstDownloaded", firstDownloaded)
setHandler("episodeChapters", episodeChapters)
addImportPath(Qt.resolvedUrl("/usr/share/harbour-podqast/python"))
addImportPath(Qt.resolvedUrl('.'))
importModule('QueueHandler', function () {
console.log('QueueHandler is now imported')
......@@ -54,7 +55,6 @@ Python {
function queueMoveDown(id) {
call("QueueHandler.queuehandler.queuemovedown", [id], function () {})
// call("QueueHandler.queue_move_down", [id], function() {})
}
function queueToArchive(id) {
......@@ -65,7 +65,7 @@ Python {
}
function getQueueEntries() {
call("QueueHandler.get_queue_posts", function () {})
call("QueueHandler.queuehandler.get_queue_posts", function () {})
}
function getFirstEntry() {
call("QueueHandler.queuehandler.getfirstentry", function () {})
......@@ -79,7 +79,7 @@ Python {
function () {})
}
function downloadAudioAll() {
call("QueueHandler.queue_download_all", function () {})
call("QueueHandler.queuehandler.queuedownloadall", function () {})
}
function getEpisodeChapters(episodeid) {
......@@ -88,8 +88,8 @@ Python {
}
function sendFirstEpisodeChapters(episodeid) {
call("QueueHandler.send_first_episode_chapters", [episodeid],
function () {})
call("QueueHandler.queuehandler.send_first_episode_chapters",
[episodeid], function () {})
}
function toggleChapter(episodeid, chapterid) {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment