mirror of
https://github.com/qbittorrent/qBittorrent.git
synced 2024-11-29 13:58:51 +03:00
Merge pull request #756 from Gelmir/no_read_on_failed_dl
Do not mark articles read if download has failed
This commit is contained in:
commit
85d4946486
6 changed files with 65 additions and 35 deletions
|
@ -2725,6 +2725,7 @@ void QBtSession::processDownloadedFile(QString url, QString file_path) {
|
|||
// Pause torrent if necessary
|
||||
if (h.is_valid() && pref.addTorrentsInPause() && Preferences().useAdditionDialog())
|
||||
h.pause();
|
||||
emit newDownloadedTorrentFromRss(url);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -212,6 +212,7 @@ signals:
|
|||
void trackerError(const QString &hash, QString time, QString msg);
|
||||
void trackerAuthenticationRequired(const QTorrentHandle& h);
|
||||
void newDownloadedTorrent(QString path, QString url);
|
||||
void newDownloadedTorrentFromRss(QString url);
|
||||
void newMagnetLink(const QString& link);
|
||||
void updateFileSize(const QString &hash);
|
||||
void downloadFromUrlFailure(QString url, QString reason);
|
||||
|
|
|
@ -120,3 +120,10 @@ const QString& RssArticle::title() const
|
|||
{
|
||||
return m_title;
|
||||
}
|
||||
|
||||
void RssArticle::handleTorrentDownloadSuccess(const QString &url) {
|
||||
if (url == m_torrentUrl || url == m_link) {
|
||||
markAsRead();
|
||||
emit articleWasRead();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,8 @@ class RssArticle;
|
|||
typedef QSharedPointer<RssArticle> RssArticlePtr;
|
||||
|
||||
// Item of a rss stream, single information
|
||||
class RssArticle {
|
||||
class RssArticle : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
RssArticle(RssFeed* parent, const QString& guid);
|
||||
|
@ -62,6 +63,12 @@ public:
|
|||
// Serialization
|
||||
QVariantHash toHash() const;
|
||||
|
||||
signals:
|
||||
void articleWasRead();
|
||||
|
||||
public slots:
|
||||
void handleTorrentDownloadSuccess(const QString& url);
|
||||
|
||||
friend RssArticlePtr hashToRssArticle(RssFeed* parent, const QVariantHash& hash);
|
||||
|
||||
private:
|
||||
|
|
|
@ -114,32 +114,46 @@ void RssFeed::loadItemsFromDisk()
|
|||
}
|
||||
}
|
||||
|
||||
void RssFeed::addArticle(const RssArticlePtr& article)
|
||||
{
|
||||
Q_ASSERT(!m_articles.contains(article->guid()));
|
||||
// Update unreadCount
|
||||
if (!article->isRead())
|
||||
++m_unreadCount;
|
||||
// Insert in hash table
|
||||
m_articles[article->guid()] = article;
|
||||
// Insertion sort
|
||||
RssArticleList::Iterator lowerBound = qLowerBound(m_articlesByDate.begin(), m_articlesByDate.end(), article, rssArticleDateRecentThan);
|
||||
m_articlesByDate.insert(lowerBound, article);
|
||||
const int lbIndex = m_articlesByDate.indexOf(article);
|
||||
// Restrict size
|
||||
const int max_articles = RssSettings().getRSSMaxArticlesPerFeed();
|
||||
if (m_articlesByDate.size() > max_articles) {
|
||||
RssArticlePtr oldestArticle = m_articlesByDate.takeLast();
|
||||
m_articles.remove(oldestArticle->guid());
|
||||
// Update unreadCount
|
||||
if (!oldestArticle->isRead())
|
||||
--m_unreadCount;
|
||||
}
|
||||
void RssFeed::addArticle(const RssArticlePtr& article) {
|
||||
int lbIndex = -1;
|
||||
int max_articles = RssSettings().getRSSMaxArticlesPerFeed();
|
||||
|
||||
// Check if article was inserted at the end of the list and will break max_articles limit
|
||||
if (RssSettings().isRssDownloadingEnabled()) {
|
||||
if (lbIndex < max_articles && !article->isRead())
|
||||
downloadArticleTorrentIfMatching(m_manager->downloadRules(), article);
|
||||
if (!m_articles.contains(article->guid())) {
|
||||
markAsDirty();
|
||||
|
||||
// Update unreadCount
|
||||
if (!article->isRead())
|
||||
++m_unreadCount;
|
||||
// Insert in hash table
|
||||
m_articles[article->guid()] = article;
|
||||
// Insertion sort
|
||||
RssArticleList::Iterator lowerBound = qLowerBound(m_articlesByDate.begin(), m_articlesByDate.end(), article, rssArticleDateRecentThan);
|
||||
m_articlesByDate.insert(lowerBound, article);
|
||||
lbIndex = m_articlesByDate.indexOf(article);
|
||||
if (m_articlesByDate.size() > max_articles) {
|
||||
RssArticlePtr oldestArticle = m_articlesByDate.takeLast();
|
||||
m_articles.remove(oldestArticle->guid());
|
||||
// Update unreadCount
|
||||
if (!oldestArticle->isRead())
|
||||
--m_unreadCount;
|
||||
}
|
||||
|
||||
// Check if article was inserted at the end of the list and will break max_articles limit
|
||||
if (RssSettings().isRssDownloadingEnabled()) {
|
||||
if (lbIndex < max_articles && !article->isRead())
|
||||
downloadArticleTorrentIfMatching(m_manager->downloadRules(), article);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// m_articles.contains(article->guid())
|
||||
// Try to download skipped articles
|
||||
if (RssSettings().isRssDownloadingEnabled()) {
|
||||
RssArticlePtr skipped = m_articles.value(article->guid(), RssArticlePtr());
|
||||
if (skipped) {
|
||||
if (!skipped->isRead())
|
||||
downloadArticleTorrentIfMatching(m_manager->downloadRules(), skipped);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -338,15 +352,16 @@ void RssFeed::downloadArticleTorrentIfMatching(RssDownloadRuleList* rules, const
|
|||
if (!matching_rule)
|
||||
return;
|
||||
|
||||
// Torrent was downloaded, consider article as read
|
||||
article->markAsRead();
|
||||
// Download the torrent
|
||||
const QString& torrent_url = article->torrentUrl();
|
||||
QBtSession::instance()->addConsoleMessage(tr("Automatically downloading %1 torrent from %2 RSS feed...").arg(article->title()).arg(displayName()));
|
||||
if (torrent_url.startsWith("magnet:", Qt::CaseInsensitive))
|
||||
QBtSession::instance()->addMagnetSkipAddDlg(torrent_url, matching_rule->savePath(), matching_rule->label());
|
||||
else
|
||||
else {
|
||||
connect(QBtSession::instance(), SIGNAL(newDownloadedTorrentFromRss(QString)), article.data(), SLOT(handleTorrentDownloadSuccess(const QString&)), Qt::UniqueConnection);
|
||||
connect(article.data(), SIGNAL(articleWasRead()), SLOT(handleArticleStateChanged()), Qt::UniqueConnection);
|
||||
QBtSession::instance()->downloadUrlAndSkipDialog(torrent_url, matching_rule->savePath(), matching_rule->label(), feedCookies());
|
||||
}
|
||||
}
|
||||
|
||||
void RssFeed::recheckRssItemsForDownload()
|
||||
|
@ -364,12 +379,6 @@ void RssFeed::handleNewArticle(const QString& feedUrl, const QVariantHash& artic
|
|||
if (feedUrl != m_url)
|
||||
return;
|
||||
|
||||
const QString guid = articleData["id"].toString();
|
||||
if (m_articles.contains(guid))
|
||||
return;
|
||||
|
||||
markAsDirty();
|
||||
|
||||
RssArticlePtr article = hashToRssArticle(this, articleData);
|
||||
Q_ASSERT(article);
|
||||
addArticle(article);
|
||||
|
@ -400,6 +409,10 @@ void RssFeed::handleFeedParsingFinished(const QString& feedUrl, const QString& e
|
|||
saveItemsToDisk();
|
||||
}
|
||||
|
||||
void RssFeed::handleArticleStateChanged() {
|
||||
m_manager->forwardFeedInfosChanged(m_url, displayName(), m_unreadCount);
|
||||
}
|
||||
|
||||
void RssFeed::decrementUnreadCount()
|
||||
{
|
||||
--m_unreadCount;
|
||||
|
|
|
@ -86,6 +86,7 @@ private slots:
|
|||
void handleFeedTitle(const QString& feedUrl, const QString& title);
|
||||
void handleNewArticle(const QString& feedUrl, const QVariantHash& article);
|
||||
void handleFeedParsingFinished(const QString& feedUrl, const QString& error);
|
||||
void handleArticleStateChanged();
|
||||
|
||||
private:
|
||||
QString iconUrl() const;
|
||||
|
|
Loading…
Reference in a new issue