Skip to content
This repository has been archived by the owner on Nov 18, 2022. It is now read-only.

Commit

Permalink
#541: fixed crash when adding many urls
Browse files Browse the repository at this point in the history
  • Loading branch information
hugbug committed Aug 25, 2018
1 parent 5a0eae7 commit 4db9ef2
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 3 deletions.
5 changes: 5 additions & 0 deletions daemon/queue/DupeCoordinator.cpp
Expand Up @@ -270,6 +270,7 @@ void DupeCoordinator::NzbFound(DownloadQueue* downloadQueue, NzbInfo* nzbInfo)
{
NzbInfo* queuedNzbInfo = (*it++).get();
if (queuedNzbInfo != nzbInfo &&
queuedNzbInfo->GetDeleteStatus() == NzbInfo::dsNone &&
(queuedNzbInfo->GetKind() == NzbInfo::nkNzb ||
(queuedNzbInfo->GetKind() == NzbInfo::nkUrl && nzbInfo->GetKind() == NzbInfo::nkUrl)) &&
queuedNzbInfo->GetDupeMode() != dmForce &&
Expand All @@ -294,9 +295,13 @@ void DupeCoordinator::NzbFound(DownloadQueue* downloadQueue, NzbInfo* nzbInfo)
// the existing queue item is moved to history as dupe-backup
info("Moving collection %s with lower duplicate score to history", queuedNzbInfo->GetName());
queuedNzbInfo->SetDeleteStatus(NzbInfo::dsDupe);
int oldSize = downloadQueue->GetQueue()->size();
downloadQueue->EditEntry(queuedNzbInfo->GetId(),
DownloadQueue::eaGroupDelete, nullptr);
int newSize = downloadQueue->GetQueue()->size();
index += oldSize == newSize ? 1 : 0;
it = downloadQueue->GetQueue()->begin() + index;
index--;
}
}
}
Expand Down
9 changes: 6 additions & 3 deletions daemon/queue/UrlCoordinator.cpp
Expand Up @@ -277,8 +277,6 @@ void UrlCoordinator::UrlCompleted(UrlDownloader* urlDownloader)
// remove downloader from downloader list
m_activeDownloads.erase(std::find(m_activeDownloads.begin(), m_activeDownloads.end(), urlDownloader));

nzbInfo->SetActiveDownloads(0);

retry = urlDownloader->GetStatus() == WebDownloader::adRetry && !nzbInfo->GetDeleting();

if (nzbInfo->GetDeleting())
Expand Down Expand Up @@ -309,6 +307,7 @@ void UrlCoordinator::UrlCompleted(UrlDownloader* urlDownloader)

if (retry)
{
nzbInfo->SetActiveDownloads(0);
return;
}

Expand All @@ -324,7 +323,7 @@ void UrlCoordinator::UrlCompleted(UrlDownloader* urlDownloader)

if (addStatus == Scanner::asSuccess)
{
// if scanner has successfully added nzb-file to queue, our pNZBInfo is
// if scanner has successfully added nzb-file to queue, our nzbInfo is
// already removed from queue and destroyed
return;
}
Expand All @@ -339,6 +338,8 @@ void UrlCoordinator::UrlCompleted(UrlDownloader* urlDownloader)
{
GuardedDownloadQueue downloadQueue = DownloadQueue::Guard();

nzbInfo->SetActiveDownloads(0);

DownloadQueue::Aspect aspect = {DownloadQueue::eaUrlFailed, downloadQueue, nzbInfo, nullptr};
downloadQueue->Notify(&aspect);
}
Expand All @@ -360,6 +361,8 @@ bool UrlCoordinator::DeleteQueueEntry(DownloadQueue* downloadQueue, NzbInfo* nzb
return true;
}
}

return false;
}

info("Deleting URL %s", nzbInfo->GetName());
Expand Down

0 comments on commit 4db9ef2

Please sign in to comment.