1
0
mirror of https://github.com/Sonarr/Sonarr.git synced 2024-12-16 11:37:58 +02:00

Blacklisting improvements

New: New releases that fail will be retried a second time after waiting 1hr (configurable)
Fixed: Blacklisting releases with the same date and vastly different ages
This commit is contained in:
Mark McDowall 2014-04-01 13:07:41 -07:00
parent 492ffb5714
commit e21574a203
44 changed files with 567 additions and 81 deletions

View File

@ -16,6 +16,15 @@ public DownloadClientConfigModule(IConfigService configService, RootFolderValida
.SetValidator(rootFolderValidator)
.SetValidator(pathExistsValidator)
.When(c => !String.IsNullOrWhiteSpace(c.DownloadedEpisodesFolder));
SharedValidator.RuleFor(c => c.BlacklistGracePeriod)
.InclusiveBetween(1, 24);
SharedValidator.RuleFor(c => c.BlacklistRetryInterval)
.InclusiveBetween(5, 120);
SharedValidator.RuleFor(c => c.BlacklistRetryLimit)
.InclusiveBetween(0, 10);
}
}
}

View File

@ -12,5 +12,8 @@ public class DownloadClientConfigResource : RestResource
public Boolean AutoRedownloadFailed { get; set; }
public Boolean RemoveFailedDownloads { get; set; }
public Boolean EnableFailedDownloadHandling { get; set; }
public Int32 BlacklistGracePeriod { get; set; }
public Int32 BlacklistRetryInterval { get; set; }
public Int32 BlacklistRetryLimit { get; set; }
}
}

View File

@ -3,7 +3,6 @@
using NzbDrone.Api.REST;
using NzbDrone.Core.Parser;
using NzbDrone.Core.Qualities;
using NzbDrone.Core.Tv;
namespace NzbDrone.Api.Indexers
{
@ -11,6 +10,7 @@ public class ReleaseResource : RestResource
{
public QualityModel Quality { get; set; }
public Int32 Age { get; set; }
public Double AgeHours { get; set; }
public Int64 Size { get; set; }
public String Indexer { get; set; }
public String ReleaseGroup { get; set; }

View File

@ -7,7 +7,7 @@
namespace NzbDrone.Common.Test.CacheTests
{
[TestFixture]
public class CachedManagerFixture : TestBase<ICacheManger>
public class CachedManagerFixture : TestBase<ICacheManager>
{
[Test]
public void should_return_proper_type_of_cache()

View File

@ -4,7 +4,7 @@
namespace NzbDrone.Common.Cache
{
public interface ICacheManger
public interface ICacheManager
{
ICached<T> GetCache<T>(Type host, string name);
ICached<T> GetCache<T>(Type host);
@ -12,11 +12,11 @@ public interface ICacheManger
ICollection<ICached> Caches { get; }
}
public class CacheManger : ICacheManger
public class CacheManager : ICacheManager
{
private readonly ICached<ICached> _cache;
public CacheManger()
public CacheManager()
{
_cache = new Cached<ICached>();

View File

@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace NzbDrone.Common
{
@ -12,5 +11,17 @@ public static class DictionaryExtensions
TValue value;
return dictionary.TryGetValue(key, out value) ? value : defaultValue;
}
public static Dictionary<T1, T2> Merge<T1, T2>(this Dictionary<T1, T2> first, Dictionary<T1, T2> second)
{
if (first == null) throw new ArgumentNullException("first");
if (second == null) throw new ArgumentNullException("second");
var merged = new Dictionary<T1, T2>();
first.ToList().ForEach(kv => merged[kv.Key] = kv.Value);
second.ToList().ForEach(kv => merged[kv.Key] = kv.Value);
return merged;
}
}
}

View File

@ -48,7 +48,7 @@ public void should_check_for_blacklisted_title_case_insensative()
{
Subject.Insert(_blacklist);
Subject.Blacklisted(_blacklist.SeriesId, _blacklist.SourceTitle.ToUpperInvariant()).Should().BeTrue();
Subject.Blacklisted(_blacklist.SeriesId, _blacklist.SourceTitle.ToUpperInvariant()).Should().HaveCount(1);
}
}
}

View File

@ -1,11 +1,11 @@
using System.Collections.Generic;
using System;
using System.Collections.Generic;
using Moq;
using NUnit.Framework;
using NzbDrone.Core.Blacklisting;
using NzbDrone.Core.Download;
using NzbDrone.Core.Qualities;
using NzbDrone.Core.Test.Framework;
using NzbDrone.Core.Tv;
namespace NzbDrone.Core.Test.Blacklisting
{
@ -26,6 +26,8 @@ public void Setup()
DownloadClient = "SabnzbdClient",
DownloadClientId = "Sabnzbd_nzo_2dfh73k"
};
_event.Data.Add("publishedDate", DateTime.UtcNow.ToString("s") + "Z");
}
[Test]

View File

@ -76,6 +76,16 @@ private void GivenFailedDownloadClientHistory()
.Returns(_failed);
}
private void GivenGracePeriod(int hours)
{
Mocker.GetMock<IConfigService>().SetupGet(s => s.BlacklistGracePeriod).Returns(hours);
}
private void GivenRetryLimit(int count)
{
Mocker.GetMock<IConfigService>().SetupGet(s => s.BlacklistRetryLimit).Returns(count);
}
private void VerifyNoFailedDownloads()
{
Mocker.GetMock<IEventAggregator>()
@ -270,5 +280,91 @@ public void should_not_process_if_failed_due_to_lack_of_disk_space()
VerifyNoFailedDownloads();
}
[Test]
public void should_process_if_ageHours_is_not_set()
{
GivenFailedDownloadClientHistory();
var historyGrabbed = Builder<History.History>.CreateListOfSize(1)
.Build()
.ToList();
historyGrabbed.First().Data.Add("downloadClient", "SabnzbdClient");
historyGrabbed.First().Data.Add("downloadClientId", _failed.First().Id);
GivenGrabbedHistory(historyGrabbed);
GivenNoFailedHistory();
Subject.Execute(new CheckForFailedDownloadCommand());
VerifyFailedDownloads();
}
[Test]
public void should_process_if_age_is_greater_than_grace_period()
{
GivenFailedDownloadClientHistory();
var historyGrabbed = Builder<History.History>.CreateListOfSize(1)
.Build()
.ToList();
historyGrabbed.First().Data.Add("downloadClient", "SabnzbdClient");
historyGrabbed.First().Data.Add("downloadClientId", _failed.First().Id);
historyGrabbed.First().Data.Add("ageHours", "48");
GivenGrabbedHistory(historyGrabbed);
GivenNoFailedHistory();
Subject.Execute(new CheckForFailedDownloadCommand());
VerifyFailedDownloads();
}
[Test]
public void should_process_if_retry_count_is_greater_than_grace_period()
{
GivenFailedDownloadClientHistory();
var historyGrabbed = Builder<History.History>.CreateListOfSize(1)
.Build()
.ToList();
historyGrabbed.First().Data.Add("downloadClient", "SabnzbdClient");
historyGrabbed.First().Data.Add("downloadClientId", _failed.First().Id);
historyGrabbed.First().Data.Add("ageHours", "48");
GivenGrabbedHistory(historyGrabbed);
GivenNoFailedHistory();
GivenGracePeriod(6);
Subject.Execute(new CheckForFailedDownloadCommand());
VerifyFailedDownloads();
}
[Test]
public void should_not_process_if_age_is_less_than_grace_period()
{
GivenFailedDownloadClientHistory();
var historyGrabbed = Builder<History.History>.CreateListOfSize(1)
.Build()
.ToList();
historyGrabbed.First().Data.Add("downloadClient", "SabnzbdClient");
historyGrabbed.First().Data.Add("downloadClientId", _failed.First().Id);
historyGrabbed.First().Data.Add("ageHours", "1");
GivenGrabbedHistory(historyGrabbed);
GivenNoFailedHistory();
GivenGracePeriod(6);
GivenRetryLimit(1);
Subject.Execute(new CheckForFailedDownloadCommand());
VerifyNoFailedDownloads();
}
}
}

View File

@ -2,16 +2,16 @@
using System.Collections.Generic;
using NzbDrone.Core.Datastore;
using NzbDrone.Core.Qualities;
using NzbDrone.Core.Tv;
namespace NzbDrone.Core.Blacklisting
{
public class Blacklist : ModelBase
{
public int SeriesId { get; set; }
public List<int> EpisodeIds { get; set; }
public string SourceTitle { get; set; }
public Int32 SeriesId { get; set; }
public List<Int32> EpisodeIds { get; set; }
public String SourceTitle { get; set; }
public QualityModel Quality { get; set; }
public DateTime Date { get; set; }
public DateTime? PublishedDate { get; set; }
}
}

View File

@ -1,6 +1,4 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Collections.Generic;
using NzbDrone.Core.Datastore;
using NzbDrone.Core.Messaging.Events;
@ -8,7 +6,7 @@ namespace NzbDrone.Core.Blacklisting
{
public interface IBlacklistRepository : IBasicRepository<Blacklist>
{
bool Blacklisted(int seriesId, string sourceTitle);
List<Blacklist> Blacklisted(int seriesId, string sourceTitle);
List<Blacklist> BlacklistedBySeries(int seriesId);
}
@ -19,11 +17,10 @@ public BlacklistRepository(IDatabase database, IEventAggregator eventAggregator)
{
}
public bool Blacklisted(int seriesId, string sourceTitle)
public List<Blacklist> Blacklisted(int seriesId, string sourceTitle)
{
return Query.Where(e => e.SeriesId == seriesId)
.AndWhere(e => e.SourceTitle.Contains(sourceTitle))
.Any();
.AndWhere(e => e.SourceTitle.Contains(sourceTitle));
}
public List<Blacklist> BlacklistedBySeries(int seriesId)

View File

@ -1,4 +1,8 @@
using System;
using System.Linq;
using NLog;
using NzbDrone.Common;
using NzbDrone.Core.Configuration;
using NzbDrone.Core.Datastore;
using NzbDrone.Core.Download;
using NzbDrone.Core.Messaging.Commands;
@ -9,25 +13,31 @@ namespace NzbDrone.Core.Blacklisting
{
public interface IBlacklistService
{
bool Blacklisted(int seriesId,string sourceTitle);
bool Blacklisted(int seriesId,string sourceTitle, DateTime publishedDate);
PagingSpec<Blacklist> Paged(PagingSpec<Blacklist> pagingSpec);
void Delete(int id);
}
public class BlacklistService : IBlacklistService, IExecute<ClearBlacklistCommand>, IHandle<DownloadFailedEvent>, IHandle<SeriesDeletedEvent>
public class BlacklistService : IBlacklistService,
IExecute<ClearBlacklistCommand>,
IHandle<DownloadFailedEvent>,
IHandle<SeriesDeletedEvent>
{
private readonly IBlacklistRepository _blacklistRepository;
private readonly IRedownloadFailedDownloads _redownloadFailedDownloadService;
public BlacklistService(IBlacklistRepository blacklistRepository, IRedownloadFailedDownloads redownloadFailedDownloadService)
public BlacklistService(IBlacklistRepository blacklistRepository,
IRedownloadFailedDownloads redownloadFailedDownloadService)
{
_blacklistRepository = blacklistRepository;
_redownloadFailedDownloadService = redownloadFailedDownloadService;
}
public bool Blacklisted(int seriesId, string sourceTitle)
public bool Blacklisted(int seriesId, string sourceTitle, DateTime publishedDate)
{
return _blacklistRepository.Blacklisted(seriesId,sourceTitle);
var blacklisted = _blacklistRepository.Blacklisted(seriesId, sourceTitle);
return blacklisted.Any(item => HasSamePublishedDate(item, publishedDate));
}
public PagingSpec<Blacklist> Paged(PagingSpec<Blacklist> pagingSpec)
@ -40,6 +50,14 @@ public void Delete(int id)
_blacklistRepository.Delete(id);
}
private bool HasSamePublishedDate(Blacklist item, DateTime publishedDate)
{
if (!item.PublishedDate.HasValue) return true;
return item.PublishedDate.Value.AddDays(-2) <= publishedDate &&
item.PublishedDate.Value.AddDays(2) >= publishedDate;
}
public void Execute(ClearBlacklistCommand message)
{
_blacklistRepository.Purge();
@ -53,7 +71,8 @@ public void Handle(DownloadFailedEvent message)
EpisodeIds = message.EpisodeIds,
SourceTitle = message.SourceTitle,
Quality = message.Quality,
Date = DateTime.UtcNow
Date = DateTime.UtcNow,
PublishedDate = DateTime.Parse(message.Data.GetValueOrDefault("publishedDate", null))
};
_blacklistRepository.Insert(blacklist);

View File

@ -46,9 +46,9 @@ public class ConfigFileProvider : IConfigFileProvider
private readonly string _configFile;
public ConfigFileProvider(IAppFolderInfo appFolderInfo, ICacheManger cacheManger, IEventAggregator eventAggregator)
public ConfigFileProvider(IAppFolderInfo appFolderInfo, ICacheManager cacheManager, IEventAggregator eventAggregator)
{
_cache = cacheManger.GetCache<string>(GetType());
_cache = cacheManager.GetCache<string>(GetType());
_eventAggregator = eventAggregator;
_configFile = appFolderInfo.GetConfigPath();
}

View File

@ -130,6 +130,27 @@ public Boolean RemoveFailedDownloads
set { SetValue("RemoveFailedDownloads", value); }
}
public Int32 BlacklistGracePeriod
{
get { return GetValueInt("BlacklistGracePeriod", 2); }
set { SetValue("BlacklistGracePeriod", value); }
}
public Int32 BlacklistRetryInterval
{
get { return GetValueInt("BlacklistRetryInterval", 60); }
set { SetValue("BlacklistRetryInterval", value); }
}
public Int32 BlacklistRetryLimit
{
get { return GetValueInt("BlacklistRetryLimit", 1); }
set { SetValue("BlacklistRetryLimit", value); }
}
public Boolean EnableFailedDownloadHandling
{
get { return GetValueBoolean("EnableFailedDownloadHandling", true); }

View File

@ -19,6 +19,10 @@ public interface IConfigService
Boolean AutoRedownloadFailed { get; set; }
Boolean RemoveFailedDownloads { get; set; }
Boolean EnableFailedDownloadHandling { get; set; }
Int32 BlacklistGracePeriod { get; set; }
Int32 BlacklistRetryInterval { get; set; }
Int32 BlacklistRetryLimit { get; set; }
//Media Management
Boolean AutoUnmonitorPreviouslyDownloadedEpisodes { get; set; }

View File

@ -25,13 +25,13 @@ public class SceneMappingService : ISceneMappingService,
private readonly ICached<SceneMapping> _getSceneNameCache;
private readonly ICached<SceneMapping> _gettvdbIdCache;
public SceneMappingService(ISceneMappingRepository repository, ISceneMappingProxy sceneMappingProxy, ICacheManger cacheManger, Logger logger)
public SceneMappingService(ISceneMappingRepository repository, ISceneMappingProxy sceneMappingProxy, ICacheManager cacheManager, Logger logger)
{
_repository = repository;
_sceneMappingProxy = sceneMappingProxy;
_getSceneNameCache = cacheManger.GetCache<SceneMapping>(GetType(), "scene_name");
_gettvdbIdCache = cacheManger.GetCache<SceneMapping>(GetType(), "tvdb_id");
_getSceneNameCache = cacheManager.GetCache<SceneMapping>(GetType(), "scene_name");
_gettvdbIdCache = cacheManager.GetCache<SceneMapping>(GetType(), "tvdb_id");
_logger = logger;
}

View File

@ -18,14 +18,14 @@ public class XemService : IHandle<SeriesUpdatedEvent>, IHandle<SeriesRefreshStar
public XemService(IEpisodeService episodeService,
IXemProxy xemProxy,
ISeriesService seriesService, ICacheManger cacheManger, Logger logger)
ISeriesService seriesService, ICacheManager cacheManager, Logger logger)
{
_episodeService = episodeService;
_xemProxy = xemProxy;
_seriesService = seriesService;
_logger = logger;
_logger = logger;
_cache = cacheManger.GetCache<bool>(GetType());
_cache = cacheManager.GetCache<bool>(GetType());
}
private void PerformUpdate(Series series)

View File

@ -0,0 +1,14 @@
using FluentMigrator;
using NzbDrone.Core.Datastore.Migration.Framework;
namespace NzbDrone.Core.Datastore.Migration
{
[Migration(47)]
public class add_temporary_blacklist_columns : NzbDroneMigrationBase
{
protected override void MainDbUpgrade()
{
Alter.Table("Blacklist").AddColumn("PublishedDate").AsDateTime().Nullable();
}
}
}

View File

@ -27,7 +27,7 @@ public string RejectionReason
}
}
public virtual bool IsSatisfiedBy(RemoteEpisode subject, SearchCriteriaBase searchCriteria)
public bool IsSatisfiedBy(RemoteEpisode subject, SearchCriteriaBase searchCriteria)
{
if (!_configService.EnableFailedDownloadHandling)
{
@ -35,7 +35,7 @@ public virtual bool IsSatisfiedBy(RemoteEpisode subject, SearchCriteriaBase sear
return true;
}
if (_blacklistService.Blacklisted(subject.Series.Id, subject.Release.Title))
if (_blacklistService.Blacklisted(subject.Series.Id, subject.Release.Title, subject.Release.PublishDate))
{
_logger.Debug("{0} is blacklisted, rejecting.", subject.Release.Title);
return false;

View File

@ -0,0 +1,63 @@
using System;
using System.Linq;
using NLog;
using NzbDrone.Common;
using NzbDrone.Core.Configuration;
using NzbDrone.Core.History;
using NzbDrone.Core.IndexerSearch.Definitions;
using NzbDrone.Core.Parser.Model;
namespace NzbDrone.Core.DecisionEngine.Specifications
{
public class RetrySpecification : IDecisionEngineSpecification
{
private readonly IHistoryService _historyService;
private readonly IConfigService _configService;
private readonly Logger _logger;
public RetrySpecification(IHistoryService historyService, IConfigService configService, Logger logger)
{
_historyService = historyService;
_configService = configService;
_logger = logger;
}
public string RejectionReason
{
get
{
return "Release has been retried too many times";
}
}
public bool IsSatisfiedBy(RemoteEpisode subject, SearchCriteriaBase searchCriteria)
{
if (!_configService.EnableFailedDownloadHandling)
{
_logger.Debug("Failed Download Handling is not enabled");
return true;
}
var history = _historyService.FindBySourceTitle(subject.Release.Title);
if (history.Count(h => h.EventType == HistoryEventType.Grabbed &&
HasSamePublishedDate(h, subject.Release.PublishDate)) >
_configService.BlacklistRetryLimit)
{
_logger.Debug("Release has been attempted more times than allowed, rejecting");
return false;
}
return true;
}
private bool HasSamePublishedDate(History.History item, DateTime publishedDate)
{
DateTime itemsPublishedDate;
if (!DateTime.TryParse(item.Data.GetValueOrDefault("PublishedDate", null), out itemsPublishedDate)) return true;
return itemsPublishedDate.AddDays(-2) <= publishedDate && itemsPublishedDate.AddDays(2) >= publishedDate;
}
}
}

View File

@ -58,6 +58,11 @@ public override void RemoveFromHistory(string id)
{
}
public override void RetryDownload(string id)
{
throw new NotImplementedException();
}
public override void Test()
{
PerformTest(Settings.Folder);

View File

@ -138,6 +138,11 @@ public override void RemoveFromHistory(string id)
_proxy.RemoveFromHistory(id, Settings);
}
public override void RetryDownload(string id)
{
_proxy.RetryDownload(id, Settings);
}
public override void Test()
{
_proxy.GetVersion(Settings);

View File

@ -17,6 +17,7 @@ public interface INzbgetProxy
List<NzbgetHistoryItem> GetHistory(NzbgetSettings settings);
VersionResponse GetVersion(NzbgetSettings settings);
void RemoveFromHistory(string id, NzbgetSettings settings);
void RetryDownload(string id, NzbgetSettings settings);
}
public class NzbgetProxy : INzbgetProxy
@ -98,6 +99,23 @@ public void RemoveFromHistory(string id, NzbgetSettings settings)
}
}
public void RetryDownload(string id, NzbgetSettings settings)
{
var history = GetHistory(settings);
var item = history.SingleOrDefault(h => h.Parameters.SingleOrDefault(p => p.Name == "drone") != null);
if (item == null)
{
_logger.Warn("Unable to return item to queue, Unknown ID: {0}", id);
return;
}
if (!EditQueue("HistoryReturn", 0, "", item.Id, settings))
{
_logger.Warn("Failed to return item to queue from history, {0} [{1}]", item.Name, item.Id);
}
}
private bool EditQueue(string command, int offset, string editText, int id, NzbgetSettings settings)
{
var parameters = new object[] { command, offset, editText, id };

View File

@ -80,6 +80,11 @@ public override void RemoveFromHistory(string id)
{
}
public override void RetryDownload(string id)
{
throw new NotImplementedException();
}
public override void Test()
{
PerformTest(Settings.Folder);

View File

@ -1,12 +1,9 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Newtonsoft.Json.Linq;
using NLog;
using NzbDrone.Common;
using NzbDrone.Common.Cache;
using NzbDrone.Common.Serializer;
using NzbDrone.Core.Download.Clients.Sabnzbd.Responses;
using NzbDrone.Core.Messaging.Commands;
using NzbDrone.Core.Parser;
using NzbDrone.Core.Parser.Model;
@ -18,20 +15,20 @@ public class Sabnzbd : DownloadClientBase<SabnzbdSettings>, IExecute<TestSabnzbd
{
private readonly IHttpProvider _httpProvider;
private readonly IParsingService _parsingService;
private readonly ISabnzbdProxy _sabnzbdProxy;
private readonly ISabnzbdProxy _proxy;
private readonly ICached<IEnumerable<QueueItem>> _queueCache;
private readonly Logger _logger;
public Sabnzbd(IHttpProvider httpProvider,
ICacheManger cacheManger,
ICacheManager cacheManager,
IParsingService parsingService,
ISabnzbdProxy sabnzbdProxy,
ISabnzbdProxy proxy,
Logger logger)
{
_httpProvider = httpProvider;
_parsingService = parsingService;
_sabnzbdProxy = sabnzbdProxy;
_queueCache = cacheManger.GetCache<IEnumerable<QueueItem>>(GetType(), "queue");
_proxy = proxy;
_queueCache = cacheManager.GetCache<IEnumerable<QueueItem>>(GetType(), "queue");
_logger = logger;
}
@ -45,7 +42,7 @@ public override string DownloadNzb(RemoteEpisode remoteEpisode)
using (var nzb = _httpProvider.DownloadStream(url))
{
_logger.Info("Adding report [{0}] to the queue.", title);
var response = _sabnzbdProxy.DownloadNzb(nzb, title, category, priority, Settings);
var response = _proxy.DownloadNzb(nzb, title, category, priority, Settings);
if (response != null && response.Ids.Any())
{
@ -64,7 +61,7 @@ public override IEnumerable<QueueItem> GetQueue()
try
{
sabQueue = _sabnzbdProxy.GetQueue(0, 0, Settings);
sabQueue = _proxy.GetQueue(0, 0, Settings);
}
catch (DownloadClientException ex)
{
@ -105,7 +102,7 @@ public override IEnumerable<HistoryItem> GetHistory(int start = 0, int limit = 1
try
{
sabHistory = _sabnzbdProxy.GetHistory(start, limit, Settings);
sabHistory = _proxy.GetHistory(start, limit, Settings);
}
catch (DownloadClientException ex)
{
@ -135,17 +132,22 @@ public override IEnumerable<HistoryItem> GetHistory(int start = 0, int limit = 1
public override void RemoveFromQueue(string id)
{
_sabnzbdProxy.RemoveFrom("queue", id, Settings);
_proxy.RemoveFrom("queue", id, Settings);
}
public override void RemoveFromHistory(string id)
{
_sabnzbdProxy.RemoveFrom("history", id, Settings);
_proxy.RemoveFrom("history", id, Settings);
}
public override void RetryDownload(string id)
{
_proxy.RetryDownload(id, Settings);
}
public override void Test()
{
_sabnzbdProxy.GetCategories(Settings);
_proxy.GetCategories(Settings);
}
public void Execute(TestSabnzbdCommand message)
@ -153,7 +155,7 @@ public void Execute(TestSabnzbdCommand message)
var settings = new SabnzbdSettings();
settings.InjectFrom(message);
_sabnzbdProxy.GetCategories(settings);
_proxy.GetCategories(settings);
}
}
}

View File

@ -20,6 +20,7 @@ public interface ISabnzbdProxy
SabnzbdCategoryResponse GetCategories(SabnzbdSettings settings);
SabnzbdQueue GetQueue(int start, int limit, SabnzbdSettings settings);
SabnzbdHistory GetHistory(int start, int limit, SabnzbdSettings settings);
void RetryDownload(string id, SabnzbdSettings settings);
}
public class SabnzbdProxy : ISabnzbdProxy
@ -111,6 +112,14 @@ public SabnzbdHistory GetHistory(int start, int limit, SabnzbdSettings settings)
return Json.Deserialize<SabnzbdHistory>(JObject.Parse(response).SelectToken("history").ToString());
}
public void RetryDownload(string id, SabnzbdSettings settings)
{
var request = new RestRequest();
var action = String.Format("mode=retry&value={0}", id);
ProcessRequest(request, action, settings);
}
private IRestClient BuildClient(string action, SabnzbdSettings settings)
{
var protocol = settings.UseSsl ? "https" : "http";

View File

@ -43,6 +43,7 @@ public override string ToString()
public abstract IEnumerable<HistoryItem> GetHistory(int start = 0, int limit = 10);
public abstract void RemoveFromQueue(string id);
public abstract void RemoveFromHistory(string id);
public abstract void RetryDownload(string id);
public abstract void Test();
}
}

View File

@ -2,12 +2,16 @@
using System.Collections.Generic;
using NzbDrone.Common.Messaging;
using NzbDrone.Core.Qualities;
using NzbDrone.Core.Tv;
namespace NzbDrone.Core.Download
{
public class DownloadFailedEvent : IEvent
{
public DownloadFailedEvent()
{
Data = new Dictionary<string, string>();
}
public Int32 SeriesId { get; set; }
public List<Int32> EpisodeIds { get; set; }
public QualityModel Quality { get; set; }
@ -15,5 +19,6 @@ public class DownloadFailedEvent : IEvent
public String DownloadClient { get; set; }
public String DownloadClientId { get; set; }
public String Message { get; set; }
public Dictionary<string, string> Data { get; set; }
}
}

View File

@ -0,0 +1,11 @@
using System;
namespace NzbDrone.Core.Download
{
public class FailedDownload
{
public HistoryItem DownloadClientHistoryItem { get; set; }
public DateTime LastRetry { get; set; }
public Int32 RetryCount { get; set; }
}
}

View File

@ -3,6 +3,7 @@
using System.Linq;
using NLog;
using NzbDrone.Common;
using NzbDrone.Common.Cache;
using NzbDrone.Core.Configuration;
using NzbDrone.Core.History;
using NzbDrone.Core.Messaging.Commands;
@ -23,6 +24,8 @@ public class FailedDownloadService : IFailedDownloadService, IExecute<CheckForFa
private readonly IConfigService _configService;
private readonly Logger _logger;
private readonly ICached<FailedDownload> _failedDownloads;
private static string DOWNLOAD_CLIENT = "downloadClient";
private static string DOWNLOAD_CLIENT_ID = "downloadClientId";
@ -30,6 +33,7 @@ public FailedDownloadService(IProvideDownloadClient downloadClientProvider,
IHistoryService historyService,
IEventAggregator eventAggregator,
IConfigService configService,
ICacheManager cacheManager,
Logger logger)
{
_downloadClientProvider = downloadClientProvider;
@ -37,6 +41,8 @@ public FailedDownloadService(IProvideDownloadClient downloadClientProvider,
_eventAggregator = eventAggregator;
_configService = configService;
_logger = logger;
_failedDownloads = cacheManager.GetCache<FailedDownload>(GetType(), "queue");
}
public void MarkAsFailed(int historyId)
@ -127,6 +133,12 @@ private void CheckHistory(List<History.History> grabbedHistory, List<History.His
continue;
}
if (FailedDownloadForRecentRelease(failedItem, historyItems))
{
_logger.Debug("Recent release Failed, do not blacklist");
continue;
}
if (failedHistory.Any(h => failedLocal.Id.Equals(h.Data.GetValueOrDefault(DOWNLOAD_CLIENT_ID))))
{
_logger.Debug("Already added to history as failed");
@ -152,19 +164,21 @@ private void CheckHistory(List<History.History> grabbedHistory, List<History.His
private void PublishDownloadFailedEvent(List<History.History> historyItems, string message)
{
var historyItem = historyItems.First();
string downloadClient;
string downloadClientId;
_eventAggregator.PublishEvent(new DownloadFailedEvent
{
SeriesId = historyItem.SeriesId,
EpisodeIds = historyItems.Select(h => h.EpisodeId).ToList(),
Quality = historyItem.Quality,
SourceTitle = historyItem.SourceTitle,
DownloadClient = historyItem.Data.GetValueOrDefault(DOWNLOAD_CLIENT),
DownloadClientId = historyItem.Data.GetValueOrDefault(DOWNLOAD_CLIENT_ID),
Message = message
});
var downloadFailedEvent = new DownloadFailedEvent
{
SeriesId = historyItem.SeriesId,
EpisodeIds = historyItems.Select(h => h.EpisodeId).ToList(),
Quality = historyItem.Quality,
SourceTitle = historyItem.SourceTitle,
DownloadClient = historyItem.Data.GetValueOrDefault(DOWNLOAD_CLIENT),
DownloadClientId = historyItem.Data.GetValueOrDefault(DOWNLOAD_CLIENT_ID),
Message = message
};
downloadFailedEvent.Data = downloadFailedEvent.Data.Merge(historyItem.Data);
_eventAggregator.PublishEvent(downloadFailedEvent);
}
private IDownloadClient GetDownloadClient()
@ -179,6 +193,56 @@ private IDownloadClient GetDownloadClient()
return downloadClient;
}
private bool FailedDownloadForRecentRelease(HistoryItem failedDownloadHistoryItem, List<History.History> matchingHistoryItems)
{
double ageHours;
if (!Double.TryParse(matchingHistoryItems.First().Data.GetValueOrDefault("ageHours"), out ageHours))
{
_logger.Debug("Unable to determine age of failed download");
return false;
}
if (ageHours > _configService.BlacklistGracePeriod)
{
_logger.Debug("Failed download is older than the grace period");
return false;
}
var tracked = _failedDownloads.Get(failedDownloadHistoryItem.Id, () => new FailedDownload
{
DownloadClientHistoryItem = failedDownloadHistoryItem,
LastRetry = DateTime.UtcNow
}
);
if (tracked.RetryCount >= _configService.BlacklistRetryLimit)
{
_logger.Debug("Retry limit reached");
return false;
}
if (tracked.LastRetry.AddMinutes(_configService.BlacklistRetryInterval) < DateTime.UtcNow)
{
_logger.Debug("Retrying failed release");
tracked.LastRetry = DateTime.UtcNow;
tracked.RetryCount++;
try
{
GetDownloadClient().RetryDownload(failedDownloadHistoryItem.Id);
}
catch (NotImplementedException ex)
{
_logger.Debug("Retrying failed downloads is not supported by your download client");
return false;
}
}
return true;
}
public void Execute(CheckForFailedDownloadCommand message)
{
if (!_configService.EnableFailedDownloadHandling)

View File

@ -11,6 +11,7 @@ public interface IDownloadClient : IProvider
IEnumerable<HistoryItem> GetHistory(int start = 0, int limit = 0);
void RemoveFromQueue(string id);
void RemoveFromHistory(string id);
void RetryDownload(string id);
void Test();
}
}

View File

@ -40,10 +40,7 @@ public void Redownload(int seriesId, List<int> episodeIds)
{
_logger.Debug("Failed download only contains one episode, searching again");
_commandExecutor.PublishCommandAsync(new EpisodeSearchCommand
{
EpisodeIds = episodeIds.ToList()
});
_commandExecutor.PublishCommandAsync(new EpisodeSearchCommand(episodeIds));
return;
}
@ -66,10 +63,7 @@ public void Redownload(int seriesId, List<int> episodeIds)
_logger.Debug("Failed download contains multiple episodes, probably a double episode, searching again");
_commandExecutor.PublishCommandAsync(new EpisodeSearchCommand
{
EpisodeIds = episodeIds.ToList()
});
_commandExecutor.PublishCommandAsync(new EpisodeSearchCommand(episodeIds));
}
}
}

View File

@ -3,7 +3,6 @@
using System.Linq;
using Marr.Data.QGen;
using NzbDrone.Core.Datastore;
using NzbDrone.Core.Datastore.Extentions;
using NzbDrone.Core.Messaging.Events;
using NzbDrone.Core.Qualities;
using NzbDrone.Core.Tv;
@ -18,6 +17,7 @@ public interface IHistoryRepository : IBasicRepository<History>
List<History> Failed();
List<History> Grabbed();
History MostRecentForEpisode(int episodeId);
List<History> FindBySourceTitle(string sourceTitle);
}
public class HistoryRepository : BasicRepository<History>, IHistoryRepository
@ -69,6 +69,16 @@ public History MostRecentForEpisode(int episodeId)
.FirstOrDefault();
}
public List<History> FindBySourceTitle(string sourceTitle)
{
return Query.Where(h => h.SourceTitle.Contains(sourceTitle));
}
public List<History> AllForEpisode(int episodeId)
{
return Query.Where(h => h.EpisodeId == episodeId);
}
protected override SortBuilder<History> GetPagedQuery(QueryBuilder<History> query, PagingSpec<History> pagingSpec)
{
var baseQuery = query.Join<History, Series>(JoinType.Inner, h => h.Series, (h, s) => h.SeriesId == s.Id)

View File

@ -23,6 +23,7 @@ public interface IHistoryService
List<History> Grabbed();
History MostRecentForEpisode(int episodeId);
History Get(int id);
List<History> FindBySourceTitle(string sourceTitle);
}
public class HistoryService : IHistoryService, IHandle<EpisodeGrabbedEvent>, IHandle<EpisodeImportedEvent>, IHandle<DownloadFailedEvent>
@ -71,6 +72,11 @@ public History Get(int id)
return _historyRepository.Get(id);
}
public List<History> FindBySourceTitle(string sourceTitle)
{
return _historyRepository.FindBySourceTitle(sourceTitle);
}
public void Purge()
{
_historyRepository.Purge();
@ -107,6 +113,8 @@ public void Handle(EpisodeGrabbedEvent message)
history.Data.Add("NzbInfoUrl", message.Episode.Release.InfoUrl);
history.Data.Add("ReleaseGroup", message.Episode.ParsedEpisodeInfo.ReleaseGroup);
history.Data.Add("Age", message.Episode.Release.Age.ToString());
history.Data.Add("AgeHours", message.Episode.Release.AgeHours.ToString());
history.Data.Add("PublishedDate", message.Episode.Release.PublishDate.ToString("s") + "Z");
history.Data.Add("DownloadClient", message.DownloadClient);
if (!String.IsNullOrWhiteSpace(message.DownloadClientId))

View File

@ -14,5 +14,14 @@ public override bool SendUpdatesToClient
return true;
}
}
public EpisodeSearchCommand()
{
}
public EpisodeSearchCommand(List<int> episodeIds)
{
EpisodeIds = episodeIds;
}
}
}

View File

@ -14,5 +14,14 @@ public override bool SendUpdatesToClient
return true;
}
}
public MissingEpisodeSearchCommand()
{
}
public MissingEpisodeSearchCommand(List<int> episodeIds)
{
EpisodeIds = episodeIds;
}
}
}

View File

@ -21,9 +21,9 @@ public class CommandTrackingService : ITrackCommands, IExecute<TrackedCommandCle
{
private readonly ICached<Command> _cache;
public CommandTrackingService(ICacheManger cacheManger)
public CommandTrackingService(ICacheManager cacheManager)
{
_cache = cacheManger.GetCache<Command>(GetType());
_cache = cacheManager.GetCache<Command>(GetType());
}
public Command GetById(int id)

View File

@ -192,6 +192,7 @@
<SubType>Code</SubType>
</Compile>
<Compile Include="Datastore\Migration\046_fix_nzb_su_url.cs" />
<Compile Include="Datastore\Migration\047_add_published_date_blacklist_column.cs" />
<Compile Include="Datastore\Migration\Framework\MigrationContext.cs" />
<Compile Include="Datastore\Migration\Framework\MigrationController.cs" />
<Compile Include="Datastore\Migration\Framework\MigrationExtension.cs" />
@ -209,6 +210,7 @@
<Compile Include="Datastore\ModelNotFoundException.cs" />
<Compile Include="Datastore\PagingSpec.cs" />
<Compile Include="Datastore\TableMapping.cs" />
<Compile Include="DecisionEngine\Specifications\RetrySpecification.cs" />
<Compile Include="DecisionEngine\Specifications\BlacklistSpecification.cs" />
<Compile Include="DecisionEngine\Specifications\DownloadDecision.cs" />
<Compile Include="DecisionEngine\IRejectWithReason.cs" />
@ -258,6 +260,7 @@
<Compile Include="Download\Clients\Sabnzbd\JsonConverters\SabnzbdQueueTimeConverter.cs" />
<Compile Include="Download\Clients\Sabnzbd\SabnzbdProxy.cs" />
<Compile Include="Download\CheckForFailedDownloadCommand.cs" />
<Compile Include="Download\FailedDownload.cs" />
<Compile Include="Download\HistoryItem.cs" />
<Compile Include="Download\DownloadFailedEvent.cs" />
<Compile Include="Download\DownloadApprovedReports.cs" />

View File

@ -48,12 +48,12 @@ public class FileNameBuilder : IBuildFileNames
public FileNameBuilder(INamingConfigService namingConfigService,
IQualityDefinitionService qualityDefinitionService,
ICacheManger cacheManger,
ICacheManager cacheManager,
Logger logger)
{
_namingConfigService = namingConfigService;
_qualityDefinitionService = qualityDefinitionService;
_patternCache = cacheManger.GetCache<EpisodeFormat>(GetType());
_patternCache = cacheManager.GetCache<EpisodeFormat>(GetType());
_logger = logger;
}

View File

@ -13,7 +13,7 @@ public class ReleaseInfo
public DateTime PublishDate { get; set; }
public int Age
public Int32 Age
{
get
{
@ -28,6 +28,21 @@ private set
}
}
public Double AgeHours
{
get
{
return DateTime.UtcNow.Subtract(PublishDate).TotalHours;
}
//This prevents manually downloading a release from blowing up in mono
//TODO: Is there a better way?
private set
{
}
}
public int TvRageId { get; set; }
public override string ToString()

View File

@ -89,7 +89,7 @@ public void TestBaseSetup()
GetType().IsPublic.Should().BeTrue("All Test fixtures should be public to work in mono.");
Mocker.SetConstant<ICacheManger>(new CacheManger());
Mocker.SetConstant<ICacheManager>(new CacheManager());
Mocker.SetConstant(LogManager.GetLogger("TestLogger"));

View File

@ -6,8 +6,9 @@ define(
'Cells/FileSizeCell',
'Cells/QualityCell',
'Cells/ApprovalStatusCell',
'Release/DownloadReportCell'
], function (Marionette, Backgrid, FileSizeCell, QualityCell, ApprovalStatusCell, DownloadReportCell) {
'Release/DownloadReportCell',
'Release/AgeCell'
], function (Marionette, Backgrid, FileSizeCell, QualityCell, ApprovalStatusCell, DownloadReportCell, AgeCell) {
return Marionette.Layout.extend({
template: 'Episode/Search/ManualLayoutTemplate',
@ -22,7 +23,7 @@ define(
name : 'age',
label : 'Age',
sortable: true,
cell : Backgrid.IntegerCell
cell : AgeCell
},
{
name : 'title',

36
src/UI/Release/AgeCell.js Normal file
View File

@ -0,0 +1,36 @@
'use strict';
define(
[
'backgrid',
'Shared/FormatHelpers'
], function (Backgrid, FormatHelpers) {
return Backgrid.Cell.extend({
className: 'age-cell',
render: function () {
var age = this.model.get('age');
var ageHours = this.model.get('ageHours');
if (age === 0) {
this.$el.html('{0} {1}'.format(ageHours.toFixed(1), this.plural(Math.round(ageHours), 'hour')));
}
else {
this.$el.html('{0} {1}'.format(age, this.plural(age, 'day')));
}
this.delegateEvents();
return this;
},
plural: function (input, unit) {
if (input === 1) {
return unit;
}
return unit + 's';
}
});
});

View File

@ -61,5 +61,41 @@
</span>
</div>
</div>
<div class="control-group advanced-setting">
<label class="control-label">Grace Period</label>
<div class="controls">
<input type="number" min="1" max="24" name="blacklistGracePeriod"/>
<span class="help-inline">
<i class="icon-nd-form-info" title="Age in hours that a release will remain in the download client and retried"/>
</span>
</div>
</div>
<div class="control-group advanced-setting">
<label class="control-label">Retry Interval</label>
<div class="controls">
<input type="number" min="5" max="120" name="blacklistRetryInterval"/>
<span class="help-inline">
<i class="icon-nd-form-info" title="Time in minutes before a failed download for a recent release will be retried"/>
</span>
</div>
</div>
<div class="control-group advanced-setting">
<label class="control-label">Retry Count</label>
<div class="controls">
<input type="number" min="0" max="10" name="blacklistRetryLimit"/>
<span class="help-inline">
<i class="icon-nd-form-info" title="Number of times to retry a release before it is blacklisted"/>
</span>
</div>
</div>
</div>
</fieldset>