1
0
mirror of https://github.com/Sonarr/Sonarr.git synced 2025-11-06 09:19:38 +02:00

Merge + Download Settings UI Fixes.

This commit is contained in:
Mark McDowall
2011-04-25 11:21:53 -07:00
parent e4d208883a
commit deb7f9d811
22 changed files with 3963 additions and 296 deletions

View File

@@ -1,80 +1,54 @@
using System;
using System.IO;
using System.Net;
using System.Xml;
using NLog;
namespace NzbDrone.Core.Providers.Core
{
public class HttpProvider
{
private static readonly Logger Logger = LogManager.GetCurrentClassLogger();
public virtual string DownloadString(string request)
public virtual string DownloadString(string address)
{
try
{
return new WebClient().DownloadString(request);
return new WebClient().DownloadString(address);
}
catch (Exception ex)
{
Logger.Warn("Failed to get response from: {0}", request);
Logger.TraceException(ex.Message, ex);
}
return String.Empty;
}
public virtual string DownloadString(string request, string username, string password)
{
try
{
var webClient = new WebClient();
webClient.Credentials = new NetworkCredential(username, password);
return webClient.DownloadString(request);
}
catch (Exception ex)
{
Logger.Warn("Failed to get response from: {0}", request);
Logger.TraceException(ex.Message, ex);
}
return String.Empty;
}
public virtual void DownloadFile(string request, string filename)
{
try
{
var webClient = new WebClient();
webClient.DownloadFile(request, filename);
}
catch (Exception ex)
{
Logger.Warn("Failed to get response from: {0}", request);
Logger.Warn("Failed to get response from: {0}", address);
Logger.TraceException(ex.Message, ex);
throw;
}
}
public virtual void DownloadFile(string request, string filename, string username, string password)
public virtual string DownloadString(string address, string username, string password)
{
try
{
var webClient = new WebClient();
webClient.Credentials = new NetworkCredential(username, password);
webClient.DownloadFile(request, filename);
return webClient.DownloadString(address);
}
catch (Exception ex)
{
Logger.Warn("Failed to get response from: {0}", request);
Logger.Warn("Failed to get response from: {0}", address);
Logger.TraceException(ex.Message, ex);
throw;
}
}
public virtual XmlReader DownloadXml(string url)
public virtual Stream DownloadStream(string url)
{
return XmlReader.Create(url);
var request = WebRequest.Create(url);
var response = request.GetResponse();
return response.GetResponseStream();
}
}
}

View File

@@ -12,7 +12,7 @@ namespace NzbDrone.Core.Providers
public class EpisodeProvider
{
private static readonly Logger Logger = LogManager.GetCurrentClassLogger();
private readonly QualityProvider _quality;
private readonly QualityProvider _qualityProvider;
private readonly SeasonProvider _seasons;
private readonly SeriesProvider _series;
private readonly IRepository _sonicRepo;
@@ -20,13 +20,13 @@ namespace NzbDrone.Core.Providers
public EpisodeProvider(IRepository sonicRepo, SeriesProvider seriesProvider,
SeasonProvider seasonProvider, TvDbProvider tvDbProvider,
QualityProvider quality)
QualityProvider qualityProvider)
{
_sonicRepo = sonicRepo;
_series = seriesProvider;
_tvDb = tvDbProvider;
_seasons = seasonProvider;
_quality = quality;
_qualityProvider = qualityProvider;
}
public EpisodeProvider()
@@ -80,7 +80,7 @@ namespace NzbDrone.Core.Providers
if (episodeInfo == null)
{
Logger.Debug("Episode S{0:00}E{1:00} doesn't exist in db. adding it now.", parsedReport.SeasonNumber, episode);
//Todo: How do we want to handle this really? Episode could be released before information is on TheTvDB
//(Parks and Rec did this a lot in the first season, from experience)
//Keivan: Should automatically add the episode to db with minimal information. then update the description/title when available.
@@ -103,24 +103,39 @@ namespace NzbDrone.Core.Providers
if (file != null)
{
//If not null we need to see if this episode has the quality as the download (or if it is better)
if (file.Quality == parsedReport.Quality && file.Proper) continue;
Logger.Debug("File is {0} Proper:{1}", file.Quality, file.Proper);
//There will never be a time when the episode quality is less than what we have and we want it... ever.... I think.
if (file.Quality > parsedReport.Quality) continue;
if (file.Quality > parsedReport.Quality)
{
Logger.Trace("file has better quality. skipping");
continue;
}
//Now we need to handle upgrades and actually pay attention to the Cutoff Value
//If not null we need to see if this episode has the quality as the download (or if it is better)
if (file.Quality == parsedReport.Quality && file.Proper == parsedReport.Proper)
{
Logger.Trace("Same quality/proper. existing proper. skipping");
continue;
}
//Now we need to handle upgrades and actually pay attention to the Cut-off Value
if (file.Quality < parsedReport.Quality)
{
var quality = _quality.Find(episodeInfo.Series.QualityProfileId);
if (episodeInfo.Series.QualityProfile.Cutoff <= file.Quality)
{
Logger.Trace("Quality is past cut-off skipping.");
continue;
}
if (quality.Cutoff <= file.Quality && file.Proper) continue;
}
}
Logger.Debug("Episode {0} is needed", parsedReport);
return true; //If we get to this point and the file has not yet been rejected then accept it
}
Logger.Debug("Episode {0} is not needed", parsedReport);
return false;
}

View File

@@ -59,16 +59,19 @@ namespace NzbDrone.Core.Providers.Indexer
/// <summary>
/// Fetches RSS feed and process each news item.
/// </summary>
public void Fetch()
public List<Exception> Fetch()
{
_logger.Debug("Fetching feeds from " + Settings.Name);
var exeptions = new List<Exception>();
foreach (var url in Urls)
{
try
{
_logger.Trace("Downloading RSS " + url);
var feed = SyndicationFeed.Load(_httpProvider.DownloadXml(url)).Items;
var reader = new SyndicationFeedXmlReader(_httpProvider.DownloadStream(url));
var feed = SyndicationFeed.Load(reader).Items;
foreach (var item in feed)
{
@@ -78,6 +81,7 @@ namespace NzbDrone.Core.Providers.Indexer
}
catch (Exception itemEx)
{
exeptions.Add(itemEx);
_logger.ErrorException("An error occurred while processing feed item", itemEx);
}
@@ -85,11 +89,13 @@ namespace NzbDrone.Core.Providers.Indexer
}
catch (Exception feedEx)
{
exeptions.Add(feedEx);
_logger.ErrorException("An error occurred while processing feed", feedEx);
}
}
_logger.Info("Finished processing feeds from " + Settings.Name);
return exeptions;
}
internal void ProcessItem(SyndicationItem feedItem)
@@ -131,17 +137,17 @@ namespace NzbDrone.Core.Providers.Indexer
return;
}
var sabTitle = _sabProvider.GetSabTitle(parseResult);
//var sabTitle = _sabProvider.GetSabTitle(parseResult);
if (_sabProvider.IsInQueue(sabTitle))
{
return;
}
//if (_sabProvider.IsInQueue(sabTitle))
//{
// return;
//}
if (!_sabProvider.AddByUrl(NzbDownloadUrl(feedItem), sabTitle))
{
return;
}
//if (!_sabProvider.AddByUrl(NzbDownloadUrl(feedItem), sabTitle))
//{
// return;
//}
foreach (var episode in episodes)
{

View File

@@ -0,0 +1,67 @@
//http://stackoverflow.com/questions/210375/problems-reading-rss-with-c-and-net-3-5
//https://connect.microsoft.com/VisualStudio/feedback/details/325421/syndicationfeed-load-fails-to-parse-datetime-against-a-real-world-feeds-ie7-can-read
using System;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Reflection;
using System.ServiceModel.Syndication;
using System.Xml;
namespace NzbDrone.Core.Providers.Indexer
{
public class SyndicationFeedXmlReader : XmlTextReader
{
readonly string[] Rss20DateTimeHints = { "pubDate" };
readonly string[] Atom10DateTimeHints = { "updated", "published", "lastBuildDate" };
private bool isRss2DateTime = false;
private bool isAtomDateTime = false;
public SyndicationFeedXmlReader(Stream stream) : base(stream) { }
public override bool IsStartElement(string localname, string ns)
{
isRss2DateTime = false;
isAtomDateTime = false;
if (Rss20DateTimeHints.Contains(localname)) isRss2DateTime = true;
if (Atom10DateTimeHints.Contains(localname)) isAtomDateTime = true;
return base.IsStartElement(localname, ns);
}
public override string ReadString()
{
string dateVal = base.ReadString();
try
{
if (isRss2DateTime)
{
MethodInfo objMethod = typeof(Rss20FeedFormatter).GetMethod("DateFromString", BindingFlags.NonPublic | BindingFlags.Static);
Debug.Assert(objMethod != null);
objMethod.Invoke(null, new object[] { dateVal, this });
}
if (isAtomDateTime)
{
MethodInfo objMethod = typeof(Atom10FeedFormatter).GetMethod("DateFromString", BindingFlags.NonPublic | BindingFlags.Instance);
Debug.Assert(objMethod != null);
objMethod.Invoke(new Atom10FeedFormatter(), new object[] { dateVal, this });
}
}
catch (TargetInvocationException)
{
DateTimeFormatInfo dtfi = CultureInfo.CurrentCulture.DateTimeFormat;
return DateTimeOffset.UtcNow.ToString(dtfi.RFC1123Pattern);
}
return dateVal;
}
}
}

View File

@@ -16,6 +16,10 @@ namespace NzbDrone.Core.Providers
private readonly ConfigProvider _config;
private readonly HttpProvider _http;
public SabProvider()
{
}
public SabProvider(ConfigProvider config, HttpProvider http)
{
_config = config;

View File

@@ -76,10 +76,6 @@ namespace NzbDrone.Core.Providers
public virtual bool IsIgnored(int seriesId, int seasonNumber)
{
var season = _sonicRepo.Single<Season>(s => s.SeriesId == seriesId && s.SeasonNumber == seasonNumber);
if (season == null)
return true;
return !season.Monitored;
}

View File

@@ -58,9 +58,8 @@ namespace NzbDrone.Core.Providers
public virtual bool QualityWanted(int seriesId, QualityTypes quality)
{
var series = _sonioRepo.Single<Series>(seriesId);
var profile = _quality.Find(series.QualityProfileId);
return profile.Allowed.Contains(quality);
Logger.Trace("Series {0} is using quality profile {1}", seriesId, series.QualityProfile.Name);
return series.QualityProfile.Allowed.Contains(quality);
}
public virtual TvdbSeries MapPathToSeries(string path)