1
0
mirror of https://github.com/Sonarr/Sonarr.git synced 2024-12-14 11:23:42 +02:00
Sonarr/NzbDrone.Core/Indexers/Providers/IndexerBase.cs

272 lines
10 KiB
C#
Raw Normal View History

using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.ServiceModel.Syndication;
using System.Text.RegularExpressions;
2011-04-04 06:50:12 +03:00
using NLog;
2012-02-11 03:48:20 +03:00
using NzbDrone.Common;
2011-04-04 06:50:12 +03:00
using NzbDrone.Core.Model;
using NzbDrone.Core.Providers.Core;
2011-04-04 06:50:12 +03:00
2013-02-23 07:35:54 +03:00
namespace NzbDrone.Core.Indexers.Providers
2011-04-04 06:50:12 +03:00
{
public abstract class IndexerBase
2011-04-04 06:50:12 +03:00
{
2011-04-22 09:23:29 +03:00
protected readonly Logger _logger;
2012-12-21 11:35:20 +03:00
protected readonly HttpProvider _httpProvider;
protected readonly ConfigProvider _configProvider;
protected static readonly Regex TitleSearchRegex = new Regex(@"[\W]", RegexOptions.IgnoreCase | RegexOptions.Compiled);
protected static readonly Regex RemoveThe = new Regex(@"^the\s", RegexOptions.IgnoreCase | RegexOptions.Compiled);
protected IndexerBase(HttpProvider httpProvider, ConfigProvider configProvider)
2011-04-04 09:53:22 +03:00
{
2011-04-05 08:30:13 +03:00
_httpProvider = httpProvider;
_configProvider = configProvider;
_logger = LogManager.GetLogger(GetType().ToString());
2011-04-04 09:53:22 +03:00
}
2011-05-27 06:54:28 +03:00
public IndexerBase()
{
2011-05-27 09:03:57 +03:00
2011-05-27 06:54:28 +03:00
}
2011-04-04 06:50:12 +03:00
/// <summary>
/// Gets the name for the feed
2011-04-04 06:50:12 +03:00
/// </summary>
public abstract string Name { get; }
2011-04-04 06:50:12 +03:00
2011-04-04 09:53:22 +03:00
/// <summary>
/// Gets the source URL for the feed
2011-04-04 09:53:22 +03:00
/// </summary>
protected abstract string[] Urls { get; }
2011-04-04 09:53:22 +03:00
public abstract bool IsConfigured { get; }
/// <summary>
/// Should the indexer be enabled by default?
/// </summary>
public virtual bool EnabledByDefault
{
get { return false; }
}
/// <summary>
/// Gets the credential.
/// </summary>
protected virtual NetworkCredential Credentials
{
get { return null; }
}
protected abstract IList<String> GetEpisodeSearchUrls(string seriesTitle, int seasonNumber, int episodeNumber);
protected abstract IList<String> GetDailyEpisodeSearchUrls(string seriesTitle, DateTime date);
protected abstract IList<String> GetSeasonSearchUrls(string seriesTitle, int seasonNumber);
protected abstract IList<String> GetPartialSeasonSearchUrls(string seriesTitle, int seasonNumber, int episodeWildcard);
/// <summary>
/// This method can be overwritten to provide indexer specific info parsing
/// </summary>
/// <param name="item">RSS item that needs to be parsed</param>
/// <param name="currentResult">Result of the built in parse function.</param>
/// <returns></returns>
protected virtual EpisodeParseResult CustomParser(SyndicationItem item, EpisodeParseResult currentResult)
{
return currentResult;
}
/// <summary>
/// This method can be overwritten to provide pre-parse the title
/// </summary>
/// <param name="item">RSS item that needs to be parsed</param>
/// <returns></returns>
protected virtual string TitlePreParser(SyndicationItem item)
{
return item.Title.Text;
}
/// <summary>
/// Generates direct link to download an NZB
/// </summary>
/// <param name = "item">RSS Feed item to generate the link for</param>
/// <returns>Download link URL</returns>
protected abstract string NzbDownloadUrl(SyndicationItem item);
2011-04-04 09:53:22 +03:00
2012-05-02 22:02:39 +03:00
/// <summary>
/// Generates link to the NZB info at the indexer
/// </summary>
/// <param name = "item">RSS Feed item to generate the link for</param>
/// <returns>Nzb Info URL</returns>
protected abstract string NzbInfoUrl(SyndicationItem item);
2011-04-04 09:53:22 +03:00
/// <summary>
2011-04-10 05:44:01 +03:00
/// Fetches RSS feed and process each news item.
2011-04-04 09:53:22 +03:00
/// </summary>
2011-05-27 06:54:28 +03:00
public virtual IList<EpisodeParseResult> FetchRss()
2011-04-04 06:50:12 +03:00
{
_logger.Debug("Fetching feeds from " + Name);
var result = new List<EpisodeParseResult>();
2011-04-04 06:50:12 +03:00
result = Fetch(Urls);
2012-02-22 07:43:19 +03:00
_logger.Debug("Finished processing feeds from " + Name);
return result;
}
public virtual IList<EpisodeParseResult> FetchSeason(string seriesTitle, int seasonNumber)
{
_logger.Debug("Searching {0} for {1} Season {2}", Name, seriesTitle, seasonNumber);
var searchUrls = GetSeasonSearchUrls(GetQueryTitle(seriesTitle), seasonNumber);
var result = Fetch(searchUrls);
_logger.Info("Finished searching {0} for {1} Season {2}, Found {3}", Name, seriesTitle, seasonNumber, result.Count);
return result;
}
public virtual IList<EpisodeParseResult> FetchPartialSeason(string seriesTitle, int seasonNumber, int episodePrefix)
{
_logger.Debug("Searching {0} for {1} Season {2}, Prefix: {3}", Name, seriesTitle, seasonNumber, episodePrefix);
var searchUrls = GetPartialSeasonSearchUrls(GetQueryTitle(seriesTitle), seasonNumber, episodePrefix);
var result = Fetch(searchUrls);
_logger.Info("Finished searching {0} for {1} Season {2}, Found {3}", Name, seriesTitle, seasonNumber, result.Count);
return result;
}
2011-05-27 06:54:28 +03:00
public virtual IList<EpisodeParseResult> FetchEpisode(string seriesTitle, int seasonNumber, int episodeNumber)
{
2011-05-27 09:03:57 +03:00
_logger.Debug("Searching {0} for {1}-S{2:00}E{3:00}", Name, seriesTitle, seasonNumber, episodeNumber);
var searchUrls = GetEpisodeSearchUrls(GetQueryTitle(seriesTitle), seasonNumber, episodeNumber);
var result = Fetch(searchUrls);
_logger.Info("Finished searching {0} for {1} S{2:00}E{3:00}, Found {4}", Name, seriesTitle, seasonNumber, episodeNumber, result.Count);
return result;
2011-04-25 21:16:38 +03:00
}
2011-04-04 06:50:12 +03:00
public virtual IList<EpisodeParseResult> FetchDailyEpisode(string seriesTitle, DateTime airDate)
{
_logger.Debug("Searching {0} for {1}-{2}", Name, seriesTitle, airDate.ToShortDateString());
var searchUrls = GetDailyEpisodeSearchUrls(GetQueryTitle(seriesTitle), airDate);
var result = Fetch(searchUrls);
_logger.Info("Finished searching {0} for {1}-{2}, Found {3}", Name, seriesTitle, airDate.ToShortDateString(), result.Count);
return result;
}
2012-12-21 11:35:20 +03:00
protected virtual List<EpisodeParseResult> Fetch(IEnumerable<string> urls)
{
var result = new List<EpisodeParseResult>();
if (!IsConfigured)
{
_logger.Warn("Indexer '{0}' isn't configured correctly. please reconfigure the indexer in settings page.", Name);
return result;
}
foreach (var url in urls)
{
try
{
_logger.Trace("Downloading RSS " + url);
var reader = new SyndicationFeedXmlReader(_httpProvider.DownloadStream(url, Credentials));
var feed = SyndicationFeed.Load(reader).Items;
foreach (var item in feed)
2011-04-22 09:23:29 +03:00
{
try
2011-04-22 23:14:02 +03:00
{
var parsedEpisode = ParseFeed(item);
if (parsedEpisode != null)
{
parsedEpisode.NzbUrl = NzbDownloadUrl(item);
2012-05-02 22:02:39 +03:00
parsedEpisode.NzbInfoUrl = NzbInfoUrl(item);
parsedEpisode.Indexer = String.IsNullOrWhiteSpace(parsedEpisode.Indexer) ? Name : parsedEpisode.Indexer;
result.Add(parsedEpisode);
}
}
catch (Exception itemEx)
{
2012-01-19 05:08:17 +03:00
itemEx.Data.Add("FeedUrl", url);
itemEx.Data.Add("Item", item.Title);
_logger.ErrorException("An error occurred while processing feed item", itemEx);
2011-04-22 23:14:02 +03:00
}
}
}
catch (WebException webException)
{
if (webException.Message.Contains("503"))
{
2012-10-23 00:01:14 +03:00
_logger.Warn("{0} server is currently unavailable.{1} {2}", Name,url, webException.Message);
}
else
{
webException.Data.Add("FeedUrl", url);
2012-02-28 05:35:25 +03:00
_logger.ErrorException("An error occurred while processing feed. " + url, webException);
}
}
catch (Exception feedEx)
{
2012-01-19 05:08:17 +03:00
feedEx.Data.Add("FeedUrl", url);
2012-02-28 05:35:25 +03:00
_logger.ErrorException("An error occurred while processing feed. " + url, feedEx);
2011-04-22 09:23:29 +03:00
}
2011-04-04 06:50:12 +03:00
}
return result;
2011-04-04 06:50:12 +03:00
}
/// <summary>
/// Parses the RSS feed item
/// </summary>
/// <param name = "item">RSS feed item to parse</param>
/// <returns>Detailed episode info</returns>
public EpisodeParseResult ParseFeed(SyndicationItem item)
{
var title = TitlePreParser(item);
var episodeParseResult = Parser.ParseTitle(title);
if (episodeParseResult != null)
{
episodeParseResult.Age = DateTime.Now.Date.Subtract(item.PublishDate.Date).Days;
episodeParseResult.OriginalString = title;
episodeParseResult.SceneSource = true;
}
_logger.Trace("Parsed: {0} from: {1}", episodeParseResult, item.Title.Text);
return CustomParser(item, episodeParseResult);
}
2011-05-27 09:03:57 +03:00
/// <summary>
/// This method can be overwritten to provide indexer specific title cleaning
/// </summary>
/// <param name="title">Title that needs to be cleaned</param>
/// <returns></returns>
public virtual string GetQueryTitle(string title)
2011-05-27 09:03:57 +03:00
{
title = RemoveThe.Replace(title, string.Empty);
var cleanTitle = TitleSearchRegex.Replace(title, "+").Trim('+', ' ');
//remove any repeating +s
cleanTitle = Regex.Replace(cleanTitle, @"\+{1,100}", "+");
return cleanTitle;
2011-05-27 09:03:57 +03:00
}
2011-04-04 06:50:12 +03:00
}
2011-04-10 05:44:01 +03:00
}