1
0
mirror of https://github.com/Sonarr/Sonarr.git synced 2024-12-14 11:23:42 +02:00
Sonarr/NzbDrone.Core/Providers/Indexer/IndexerBase.cs

242 lines
8.1 KiB
C#
Raw Normal View History

using System;
using System.Linq;
using System.Collections.Generic;
using System.Net;
using System.ServiceModel.Syndication;
using System.Text.RegularExpressions;
using System.Web;
using Ninject;
2011-04-04 06:50:12 +03:00
using NLog;
using NzbDrone.Core.Model;
using NzbDrone.Core.Model.Search;
using NzbDrone.Core.Providers.Core;
2011-04-04 06:50:12 +03:00
namespace NzbDrone.Core.Providers.Indexer
2011-04-04 06:50:12 +03:00
{
public abstract class IndexerBase
2011-04-04 06:50:12 +03:00
{
2011-04-22 09:23:29 +03:00
protected readonly Logger _logger;
2011-04-07 05:25:52 +03:00
private readonly HttpProvider _httpProvider;
protected readonly ConfigProvider _configProvider;
private static readonly Regex TitleSearchRegex = new Regex(@"[\W]", RegexOptions.IgnoreCase | RegexOptions.Compiled);
[Inject]
protected IndexerBase(HttpProvider httpProvider, ConfigProvider configProvider)
2011-04-04 09:53:22 +03:00
{
2011-04-05 08:30:13 +03:00
_httpProvider = httpProvider;
_configProvider = configProvider;
_logger = LogManager.GetLogger(GetType().ToString());
2011-04-04 09:53:22 +03:00
}
2011-05-27 06:54:28 +03:00
public IndexerBase()
{
2011-05-27 09:03:57 +03:00
2011-05-27 06:54:28 +03:00
}
2011-04-04 06:50:12 +03:00
/// <summary>
/// Gets the name for the feed
2011-04-04 06:50:12 +03:00
/// </summary>
public abstract string Name { get; }
2011-04-04 06:50:12 +03:00
2011-04-04 09:53:22 +03:00
/// <summary>
/// Gets the source URL for the feed
2011-04-04 09:53:22 +03:00
/// </summary>
protected abstract string[] Urls { get; }
2011-04-04 09:53:22 +03:00
/// <summary>
/// Gets the credential.
/// </summary>
protected virtual NetworkCredential Credentials
{
get { return null; }
}
/// <summary>
/// Gets the rss url for specific episode search
/// </summary>
/// <param name="searchModel">SearchModel containing episode information</param>
/// <returns></returns>
protected abstract IList<String> GetSearchUrls(SearchModel searchModel);
/// <summary>
/// This method can be overwritten to provide indexer specific info parsing
/// </summary>
/// <param name="item">RSS item that needs to be parsed</param>
/// <param name="currentResult">Result of the built in parse function.</param>
/// <returns></returns>
protected virtual EpisodeParseResult CustomParser(SyndicationItem item, EpisodeParseResult currentResult)
{
return currentResult;
}
/// <summary>
/// Generates direct link to download an NZB
/// </summary>
/// <param name = "item">RSS Feed item to generate the link for</param>
/// <returns>Download link URL</returns>
protected abstract string NzbDownloadUrl(SyndicationItem item);
2011-04-04 09:53:22 +03:00
/// <summary>
2011-04-10 05:44:01 +03:00
/// Fetches RSS feed and process each news item.
2011-04-04 09:53:22 +03:00
/// </summary>
2011-05-27 06:54:28 +03:00
public virtual IList<EpisodeParseResult> FetchRss()
2011-04-04 06:50:12 +03:00
{
_logger.Debug("Fetching feeds from " + Name);
var result = new List<EpisodeParseResult>();
2011-04-04 06:50:12 +03:00
foreach (var url in Urls)
2011-04-04 06:50:12 +03:00
{
result.AddRange(Fetch(url));
}
_logger.Info("Finished processing feeds from " + Name);
return result;
}
public virtual IList<EpisodeParseResult> FetchSeason(string seriesTitle, int seasonNumber)
{
_logger.Debug("Searching {0} for {1}-Season {2}", Name, seriesTitle, seasonNumber);
var result = new List<EpisodeParseResult>();
var searchModel = new SearchModel
{
SeriesTitle = GetQueryTitle(seriesTitle),
SeasonNumber = seasonNumber,
SearchType = SearchType.SeasonSearch
};
var searchUrls = GetSearchUrls(searchModel);
foreach (var url in searchUrls)
{
result.AddRange(Fetch(url));
}
result = result.Where(e => e.CleanTitle == Parser.NormalizeTitle(seriesTitle)).ToList();
_logger.Info("Finished searching {0} for {1}-S{2}, Found {3}", Name, seriesTitle, seasonNumber, result.Count);
return result;
}
public virtual IList<EpisodeParseResult> FetchPartialSeason(string seriesTitle, int seasonNumber, int episodePrefix)
{
_logger.Debug("Searching {0} for {1}-Season {2}, Prefix: {3}", Name, seriesTitle, seasonNumber, episodePrefix);
var result = new List<EpisodeParseResult>();
var searchModel = new SearchModel
{
SeriesTitle = GetQueryTitle(seriesTitle),
SeasonNumber = seasonNumber,
EpisodePrefix = episodePrefix,
SearchType = SearchType.PartialSeasonSearch
};
var searchUrls = GetSearchUrls(searchModel);
foreach (var url in searchUrls)
{
result.AddRange(Fetch(url));
}
result = result.Where(e => e.CleanTitle == Parser.NormalizeTitle(seriesTitle)).ToList();
_logger.Info("Finished searching {0} for {1}-S{2}, Found {3}", Name, seriesTitle, seasonNumber, result.Count);
return result;
}
2011-05-27 06:54:28 +03:00
public virtual IList<EpisodeParseResult> FetchEpisode(string seriesTitle, int seasonNumber, int episodeNumber)
{
2011-05-27 09:03:57 +03:00
_logger.Debug("Searching {0} for {1}-S{2:00}E{3:00}", Name, seriesTitle, seasonNumber, episodeNumber);
var result = new List<EpisodeParseResult>();
var searchModel = new SearchModel
{
SeriesTitle = GetQueryTitle(seriesTitle),
SeasonNumber = seasonNumber,
EpisodeNumber = episodeNumber,
SearchType = SearchType.EpisodeSearch
};
var searchUrls = GetSearchUrls(searchModel);
foreach (var url in searchUrls)
{
result.AddRange(Fetch(url));
}
result = result.Where(e => e.CleanTitle == Parser.NormalizeTitle(seriesTitle)).ToList();
_logger.Info("Finished searching {0} for {1}-S{2}E{3:00}, Found {4}", Name, seriesTitle, seasonNumber, episodeNumber, result.Count);
return result;
2011-04-25 21:16:38 +03:00
}
2011-04-04 06:50:12 +03:00
private IEnumerable<EpisodeParseResult> Fetch(string url)
{
var result = new List<EpisodeParseResult>();
try
{
_logger.Trace("Downloading RSS " + url);
var reader = new SyndicationFeedXmlReader(_httpProvider.DownloadStream(url, Credentials));
var feed = SyndicationFeed.Load(reader).Items;
foreach (var item in feed)
{
try
2011-04-22 09:23:29 +03:00
{
var parsedEpisode = ParseFeed(item);
if (parsedEpisode != null)
2011-04-22 23:14:02 +03:00
{
parsedEpisode.NzbUrl = NzbDownloadUrl(item);
parsedEpisode.Indexer = Name;
parsedEpisode.NzbTitle = item.Title.Text;
result.Add(parsedEpisode);
2011-04-22 23:14:02 +03:00
}
2011-04-22 09:23:29 +03:00
}
catch (Exception itemEx)
{
_logger.ErrorException("An error occurred while processing feed item", itemEx);
}
2011-04-22 09:23:29 +03:00
}
}
catch (Exception feedEx)
{
_logger.ErrorException("An error occurred while processing feed", feedEx);
2011-04-04 06:50:12 +03:00
}
return result;
2011-04-04 06:50:12 +03:00
}
/// <summary>
/// Parses the RSS feed item
/// </summary>
/// <param name = "item">RSS feed item to parse</param>
/// <returns>Detailed episode info</returns>
public EpisodeParseResult ParseFeed(SyndicationItem item)
{
var episodeParseResult = Parser.ParseTitle(item.Title.Text);
return CustomParser(item, episodeParseResult);
}
2011-05-27 09:03:57 +03:00
public static string GetQueryTitle(string title)
2011-05-27 09:03:57 +03:00
{
var cleanTitle = TitleSearchRegex.Replace(title, "+").Trim('+', ' ');
//remove any repeating +s
cleanTitle = Regex.Replace(cleanTitle, @"\+{1,100}", "+");
return cleanTitle;
2011-05-27 09:03:57 +03:00
}
2011-04-04 06:50:12 +03:00
}
2011-04-10 05:44:01 +03:00
}