2023-07-16 17:47:00 +02:00
|
|
|
|
using System.Globalization;
|
|
|
|
|
using System.IO.Compression;
|
2023-06-20 15:46:54 +02:00
|
|
|
|
using System.Net;
|
2023-06-01 18:28:58 +02:00
|
|
|
|
using System.Runtime.InteropServices;
|
2023-06-03 22:25:24 +02:00
|
|
|
|
using System.Text.RegularExpressions;
|
2023-05-20 22:10:24 +02:00
|
|
|
|
using Logging;
|
2023-06-05 00:35:57 +02:00
|
|
|
|
using Tranga.TrangaTasks;
|
2023-06-01 18:28:58 +02:00
|
|
|
|
using static System.IO.UnixFileMode;
|
2023-05-18 12:26:15 +02:00
|
|
|
|
|
|
|
|
|
namespace Tranga;
|
|
|
|
|
|
2023-05-19 19:52:24 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Base-Class for all Connectors
|
|
|
|
|
/// Provides some methods to be used by all Connectors, as well as a DownloadClient
|
|
|
|
|
/// </summary>
|
2023-05-18 12:26:15 +02:00
|
|
|
|
public abstract class Connector
|
|
|
|
|
{
|
2023-06-27 22:57:44 +02:00
|
|
|
|
protected TrangaSettings settings { get; }
|
|
|
|
|
protected DownloadClient downloadClient { get; init; } = null!;
|
2023-05-19 19:50:26 +02:00
|
|
|
|
|
2023-05-31 20:29:30 +02:00
|
|
|
|
protected readonly Logger? logger;
|
2023-05-20 22:10:24 +02:00
|
|
|
|
|
2023-05-25 14:23:33 +02:00
|
|
|
|
|
2023-06-27 22:57:44 +02:00
|
|
|
|
protected Connector(TrangaSettings settings, Logger? logger = null)
|
2023-05-18 18:51:19 +02:00
|
|
|
|
{
|
2023-06-27 22:57:44 +02:00
|
|
|
|
this.settings = settings;
|
2023-05-20 22:10:24 +02:00
|
|
|
|
this.logger = logger;
|
2023-06-27 22:57:44 +02:00
|
|
|
|
if (!Directory.Exists(settings.coverImageCache))
|
|
|
|
|
Directory.CreateDirectory(settings.coverImageCache);
|
2023-05-18 18:51:19 +02:00
|
|
|
|
}
|
|
|
|
|
|
2023-05-19 19:52:24 +02:00
|
|
|
|
public abstract string name { get; } //Name of the Connector (e.g. Website)
|
2023-07-02 22:46:01 +02:00
|
|
|
|
|
|
|
|
|
public Publication[] GetPublications(ref HashSet<Publication> publicationCollection, string publicationTitle = "")
|
|
|
|
|
{
|
|
|
|
|
Publication[] ret = GetPublicationsInternal(publicationTitle);
|
|
|
|
|
foreach (Publication p in ret)
|
|
|
|
|
publicationCollection.Add(p);
|
|
|
|
|
return ret;
|
|
|
|
|
}
|
2023-05-19 19:52:24 +02:00
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Returns all Publications with the given string.
|
|
|
|
|
/// If the string is empty or null, returns all Publication of the Connector
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="publicationTitle">Search-Query</param>
|
|
|
|
|
/// <returns>Publications matching the query</returns>
|
2023-07-02 22:46:01 +02:00
|
|
|
|
protected abstract Publication[] GetPublicationsInternal(string publicationTitle = "");
|
2023-05-19 19:52:24 +02:00
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Returns all Chapters of the publication in the provided language.
|
|
|
|
|
/// If the language is empty or null, returns all Chapters in all Languages.
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="publication">Publication to get Chapters for</param>
|
|
|
|
|
/// <param name="language">Language of the Chapters</param>
|
|
|
|
|
/// <returns>Array of Chapters matching Publication and Language</returns>
|
2023-05-18 18:19:04 +02:00
|
|
|
|
public abstract Chapter[] GetChapters(Publication publication, string language = "");
|
2023-06-09 11:06:18 +02:00
|
|
|
|
|
2023-06-28 22:43:03 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Updates the available Chapters of a Publication
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="publication">Publication to check</param>
|
|
|
|
|
/// <param name="language">Language to receive chapters for</param>
|
|
|
|
|
/// <param name="collection"></param>
|
|
|
|
|
/// <returns>List of Chapters that were previously not in collection</returns>
|
|
|
|
|
public List<Chapter> GetNewChaptersList(Publication publication, string language, ref HashSet<Publication> collection)
|
|
|
|
|
{
|
|
|
|
|
Chapter[] newChapters = this.GetChapters(publication, language);
|
|
|
|
|
collection.Add(publication);
|
2023-07-16 17:47:00 +02:00
|
|
|
|
NumberFormatInfo decimalPoint = new (){ NumberDecimalSeparator = "." };
|
2023-06-28 22:43:03 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), "Checking for duplicates");
|
2023-07-16 17:47:00 +02:00
|
|
|
|
List<Chapter> newChaptersList = newChapters.Where(nChapter =>
|
|
|
|
|
float.Parse(nChapter.chapterNumber, decimalPoint) > publication.ignoreChaptersBelow &&
|
|
|
|
|
!nChapter.CheckChapterIsDownloaded(settings.downloadLocation)).ToList();
|
2023-06-28 22:43:03 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"{newChaptersList.Count} new chapters.");
|
|
|
|
|
|
|
|
|
|
return newChaptersList;
|
|
|
|
|
}
|
|
|
|
|
|
2023-06-27 23:02:55 +02:00
|
|
|
|
public Chapter[] SelectChapters(Publication publication, string searchTerm, string? language = null)
|
2023-06-09 11:06:18 +02:00
|
|
|
|
{
|
|
|
|
|
Chapter[] availableChapters = this.GetChapters(publication, language??"en");
|
|
|
|
|
Regex volumeRegex = new ("((v(ol)*(olume)*)+ *([0-9]+(-[0-9]+)?){1})", RegexOptions.IgnoreCase);
|
|
|
|
|
Regex chapterRegex = new ("((c(h)*(hapter)*)+ *([0-9]+(-[0-9]+)?){1})", RegexOptions.IgnoreCase);
|
|
|
|
|
Regex singleResultRegex = new("([0-9]+)", RegexOptions.IgnoreCase);
|
|
|
|
|
Regex rangeResultRegex = new("([0-9]+(-[0-9]+))", RegexOptions.IgnoreCase);
|
2023-06-27 23:02:55 +02:00
|
|
|
|
Regex allRegex = new("a(ll)?", RegexOptions.IgnoreCase);
|
2023-06-09 11:06:18 +02:00
|
|
|
|
if (volumeRegex.IsMatch(searchTerm) && chapterRegex.IsMatch(searchTerm))
|
|
|
|
|
{
|
|
|
|
|
string volume = singleResultRegex.Match(volumeRegex.Match(searchTerm).Value).Value;
|
|
|
|
|
string chapter = singleResultRegex.Match(chapterRegex.Match(searchTerm).Value).Value;
|
2023-06-27 23:25:35 +02:00
|
|
|
|
return availableChapters.Where(aCh => aCh.volumeNumber is not null &&
|
2023-06-09 11:06:18 +02:00
|
|
|
|
aCh.volumeNumber.Equals(volume, StringComparison.InvariantCultureIgnoreCase) &&
|
|
|
|
|
aCh.chapterNumber.Equals(chapter, StringComparison.InvariantCultureIgnoreCase))
|
|
|
|
|
.ToArray();
|
|
|
|
|
}
|
|
|
|
|
else if (volumeRegex.IsMatch(searchTerm))
|
|
|
|
|
{
|
|
|
|
|
string volume = volumeRegex.Match(searchTerm).Value;
|
|
|
|
|
if (rangeResultRegex.IsMatch(volume))
|
|
|
|
|
{
|
|
|
|
|
string range = rangeResultRegex.Match(volume).Value;
|
|
|
|
|
int start = Convert.ToInt32(range.Split('-')[0]);
|
|
|
|
|
int end = Convert.ToInt32(range.Split('-')[1]);
|
|
|
|
|
return availableChapters.Where(aCh => aCh.volumeNumber is not null &&
|
|
|
|
|
Convert.ToInt32(aCh.volumeNumber) >= start &&
|
|
|
|
|
Convert.ToInt32(aCh.volumeNumber) <= end).ToArray();
|
|
|
|
|
}
|
2023-06-11 19:17:03 +02:00
|
|
|
|
else if (singleResultRegex.IsMatch(volume))
|
|
|
|
|
{
|
|
|
|
|
string volumeNumber = singleResultRegex.Match(volume).Value;
|
2023-06-09 11:06:18 +02:00
|
|
|
|
return availableChapters.Where(aCh =>
|
|
|
|
|
aCh.volumeNumber is not null &&
|
2023-06-11 19:17:03 +02:00
|
|
|
|
aCh.volumeNumber.Equals(volumeNumber, StringComparison.InvariantCultureIgnoreCase)).ToArray();
|
|
|
|
|
}
|
2023-06-09 11:06:18 +02:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
else if (chapterRegex.IsMatch(searchTerm))
|
|
|
|
|
{
|
|
|
|
|
string chapter = volumeRegex.Match(searchTerm).Value;
|
|
|
|
|
if (rangeResultRegex.IsMatch(chapter))
|
|
|
|
|
{
|
|
|
|
|
string range = rangeResultRegex.Match(chapter).Value;
|
|
|
|
|
int start = Convert.ToInt32(range.Split('-')[0]);
|
|
|
|
|
int end = Convert.ToInt32(range.Split('-')[1]);
|
2023-06-27 23:25:35 +02:00
|
|
|
|
return availableChapters.Where(aCh => Convert.ToInt32(aCh.chapterNumber) >= start &&
|
2023-06-09 11:06:18 +02:00
|
|
|
|
Convert.ToInt32(aCh.chapterNumber) <= end).ToArray();
|
|
|
|
|
}
|
2023-06-11 19:17:03 +02:00
|
|
|
|
else if (singleResultRegex.IsMatch(chapter))
|
|
|
|
|
{
|
|
|
|
|
string chapterNumber = singleResultRegex.Match(chapter).Value;
|
2023-06-09 11:06:18 +02:00
|
|
|
|
return availableChapters.Where(aCh =>
|
2023-06-11 19:17:03 +02:00
|
|
|
|
aCh.chapterNumber.Equals(chapterNumber, StringComparison.InvariantCultureIgnoreCase)).ToArray();
|
|
|
|
|
}
|
2023-06-09 11:06:18 +02:00
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
if (rangeResultRegex.IsMatch(searchTerm))
|
|
|
|
|
{
|
|
|
|
|
int start = Convert.ToInt32(searchTerm.Split('-')[0]);
|
|
|
|
|
int end = Convert.ToInt32(searchTerm.Split('-')[1]);
|
|
|
|
|
return availableChapters[start..(end + 1)];
|
|
|
|
|
}
|
|
|
|
|
else if(singleResultRegex.IsMatch(searchTerm))
|
|
|
|
|
return new [] { availableChapters[Convert.ToInt32(searchTerm)] };
|
2023-06-27 23:02:55 +02:00
|
|
|
|
else if (allRegex.IsMatch(searchTerm))
|
|
|
|
|
return availableChapters;
|
2023-06-09 11:06:18 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return Array.Empty<Chapter>();
|
|
|
|
|
}
|
2023-05-19 19:52:24 +02:00
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Retrieves the Chapter (+Images) from the website.
|
2023-05-31 20:39:23 +02:00
|
|
|
|
/// Should later call DownloadChapterImages to retrieve the individual Images of the Chapter and create .cbz archive.
|
2023-05-19 19:52:24 +02:00
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="publication">Publication that contains Chapter</param>
|
|
|
|
|
/// <param name="chapter">Chapter with Images to retrieve</param>
|
2023-06-01 22:05:48 +02:00
|
|
|
|
/// <param name="parentTask">Will be used for progress-tracking</param>
|
2023-06-10 14:27:09 +02:00
|
|
|
|
/// <param name="cancellationToken"></param>
|
2023-06-20 15:46:54 +02:00
|
|
|
|
public abstract HttpStatusCode DownloadChapter(Publication publication, Chapter chapter, DownloadChapterTask parentTask, CancellationToken? cancellationToken = null);
|
2023-05-26 14:07:11 +02:00
|
|
|
|
|
2023-05-19 19:52:24 +02:00
|
|
|
|
/// <summary>
|
2023-05-31 20:39:57 +02:00
|
|
|
|
/// Copies the already downloaded cover from cache to downloadLocation
|
2023-05-19 19:52:24 +02:00
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="publication">Publication to retrieve Cover for</param>
|
2023-05-26 14:07:11 +02:00
|
|
|
|
/// <param name="settings">TrangaSettings</param>
|
2023-05-31 20:39:57 +02:00
|
|
|
|
public void CopyCoverFromCacheToDownloadLocation(Publication publication, TrangaSettings settings)
|
2023-05-19 19:52:24 +02:00
|
|
|
|
{
|
2023-06-11 19:05:08 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Cloning cover {publication.sortName} -> {publication.internalId}");
|
2023-05-31 20:29:30 +02:00
|
|
|
|
//Check if Publication already has a Folder and cover
|
2023-06-27 22:57:44 +02:00
|
|
|
|
string publicationFolder = publication.CreatePublicationFolder(settings.downloadLocation);
|
2023-05-31 20:29:30 +02:00
|
|
|
|
DirectoryInfo dirInfo = new (publicationFolder);
|
2023-06-11 19:05:08 +02:00
|
|
|
|
if (dirInfo.EnumerateFiles().Any(info => info.Name.Contains("cover", StringComparison.InvariantCultureIgnoreCase)))
|
2023-05-31 20:29:30 +02:00
|
|
|
|
{
|
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Cover exists {publication.sortName}");
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
string fileInCache = Path.Join(settings.coverImageCache, publication.coverFileNameInCache);
|
|
|
|
|
string newFilePath = Path.Join(publicationFolder, $"cover.{Path.GetFileName(fileInCache).Split('.')[^1]}" );
|
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Cloning cover {fileInCache} -> {newFilePath}");
|
|
|
|
|
File.Copy(fileInCache, newFilePath, true);
|
2023-06-01 22:32:11 +02:00
|
|
|
|
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
|
|
|
|
File.SetUnixFileMode(newFilePath, GroupRead | GroupWrite | OtherRead | OtherWrite | UserRead | UserWrite);
|
2023-05-19 19:52:24 +02:00
|
|
|
|
}
|
2023-05-20 00:19:04 +02:00
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Downloads Image from URL and saves it to the given path(incl. fileName)
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="imageUrl"></param>
|
|
|
|
|
/// <param name="fullPath"></param>
|
2023-05-31 21:18:41 +02:00
|
|
|
|
/// <param name="requestType">RequestType for Rate-Limit</param>
|
2023-06-01 22:05:48 +02:00
|
|
|
|
/// <param name="referrer">referrer used in html request header</param>
|
2023-06-20 15:46:54 +02:00
|
|
|
|
private HttpStatusCode DownloadImage(string imageUrl, string fullPath, byte requestType, string? referrer = null)
|
2023-05-19 19:44:59 +02:00
|
|
|
|
{
|
2023-06-01 13:13:53 +02:00
|
|
|
|
DownloadClient.RequestResult requestResult = downloadClient.MakeRequest(imageUrl, requestType, referrer);
|
2023-06-20 15:46:54 +02:00
|
|
|
|
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.result == Stream.Null)
|
|
|
|
|
return requestResult.statusCode;
|
2023-06-19 22:45:33 +02:00
|
|
|
|
byte[] buffer = new byte[requestResult.result.Length];
|
|
|
|
|
requestResult.result.ReadExactly(buffer, 0, buffer.Length);
|
|
|
|
|
File.WriteAllBytes(fullPath, buffer);
|
2023-06-20 15:46:54 +02:00
|
|
|
|
return requestResult.statusCode;
|
2023-05-19 19:44:59 +02:00
|
|
|
|
}
|
2023-05-22 18:15:24 +02:00
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Downloads all Images from URLs, Compresses to zip(cbz) and saves.
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="imageUrls">List of URLs to download Images from</param>
|
|
|
|
|
/// <param name="saveArchiveFilePath">Full path to save archive to (without file ending .cbz)</param>
|
2023-06-01 22:05:48 +02:00
|
|
|
|
/// <param name="parentTask">Used for progress tracking</param>
|
2023-05-20 01:06:12 +02:00
|
|
|
|
/// <param name="comicInfoPath">Path of the generate Chapter ComicInfo.xml, if it was generated</param>
|
2023-05-22 18:15:24 +02:00
|
|
|
|
/// <param name="requestType">RequestType for RateLimits</param>
|
2023-06-01 22:05:48 +02:00
|
|
|
|
/// <param name="referrer">Used in http request header</param>
|
2023-06-20 14:59:08 +02:00
|
|
|
|
/// <param name="cancellationToken"></param>
|
2023-06-20 15:46:54 +02:00
|
|
|
|
protected HttpStatusCode DownloadChapterImages(string[] imageUrls, string saveArchiveFilePath, byte requestType, DownloadChapterTask parentTask, string? comicInfoPath = null, string? referrer = null, CancellationToken? cancellationToken = null)
|
2023-05-18 16:21:02 +02:00
|
|
|
|
{
|
2023-06-20 15:46:54 +02:00
|
|
|
|
if (cancellationToken?.IsCancellationRequested ?? false)
|
|
|
|
|
return HttpStatusCode.RequestTimeout;
|
2023-05-26 15:09:26 +02:00
|
|
|
|
logger?.WriteLine("Connector", $"Downloading Images for {saveArchiveFilePath}");
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Check if Publication Directory already exists
|
2023-05-22 17:09:47 +02:00
|
|
|
|
string directoryPath = Path.GetDirectoryName(saveArchiveFilePath)!;
|
2023-05-19 18:20:26 +02:00
|
|
|
|
if (!Directory.Exists(directoryPath))
|
|
|
|
|
Directory.CreateDirectory(directoryPath);
|
2023-05-22 17:09:47 +02:00
|
|
|
|
|
|
|
|
|
if (File.Exists(saveArchiveFilePath)) //Don't download twice.
|
2023-06-20 15:46:54 +02:00
|
|
|
|
return HttpStatusCode.OK;
|
2023-05-19 18:20:26 +02:00
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Create a temporary folder to store images
|
2023-05-19 23:00:45 +02:00
|
|
|
|
string tempFolder = Directory.CreateTempSubdirectory().FullName;
|
2023-05-18 17:21:06 +02:00
|
|
|
|
|
|
|
|
|
int chapter = 0;
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Download all Images to temporary Folder
|
2023-05-18 17:42:02 +02:00
|
|
|
|
foreach (string imageUrl in imageUrls)
|
|
|
|
|
{
|
|
|
|
|
string[] split = imageUrl.Split('.');
|
2023-05-20 01:06:00 +02:00
|
|
|
|
string extension = split[^1];
|
2023-06-05 00:35:57 +02:00
|
|
|
|
logger?.WriteLine("Connector", $"Downloading Image {chapter + 1:000}/{imageUrls.Length:000} {parentTask.publication.sortName} {parentTask.publication.internalId} Vol.{parentTask.chapter.volumeNumber} Ch.{parentTask.chapter.chapterNumber} {parentTask.progress:P2}");
|
2023-06-20 15:46:54 +02:00
|
|
|
|
HttpStatusCode status = DownloadImage(imageUrl, Path.Join(tempFolder, $"{chapter++}.{extension}"), requestType, referrer);
|
|
|
|
|
if ((int)status < 200 || (int)status >= 300)
|
|
|
|
|
return status;
|
2023-06-10 15:58:11 +02:00
|
|
|
|
parentTask.IncrementProgress(1.0 / imageUrls.Length);
|
2023-06-20 15:46:54 +02:00
|
|
|
|
if (cancellationToken?.IsCancellationRequested ?? false)
|
|
|
|
|
return HttpStatusCode.RequestTimeout;
|
2023-05-18 17:42:02 +02:00
|
|
|
|
}
|
2023-05-19 18:20:26 +02:00
|
|
|
|
|
2023-05-20 00:19:04 +02:00
|
|
|
|
if(comicInfoPath is not null)
|
|
|
|
|
File.Copy(comicInfoPath, Path.Join(tempFolder, "ComicInfo.xml"));
|
|
|
|
|
|
2023-05-26 15:09:26 +02:00
|
|
|
|
logger?.WriteLine("Connector", $"Creating archive {saveArchiveFilePath}");
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//ZIP-it and ship-it
|
2023-05-22 17:09:47 +02:00
|
|
|
|
ZipFile.CreateFromDirectory(tempFolder, saveArchiveFilePath);
|
2023-06-01 18:28:58 +02:00
|
|
|
|
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
|
|
|
|
File.SetUnixFileMode(saveArchiveFilePath, GroupRead | GroupWrite | OtherRead | OtherWrite | UserRead | UserWrite);
|
2023-05-19 20:55:19 +02:00
|
|
|
|
Directory.Delete(tempFolder, true); //Cleanup
|
2023-06-20 15:46:54 +02:00
|
|
|
|
return HttpStatusCode.OK;
|
2023-05-18 16:21:02 +02:00
|
|
|
|
}
|
2023-05-19 19:52:24 +02:00
|
|
|
|
|
2023-06-01 13:13:53 +02:00
|
|
|
|
protected string SaveCoverImageToCache(string url, byte requestType)
|
|
|
|
|
{
|
|
|
|
|
string[] split = url.Split('/');
|
|
|
|
|
string filename = split[^1];
|
2023-06-27 22:57:44 +02:00
|
|
|
|
string saveImagePath = Path.Join(settings.coverImageCache, filename);
|
2023-06-01 13:13:53 +02:00
|
|
|
|
|
|
|
|
|
if (File.Exists(saveImagePath))
|
|
|
|
|
return filename;
|
|
|
|
|
|
|
|
|
|
DownloadClient.RequestResult coverResult = downloadClient.MakeRequest(url, requestType);
|
|
|
|
|
using MemoryStream ms = new();
|
|
|
|
|
coverResult.result.CopyTo(ms);
|
|
|
|
|
File.WriteAllBytes(saveImagePath, ms.ToArray());
|
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Saving image to {saveImagePath}");
|
|
|
|
|
return filename;
|
|
|
|
|
}
|
|
|
|
|
|
2023-05-19 19:44:59 +02:00
|
|
|
|
protected class DownloadClient
|
2023-05-18 12:26:15 +02:00
|
|
|
|
{
|
2023-06-20 14:58:02 +02:00
|
|
|
|
private static readonly HttpClient Client = new()
|
|
|
|
|
{
|
|
|
|
|
Timeout = TimeSpan.FromSeconds(60)
|
|
|
|
|
};
|
2023-05-18 17:18:41 +02:00
|
|
|
|
|
2023-05-22 18:15:24 +02:00
|
|
|
|
private readonly Dictionary<byte, DateTime> _lastExecutedRateLimit;
|
2023-05-22 21:38:23 +02:00
|
|
|
|
private readonly Dictionary<byte, TimeSpan> _rateLimit;
|
2023-06-20 14:59:08 +02:00
|
|
|
|
// ReSharper disable once InconsistentNaming
|
|
|
|
|
private readonly Logger? logger;
|
2023-05-22 18:15:24 +02:00
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Creates a httpClient
|
|
|
|
|
/// </summary>
|
2023-05-22 18:15:24 +02:00
|
|
|
|
/// <param name="rateLimitRequestsPerMinute">Rate limits for requests. byte is RequestType, int maximum requests per minute for RequestType</param>
|
2023-05-31 21:18:41 +02:00
|
|
|
|
/// <param name="logger"></param>
|
2023-05-22 21:38:23 +02:00
|
|
|
|
public DownloadClient(Dictionary<byte, int> rateLimitRequestsPerMinute, Logger? logger)
|
2023-05-18 17:18:41 +02:00
|
|
|
|
{
|
2023-05-22 21:38:23 +02:00
|
|
|
|
this.logger = logger;
|
2023-05-22 18:15:24 +02:00
|
|
|
|
_lastExecutedRateLimit = new();
|
2023-05-22 21:38:23 +02:00
|
|
|
|
_rateLimit = new();
|
2023-05-22 18:15:24 +02:00
|
|
|
|
foreach(KeyValuePair<byte, int> limit in rateLimitRequestsPerMinute)
|
2023-05-22 21:38:23 +02:00
|
|
|
|
_rateLimit.Add(limit.Key, TimeSpan.FromMinutes(1).Divide(limit.Value));
|
2023-05-18 17:18:41 +02:00
|
|
|
|
}
|
2023-05-22 18:15:24 +02:00
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Request Webpage
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="url"></param>
|
2023-05-22 18:15:24 +02:00
|
|
|
|
/// <param name="requestType">For RateLimits: Same Endpoints use same type</param>
|
2023-06-01 22:05:48 +02:00
|
|
|
|
/// <param name="referrer">Used in http request header</param>
|
2023-05-19 20:22:13 +02:00
|
|
|
|
/// <returns>RequestResult with StatusCode and Stream of received data</returns>
|
2023-06-01 13:13:53 +02:00
|
|
|
|
public RequestResult MakeRequest(string url, byte requestType, string? referrer = null)
|
2023-05-18 12:26:15 +02:00
|
|
|
|
{
|
2023-05-22 21:38:23 +02:00
|
|
|
|
if (_rateLimit.TryGetValue(requestType, out TimeSpan value))
|
2023-05-22 18:15:24 +02:00
|
|
|
|
_lastExecutedRateLimit.TryAdd(requestType, DateTime.Now.Subtract(value));
|
|
|
|
|
else
|
2023-05-22 21:38:23 +02:00
|
|
|
|
{
|
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), "RequestType not configured for rate-limit.");
|
2023-06-20 15:46:54 +02:00
|
|
|
|
return new RequestResult(HttpStatusCode.NotAcceptable, Stream.Null);
|
2023-05-22 21:38:23 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
TimeSpan rateLimitTimeout = _rateLimit[requestType]
|
|
|
|
|
.Subtract(DateTime.Now.Subtract(_lastExecutedRateLimit[requestType]));
|
2023-05-22 18:15:24 +02:00
|
|
|
|
|
2023-05-22 21:41:11 +02:00
|
|
|
|
if(rateLimitTimeout > TimeSpan.Zero)
|
|
|
|
|
Thread.Sleep(rateLimitTimeout);
|
2023-05-18 17:41:44 +02:00
|
|
|
|
|
2023-05-22 21:38:23 +02:00
|
|
|
|
HttpResponseMessage? response = null;
|
|
|
|
|
while (response is null)
|
|
|
|
|
{
|
|
|
|
|
try
|
|
|
|
|
{
|
|
|
|
|
HttpRequestMessage requestMessage = new(HttpMethod.Get, url);
|
2023-06-01 13:13:53 +02:00
|
|
|
|
if(referrer is not null)
|
|
|
|
|
requestMessage.Headers.Referrer = new Uri(referrer);
|
2023-05-22 21:38:23 +02:00
|
|
|
|
_lastExecutedRateLimit[requestType] = DateTime.Now;
|
|
|
|
|
response = Client.Send(requestMessage);
|
|
|
|
|
}
|
|
|
|
|
catch (HttpRequestException e)
|
|
|
|
|
{
|
2023-05-22 21:44:52 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), e.Message);
|
2023-06-01 21:16:57 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Waiting {_rateLimit[requestType] * 2}... Retrying.");
|
2023-05-22 21:38:23 +02:00
|
|
|
|
Thread.Sleep(_rateLimit[requestType] * 2);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (!response.IsSuccessStatusCode)
|
2023-06-19 22:45:33 +02:00
|
|
|
|
{
|
2023-05-22 21:38:23 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Request-Error {response.StatusCode}: {response.ReasonPhrase}");
|
2023-06-20 15:46:54 +02:00
|
|
|
|
return new RequestResult(response.StatusCode, Stream.Null);
|
2023-06-19 22:45:33 +02:00
|
|
|
|
}
|
2023-07-06 02:09:56 +02:00
|
|
|
|
|
|
|
|
|
// Request has been redirected to another page. For example, it redirects directly to the results when there is only 1 result
|
|
|
|
|
if(response.RequestMessage is not null && response.RequestMessage.RequestUri is not null)
|
|
|
|
|
{
|
|
|
|
|
return new RequestResult(response.StatusCode, response.Content.ReadAsStream(), true, response.RequestMessage.RequestUri.AbsoluteUri);
|
|
|
|
|
}
|
|
|
|
|
|
2023-06-20 15:46:54 +02:00
|
|
|
|
return new RequestResult(response.StatusCode, response.Content.ReadAsStream());
|
2023-05-18 12:26:15 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public struct RequestResult
|
|
|
|
|
{
|
2023-06-20 15:46:54 +02:00
|
|
|
|
public HttpStatusCode statusCode { get; }
|
2023-05-18 12:26:15 +02:00
|
|
|
|
public Stream result { get; }
|
2023-07-06 02:09:56 +02:00
|
|
|
|
public bool HasBeenRedirected { get; }
|
|
|
|
|
public string? RedirectedToUrl { get; }
|
2023-05-18 12:26:15 +02:00
|
|
|
|
|
2023-06-20 15:46:54 +02:00
|
|
|
|
public RequestResult(HttpStatusCode statusCode, Stream result)
|
2023-05-18 12:26:15 +02:00
|
|
|
|
{
|
2023-06-20 15:46:54 +02:00
|
|
|
|
this.statusCode = statusCode;
|
2023-05-18 12:26:15 +02:00
|
|
|
|
this.result = result;
|
|
|
|
|
}
|
2023-07-06 02:09:56 +02:00
|
|
|
|
|
|
|
|
|
public RequestResult(HttpStatusCode statusCode, Stream result, bool hasBeenRedirected, string redirectedTo)
|
|
|
|
|
: this(statusCode, result)
|
|
|
|
|
{
|
|
|
|
|
this.HasBeenRedirected = hasBeenRedirected;
|
|
|
|
|
RedirectedToUrl = redirectedTo;
|
|
|
|
|
}
|
2023-05-18 12:26:15 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|