2023-05-18 16:42:00 +02:00
|
|
|
|
using System.IO.Compression;
|
|
|
|
|
using System.Net;
|
2023-05-20 00:19:04 +02:00
|
|
|
|
using System.Xml.Linq;
|
2023-05-20 22:10:24 +02:00
|
|
|
|
using Logging;
|
2023-05-18 12:26:15 +02:00
|
|
|
|
|
|
|
|
|
namespace Tranga;
|
|
|
|
|
|
2023-05-19 19:52:24 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Base-Class for all Connectors
|
|
|
|
|
/// Provides some methods to be used by all Connectors, as well as a DownloadClient
|
|
|
|
|
/// </summary>
|
2023-05-18 12:26:15 +02:00
|
|
|
|
public abstract class Connector
|
|
|
|
|
{
|
2023-05-19 19:50:26 +02:00
|
|
|
|
internal string downloadLocation { get; } //Location of local files
|
|
|
|
|
protected DownloadClient downloadClient { get; }
|
|
|
|
|
|
2023-05-20 22:10:24 +02:00
|
|
|
|
protected Logger? logger;
|
|
|
|
|
|
|
|
|
|
protected Connector(string downloadLocation, uint downloadDelay, Logger? logger)
|
2023-05-18 18:51:19 +02:00
|
|
|
|
{
|
|
|
|
|
this.downloadLocation = downloadLocation;
|
2023-05-19 19:50:26 +02:00
|
|
|
|
this.downloadClient = new DownloadClient(downloadDelay);
|
2023-05-20 22:10:24 +02:00
|
|
|
|
this.logger = logger;
|
2023-05-18 18:51:19 +02:00
|
|
|
|
}
|
|
|
|
|
|
2023-05-19 19:52:24 +02:00
|
|
|
|
public abstract string name { get; } //Name of the Connector (e.g. Website)
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Returns all Publications with the given string.
|
|
|
|
|
/// If the string is empty or null, returns all Publication of the Connector
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="publicationTitle">Search-Query</param>
|
|
|
|
|
/// <returns>Publications matching the query</returns>
|
2023-05-18 16:41:14 +02:00
|
|
|
|
public abstract Publication[] GetPublications(string publicationTitle = "");
|
2023-05-19 19:52:24 +02:00
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Returns all Chapters of the publication in the provided language.
|
|
|
|
|
/// If the language is empty or null, returns all Chapters in all Languages.
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="publication">Publication to get Chapters for</param>
|
|
|
|
|
/// <param name="language">Language of the Chapters</param>
|
|
|
|
|
/// <returns>Array of Chapters matching Publication and Language</returns>
|
2023-05-18 18:19:04 +02:00
|
|
|
|
public abstract Chapter[] GetChapters(Publication publication, string language = "");
|
2023-05-19 19:52:24 +02:00
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Retrieves the Chapter (+Images) from the website.
|
|
|
|
|
/// Should later call DownloadChapterImages to retrieve the individual Images of the Chapter.
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="publication">Publication that contains Chapter</param>
|
|
|
|
|
/// <param name="chapter">Chapter with Images to retrieve</param>
|
|
|
|
|
public abstract void DownloadChapter(Publication publication, Chapter chapter);
|
|
|
|
|
|
|
|
|
|
/// <summary>
|
|
|
|
|
/// Retrieves the Cover from the Website
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="publication">Publication to retrieve Cover for</param>
|
2023-05-18 19:56:06 +02:00
|
|
|
|
public abstract void DownloadCover(Publication publication);
|
2023-05-18 16:21:02 +02:00
|
|
|
|
|
2023-05-19 19:52:24 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Saves the series-info to series.json in the Publication Folder
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="publication">Publication to save series.json for</param>
|
|
|
|
|
public void SaveSeriesInfo(Publication publication)
|
|
|
|
|
{
|
2023-05-20 22:10:24 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Saving series.json for {publication.sortName}");
|
2023-05-19 20:55:04 +02:00
|
|
|
|
//Check if Publication already has a Folder and a series.json
|
|
|
|
|
string publicationFolder = Path.Join(downloadLocation, publication.folderName);
|
2023-05-19 22:58:59 +02:00
|
|
|
|
if(!Directory.Exists(publicationFolder))
|
|
|
|
|
Directory.CreateDirectory(publicationFolder);
|
|
|
|
|
|
2023-05-19 20:55:04 +02:00
|
|
|
|
string seriesInfoPath = Path.Join(publicationFolder, "series.json");
|
2023-05-19 19:52:24 +02:00
|
|
|
|
if(!File.Exists(seriesInfoPath))
|
2023-05-20 00:19:40 +02:00
|
|
|
|
File.WriteAllText(seriesInfoPath,publication.GetSeriesInfoJson());
|
2023-05-19 19:52:24 +02:00
|
|
|
|
}
|
2023-05-20 00:19:04 +02:00
|
|
|
|
|
2023-05-20 15:05:41 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Creates a string containing XML of publication and chapter.
|
|
|
|
|
/// See ComicInfo.xml
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <returns>XML-string</returns>
|
2023-05-20 22:10:24 +02:00
|
|
|
|
protected static string CreateComicInfo(Publication publication, Chapter chapter, Logger? logger)
|
2023-05-20 00:19:04 +02:00
|
|
|
|
{
|
2023-05-21 15:36:12 +02:00
|
|
|
|
logger?.WriteLine("Connector", $"Creating ComicInfo.Xml for {publication.sortName} Chapter {chapter.volumeNumber} {chapter.chapterNumber}");
|
2023-05-20 00:19:04 +02:00
|
|
|
|
XElement comicInfo = new XElement("ComicInfo",
|
|
|
|
|
new XElement("Tags", string.Join(',',publication.tags)),
|
|
|
|
|
new XElement("LanguageISO", publication.originalLanguage),
|
2023-05-20 02:29:54 +02:00
|
|
|
|
new XElement("Title", chapter.name),
|
2023-05-22 17:20:07 +02:00
|
|
|
|
new XElement("Writer", publication.author),
|
2023-05-20 02:29:54 +02:00
|
|
|
|
new XElement("Volume", chapter.volumeNumber),
|
2023-05-20 12:53:54 +02:00
|
|
|
|
new XElement("Number", chapter.chapterNumber) //TODO check if this is correct at some point
|
2023-05-20 00:19:04 +02:00
|
|
|
|
);
|
|
|
|
|
return comicInfo.ToString();
|
|
|
|
|
}
|
2023-05-20 01:30:23 +02:00
|
|
|
|
|
2023-05-20 15:05:41 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Checks if a chapter-archive is already present
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <returns>true if chapter is present</returns>
|
2023-05-20 01:30:23 +02:00
|
|
|
|
public bool ChapterIsDownloaded(Publication publication, Chapter chapter)
|
|
|
|
|
{
|
|
|
|
|
return File.Exists(CreateFullFilepath(publication, chapter));
|
|
|
|
|
}
|
|
|
|
|
|
2023-05-20 15:05:41 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Creates full file path of chapter-archive
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <returns>Filepath</returns>
|
2023-05-20 01:30:23 +02:00
|
|
|
|
protected string CreateFullFilepath(Publication publication, Chapter chapter)
|
|
|
|
|
{
|
2023-05-22 17:09:47 +02:00
|
|
|
|
return Path.Join(downloadLocation, publication.folderName, $"{chapter.fileName}.cbz");
|
2023-05-20 01:30:23 +02:00
|
|
|
|
}
|
2023-05-19 19:52:24 +02:00
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Downloads Image from URL and saves it to the given path(incl. fileName)
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="imageUrl"></param>
|
|
|
|
|
/// <param name="fullPath"></param>
|
|
|
|
|
/// <param name="downloadClient">DownloadClient of the connector</param>
|
2023-05-19 19:52:24 +02:00
|
|
|
|
protected static void DownloadImage(string imageUrl, string fullPath, DownloadClient downloadClient)
|
2023-05-19 19:44:59 +02:00
|
|
|
|
{
|
|
|
|
|
DownloadClient.RequestResult requestResult = downloadClient.MakeRequest(imageUrl);
|
|
|
|
|
byte[] buffer = new byte[requestResult.result.Length];
|
|
|
|
|
requestResult.result.ReadExactly(buffer, 0, buffer.Length);
|
|
|
|
|
File.WriteAllBytes(fullPath, buffer);
|
|
|
|
|
}
|
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Downloads all Images from URLs, Compresses to zip(cbz) and saves.
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="imageUrls">List of URLs to download Images from</param>
|
|
|
|
|
/// <param name="saveArchiveFilePath">Full path to save archive to (without file ending .cbz)</param>
|
|
|
|
|
/// <param name="downloadClient">DownloadClient of the connector</param>
|
2023-05-20 01:06:12 +02:00
|
|
|
|
/// <param name="comicInfoPath">Path of the generate Chapter ComicInfo.xml, if it was generated</param>
|
2023-05-20 22:10:24 +02:00
|
|
|
|
protected static void DownloadChapterImages(string[] imageUrls, string saveArchiveFilePath, DownloadClient downloadClient, Logger? logger, string? comicInfoPath = null)
|
2023-05-18 16:21:02 +02:00
|
|
|
|
{
|
2023-05-20 22:10:24 +02:00
|
|
|
|
logger?.WriteLine("Connector", "Downloading Images");
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Check if Publication Directory already exists
|
2023-05-22 17:09:47 +02:00
|
|
|
|
string directoryPath = Path.GetDirectoryName(saveArchiveFilePath)!;
|
2023-05-19 18:20:26 +02:00
|
|
|
|
if (!Directory.Exists(directoryPath))
|
|
|
|
|
Directory.CreateDirectory(directoryPath);
|
2023-05-22 17:09:47 +02:00
|
|
|
|
|
|
|
|
|
if (File.Exists(saveArchiveFilePath)) //Don't download twice.
|
2023-05-19 18:20:26 +02:00
|
|
|
|
return;
|
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Create a temporary folder to store images
|
2023-05-19 23:00:45 +02:00
|
|
|
|
string tempFolder = Directory.CreateTempSubdirectory().FullName;
|
2023-05-18 17:21:06 +02:00
|
|
|
|
|
|
|
|
|
int chapter = 0;
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Download all Images to temporary Folder
|
2023-05-18 17:42:02 +02:00
|
|
|
|
foreach (string imageUrl in imageUrls)
|
|
|
|
|
{
|
|
|
|
|
string[] split = imageUrl.Split('.');
|
2023-05-20 01:06:00 +02:00
|
|
|
|
string extension = split[^1];
|
2023-05-19 19:44:59 +02:00
|
|
|
|
DownloadImage(imageUrl, Path.Join(tempFolder, $"{chapter++}.{extension}"), downloadClient);
|
2023-05-18 17:42:02 +02:00
|
|
|
|
}
|
2023-05-19 18:20:26 +02:00
|
|
|
|
|
2023-05-20 00:19:04 +02:00
|
|
|
|
if(comicInfoPath is not null)
|
|
|
|
|
File.Copy(comicInfoPath, Path.Join(tempFolder, "ComicInfo.xml"));
|
|
|
|
|
|
2023-05-20 22:10:24 +02:00
|
|
|
|
logger?.WriteLine("Connector", "Creating archive");
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//ZIP-it and ship-it
|
2023-05-22 17:09:47 +02:00
|
|
|
|
ZipFile.CreateFromDirectory(tempFolder, saveArchiveFilePath);
|
2023-05-19 20:55:19 +02:00
|
|
|
|
Directory.Delete(tempFolder, true); //Cleanup
|
2023-05-18 16:21:02 +02:00
|
|
|
|
}
|
2023-05-19 19:52:24 +02:00
|
|
|
|
|
2023-05-19 19:44:59 +02:00
|
|
|
|
protected class DownloadClient
|
2023-05-18 12:26:15 +02:00
|
|
|
|
{
|
2023-05-18 17:41:44 +02:00
|
|
|
|
private readonly TimeSpan _requestSpeed;
|
|
|
|
|
private DateTime _lastRequest;
|
2023-05-18 18:55:11 +02:00
|
|
|
|
private static readonly HttpClient Client = new();
|
2023-05-18 17:18:41 +02:00
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Creates a httpClient
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="delay">minimum delay between requests (to avoid spam)</param>
|
2023-05-18 17:18:41 +02:00
|
|
|
|
public DownloadClient(uint delay)
|
|
|
|
|
{
|
2023-05-18 17:41:44 +02:00
|
|
|
|
_requestSpeed = TimeSpan.FromMilliseconds(delay);
|
|
|
|
|
_lastRequest = DateTime.Now.Subtract(_requestSpeed);
|
2023-05-18 17:18:41 +02:00
|
|
|
|
}
|
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
/// <summary>
|
|
|
|
|
/// Request Webpage
|
|
|
|
|
/// </summary>
|
|
|
|
|
/// <param name="url"></param>
|
|
|
|
|
/// <returns>RequestResult with StatusCode and Stream of received data</returns>
|
2023-05-18 17:18:41 +02:00
|
|
|
|
public RequestResult MakeRequest(string url)
|
2023-05-18 12:26:15 +02:00
|
|
|
|
{
|
2023-05-18 17:41:44 +02:00
|
|
|
|
while((DateTime.Now - _lastRequest) < _requestSpeed)
|
2023-05-18 17:18:41 +02:00
|
|
|
|
Thread.Sleep(10);
|
2023-05-18 17:41:44 +02:00
|
|
|
|
_lastRequest = DateTime.Now;
|
|
|
|
|
|
|
|
|
|
HttpRequestMessage requestMessage = new(HttpMethod.Get, url);
|
2023-05-18 18:55:11 +02:00
|
|
|
|
HttpResponseMessage response = Client.Send(requestMessage);
|
2023-05-18 12:26:15 +02:00
|
|
|
|
Stream resultString = response.IsSuccessStatusCode ? response.Content.ReadAsStream() : Stream.Null;
|
|
|
|
|
return new RequestResult(response.StatusCode, resultString);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public struct RequestResult
|
|
|
|
|
{
|
|
|
|
|
public HttpStatusCode statusCode { get; }
|
|
|
|
|
public Stream result { get; }
|
|
|
|
|
|
|
|
|
|
public RequestResult(HttpStatusCode statusCode, Stream result)
|
|
|
|
|
{
|
|
|
|
|
this.statusCode = statusCode;
|
|
|
|
|
this.result = result;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|