2023-05-18 18:20:37 +02:00
|
|
|
|
using System.Globalization;
|
2023-05-19 18:11:14 +02:00
|
|
|
|
using System.Net;
|
2023-05-18 18:20:37 +02:00
|
|
|
|
using System.Text.Json;
|
2023-05-18 15:48:54 +02:00
|
|
|
|
using System.Text.Json.Nodes;
|
2023-05-20 22:10:24 +02:00
|
|
|
|
using Logging;
|
2023-05-18 15:48:54 +02:00
|
|
|
|
|
|
|
|
|
namespace Tranga.Connectors;
|
|
|
|
|
public class MangaDex : Connector
|
|
|
|
|
{
|
|
|
|
|
public override string name { get; }
|
|
|
|
|
|
2023-05-22 18:15:24 +02:00
|
|
|
|
private enum RequestType : byte
|
2023-05-19 19:50:26 +02:00
|
|
|
|
{
|
2023-05-22 18:15:24 +02:00
|
|
|
|
Manga,
|
|
|
|
|
Feed,
|
|
|
|
|
AtHomeServer,
|
2023-05-25 13:50:08 +02:00
|
|
|
|
CoverUrl,
|
|
|
|
|
Author,
|
2023-05-19 19:50:26 +02:00
|
|
|
|
}
|
2023-05-22 18:15:24 +02:00
|
|
|
|
|
|
|
|
|
public MangaDex(string downloadLocation, Logger? logger) : base(downloadLocation, logger)
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-19 17:35:29 +02:00
|
|
|
|
name = "MangaDex";
|
2023-05-22 18:15:24 +02:00
|
|
|
|
this.downloadClient = new DownloadClient(new Dictionary<byte, int>()
|
|
|
|
|
{
|
|
|
|
|
{(byte)RequestType.Manga, 250},
|
|
|
|
|
{(byte)RequestType.Feed, 250},
|
2023-05-22 18:55:26 +02:00
|
|
|
|
{(byte)RequestType.AtHomeServer, 40},
|
2023-05-25 13:50:08 +02:00
|
|
|
|
{(byte)RequestType.CoverUrl, 250},
|
2023-05-22 18:15:24 +02:00
|
|
|
|
{(byte)RequestType.Author, 250}
|
2023-05-22 21:38:23 +02:00
|
|
|
|
}, logger);
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
|
|
|
|
|
2023-05-18 16:41:14 +02:00
|
|
|
|
public override Publication[] GetPublications(string publicationTitle = "")
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-21 01:57:56 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Getting Publications (title={publicationTitle})");
|
2023-05-19 20:22:13 +02:00
|
|
|
|
const int limit = 100; //How many values we want returned at once
|
|
|
|
|
int offset = 0; //"Page"
|
|
|
|
|
int total = int.MaxValue; //How many total results are there, is updated on first request
|
2023-05-18 15:48:54 +02:00
|
|
|
|
HashSet<Publication> publications = new();
|
2023-05-19 20:22:13 +02:00
|
|
|
|
while (offset < total) //As long as we haven't requested all "Pages"
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Request next Page
|
2023-05-19 18:11:14 +02:00
|
|
|
|
DownloadClient.RequestResult requestResult =
|
2023-05-19 19:50:26 +02:00
|
|
|
|
downloadClient.MakeRequest(
|
2023-05-22 18:15:24 +02:00
|
|
|
|
$"https://api.mangadex.org/manga?limit={limit}&title={publicationTitle}&offset={offset}", (byte)RequestType.Manga);
|
2023-05-19 18:11:14 +02:00
|
|
|
|
if (requestResult.statusCode != HttpStatusCode.OK)
|
|
|
|
|
break;
|
2023-05-18 15:48:54 +02:00
|
|
|
|
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
2023-05-19 20:22:13 +02:00
|
|
|
|
|
2023-05-18 18:20:04 +02:00
|
|
|
|
offset += limit;
|
2023-05-18 15:48:54 +02:00
|
|
|
|
if (result is null)
|
|
|
|
|
break;
|
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
total = result["total"]!.GetValue<int>(); //Update the total number of Publications
|
|
|
|
|
|
|
|
|
|
JsonArray mangaInResult = result["data"]!.AsArray(); //Manga-data-Array
|
|
|
|
|
//Loop each Manga and extract information from JSON
|
2023-05-18 18:55:11 +02:00
|
|
|
|
foreach (JsonNode? mangeNode in mangaInResult)
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-18 18:55:11 +02:00
|
|
|
|
JsonObject manga = (JsonObject)mangeNode!;
|
|
|
|
|
JsonObject attributes = manga["attributes"]!.AsObject();
|
2023-05-18 15:48:54 +02:00
|
|
|
|
|
2023-05-22 17:20:07 +02:00
|
|
|
|
string publicationId = manga["id"]!.GetValue<string>();
|
|
|
|
|
|
2023-05-18 15:48:54 +02:00
|
|
|
|
string title = attributes["title"]!.AsObject().ContainsKey("en") && attributes["title"]!["en"] is not null
|
|
|
|
|
? attributes["title"]!["en"]!.GetValue<string>()
|
2023-05-20 00:46:25 +02:00
|
|
|
|
: attributes["title"]![((IDictionary<string, JsonNode?>)attributes["title"]!.AsObject()).Keys.First()]!.GetValue<string>();
|
|
|
|
|
|
2023-05-18 15:48:54 +02:00
|
|
|
|
string? description = attributes["description"]!.AsObject().ContainsKey("en") && attributes["description"]!["en"] is not null
|
|
|
|
|
? attributes["description"]!["en"]!.GetValue<string?>()
|
|
|
|
|
: null;
|
|
|
|
|
|
|
|
|
|
JsonArray altTitlesObject = attributes["altTitles"]!.AsArray();
|
2023-05-21 21:12:32 +02:00
|
|
|
|
Dictionary<string, string> altTitlesDict = new();
|
2023-05-18 18:55:11 +02:00
|
|
|
|
foreach (JsonNode? altTitleNode in altTitlesObject)
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-18 18:55:11 +02:00
|
|
|
|
JsonObject altTitleObject = (JsonObject)altTitleNode!;
|
|
|
|
|
string key = ((IDictionary<string, JsonNode?>)altTitleObject).Keys.ToArray()[0];
|
2023-05-21 21:24:04 +02:00
|
|
|
|
altTitlesDict.TryAdd(key, altTitleObject[key]!.GetValue<string>());
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
JsonArray tagsObject = attributes["tags"]!.AsArray();
|
|
|
|
|
HashSet<string> tags = new();
|
2023-05-18 18:55:11 +02:00
|
|
|
|
foreach (JsonNode? tagNode in tagsObject)
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-18 18:55:11 +02:00
|
|
|
|
JsonObject tagObject = (JsonObject)tagNode!;
|
|
|
|
|
if(tagObject["attributes"]!["name"]!.AsObject().ContainsKey("en"))
|
|
|
|
|
tags.Add(tagObject["attributes"]!["name"]!["en"]!.GetValue<string>());
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
|
|
|
|
|
2023-05-22 16:45:55 +02:00
|
|
|
|
string? posterId = null;
|
2023-05-22 17:20:07 +02:00
|
|
|
|
string? authorId = null;
|
2023-05-18 17:21:22 +02:00
|
|
|
|
if (manga.ContainsKey("relationships") && manga["relationships"] is not null)
|
|
|
|
|
{
|
|
|
|
|
JsonArray relationships = manga["relationships"]!.AsArray();
|
2023-05-22 16:45:55 +02:00
|
|
|
|
posterId = relationships.FirstOrDefault(relationship => relationship!["type"]!.GetValue<string>() == "cover_art")!["id"]!.GetValue<string>();
|
2023-05-22 17:20:07 +02:00
|
|
|
|
authorId = relationships.FirstOrDefault(relationship => relationship!["type"]!.GetValue<string>() == "author")!["id"]!.GetValue<string>();
|
2023-05-18 17:21:22 +02:00
|
|
|
|
}
|
2023-05-22 17:20:07 +02:00
|
|
|
|
string? coverUrl = GetCoverUrl(publicationId, posterId);
|
2023-05-25 13:50:48 +02:00
|
|
|
|
string? coverBase64 = null;
|
|
|
|
|
if (coverUrl is not null)
|
|
|
|
|
{
|
|
|
|
|
DownloadClient.RequestResult coverResult = downloadClient.MakeRequest(coverUrl, (byte)RequestType.AtHomeServer);
|
|
|
|
|
using MemoryStream ms = new();
|
|
|
|
|
coverResult.result.CopyTo(ms);
|
|
|
|
|
byte[] imageBytes = ms.ToArray();
|
|
|
|
|
coverBase64 = Convert.ToBase64String(imageBytes);
|
|
|
|
|
}
|
2023-05-22 17:20:07 +02:00
|
|
|
|
string? author = GetAuthor(authorId);
|
2023-05-18 17:21:22 +02:00
|
|
|
|
|
2023-05-21 21:12:32 +02:00
|
|
|
|
Dictionary<string, string> linksDict = new();
|
2023-05-18 17:21:22 +02:00
|
|
|
|
if (attributes.ContainsKey("links") && attributes["links"] is not null)
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-18 17:21:22 +02:00
|
|
|
|
JsonObject linksObject = attributes["links"]!.AsObject();
|
|
|
|
|
foreach (string key in ((IDictionary<string, JsonNode?>)linksObject).Keys)
|
|
|
|
|
{
|
2023-05-21 21:12:32 +02:00
|
|
|
|
linksDict.Add(key, linksObject[key]!.GetValue<string>());
|
2023-05-18 17:21:22 +02:00
|
|
|
|
}
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int? year = attributes.ContainsKey("year") && attributes["year"] is not null
|
|
|
|
|
? attributes["year"]!.GetValue<int?>()
|
|
|
|
|
: null;
|
|
|
|
|
|
|
|
|
|
string? originalLanguage = attributes.ContainsKey("originalLanguage") && attributes["originalLanguage"] is not null
|
|
|
|
|
? attributes["originalLanguage"]!.GetValue<string?>()
|
|
|
|
|
: null;
|
|
|
|
|
|
|
|
|
|
string status = attributes["status"]!.GetValue<string>();
|
|
|
|
|
|
2023-05-22 16:45:55 +02:00
|
|
|
|
Publication pub = new (
|
2023-05-18 15:48:54 +02:00
|
|
|
|
title,
|
2023-05-22 17:20:07 +02:00
|
|
|
|
author,
|
2023-05-18 15:48:54 +02:00
|
|
|
|
description,
|
2023-05-21 21:12:32 +02:00
|
|
|
|
altTitlesDict,
|
2023-05-18 15:48:54 +02:00
|
|
|
|
tags.ToArray(),
|
2023-05-22 16:45:55 +02:00
|
|
|
|
coverUrl,
|
2023-05-25 13:50:48 +02:00
|
|
|
|
coverBase64,
|
2023-05-21 21:12:32 +02:00
|
|
|
|
linksDict,
|
2023-05-18 15:48:54 +02:00
|
|
|
|
year,
|
|
|
|
|
originalLanguage,
|
|
|
|
|
status,
|
2023-05-22 16:45:55 +02:00
|
|
|
|
publicationId
|
2023-05-18 15:48:54 +02:00
|
|
|
|
);
|
2023-05-19 20:22:13 +02:00
|
|
|
|
publications.Add(pub); //Add Publication (Manga) to result
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return publications.ToArray();
|
|
|
|
|
}
|
|
|
|
|
|
2023-05-18 18:19:04 +02:00
|
|
|
|
public override Chapter[] GetChapters(Publication publication, string language = "")
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-21 01:57:56 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Getting Chapters {publication.sortName} (language={language})");
|
2023-05-19 20:22:13 +02:00
|
|
|
|
const int limit = 100; //How many values we want returned at once
|
|
|
|
|
int offset = 0; //"Page"
|
|
|
|
|
int total = int.MaxValue; //How many total results are there, is updated on first request
|
2023-05-18 16:04:03 +02:00
|
|
|
|
List<Chapter> chapters = new();
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//As long as we haven't requested all "Pages"
|
2023-05-18 16:04:03 +02:00
|
|
|
|
while (offset < total)
|
|
|
|
|
{
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Request next "Page"
|
2023-05-18 16:04:03 +02:00
|
|
|
|
DownloadClient.RequestResult requestResult =
|
2023-05-19 19:50:26 +02:00
|
|
|
|
downloadClient.MakeRequest(
|
2023-05-22 18:15:24 +02:00
|
|
|
|
$"https://api.mangadex.org/manga/{publication.publicationId}/feed?limit={limit}&offset={offset}&translatedLanguage%5B%5D={language}", (byte)RequestType.Feed);
|
2023-05-19 18:11:14 +02:00
|
|
|
|
if (requestResult.statusCode != HttpStatusCode.OK)
|
|
|
|
|
break;
|
2023-05-18 16:04:03 +02:00
|
|
|
|
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
2023-05-18 18:20:04 +02:00
|
|
|
|
|
|
|
|
|
offset += limit;
|
2023-05-18 16:04:03 +02:00
|
|
|
|
if (result is null)
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
total = result["total"]!.GetValue<int>();
|
|
|
|
|
JsonArray chaptersInResult = result["data"]!.AsArray();
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Loop through all Chapters in result and extract information from JSON
|
2023-05-18 18:55:11 +02:00
|
|
|
|
foreach (JsonNode? jsonNode in chaptersInResult)
|
2023-05-18 16:04:03 +02:00
|
|
|
|
{
|
2023-05-18 18:55:11 +02:00
|
|
|
|
JsonObject chapter = (JsonObject)jsonNode!;
|
|
|
|
|
JsonObject attributes = chapter["attributes"]!.AsObject();
|
2023-05-18 17:22:02 +02:00
|
|
|
|
string chapterId = chapter["id"]!.GetValue<string>();
|
2023-05-18 16:21:21 +02:00
|
|
|
|
|
2023-05-18 16:04:03 +02:00
|
|
|
|
string? title = attributes.ContainsKey("title") && attributes["title"] is not null
|
|
|
|
|
? attributes["title"]!.GetValue<string>()
|
|
|
|
|
: null;
|
|
|
|
|
|
|
|
|
|
string? volume = attributes.ContainsKey("volume") && attributes["volume"] is not null
|
|
|
|
|
? attributes["volume"]!.GetValue<string>()
|
|
|
|
|
: null;
|
|
|
|
|
|
|
|
|
|
string? chapterNum = attributes.ContainsKey("chapter") && attributes["chapter"] is not null
|
|
|
|
|
? attributes["chapter"]!.GetValue<string>()
|
|
|
|
|
: null;
|
|
|
|
|
|
2023-05-19 19:32:47 +02:00
|
|
|
|
chapters.Add(new Chapter(title, volume, chapterNum, chapterId));
|
2023-05-18 16:04:03 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2023-05-18 18:20:37 +02:00
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Return Chapters ordered by Chapter-Number
|
2023-05-18 18:55:11 +02:00
|
|
|
|
NumberFormatInfo chapterNumberFormatInfo = new()
|
2023-05-18 18:20:37 +02:00
|
|
|
|
{
|
|
|
|
|
NumberDecimalSeparator = "."
|
|
|
|
|
};
|
|
|
|
|
return chapters.OrderBy(chapter => Convert.ToSingle(chapter.chapterNumber, chapterNumberFormatInfo)).ToArray();
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
|
|
|
|
|
2023-05-18 16:21:02 +02:00
|
|
|
|
public override void DownloadChapter(Publication publication, Chapter chapter)
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-21 01:57:56 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Download Chapter {publication.sortName} {chapter.volumeNumber}-{chapter.chapterNumber}");
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Request URLs for Chapter-Images
|
2023-05-18 16:21:54 +02:00
|
|
|
|
DownloadClient.RequestResult requestResult =
|
2023-05-22 18:15:24 +02:00
|
|
|
|
downloadClient.MakeRequest($"https://api.mangadex.org/at-home/server/{chapter.url}?forcePort443=false'", (byte)RequestType.AtHomeServer);
|
2023-05-19 18:11:14 +02:00
|
|
|
|
if (requestResult.statusCode != HttpStatusCode.OK)
|
|
|
|
|
return;
|
2023-05-18 16:21:54 +02:00
|
|
|
|
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
|
|
|
|
if (result is null)
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
string baseUrl = result["baseUrl"]!.GetValue<string>();
|
2023-05-18 17:21:06 +02:00
|
|
|
|
string hash = result["chapter"]!["hash"]!.GetValue<string>();
|
|
|
|
|
JsonArray imageFileNames = result["chapter"]!["data"]!.AsArray();
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Loop through all imageNames and construct urls (imageUrl)
|
2023-05-18 17:21:06 +02:00
|
|
|
|
HashSet<string> imageUrls = new();
|
2023-05-18 18:55:11 +02:00
|
|
|
|
foreach (JsonNode? image in imageFileNames)
|
2023-05-18 17:42:26 +02:00
|
|
|
|
imageUrls.Add($"{baseUrl}/data/{hash}/{image!.GetValue<string>()}");
|
2023-05-18 17:21:06 +02:00
|
|
|
|
|
2023-05-20 00:19:04 +02:00
|
|
|
|
string comicInfoPath = Path.GetTempFileName();
|
2023-05-20 22:10:24 +02:00
|
|
|
|
File.WriteAllText(comicInfoPath, CreateComicInfo(publication, chapter, logger));
|
2023-05-20 00:19:04 +02:00
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Download Chapter-Images
|
2023-05-22 18:15:24 +02:00
|
|
|
|
DownloadChapterImages(imageUrls.ToArray(), CreateFullFilepath(publication, chapter), downloadClient, (byte)RequestType.AtHomeServer, logger, comicInfoPath);
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
2023-05-18 19:56:06 +02:00
|
|
|
|
|
2023-05-22 16:45:55 +02:00
|
|
|
|
private string? GetCoverUrl(string publicationId, string? posterId)
|
|
|
|
|
{
|
|
|
|
|
if (posterId is null)
|
|
|
|
|
{
|
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"No posterId");
|
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
//Request information where to download Cover
|
|
|
|
|
DownloadClient.RequestResult requestResult =
|
2023-05-25 13:50:48 +02:00
|
|
|
|
downloadClient.MakeRequest($"https://api.mangadex.org/cover/{posterId}", (byte)RequestType.CoverUrl);
|
2023-05-22 16:45:55 +02:00
|
|
|
|
if (requestResult.statusCode != HttpStatusCode.OK)
|
|
|
|
|
return null;
|
|
|
|
|
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
|
|
|
|
if (result is null)
|
|
|
|
|
return null;
|
|
|
|
|
|
|
|
|
|
string fileName = result["data"]!["attributes"]!["fileName"]!.GetValue<string>();
|
|
|
|
|
|
|
|
|
|
string coverUrl = $"https://uploads.mangadex.org/covers/{publicationId}/{fileName}";
|
|
|
|
|
return coverUrl;
|
|
|
|
|
}
|
|
|
|
|
|
2023-05-22 17:20:07 +02:00
|
|
|
|
private string? GetAuthor(string? authorId)
|
|
|
|
|
{
|
|
|
|
|
if (authorId is null)
|
|
|
|
|
return null;
|
|
|
|
|
|
|
|
|
|
DownloadClient.RequestResult requestResult =
|
2023-05-22 18:15:24 +02:00
|
|
|
|
downloadClient.MakeRequest($"https://api.mangadex.org/author/{authorId}", (byte)RequestType.Author);
|
2023-05-22 17:20:07 +02:00
|
|
|
|
if (requestResult.statusCode != HttpStatusCode.OK)
|
|
|
|
|
return null;
|
|
|
|
|
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
|
|
|
|
if (result is null)
|
|
|
|
|
return null;
|
|
|
|
|
|
|
|
|
|
string author = result["data"]!["attributes"]!["name"]!.GetValue<string>();
|
|
|
|
|
return author;
|
|
|
|
|
}
|
|
|
|
|
|
2023-05-18 19:56:06 +02:00
|
|
|
|
public override void DownloadCover(Publication publication)
|
|
|
|
|
{
|
2023-05-20 22:10:24 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Download cover {publication.sortName}");
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Check if Publication already has a Folder and cover
|
|
|
|
|
string publicationFolder = Path.Join(downloadLocation, publication.folderName);
|
2023-05-19 22:58:59 +02:00
|
|
|
|
if(!Directory.Exists(publicationFolder))
|
|
|
|
|
Directory.CreateDirectory(publicationFolder);
|
2023-05-19 20:22:13 +02:00
|
|
|
|
DirectoryInfo dirInfo = new (publicationFolder);
|
2023-05-22 21:38:44 +02:00
|
|
|
|
if (dirInfo.EnumerateFiles().Any(info => info.Name.Contains("cover.")))
|
2023-05-22 16:45:55 +02:00
|
|
|
|
{
|
2023-05-22 21:38:44 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Cover exists {publication.sortName}");
|
2023-05-18 19:56:06 +02:00
|
|
|
|
return;
|
2023-05-22 16:45:55 +02:00
|
|
|
|
}
|
2023-05-18 19:56:06 +02:00
|
|
|
|
|
2023-05-22 21:44:52 +02:00
|
|
|
|
if (publication.posterUrl is null || publication.posterUrl!.Contains("http"))
|
2023-05-22 21:38:44 +02:00
|
|
|
|
{
|
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"No Poster-URL in publication");
|
|
|
|
|
return;
|
|
|
|
|
}
|
2023-05-18 19:56:06 +02:00
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Get file-extension (jpg, png)
|
2023-05-22 21:38:44 +02:00
|
|
|
|
string[] split = publication.posterUrl.Split('.');
|
2023-05-20 01:06:00 +02:00
|
|
|
|
string extension = split[^1];
|
2023-05-18 19:56:06 +02:00
|
|
|
|
|
|
|
|
|
string outFolderPath = Path.Join(downloadLocation, publication.folderName);
|
|
|
|
|
Directory.CreateDirectory(outFolderPath);
|
2023-05-19 20:22:13 +02:00
|
|
|
|
|
|
|
|
|
//Download cover-Image
|
2023-05-22 21:38:44 +02:00
|
|
|
|
DownloadImage(publication.posterUrl, Path.Join(downloadLocation, publication.folderName, $"cover.{extension}"), this.downloadClient, (byte)RequestType.AtHomeServer);
|
2023-05-18 19:56:06 +02:00
|
|
|
|
}
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|