2023-05-18 18:20:37 +02:00
|
|
|
|
using System.Globalization;
|
2023-05-19 18:11:14 +02:00
|
|
|
|
using System.Net;
|
2023-05-18 18:20:37 +02:00
|
|
|
|
using System.Text.Json;
|
2023-05-18 15:48:54 +02:00
|
|
|
|
using System.Text.Json.Nodes;
|
2023-05-20 22:10:24 +02:00
|
|
|
|
using Logging;
|
2023-06-05 00:35:57 +02:00
|
|
|
|
using Tranga.TrangaTasks;
|
2023-05-18 15:48:54 +02:00
|
|
|
|
|
|
|
|
|
namespace Tranga.Connectors;
|
|
|
|
|
public class MangaDex : Connector
|
|
|
|
|
{
|
|
|
|
|
public override string name { get; }
|
|
|
|
|
|
2023-05-22 18:15:24 +02:00
|
|
|
|
private enum RequestType : byte
|
2023-05-19 19:50:26 +02:00
|
|
|
|
{
|
2023-05-22 18:15:24 +02:00
|
|
|
|
Manga,
|
|
|
|
|
Feed,
|
|
|
|
|
AtHomeServer,
|
2023-05-25 13:50:08 +02:00
|
|
|
|
CoverUrl,
|
|
|
|
|
Author,
|
2023-05-19 19:50:26 +02:00
|
|
|
|
}
|
2023-05-22 18:15:24 +02:00
|
|
|
|
|
2023-05-25 14:23:33 +02:00
|
|
|
|
public MangaDex(string downloadLocation, string imageCachePath, Logger? logger) : base(downloadLocation, imageCachePath, logger)
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-19 17:35:29 +02:00
|
|
|
|
name = "MangaDex";
|
2023-05-22 18:15:24 +02:00
|
|
|
|
this.downloadClient = new DownloadClient(new Dictionary<byte, int>()
|
|
|
|
|
{
|
|
|
|
|
{(byte)RequestType.Manga, 250},
|
|
|
|
|
{(byte)RequestType.Feed, 250},
|
2023-05-22 18:55:26 +02:00
|
|
|
|
{(byte)RequestType.AtHomeServer, 40},
|
2023-05-25 13:50:08 +02:00
|
|
|
|
{(byte)RequestType.CoverUrl, 250},
|
2023-05-22 18:15:24 +02:00
|
|
|
|
{(byte)RequestType.Author, 250}
|
2023-05-22 21:38:23 +02:00
|
|
|
|
}, logger);
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
|
|
|
|
|
2023-05-18 16:41:14 +02:00
|
|
|
|
public override Publication[] GetPublications(string publicationTitle = "")
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-21 01:57:56 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Getting Publications (title={publicationTitle})");
|
2023-05-19 20:22:13 +02:00
|
|
|
|
const int limit = 100; //How many values we want returned at once
|
|
|
|
|
int offset = 0; //"Page"
|
|
|
|
|
int total = int.MaxValue; //How many total results are there, is updated on first request
|
2023-05-18 15:48:54 +02:00
|
|
|
|
HashSet<Publication> publications = new();
|
2023-05-31 20:29:30 +02:00
|
|
|
|
int loadedPublicationData = 0;
|
2023-05-19 20:22:13 +02:00
|
|
|
|
while (offset < total) //As long as we haven't requested all "Pages"
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Request next Page
|
2023-05-19 18:11:14 +02:00
|
|
|
|
DownloadClient.RequestResult requestResult =
|
2023-05-19 19:50:26 +02:00
|
|
|
|
downloadClient.MakeRequest(
|
2023-05-22 18:15:24 +02:00
|
|
|
|
$"https://api.mangadex.org/manga?limit={limit}&title={publicationTitle}&offset={offset}", (byte)RequestType.Manga);
|
2023-05-19 18:11:14 +02:00
|
|
|
|
if (requestResult.statusCode != HttpStatusCode.OK)
|
|
|
|
|
break;
|
2023-05-18 15:48:54 +02:00
|
|
|
|
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
2023-05-19 20:22:13 +02:00
|
|
|
|
|
2023-05-18 18:20:04 +02:00
|
|
|
|
offset += limit;
|
2023-05-18 15:48:54 +02:00
|
|
|
|
if (result is null)
|
|
|
|
|
break;
|
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
total = result["total"]!.GetValue<int>(); //Update the total number of Publications
|
|
|
|
|
|
|
|
|
|
JsonArray mangaInResult = result["data"]!.AsArray(); //Manga-data-Array
|
|
|
|
|
//Loop each Manga and extract information from JSON
|
2023-05-18 18:55:11 +02:00
|
|
|
|
foreach (JsonNode? mangeNode in mangaInResult)
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-31 20:29:30 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Getting publication data. {++loadedPublicationData}/{total}");
|
2023-05-18 18:55:11 +02:00
|
|
|
|
JsonObject manga = (JsonObject)mangeNode!;
|
|
|
|
|
JsonObject attributes = manga["attributes"]!.AsObject();
|
2023-05-18 15:48:54 +02:00
|
|
|
|
|
2023-05-22 17:20:07 +02:00
|
|
|
|
string publicationId = manga["id"]!.GetValue<string>();
|
|
|
|
|
|
2023-05-18 15:48:54 +02:00
|
|
|
|
string title = attributes["title"]!.AsObject().ContainsKey("en") && attributes["title"]!["en"] is not null
|
|
|
|
|
? attributes["title"]!["en"]!.GetValue<string>()
|
2023-05-20 00:46:25 +02:00
|
|
|
|
: attributes["title"]![((IDictionary<string, JsonNode?>)attributes["title"]!.AsObject()).Keys.First()]!.GetValue<string>();
|
|
|
|
|
|
2023-05-18 15:48:54 +02:00
|
|
|
|
string? description = attributes["description"]!.AsObject().ContainsKey("en") && attributes["description"]!["en"] is not null
|
|
|
|
|
? attributes["description"]!["en"]!.GetValue<string?>()
|
|
|
|
|
: null;
|
|
|
|
|
|
|
|
|
|
JsonArray altTitlesObject = attributes["altTitles"]!.AsArray();
|
2023-05-21 21:12:32 +02:00
|
|
|
|
Dictionary<string, string> altTitlesDict = new();
|
2023-05-18 18:55:11 +02:00
|
|
|
|
foreach (JsonNode? altTitleNode in altTitlesObject)
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-18 18:55:11 +02:00
|
|
|
|
JsonObject altTitleObject = (JsonObject)altTitleNode!;
|
|
|
|
|
string key = ((IDictionary<string, JsonNode?>)altTitleObject).Keys.ToArray()[0];
|
2023-05-21 21:24:04 +02:00
|
|
|
|
altTitlesDict.TryAdd(key, altTitleObject[key]!.GetValue<string>());
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
JsonArray tagsObject = attributes["tags"]!.AsArray();
|
|
|
|
|
HashSet<string> tags = new();
|
2023-05-18 18:55:11 +02:00
|
|
|
|
foreach (JsonNode? tagNode in tagsObject)
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-18 18:55:11 +02:00
|
|
|
|
JsonObject tagObject = (JsonObject)tagNode!;
|
|
|
|
|
if(tagObject["attributes"]!["name"]!.AsObject().ContainsKey("en"))
|
|
|
|
|
tags.Add(tagObject["attributes"]!["name"]!["en"]!.GetValue<string>());
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
|
|
|
|
|
2023-05-22 16:45:55 +02:00
|
|
|
|
string? posterId = null;
|
2023-06-10 14:05:23 +02:00
|
|
|
|
HashSet<string> authorIds = new();
|
2023-05-18 17:21:22 +02:00
|
|
|
|
if (manga.ContainsKey("relationships") && manga["relationships"] is not null)
|
|
|
|
|
{
|
|
|
|
|
JsonArray relationships = manga["relationships"]!.AsArray();
|
2023-05-22 16:45:55 +02:00
|
|
|
|
posterId = relationships.FirstOrDefault(relationship => relationship!["type"]!.GetValue<string>() == "cover_art")!["id"]!.GetValue<string>();
|
2023-06-11 18:01:04 +02:00
|
|
|
|
foreach (JsonNode? node in relationships.Where(relationship =>
|
2023-06-10 14:05:23 +02:00
|
|
|
|
relationship!["type"]!.GetValue<string>() == "author"))
|
|
|
|
|
authorIds.Add(node!["id"]!.GetValue<string>());
|
2023-05-18 17:21:22 +02:00
|
|
|
|
}
|
2023-05-22 17:20:07 +02:00
|
|
|
|
string? coverUrl = GetCoverUrl(publicationId, posterId);
|
2023-05-25 14:23:33 +02:00
|
|
|
|
string? coverCacheName = null;
|
2023-05-25 13:50:48 +02:00
|
|
|
|
if (coverUrl is not null)
|
2023-06-01 13:13:53 +02:00
|
|
|
|
coverCacheName = SaveCoverImageToCache(coverUrl, (byte)RequestType.AtHomeServer);
|
2023-06-10 14:05:23 +02:00
|
|
|
|
|
|
|
|
|
List<string> authors = GetAuthors(authorIds);
|
2023-05-18 17:21:22 +02:00
|
|
|
|
|
2023-05-21 21:12:32 +02:00
|
|
|
|
Dictionary<string, string> linksDict = new();
|
2023-05-18 17:21:22 +02:00
|
|
|
|
if (attributes.ContainsKey("links") && attributes["links"] is not null)
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-18 17:21:22 +02:00
|
|
|
|
JsonObject linksObject = attributes["links"]!.AsObject();
|
|
|
|
|
foreach (string key in ((IDictionary<string, JsonNode?>)linksObject).Keys)
|
|
|
|
|
{
|
2023-05-21 21:12:32 +02:00
|
|
|
|
linksDict.Add(key, linksObject[key]!.GetValue<string>());
|
2023-05-18 17:21:22 +02:00
|
|
|
|
}
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int? year = attributes.ContainsKey("year") && attributes["year"] is not null
|
|
|
|
|
? attributes["year"]!.GetValue<int?>()
|
|
|
|
|
: null;
|
|
|
|
|
|
|
|
|
|
string? originalLanguage = attributes.ContainsKey("originalLanguage") && attributes["originalLanguage"] is not null
|
|
|
|
|
? attributes["originalLanguage"]!.GetValue<string?>()
|
|
|
|
|
: null;
|
|
|
|
|
|
|
|
|
|
string status = attributes["status"]!.GetValue<string>();
|
|
|
|
|
|
2023-05-22 16:45:55 +02:00
|
|
|
|
Publication pub = new (
|
2023-05-18 15:48:54 +02:00
|
|
|
|
title,
|
2023-06-10 14:05:23 +02:00
|
|
|
|
authors,
|
2023-05-18 15:48:54 +02:00
|
|
|
|
description,
|
2023-05-21 21:12:32 +02:00
|
|
|
|
altTitlesDict,
|
2023-05-18 15:48:54 +02:00
|
|
|
|
tags.ToArray(),
|
2023-05-22 16:45:55 +02:00
|
|
|
|
coverUrl,
|
2023-05-25 14:23:33 +02:00
|
|
|
|
coverCacheName,
|
2023-05-21 21:12:32 +02:00
|
|
|
|
linksDict,
|
2023-05-18 15:48:54 +02:00
|
|
|
|
year,
|
|
|
|
|
originalLanguage,
|
|
|
|
|
status,
|
2023-05-22 16:45:55 +02:00
|
|
|
|
publicationId
|
2023-05-18 15:48:54 +02:00
|
|
|
|
);
|
2023-05-19 20:22:13 +02:00
|
|
|
|
publications.Add(pub); //Add Publication (Manga) to result
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2023-05-26 15:09:26 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Done getting publications (title={publicationTitle})");
|
2023-05-18 15:48:54 +02:00
|
|
|
|
return publications.ToArray();
|
|
|
|
|
}
|
|
|
|
|
|
2023-05-18 18:19:04 +02:00
|
|
|
|
public override Chapter[] GetChapters(Publication publication, string language = "")
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-05-26 15:09:26 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Getting Chapters for {publication.sortName} {publication.internalId} (language={language})");
|
2023-05-19 20:22:13 +02:00
|
|
|
|
const int limit = 100; //How many values we want returned at once
|
|
|
|
|
int offset = 0; //"Page"
|
|
|
|
|
int total = int.MaxValue; //How many total results are there, is updated on first request
|
2023-05-18 16:04:03 +02:00
|
|
|
|
List<Chapter> chapters = new();
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//As long as we haven't requested all "Pages"
|
2023-05-18 16:04:03 +02:00
|
|
|
|
while (offset < total)
|
|
|
|
|
{
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Request next "Page"
|
2023-05-18 16:04:03 +02:00
|
|
|
|
DownloadClient.RequestResult requestResult =
|
2023-05-19 19:50:26 +02:00
|
|
|
|
downloadClient.MakeRequest(
|
2023-05-22 18:15:24 +02:00
|
|
|
|
$"https://api.mangadex.org/manga/{publication.publicationId}/feed?limit={limit}&offset={offset}&translatedLanguage%5B%5D={language}", (byte)RequestType.Feed);
|
2023-05-19 18:11:14 +02:00
|
|
|
|
if (requestResult.statusCode != HttpStatusCode.OK)
|
|
|
|
|
break;
|
2023-05-18 16:04:03 +02:00
|
|
|
|
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
2023-05-18 18:20:04 +02:00
|
|
|
|
|
|
|
|
|
offset += limit;
|
2023-05-18 16:04:03 +02:00
|
|
|
|
if (result is null)
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
|
|
total = result["total"]!.GetValue<int>();
|
|
|
|
|
JsonArray chaptersInResult = result["data"]!.AsArray();
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Loop through all Chapters in result and extract information from JSON
|
2023-05-18 18:55:11 +02:00
|
|
|
|
foreach (JsonNode? jsonNode in chaptersInResult)
|
2023-05-18 16:04:03 +02:00
|
|
|
|
{
|
2023-05-18 18:55:11 +02:00
|
|
|
|
JsonObject chapter = (JsonObject)jsonNode!;
|
|
|
|
|
JsonObject attributes = chapter["attributes"]!.AsObject();
|
2023-05-18 17:22:02 +02:00
|
|
|
|
string chapterId = chapter["id"]!.GetValue<string>();
|
2023-05-18 16:21:21 +02:00
|
|
|
|
|
2023-05-18 16:04:03 +02:00
|
|
|
|
string? title = attributes.ContainsKey("title") && attributes["title"] is not null
|
|
|
|
|
? attributes["title"]!.GetValue<string>()
|
|
|
|
|
: null;
|
|
|
|
|
|
|
|
|
|
string? volume = attributes.ContainsKey("volume") && attributes["volume"] is not null
|
|
|
|
|
? attributes["volume"]!.GetValue<string>()
|
|
|
|
|
: null;
|
|
|
|
|
|
|
|
|
|
string? chapterNum = attributes.ContainsKey("chapter") && attributes["chapter"] is not null
|
|
|
|
|
? attributes["chapter"]!.GetValue<string>()
|
|
|
|
|
: null;
|
|
|
|
|
|
2023-05-19 19:32:47 +02:00
|
|
|
|
chapters.Add(new Chapter(title, volume, chapterNum, chapterId));
|
2023-05-18 16:04:03 +02:00
|
|
|
|
}
|
|
|
|
|
}
|
2023-05-18 18:20:37 +02:00
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Return Chapters ordered by Chapter-Number
|
2023-05-18 18:55:11 +02:00
|
|
|
|
NumberFormatInfo chapterNumberFormatInfo = new()
|
2023-05-18 18:20:37 +02:00
|
|
|
|
{
|
|
|
|
|
NumberDecimalSeparator = "."
|
|
|
|
|
};
|
2023-05-26 15:09:26 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Done getting Chapters for {publication.internalId}");
|
2023-05-18 18:20:37 +02:00
|
|
|
|
return chapters.OrderBy(chapter => Convert.ToSingle(chapter.chapterNumber, chapterNumberFormatInfo)).ToArray();
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
|
|
|
|
|
2023-06-10 14:27:09 +02:00
|
|
|
|
public override void DownloadChapter(Publication publication, Chapter chapter, DownloadChapterTask parentTask, CancellationToken? cancellationToken = null)
|
2023-05-18 15:48:54 +02:00
|
|
|
|
{
|
2023-06-10 14:27:09 +02:00
|
|
|
|
if (cancellationToken?.IsCancellationRequested??false)
|
|
|
|
|
return;
|
2023-05-26 15:09:26 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Downloading Chapter-Info {publication.sortName} {publication.internalId} {chapter.volumeNumber}-{chapter.chapterNumber}");
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Request URLs for Chapter-Images
|
2023-05-18 16:21:54 +02:00
|
|
|
|
DownloadClient.RequestResult requestResult =
|
2023-05-22 18:15:24 +02:00
|
|
|
|
downloadClient.MakeRequest($"https://api.mangadex.org/at-home/server/{chapter.url}?forcePort443=false'", (byte)RequestType.AtHomeServer);
|
2023-05-19 18:11:14 +02:00
|
|
|
|
if (requestResult.statusCode != HttpStatusCode.OK)
|
|
|
|
|
return;
|
2023-05-18 16:21:54 +02:00
|
|
|
|
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
|
|
|
|
if (result is null)
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
string baseUrl = result["baseUrl"]!.GetValue<string>();
|
2023-05-18 17:21:06 +02:00
|
|
|
|
string hash = result["chapter"]!["hash"]!.GetValue<string>();
|
|
|
|
|
JsonArray imageFileNames = result["chapter"]!["data"]!.AsArray();
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Loop through all imageNames and construct urls (imageUrl)
|
2023-05-18 17:21:06 +02:00
|
|
|
|
HashSet<string> imageUrls = new();
|
2023-05-18 18:55:11 +02:00
|
|
|
|
foreach (JsonNode? image in imageFileNames)
|
2023-05-18 17:42:26 +02:00
|
|
|
|
imageUrls.Add($"{baseUrl}/data/{hash}/{image!.GetValue<string>()}");
|
2023-05-18 17:21:06 +02:00
|
|
|
|
|
2023-05-20 00:19:04 +02:00
|
|
|
|
string comicInfoPath = Path.GetTempFileName();
|
2023-05-31 20:29:30 +02:00
|
|
|
|
File.WriteAllText(comicInfoPath, GetComicInfoXmlString(publication, chapter, logger));
|
2023-05-20 00:19:04 +02:00
|
|
|
|
|
2023-05-19 20:22:13 +02:00
|
|
|
|
//Download Chapter-Images
|
2023-06-10 14:27:09 +02:00
|
|
|
|
DownloadChapterImages(imageUrls.ToArray(), GetArchiveFilePath(publication, chapter), (byte)RequestType.AtHomeServer, parentTask, comicInfoPath, cancellationToken:cancellationToken);
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|
2023-05-18 19:56:06 +02:00
|
|
|
|
|
2023-05-22 16:45:55 +02:00
|
|
|
|
private string? GetCoverUrl(string publicationId, string? posterId)
|
|
|
|
|
{
|
2023-05-26 15:09:26 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Getting CoverUrl for {publicationId}");
|
2023-05-22 16:45:55 +02:00
|
|
|
|
if (posterId is null)
|
|
|
|
|
{
|
2023-05-26 15:09:26 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"No posterId, aborting");
|
2023-05-22 16:45:55 +02:00
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
//Request information where to download Cover
|
|
|
|
|
DownloadClient.RequestResult requestResult =
|
2023-05-25 13:50:48 +02:00
|
|
|
|
downloadClient.MakeRequest($"https://api.mangadex.org/cover/{posterId}", (byte)RequestType.CoverUrl);
|
2023-05-22 16:45:55 +02:00
|
|
|
|
if (requestResult.statusCode != HttpStatusCode.OK)
|
|
|
|
|
return null;
|
|
|
|
|
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
|
|
|
|
if (result is null)
|
|
|
|
|
return null;
|
|
|
|
|
|
|
|
|
|
string fileName = result["data"]!["attributes"]!["fileName"]!.GetValue<string>();
|
|
|
|
|
|
|
|
|
|
string coverUrl = $"https://uploads.mangadex.org/covers/{publicationId}/{fileName}";
|
2023-05-26 15:09:26 +02:00
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Got Cover-Url for {publicationId} -> {coverUrl}");
|
2023-05-22 16:45:55 +02:00
|
|
|
|
return coverUrl;
|
|
|
|
|
}
|
|
|
|
|
|
2023-06-10 14:05:23 +02:00
|
|
|
|
private List<string> GetAuthors(IEnumerable<string> authorIds)
|
2023-05-22 17:20:07 +02:00
|
|
|
|
{
|
2023-06-10 14:05:23 +02:00
|
|
|
|
List<string> ret = new();
|
|
|
|
|
foreach (string authorId in authorIds)
|
|
|
|
|
{
|
|
|
|
|
DownloadClient.RequestResult requestResult =
|
|
|
|
|
downloadClient.MakeRequest($"https://api.mangadex.org/author/{authorId}", (byte)RequestType.Author);
|
|
|
|
|
if (requestResult.statusCode != HttpStatusCode.OK)
|
|
|
|
|
return ret;
|
|
|
|
|
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
|
|
|
|
if (result is null)
|
|
|
|
|
return ret;
|
2023-05-22 17:20:07 +02:00
|
|
|
|
|
2023-06-10 14:05:23 +02:00
|
|
|
|
string authorName = result["data"]!["attributes"]!["name"]!.GetValue<string>();
|
|
|
|
|
ret.Add(authorName);
|
|
|
|
|
logger?.WriteLine(this.GetType().ToString(), $"Got author {authorId} -> {authorName}");
|
|
|
|
|
}
|
|
|
|
|
return ret;
|
2023-05-22 17:20:07 +02:00
|
|
|
|
}
|
2023-05-18 15:48:54 +02:00
|
|
|
|
}
|