7 Commits

Author SHA1 Message Date
7477f4d04d Do no replace spaces with %20
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-05-08 05:28:31 +02:00
30a8162777 Shorten RequestUrl for MangaDex MangaInfo
Lower MaxRequestLimits
2025-05-08 04:39:22 +02:00
57baad3d2c Make LastRequest (LastExecutedRateLimit) static 2025-05-08 03:57:45 +02:00
3c5f51e495 fix time formatting 2025-05-08 03:48:57 +02:00
397d3c93df More logging 2025-05-08 03:36:32 +02:00
1b49b171f4 Remove Manganato (for now) 2025-05-08 03:22:31 +02:00
ec5d048df5 Add more Logging 2025-05-08 03:03:44 +02:00
11 changed files with 167 additions and 72 deletions

View File

@ -2,6 +2,7 @@
using System.Text;
using System.Text.RegularExpressions;
using HtmlAgilityPack;
using log4net;
using PuppeteerSharp;
namespace API.MangaDownloadClients;
@ -13,7 +14,7 @@ internal class ChromiumDownloadClient : DownloadClient
private readonly Thread _closeStalePagesThread;
private readonly List<KeyValuePair<IPage, DateTime>> _openPages = new ();
private static async Task<IBrowser> StartBrowser()
private static async Task<IBrowser> StartBrowser(ILog log)
{
return await Puppeteer.LaunchAsync(new LaunchOptions
{
@ -24,14 +25,14 @@ internal class ChromiumDownloadClient : DownloadClient
"--disable-setuid-sandbox",
"--no-sandbox"},
Timeout = 30000
});
}, new LoggerFactory([new Provider(log)]));
}
public ChromiumDownloadClient()
{
_httpDownloadClient = new();
if(_browser is null)
_browser = StartBrowser().Result;
_browser = StartBrowser(Log).Result;
_closeStalePagesThread = new Thread(CheckStalePages);
_closeStalePagesThread.Start();
}
@ -41,8 +42,10 @@ internal class ChromiumDownloadClient : DownloadClient
while (true)
{
Thread.Sleep(TimeSpan.FromHours(1));
Log.Debug("Removing stale pages");
foreach ((IPage? key, DateTime value) in _openPages.Where(kv => kv.Value.Subtract(DateTime.Now) > TimeSpan.FromHours(1)))
{
Log.Debug($"Closing {key.Url}");
key.CloseAsync().Wait();
}
}
@ -51,6 +54,7 @@ internal class ChromiumDownloadClient : DownloadClient
private readonly Regex _imageUrlRex = new(@"https?:\/\/.*\.(?:p?jpe?g|gif|a?png|bmp|avif|webp)(\?.*)?");
internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null)
{
Log.Debug($"Requesting {url}");
return _imageUrlRex.IsMatch(url)
? _httpDownloadClient.MakeRequestInternal(url, referrer)
: MakeRequestBrowser(url, referrer, clickButton);
@ -68,11 +72,11 @@ internal class ChromiumDownloadClient : DownloadClient
try
{
response = page.GoToAsync(url, WaitUntilNavigation.Networkidle0).Result;
//Log($"Page loaded. {url}");
Log.Debug($"Page loaded. {url}");
}
catch (Exception e)
{
//Log($"Could not load Page {url}\n{e.Message}");
Log.Info($"Could not load Page {url}\n{e.Message}");
page.CloseAsync();
_openPages.Remove(_openPages.Find(i => i.Key == page));
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
@ -107,4 +111,41 @@ internal class ChromiumDownloadClient : DownloadClient
_openPages.Remove(_openPages.Find(i => i.Key == page));
return new RequestResult(response.Status, document, stream, false, "");
}
private class Provider(ILog log) : ILoggerProvider
{
public void Dispose()
{
}
public ILogger CreateLogger(string categoryName)
{
return new ChromiumLogger(log);
}
}
private class ChromiumLogger(ILog log) : ILogger
{
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter)
{
string message = formatter.Invoke(state, exception);
switch(logLevel)
{
case LogLevel.Critical: log.Fatal(message); break;
case LogLevel.Error: log.Error(message); break;
case LogLevel.Warning: log.Warn(message); break;
case LogLevel.Information: log.Info(message); break;
case LogLevel.Debug: log.Debug(message); break;
default: log.Info(message); break;
}
}
public bool IsEnabled(LogLevel logLevel) => true;
public IDisposable? BeginScope<TState>(TState state) where TState : notnull
{
return null;
}
}
}

View File

@ -5,17 +5,17 @@ namespace API.MangaDownloadClients;
internal abstract class DownloadClient
{
private readonly Dictionary<RequestType, DateTime> _lastExecutedRateLimit;
private static readonly Dictionary<RequestType, DateTime> LastExecutedRateLimit = new();
protected ILog Log { get; init; }
protected DownloadClient()
{
this.Log = LogManager.GetLogger(GetType());
this._lastExecutedRateLimit = new();
}
public RequestResult MakeRequest(string url, RequestType requestType, string? referrer = null, string? clickButton = null)
{
Log.Debug($"Requesting {url}");
if (!TrangaSettings.requestLimits.ContainsKey(requestType))
{
return new RequestResult(HttpStatusCode.NotAcceptable, null, Stream.Null);
@ -26,17 +26,19 @@ internal abstract class DownloadClient
: TrangaSettings.requestLimits[requestType];
TimeSpan timeBetweenRequests = TimeSpan.FromMinutes(1).Divide(rateLimit);
_lastExecutedRateLimit.TryAdd(requestType, DateTime.UtcNow.Subtract(timeBetweenRequests));
TimeSpan rateLimitTimeout = timeBetweenRequests.Subtract(DateTime.UtcNow.Subtract(_lastExecutedRateLimit[requestType]));
DateTime now = DateTime.Now;
LastExecutedRateLimit.TryAdd(requestType, now.Subtract(timeBetweenRequests));
TimeSpan rateLimitTimeout = timeBetweenRequests.Subtract(now.Subtract(LastExecutedRateLimit[requestType]));
Log.Debug($"Request limit {rateLimit}/Minute timeBetweenRequests: {timeBetweenRequests:ss'.'fffff} Timeout: {rateLimitTimeout:ss'.'fffff}");
if (rateLimitTimeout > TimeSpan.Zero)
{
Thread.Sleep(rateLimitTimeout);
}
RequestResult result = MakeRequestInternal(url, referrer, clickButton);
_lastExecutedRateLimit[requestType] = DateTime.UtcNow;
LastExecutedRateLimit[requestType] = DateTime.UtcNow;
return result;
}

View File

@ -1,5 +1,4 @@
using System.Net;
using API.Schema;
using HtmlAgilityPack;
namespace API.MangaDownloadClients;
@ -18,16 +17,15 @@ internal class HttpDownloadClient : DownloadClient
internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null)
{
//TODO
//if (clickButton is not null)
//Log("Can not click button on static site.");
if (clickButton is not null)
Log.Warn("Can not click button on static site.");
HttpResponseMessage? response = null;
while (response is null)
{
HttpRequestMessage requestMessage = new(HttpMethod.Get, url);
if (referrer is not null)
requestMessage.Headers.Referrer = new Uri(referrer);
//Log($"Requesting {requestType} {url}");
Log.Debug($"Requesting {url}");
try
{
response = Client.Send(requestMessage);

View File

@ -117,7 +117,7 @@ using (var scope = app.Services.CreateScope())
new Mangaworld(),
new ManhuaPlus(),
new Weebcentral(),
new Manganato(),
//new Manganato(),
new Global(scope.ServiceProvider.GetService<PgsqlContext>()!)
];
MangaConnector[] newConnectors = connectors.Where(c => !context.MangaConnectors.Contains(c)).ToArray();

View File

@ -73,12 +73,14 @@ public class Kavita : LibraryConnector
Stream data = NetClient.MakeRequest($"{BaseUrl}/api/Library/libraries", "Bearer", Auth);
if (data == Stream.Null)
{
return Array.Empty<KavitaLibrary>();
Log.Info("No libraries found");
return [];
}
JsonArray? result = JsonSerializer.Deserialize<JsonArray>(data);
if (result is null)
{
return Array.Empty<KavitaLibrary>();
Log.Info("No libraries found");
return [];
}
List<KavitaLibrary> ret = new();

View File

@ -38,12 +38,14 @@ public class Komga : LibraryConnector
Stream data = NetClient.MakeRequest($"{BaseUrl}/api/v1/libraries", "Basic", Auth);
if (data == Stream.Null)
{
return Array.Empty<KomgaLibrary>();
Log.Info("No libraries found");
return [];
}
JsonArray? result = JsonSerializer.Deserialize<JsonArray>(data);
if (result is null)
{
return Array.Empty<KomgaLibrary>();
Log.Info("No libraries found");
return [];
}
HashSet<KomgaLibrary> ret = new();

View File

@ -1,5 +1,8 @@
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using log4net;
using Microsoft.EntityFrameworkCore;
using Newtonsoft.Json;
namespace API.Schema.LibraryConnectors;
@ -20,6 +23,10 @@ public abstract class LibraryConnector(string libraryConnectorId, LibraryType li
[Required]
public string Auth { get; init; } = auth;
[JsonIgnore]
[NotMapped]
protected ILog Log { get; init; } = LogManager.GetLogger($"{libraryType.ToString()} {baseUrl}");
protected abstract void UpdateLibraryInternal();
internal abstract bool Test();
}

View File

@ -1,48 +1,52 @@
using System.Net;
using System.Net.Http.Headers;
using log4net;
namespace API.Schema.LibraryConnectors;
public class NetClient
{
private static ILog Log = LogManager.GetLogger(typeof(NetClient));
public static Stream MakeRequest(string url, string authScheme, string auth)
{
Log.Debug($"Requesting {url}");
HttpClient client = new();
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(authScheme, auth);
HttpRequestMessage requestMessage = new()
{
HttpClient client = new();
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(authScheme, auth);
HttpRequestMessage requestMessage = new ()
{
Method = HttpMethod.Get,
RequestUri = new Uri(url)
};
try
{
Method = HttpMethod.Get,
RequestUri = new Uri(url)
};
try
{
HttpResponseMessage response = client.Send(requestMessage);
HttpResponseMessage response = client.Send(requestMessage);
if (response.StatusCode is HttpStatusCode.Unauthorized &&
response.RequestMessage!.RequestUri!.AbsoluteUri != url)
return MakeRequest(response.RequestMessage!.RequestUri!.AbsoluteUri, authScheme, auth);
else if (response.IsSuccessStatusCode)
return response.Content.ReadAsStream();
else
return Stream.Null;
}
catch (Exception e)
{
switch (e)
{
case HttpRequestException:
break;
default:
throw;
}
if (response.StatusCode is HttpStatusCode.Unauthorized &&
response.RequestMessage!.RequestUri!.AbsoluteUri != url)
return MakeRequest(response.RequestMessage!.RequestUri!.AbsoluteUri, authScheme, auth);
else if (response.IsSuccessStatusCode)
return response.Content.ReadAsStream();
else
return Stream.Null;
}
}
catch (Exception e)
{
switch (e)
{
case HttpRequestException:
Log.Debug(e);
break;
default:
throw;
}
Log.Info("Failed to make request");
return Stream.Null;
}
}
public static bool MakePost(string url, string authScheme, string auth)
public static bool MakePost(string url, string authScheme, string auth)
{
HttpClient client = new()
{

View File

@ -22,26 +22,29 @@ public class MangaDex : MangaConnector
int offset = 0; //"Page"
int total = int.MaxValue; //How many total results are there, is updated on first request
HashSet<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> retManga = new();
int loadedPublicationData = 0;
List<JsonNode> results = new();
//Request all search-results
while (offset < total) //As long as we haven't requested all "Pages"
{
//Request next Page
RequestResult requestResult = downloadClient.MakeRequest(
$"https://api.mangadex.org/manga?limit={limit}&title={publicationTitle}&offset={offset}" +
$"&contentRating%5B%5D=safe&contentRating%5B%5D=suggestive&contentRating%5B%5D=erotica" +
$"&contentRating%5B%5D=pornographic" +
$"&includes%5B%5D=manga&includes%5B%5D=cover_art&includes%5B%5D=author" +
$"&includes%5B%5D=artist&includes%5B%5D=tag", RequestType.MangaInfo);
string requestUrl =
$"https://api.mangadex.org/manga?limit={limit}&title={publicationTitle}&offset={offset}" +
$"&includes[]=manga&includes[]=cover_art&includes[]=author&includes[]=artist&includes[]=tag";
RequestResult requestResult = downloadClient.MakeRequest(requestUrl, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
Log.Info($"{requestResult.statusCode}: {requestUrl}");
break;
}
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
offset += limit;
if (result is null)
{
Log.Info($"result was null: {requestUrl}");
break;
}
if(result.ContainsKey("total"))
total = result["total"]!.GetValue<int>(); //Update the total number of Publications
@ -61,13 +64,18 @@ public class MangaDex : MangaConnector
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
RequestResult requestResult =
downloadClient.MakeRequest($"https://api.mangadex.org/manga/{publicationId}?includes%5B%5D=manga&includes%5B%5D=cover_art&includes%5B%5D=author&includes%5B%5D=artist&includes%5B%5D=tag", RequestType.MangaInfo);
string url = $"https://api.mangadex.org/manga/{publicationId}" +
$"?includes%5B%5D=manga&includes%5B%5D=cover_art&includes%5B%5D=author&includes%5B%5D=artist&includes%5B%5D=tag";
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
Log.Info($"{requestResult.statusCode}: {url}");
return null;
}
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
if(result is not null)
return MangaFromJsonObject(result["data"]!.AsObject());
Log.Info($"result was null: {url}");
return null;
}
@ -81,15 +89,24 @@ public class MangaDex : MangaConnector
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? MangaFromJsonObject(JsonObject manga)
{
if (!manga.TryGetPropertyValue("id", out JsonNode? idNode))
{
Log.Info("id was null");
return null;
}
string publicationId = idNode!.GetValue<string>();
if (!manga.TryGetPropertyValue("attributes", out JsonNode? attributesNode))
{
Log.Info("attributes was null");
return null;
}
JsonObject attributes = attributesNode!.AsObject();
if (!attributes.TryGetPropertyValue("title", out JsonNode? titleNode))
{
Log.Info("title was null");
return null;
}
string sortName = titleNode!.AsObject().ContainsKey("en") switch
{
true => titleNode.AsObject()["en"]!.GetValue<string>(),
@ -108,7 +125,10 @@ public class MangaDex : MangaConnector
List<MangaAltTitle> altTitles = altTitlesDict.Select(t => new MangaAltTitle(t.Key, t.Value)).ToList();
if (!attributes.TryGetPropertyValue("description", out JsonNode? descriptionNode))
{
Log.Info("description was null");
return null;
}
string description = descriptionNode!.AsObject().ContainsKey("en") switch
{
true => descriptionNode.AsObject()["en"]!.GetValue<string>(),
@ -154,12 +174,18 @@ public class MangaDex : MangaConnector
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
if (!manga.TryGetPropertyValue("relationships", out JsonNode? relationshipsNode))
{
Log.Info("relationships was null");
return null;
}
JsonNode? coverNode = relationshipsNode!.AsArray()
.FirstOrDefault(rel => rel!["type"]!.GetValue<string>().Equals("cover_art"));
if (coverNode is null)
{
Log.Info("coverNode was null");
return null;
}
string fileName = coverNode["attributes"]!["fileName"]!.GetValue<string>();
string coverUrl = $"https://uploads.mangadex.org/covers/{publicationId}/{fileName}";
@ -195,16 +221,22 @@ public class MangaDex : MangaConnector
while (offset < total)
{
//Request next "Page"
RequestResult requestResult =
downloadClient.MakeRequest(
$"https://api.mangadex.org/manga/{manga.IdOnConnectorSite}/feed?limit={limit}&offset={offset}&translatedLanguage%5B%5D={language}&contentRating%5B%5D=safe&contentRating%5B%5D=suggestive&contentRating%5B%5D=erotica&contentRating%5B%5D=pornographic", RequestType.MangaDexFeed);
string requestUrl = $"https://api.mangadex.org/manga/{manga.IdOnConnectorSite}/feed?limit={limit}&offset={offset}&translatedLanguage%5B%5D={language}" +
$"&contentRating%5B%5D=safe&contentRating%5B%5D=suggestive&contentRating%5B%5D=erotica&contentRating%5B%5D=pornographic";
RequestResult requestResult = downloadClient.MakeRequest(requestUrl, RequestType.MangaDexFeed);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
Log.Info($"{requestResult.statusCode}: {requestUrl}");
break;
}
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
offset += limit;
if (result is null)
{
Log.Info($"result was null: {requestUrl}");
break;
}
total = result["total"]!.GetValue<int>();
JsonArray chaptersInResult = result["data"]!.AsArray();
@ -235,6 +267,7 @@ public class MangaDex : MangaConnector
if (attributes.ContainsKey("pages") && attributes["pages"] is not null &&
attributes["pages"]!.GetValue<int>() < 1)
{
Log.Info($"No pages: {chapterId}");
continue;
}
@ -246,6 +279,7 @@ public class MangaDex : MangaConnector
}
catch (Exception e)
{
Log.Debug(e);
}
}
}
@ -258,16 +292,23 @@ public class MangaDex : MangaConnector
{//Request URLs for Chapter-Images
Match m = Regex.Match(chapter.Url, @"https?:\/\/mangadex.org\/chapter\/([0-9\-a-z]+)");
if (!m.Success)
{
Log.Error($"Could not parse Chapter ID: {chapter.Url}");
return [];
}
string url = $"https://api.mangadex.org/at-home/server/{m.Groups[1].Value}?forcePort443=false";
RequestResult requestResult =
downloadClient.MakeRequest($"https://api.mangadex.org/at-home/server/{m.Groups[1].Value}?forcePort443=false", RequestType.MangaDexImage);
downloadClient.MakeRequest(url, RequestType.MangaDexImage);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
Log.Info($"{requestResult.statusCode}: {url}");
return [];
}
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
if (result is null)
{
Log.Info($"Result was null: {url}");
return [];
}
string baseUrl = result["baseUrl"]!.GetValue<string>();

View File

@ -135,10 +135,8 @@ public static class Tranga
List<Job> runJobs = context.Jobs.Where(j => j.state <= JobState.Running && j.Enabled == true).ToList()
.Where(j => j.NextExecution < DateTime.UtcNow).ToList();
Log.Debug($"Due jobs: {runJobs.Count}");
Log.Debug($"Running jobs: {RunningJobs.Count}");
IEnumerable<Job> orderedJobs = OrderJobs(runJobs, context).ToList();
Log.Debug($"Ordered jobs: {orderedJobs.Count()}");
Log.Debug($"Jobs Due: {runJobs.Count} Running: {RunningJobs.Count} Ordered: {orderedJobs.Count()}");
foreach (Job job in orderedJobs)
{
// If the job is already running, skip it

View File

@ -39,11 +39,11 @@ public static class TrangaSettings
[JsonIgnore]
internal static readonly Dictionary<RequestType, int> DefaultRequestLimits = new ()
{
{RequestType.MangaInfo, 250},
{RequestType.MangaDexFeed, 250},
{RequestType.MangaInfo, 60},
{RequestType.MangaDexFeed, 60},
{RequestType.MangaDexImage, 40},
{RequestType.MangaImage, 60},
{RequestType.MangaCover, 250},
{RequestType.MangaCover, 60},
{RequestType.Default, 60}
};
public static Dictionary<RequestType, int> requestLimits { get; private set; } = DefaultRequestLimits;