Compare commits

...

4 Commits

Author SHA1 Message Date
c94c55300c https://github.com/C9Glax/tranga/issues/361 Chromium Close Pages that errored.
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-03-27 20:30:51 +01:00
721f932fac Fix MangaConnector enablestate endpoint 2025-03-27 19:49:44 +01:00
f691529591 "Global"-Connector https://github.com/C9Glax/tranga-website/issues/50 2025-03-27 19:35:49 +01:00
d75262a8f3 DBUpdate Exception on Jobs update 2025-03-27 19:03:06 +01:00
6 changed files with 94 additions and 33 deletions

View File

@ -50,7 +50,7 @@ public class MangaConnectorController(PgsqlContext context) : Controller
/// <summary>
/// Enabled or disables a Connector
/// </summary>
/// <param name="id">ID of the connector</param>
/// <param name="MangaConnectorName">ID of the connector</param>
/// <param name="enabled">Set true to enable</param>
/// <response code="200"></response>
/// <response code="404">Connector with ID not found.</response>
@ -59,11 +59,11 @@ public class MangaConnectorController(PgsqlContext context) : Controller
[ProducesResponseType(Status200OK)]
[ProducesResponseType(Status404NotFound)]
[ProducesResponseType<string>(Status500InternalServerError, "text/plain")]
public IActionResult SetEnabled(string id, bool enabled)
public IActionResult SetEnabled(string MangaConnectorName, bool enabled)
{
try
{
MangaConnector? connector = context.MangaConnectors.Find(id);
MangaConnector? connector = context.MangaConnectors.Find(MangaConnectorName);
if (connector is null)
return NotFound();

View File

@ -10,6 +10,8 @@ internal class ChromiumDownloadClient : DownloadClient
{
private static IBrowser? _browser;
private readonly HttpDownloadClient _httpDownloadClient;
private readonly Thread _closeStalePagesThread;
private readonly List<KeyValuePair<IPage, DateTime>> _openPages = new ();
private static async Task<IBrowser> StartBrowser()
{
@ -22,31 +24,7 @@ internal class ChromiumDownloadClient : DownloadClient
"--disable-setuid-sandbox",
"--no-sandbox"},
Timeout = 30000
}, new LoggerFactory([new LogProvider()])); //TODO
}
private class LogProvider : ILoggerProvider
{
//TODO
public void Dispose() { }
public ILogger CreateLogger(string categoryName) => new Logger();
}
private class Logger : ILogger
{
public Logger() : base() { }
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter)
{
if (logLevel <= LogLevel.Information)
return;
//TODO
}
public bool IsEnabled(LogLevel logLevel) => true;
public IDisposable? BeginScope<TState>(TState state) where TState : notnull => null;
});
}
public ChromiumDownloadClient()
@ -54,6 +32,20 @@ internal class ChromiumDownloadClient : DownloadClient
_httpDownloadClient = new();
if(_browser is null)
_browser = StartBrowser().Result;
_closeStalePagesThread = new Thread(CheckStalePages);
_closeStalePagesThread.Start();
}
private void CheckStalePages()
{
while (true)
{
Thread.Sleep(TimeSpan.FromHours(1));
foreach ((IPage? key, DateTime value) in _openPages.Where(kv => kv.Value.Subtract(DateTime.Now) > TimeSpan.FromHours(1)))
{
key.CloseAsync().Wait();
}
}
}
private readonly Regex _imageUrlRex = new(@"https?:\/\/.*\.(?:p?jpe?g|gif|a?png|bmp|avif|webp)(\?.*)?");
@ -69,8 +61,9 @@ internal class ChromiumDownloadClient : DownloadClient
if (_browser is null)
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
IPage page = _browser.NewPageAsync().Result;
_openPages.Add(new(page, DateTime.Now));
page.SetExtraHttpHeadersAsync(new() { { "Referer", referrer } });
page.DefaultTimeout = 10000;
page.DefaultTimeout = 30000;
IResponse response;
try
{
@ -81,6 +74,7 @@ internal class ChromiumDownloadClient : DownloadClient
{
//Log($"Could not load Page {url}\n{e.Message}");
page.CloseAsync();
_openPages.Remove(_openPages.Find(i => i.Key == page));
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
}
@ -104,11 +98,13 @@ internal class ChromiumDownloadClient : DownloadClient
}
else
{
page.CloseAsync();
page.CloseAsync().Wait();
_openPages.Remove(_openPages.Find(i => i.Key == page));
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
}
page.CloseAsync();
page.CloseAsync().Wait();
_openPages.Remove(_openPages.Find(i => i.Key == page));
return new RequestResult(response.Status, document, stream, false, "");
}
}

View File

@ -117,7 +117,8 @@ using (var scope = app.Services.CreateScope())
new Mangaworld(),
new ManhuaPlus(),
new Weebcentral(),
new Manganato()
new Manganato(),
new Global(scope.ServiceProvider.GetService<PgsqlContext>()!)
];
MangaConnector[] newConnectors = connectors.Where(c => !context.MangaConnectors.Contains(c)).ToArray();
context.MangaConnectors.AddRange(newConnectors);

View File

@ -0,0 +1,55 @@
namespace API.Schema.MangaConnectors;
public class Global : MangaConnector
{
private PgsqlContext context { get; init; }
public Global(PgsqlContext context) : base("Global", ["all"], [""], "")
{
this.context = context;
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
//Get all enabled Connectors
MangaConnector[] enabledConnectors = context.MangaConnectors.Where(c => c.Enabled && c.Name != "Global").ToArray();
//Create Task for each MangaConnector to search simulatneously
Task<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[]>[] tasks =
enabledConnectors.Select(c =>
new Task<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[]>(() => c.GetManga(publicationTitle))).ToArray();
foreach (var task in tasks)
task.Start();
//Wait for all tasks to finish
do
{
Thread.Sleep(50);
}while(tasks.Any(t => t.Status < TaskStatus.RanToCompletion));
//Concatenate all results into one
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ret =
tasks.Select(t => t.IsCompletedSuccessfully ? t.Result : []).ToArray().SelectMany(i => i).ToArray();
return ret;
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
MangaConnector? mc = context.MangaConnectors.ToArray().FirstOrDefault(c => c.ValidateUrl(url));
return mc?.GetMangaFromUrl(url) ?? null;
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return null;
}
public override Chapter[] GetChapters(Manga manga, string language = "en")
{
return manga.MangaConnector?.GetChapters(manga) ?? [];
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
return chapter.ParentManga?.MangaConnector?.GetChapterImageUrls(chapter) ?? [];
}
}

View File

@ -25,6 +25,7 @@ public class PgsqlContext(DbContextOptions<PgsqlContext> options) : DbContext(op
{
modelBuilder.Entity<MangaConnector>()
.HasDiscriminator(c => c.Name)
.HasValue<Global>("Global")
.HasValue<AsuraToon>("AsuraToon")
.HasValue<Bato>("Bato")
.HasValue<MangaHere>("MangaHere")

View File

@ -4,6 +4,7 @@ using API.Schema.MangaConnectors;
using API.Schema.NotificationConnectors;
using log4net;
using log4net.Config;
using Microsoft.EntityFrameworkCore;
namespace API;
@ -128,7 +129,14 @@ public static class Tranga
context.Jobs.Update(thread.job);
}
try
{
context.SaveChanges();
}
catch (DbUpdateException e)
{
}
Thread.Sleep(TrangaSettings.startNewJobTimeoutMs);
}
}