48 Commits

Author SHA1 Message Date
e063cf1fd9 Debug: MatchJobsRunningAndWaiting
Some checks failed
Docker Image CI / build (push) Has been cancelled
UpdateCoverJobs not starting.
2025-06-28 23:15:51 +02:00
8170e1d762 JobCycle Info-Debug list jobs started/running
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-06-28 20:35:10 +02:00
254383b006 Include Description in ComicInfo.xml 2025-06-28 20:28:28 +02:00
df431e533a Add POST Jobs/Cleaup Endpoint:
Removes failed and completed Jobs (that are not recurring)
2025-06-28 20:18:28 +02:00
9a4cc0cbaf Only log Error on image-processing if we dont know what Exception was thrown 2025-06-28 20:13:09 +02:00
861cf7e166 Fix Image-Processing:
Format is not supported by Imagesharp, throwing exception causing Job to fail.
2025-06-28 20:00:01 +02:00
7e34b3b91e Update readme to contain information on how to test locally 2025-06-28 19:48:47 +02:00
29d36484f9 include logging driver in docker-compose
Remove parameters from start-CMD in Dockerfile
2025-06-28 19:39:19 +02:00
2c6e8e4d16 Default startNewJobTimeoutMs set to 20s
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-06-18 02:11:03 +02:00
fab2886684 ComickIo Stop double work for retrieving chapters:
We can build the canonical url from the hids
2025-06-18 01:55:19 +02:00
d9ccf71b21 DownloadSingleChapterJob add check if chapter is already downloaded before re-downloading 2025-06-18 01:18:06 +02:00
f36f34f212 We dont need to actually load the MangaConnector to know if two names match.
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-06-18 00:23:33 +02:00
ff10432c79 Fix FilterJobsWithoutDownloading: Dont check if a job has a connector, that takes forever 2025-06-18 00:11:05 +02:00
776e1e4890 ...use what we coded... 2025-06-17 20:18:10 +02:00
db0643fa19 More Debug 2025-06-17 20:09:49 +02:00
3eeb563ca1 Add Debug Statement to find slow operations in Job-Cycle 2025-06-17 19:55:54 +02:00
7a88b1f7ee Increase default request Limits 2025-06-17 19:55:31 +02:00
b5411e9c6c Better Debugging for HttpDownloadClient 2025-06-17 18:52:27 +02:00
07b260dea6 GC Cleanup 2025-06-17 18:52:14 +02:00
71ad32de31 Fix FlareSolverr IsJson-Check 2025-06-17 18:51:29 +02:00
ecd2c2722f Fix FlareSolverr, Flaresolverrsharp is broken 2025-06-17 18:28:18 +02:00
ff1e467ada Add caching header to Covers
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-06-17 16:23:58 +02:00
24f68b4a8e SearchController GetFromUrl StatusCode 404 instead of 400 if URL does not yield a Manga
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-06-17 00:25:09 +02:00
e51e90aabc FlareSolverr by FlareSolverrSharp
#372
2025-06-17 00:25:08 +02:00
dc2c27f4bd Merge pull request #402 from catumin/docker-compose
Some checks failed
Docker Image CI / build (push) Has been cancelled
Wait for Postgres healthcheck before attempting to continue
2025-06-16 09:52:11 +02:00
406d8eef51 Wait for Postgres healthcheck before attempting to continue
Signed-off-by: Cat Aulucya <cat@aulucya.gay>
2025-06-15 21:17:24 -07:00
1fba599c79 Fix UserAgent formatting
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-06-16 01:31:58 +02:00
a668a16035 Use TrangaSettings.userAgent 2025-06-16 01:14:05 +02:00
f89b8e1977 Fix UserAgent RequestHeader:
UserAgent should not be added after it already existed
2025-06-16 01:11:38 +02:00
11290062c0 Fix setting of version policy 2025-06-16 00:58:54 +02:00
f46910fac6 Formatting 2025-06-16 00:52:10 +02:00
f974c5ddd1 header formatting (debug) HttpDownloadClient.cs 2025-06-16 00:49:27 +02:00
a01963a125 HttpVersionPolicy.RequestVersionOrHigher 2025-06-16 00:47:26 +02:00
8a877ee465 Extend debug for requests 2025-06-16 00:34:03 +02:00
c370e656f1 HttpDownloadClient add a Debug statement if the request fails with status code and content 2025-06-16 00:10:59 +02:00
58ed976737 HttpDownloadClient Check if original uri is equal to final uri 2025-06-16 00:10:28 +02:00
1b6af73a0c MangaDex nullvalue checks and allow null-fields in response
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-06-15 23:55:23 +02:00
70fe23857b Update UserAgent-String to Version 2.0 2025-06-15 23:26:30 +02:00
0027af2d36 Fix: First startup coverImageCache does not exist (on stale check) 2025-06-15 23:07:34 +02:00
1a8f70f501 Cleanup code for HttpDownloadClient and error-log 2025-06-15 23:00:01 +02:00
aa67c11050 Start-Job endpoint: Add option to start Jobs that our job is dependent on
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-05-19 19:57:51 +02:00
7b38d0aa2b Add Debug-output for when next job is due if not job was started 2025-05-19 19:57:27 +02:00
64e31fad54 Job-Cycle match JobTypes and MangaConnectors on running and waiting Jobs 2025-05-19 17:36:32 +02:00
49a70e2341 startNewJobTimeoutMs set to 5000 2025-05-19 17:36:07 +02:00
9659f2a68a MangaDex.cs year may be null
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-05-18 22:44:32 +02:00
d474868116 Fix missing Permissions for covers 2025-05-18 22:14:51 +02:00
b1312c4164 Remove UpdateSingleChapterDownloadedJob.cs 2025-05-18 20:39:24 +02:00
33856f9927 Fix infinity joby (because we did not create new Scope on every cycle) 2025-05-18 20:31:46 +02:00
26 changed files with 1430 additions and 291 deletions

View File

@ -5,6 +5,7 @@ using API.Schema.Jobs;
using Asp.Versioning; using Asp.Versioning;
using log4net; using log4net;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using static Microsoft.AspNetCore.Http.StatusCodes; using static Microsoft.AspNetCore.Http.StatusCodes;
// ReSharper disable InconsistentNaming // ReSharper disable InconsistentNaming
@ -326,6 +327,7 @@ public class JobController(PgsqlContext context, ILog Log) : Controller
/// Starts the Job with the requested ID /// Starts the Job with the requested ID
/// </summary> /// </summary>
/// <param name="JobId">Job-ID</param> /// <param name="JobId">Job-ID</param>
/// <param name="startDependencies">Start Jobs necessary for execution</param>
/// <response code="202">Job started</response> /// <response code="202">Job started</response>
/// <response code="404">Job with ID not found</response> /// <response code="404">Job with ID not found</response>
/// <response code="409">Job was already running</response> /// <response code="409">Job was already running</response>
@ -335,16 +337,22 @@ public class JobController(PgsqlContext context, ILog Log) : Controller
[ProducesResponseType(Status404NotFound)] [ProducesResponseType(Status404NotFound)]
[ProducesResponseType(Status409Conflict)] [ProducesResponseType(Status409Conflict)]
[ProducesResponseType<string>(Status500InternalServerError, "text/plain")] [ProducesResponseType<string>(Status500InternalServerError, "text/plain")]
public IActionResult StartJob(string JobId) public IActionResult StartJob(string JobId, [FromBody(EmptyBodyBehavior = EmptyBodyBehavior.Allow)]bool startDependencies = false)
{ {
Job? ret = context.Jobs.Find(JobId); Job? ret = context.Jobs.Find(JobId);
if (ret is null) if (ret is null)
return NotFound(); return NotFound();
List<Job> dependencies = startDependencies ? ret.GetDependenciesAndSelf() : [ret];
try try
{ {
if (ret.state >= JobState.Running && ret.state < JobState.Completed) if(dependencies.Any(d => d.state >= JobState.Running && d.state < JobState.Completed))
return new ConflictResult(); return new ConflictResult();
ret.LastExecution = DateTime.UnixEpoch; dependencies.ForEach(d =>
{
d.LastExecution = DateTime.UnixEpoch;
d.state = JobState.CompletedWaiting;
});
context.SaveChanges(); context.SaveChanges();
return Accepted(); return Accepted();
} }
@ -366,4 +374,25 @@ public class JobController(PgsqlContext context, ILog Log) : Controller
{ {
return StatusCode(Status501NotImplemented); return StatusCode(Status501NotImplemented);
} }
/// <summary>
/// Removes failed and completed Jobs (that are not recurring)
/// </summary>
/// <response code="202">Job started</response>
/// <response code="500">Error during Database Operation</response>
[HttpPost("Cleanup")]
public IActionResult CleanupJobs()
{
try
{
context.Jobs.RemoveRange(context.Jobs.Where(j => j.state == JobState.Failed || j.state == JobState.Completed));
context.SaveChanges();
return Ok();
}
catch (Exception e)
{
Log.Error(e);
return StatusCode(500, e.Message);
}
}
} }

View File

@ -4,6 +4,7 @@ using API.Schema.Jobs;
using Asp.Versioning; using Asp.Versioning;
using log4net; using log4net;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.Net.Http.Headers;
using SixLabors.ImageSharp; using SixLabors.ImageSharp;
using SixLabors.ImageSharp.Formats.Jpeg; using SixLabors.ImageSharp.Formats.Jpeg;
using SixLabors.ImageSharp.Processing; using SixLabors.ImageSharp.Processing;
@ -139,7 +140,9 @@ public class MangaController(PgsqlContext context, ILog Log) : Controller
using MemoryStream ms = new(); using MemoryStream ms = new();
image.Save(ms, new JpegEncoder(){Quality = 100}); image.Save(ms, new JpegEncoder(){Quality = 100});
return File(ms.GetBuffer(), "image/jpeg"); DateTime lastModified = new FileInfo(m.CoverFileNameInCache).LastWriteTime;
HttpContext.Response.Headers.CacheControl = "public";
return File(ms.GetBuffer(), "image/jpeg", new DateTimeOffset(lastModified), EntityTagHeaderValue.Parse($"\"{lastModified.Ticks}\""));
} }
/// <summary> /// <summary>

View File

@ -78,12 +78,11 @@ public class SearchController(PgsqlContext context, ILog Log) : Controller
/// <param name="url">Manga-Page URL</param> /// <param name="url">Manga-Page URL</param>
/// <response code="200"></response> /// <response code="200"></response>
/// <response code="300">Multiple connectors found for URL</response> /// <response code="300">Multiple connectors found for URL</response>
/// <response code="400">No Manga at URL</response> /// <response code="404">Manga not found</response>
/// <response code="404">No connector found for URL</response>
/// <response code="500">Error during Database Operation</response> /// <response code="500">Error during Database Operation</response>
[HttpPost("Url")] [HttpPost("Url")]
[ProducesResponseType<Manga>(Status200OK, "application/json")] [ProducesResponseType<Manga>(Status200OK, "application/json")]
[ProducesResponseType(Status400BadRequest)] [ProducesResponseType(Status404NotFound)]
[ProducesResponseType<string>(Status500InternalServerError, "text/plain")] [ProducesResponseType<string>(Status500InternalServerError, "text/plain")]
public IActionResult GetMangaFromUrl([FromBody]string url) public IActionResult GetMangaFromUrl([FromBody]string url)
{ {
@ -91,7 +90,7 @@ public class SearchController(PgsqlContext context, ILog Log) : Controller
return StatusCode(Status500InternalServerError, "Could not find Global Connector."); return StatusCode(Status500InternalServerError, "Could not find Global Connector.");
if(connector.GetMangaFromUrl(url) is not { } manga) if(connector.GetMangaFromUrl(url) is not { } manga)
return BadRequest(); return NotFound();
try try
{ {
if(AddMangaToContext(manga) is { } add) if(AddMangaToContext(manga) is { } add)

View File

@ -1,10 +1,12 @@
using API.MangaDownloadClients; using System.Net.Http.Headers;
using API.MangaDownloadClients;
using API.Schema; using API.Schema;
using API.Schema.Contexts; using API.Schema.Contexts;
using API.Schema.Jobs; using API.Schema.Jobs;
using Asp.Versioning; using Asp.Versioning;
using log4net; using log4net;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq; using Newtonsoft.Json.Linq;
using static Microsoft.AspNetCore.Http.StatusCodes; using static Microsoft.AspNetCore.Http.StatusCodes;
@ -291,4 +293,45 @@ public class SettingsController(PgsqlContext context, ILog Log) : Controller
return StatusCode(500, e); return StatusCode(500, e);
} }
} }
/// <summary>
/// Sets the FlareSolverr-URL
/// </summary>
/// <param name="flareSolverrUrl">URL of FlareSolverr-Instance</param>
/// <response code="200"></response>
[HttpPost("FlareSolverr/Url")]
[ProducesResponseType(Status200OK)]
public IActionResult SetFlareSolverrUrl([FromBody]string flareSolverrUrl)
{
TrangaSettings.UpdateFlareSolverrUrl(flareSolverrUrl);
return Ok();
}
/// <summary>
/// Resets the FlareSolverr-URL (HttpClient does not use FlareSolverr anymore)
/// </summary>
/// <response code="200"></response>
[HttpDelete("FlareSolverr/Url")]
[ProducesResponseType(Status200OK)]
public IActionResult ClearFlareSolverrUrl()
{
TrangaSettings.UpdateFlareSolverrUrl(string.Empty);
return Ok();
}
/// <summary>
/// Test FlareSolverr
/// </summary>
/// <response code="200">FlareSolverr is working!</response>
/// <response code="500">FlareSolverr is not working</response>
[HttpPost("FlareSolverr/Test")]
[ProducesResponseType(Status200OK)]
[ProducesResponseType(Status500InternalServerError)]
public IActionResult TestFlareSolverrReachable()
{
const string knownProtectedUrl = "https://prowlarr.servarr.com/v1/ping";
FlareSolverrDownloadClient client = new();
RequestResult result = client.MakeRequestInternal(knownProtectedUrl);
return (int)result.statusCode >= 200 && (int)result.statusCode < 300 ? Ok() : StatusCode(500, result.statusCode);
}
} }

View File

@ -3,7 +3,7 @@ using log4net;
namespace API.MangaDownloadClients; namespace API.MangaDownloadClients;
internal abstract class DownloadClient public abstract class DownloadClient
{ {
private static readonly Dictionary<RequestType, DateTime> LastExecutedRateLimit = new(); private static readonly Dictionary<RequestType, DateTime> LastExecutedRateLimit = new();
protected ILog Log { get; init; } protected ILog Log { get; init; }

View File

@ -0,0 +1,180 @@
using System.Diagnostics.CodeAnalysis;
using System.Net;
using System.Text;
using System.Text.Json;
using HtmlAgilityPack;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace API.MangaDownloadClients;
public class FlareSolverrDownloadClient : DownloadClient
{
internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null)
{
if (clickButton is not null)
Log.Warn("Client can not click button");
if(referrer is not null)
Log.Warn("Client can not set referrer");
if (TrangaSettings.flareSolverrUrl == string.Empty)
{
Log.Error("FlareSolverr URL is empty");
return new(HttpStatusCode.InternalServerError, null, Stream.Null);
}
Uri flareSolverrUri = new (TrangaSettings.flareSolverrUrl);
if (flareSolverrUri.Segments.Last() != "v1")
flareSolverrUri = new UriBuilder(flareSolverrUri)
{
Path = "v1"
}.Uri;
HttpClient client = new()
{
Timeout = TimeSpan.FromSeconds(10),
DefaultVersionPolicy = HttpVersionPolicy.RequestVersionOrHigher,
DefaultRequestHeaders = { { "User-Agent", TrangaSettings.userAgent } }
};
JObject requestObj = new()
{
["cmd"] = "request.get",
["url"] = url
};
HttpRequestMessage requestMessage = new(HttpMethod.Post, flareSolverrUri)
{
Content = new StringContent(JsonConvert.SerializeObject(requestObj)),
};
requestMessage.Content.Headers.ContentType = new ("application/json");
Log.Debug($"Requesting {url}");
HttpResponseMessage? response;
try
{
response = client.Send(requestMessage);
}
catch (HttpRequestException e)
{
Log.Error(e);
return new (HttpStatusCode.Unused, null, Stream.Null);
}
if (!response.IsSuccessStatusCode)
{
Log.Debug($"Request returned status code {(int)response.StatusCode} {response.StatusCode}:\n" +
$"=====\n" +
$"Request:\n" +
$"{requestMessage.Method} {requestMessage.RequestUri}\n" +
$"{requestMessage.Version} {requestMessage.VersionPolicy}\n" +
$"Headers:\n\t{string.Join("\n\t", requestMessage.Headers.Select(h => $"{h.Key}: <{string.Join(">, <", h.Value)}"))}>\n" +
$"{requestMessage.Content?.ReadAsStringAsync().Result}" +
$"=====\n" +
$"Response:\n" +
$"{response.Version}\n" +
$"Headers:\n\t{string.Join("\n\t", response.Headers.Select(h => $"{h.Key}: <{string.Join(">, <", h.Value)}"))}>\n" +
$"{response.Content.ReadAsStringAsync().Result}");
return new (response.StatusCode, null, Stream.Null);
}
string responseString = response.Content.ReadAsStringAsync().Result;
JObject responseObj = JObject.Parse(responseString);
if (!IsInCorrectFormat(responseObj, out string? reason))
{
Log.Error($"Wrong format: {reason}");
return new(HttpStatusCode.Unused, null, Stream.Null);
}
string statusResponse = responseObj["status"]!.Value<string>()!;
if (statusResponse != "ok")
{
Log.Debug($"Status is not ok: {statusResponse}");
return new(HttpStatusCode.Unused, null, Stream.Null);
}
JObject solution = (responseObj["solution"] as JObject)!;
if (!Enum.TryParse(solution["status"]!.Value<int>().ToString(), out HttpStatusCode statusCode))
{
Log.Error($"Wrong format: Cant parse status code: {solution["status"]!.Value<int>()}");
return new(HttpStatusCode.Unused, null, Stream.Null);
}
if (statusCode < HttpStatusCode.OK || statusCode >= HttpStatusCode.MultipleChoices)
{
Log.Debug($"Status is: {statusCode}");
return new(statusCode, null, Stream.Null);
}
if (solution["response"]!.Value<string>() is not { } htmlString)
{
Log.Error("Wrong format: Cant find response in solution");
return new(HttpStatusCode.Unused, null, Stream.Null);
}
if (IsJson(htmlString, out HtmlDocument document, out string? json))
{
MemoryStream ms = new();
ms.Write(Encoding.UTF8.GetBytes(json));
ms.Position = 0;
return new(statusCode, document, ms);
}
else
{
MemoryStream ms = new();
ms.Write(Encoding.UTF8.GetBytes(htmlString));
ms.Position = 0;
return new(statusCode, document, ms);
}
}
private bool IsInCorrectFormat(JObject responseObj, [NotNullWhen(false)]out string? reason)
{
reason = null;
if (!responseObj.ContainsKey("status"))
{
reason = "Cant find status on response";
return false;
}
if (responseObj["solution"] is not JObject solution)
{
reason = "Cant find solution";
return false;
}
if (!solution.ContainsKey("status"))
{
reason = "Wrong format: Cant find status in solution";
return false;
}
if (!solution.ContainsKey("response"))
{
reason = "Wrong format: Cant find response in solution";
return false;
}
return true;
}
private bool IsJson(string htmlString, out HtmlDocument document, [NotNullWhen(true)]out string? jsonString)
{
jsonString = null;
document = new();
document.LoadHtml(htmlString);
HtmlNode pre = document.DocumentNode.SelectSingleNode("//pre");
try
{
using JsonDocument _ = JsonDocument.Parse(pre.InnerText);
jsonString = pre.InnerText;
return true;
}
catch (JsonReaderException)
{
return false;
}
}
}

View File

@ -5,46 +5,54 @@ namespace API.MangaDownloadClients;
internal class HttpDownloadClient : DownloadClient internal class HttpDownloadClient : DownloadClient
{ {
private static readonly HttpClient Client = new()
{
Timeout = TimeSpan.FromSeconds(10)
};
public HttpDownloadClient()
{
Client.DefaultRequestHeaders.TryAddWithoutValidation("User-Agent", TrangaSettings.userAgent);
}
internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null) internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null)
{ {
if (clickButton is not null) if (clickButton is not null)
Log.Warn("Can not click button on static site."); Log.Warn("Client can not click button");
HttpResponseMessage? response = null; HttpClient client = new();
while (response is null) client.Timeout = TimeSpan.FromSeconds(10);
{ client.DefaultVersionPolicy = HttpVersionPolicy.RequestVersionOrHigher;
HttpRequestMessage requestMessage = new(HttpMethod.Get, url); client.DefaultRequestHeaders.Add("User-Agent", TrangaSettings.userAgent);
HttpResponseMessage? response;
Uri uri = new(url);
HttpRequestMessage requestMessage = new(HttpMethod.Get, uri);
if (referrer is not null) if (referrer is not null)
requestMessage.Headers.Referrer = new Uri(referrer); requestMessage.Headers.Referrer = new (referrer);
Log.Debug($"Requesting {url}"); Log.Debug($"Requesting {url}");
try try
{ {
response = Client.Send(requestMessage); response = client.Send(requestMessage);
} }
catch (Exception e) catch (HttpRequestException e)
{ {
switch (e) Log.Error(e);
{ return new (HttpStatusCode.Unused, null, Stream.Null);
case TaskCanceledException:
return new RequestResult(HttpStatusCode.RequestTimeout, null, Stream.Null);
case HttpRequestException:
return new RequestResult(HttpStatusCode.BadRequest, null, Stream.Null);
}
}
} }
if (!response.IsSuccessStatusCode) if (!response.IsSuccessStatusCode)
{ {
return new RequestResult(response.StatusCode, null, Stream.Null); Log.Debug($"Request returned status code {(int)response.StatusCode} {response.StatusCode}");
if (response.Headers.Server.Any(s =>
(s.Product?.Name ?? "").Contains("cloudflare", StringComparison.InvariantCultureIgnoreCase)))
{
Log.Debug("Retrying with FlareSolverr!");
return new FlareSolverrDownloadClient().MakeRequestInternal(url, referrer, clickButton);
}
else
{
Log.Debug($"Request returned status code {(int)response.StatusCode} {response.StatusCode}:\n" +
$"=====\n" +
$"Request:\n" +
$"{requestMessage.Method} {requestMessage.RequestUri}\n" +
$"{requestMessage.Version} {requestMessage.VersionPolicy}\n" +
$"Headers:\n\t{string.Join("\n\t", requestMessage.Headers.Select(h => $"{h.Key}: <{string.Join(">, <", h.Value)}"))}>\n" +
$"{requestMessage.Content?.ReadAsStringAsync().Result}" +
$"=====\n" +
$"Response:\n" +
$"{response.Version}\n" +
$"Headers:\n\t{string.Join("\n\t", response.Headers.Select(h => $"{h.Key}: <{string.Join(">, <", h.Value)}"))}>\n" +
$"{response.Content.ReadAsStringAsync().Result}");
}
} }
Stream stream; Stream stream;
@ -55,7 +63,7 @@ internal class HttpDownloadClient : DownloadClient
catch (Exception e) catch (Exception e)
{ {
Log.Error(e); Log.Error(e);
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null); return new (HttpStatusCode.Unused, null, Stream.Null);
} }
HtmlDocument? document = null; HtmlDocument? document = null;
@ -69,12 +77,11 @@ internal class HttpDownloadClient : DownloadClient
} }
// Request has been redirected to another page. For example, it redirects directly to the results when there is only 1 result // Request has been redirected to another page. For example, it redirects directly to the results when there is only 1 result
if (response.RequestMessage is not null && response.RequestMessage.RequestUri is not null) if (response.RequestMessage is not null && response.RequestMessage.RequestUri is not null && response.RequestMessage.RequestUri != uri)
{ {
return new RequestResult(response.StatusCode, document, stream, true, return new (response.StatusCode, document, stream, true, response.RequestMessage.RequestUri.AbsoluteUri);
response.RequestMessage.RequestUri.AbsoluteUri);
} }
return new RequestResult(response.StatusCode, document, stream); return new (response.StatusCode, document, stream);
} }
} }

View File

@ -0,0 +1,724 @@
// <auto-generated />
using System;
using API.Schema.Contexts;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace API.Migrations.pgsql
{
[DbContext(typeof(PgsqlContext))]
[Migration("20250518183729_Remove-UpdateSingleChapterDownloaded-Job")]
partial class RemoveUpdateSingleChapterDownloadedJob
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.5")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("API.Schema.Author", b =>
{
b.Property<string>("AuthorId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("AuthorName")
.IsRequired()
.HasMaxLength(128)
.HasColumnType("character varying(128)");
b.HasKey("AuthorId");
b.ToTable("Authors");
});
modelBuilder.Entity("API.Schema.Chapter", b =>
{
b.Property<string>("ChapterId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("ChapterNumber")
.IsRequired()
.HasMaxLength(10)
.HasColumnType("character varying(10)");
b.Property<bool>("Downloaded")
.HasColumnType("boolean");
b.Property<string>("FileName")
.IsRequired()
.HasMaxLength(256)
.HasColumnType("character varying(256)");
b.Property<string>("IdOnConnectorSite")
.HasMaxLength(256)
.HasColumnType("character varying(256)");
b.Property<string>("ParentMangaId")
.IsRequired()
.HasColumnType("character varying(64)");
b.Property<string>("Title")
.HasMaxLength(256)
.HasColumnType("character varying(256)");
b.Property<string>("Url")
.IsRequired()
.HasMaxLength(2048)
.HasColumnType("character varying(2048)");
b.Property<int?>("VolumeNumber")
.HasColumnType("integer");
b.HasKey("ChapterId");
b.HasIndex("ParentMangaId");
b.ToTable("Chapters");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.Property<string>("JobId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<bool>("Enabled")
.HasColumnType("boolean");
b.Property<byte>("JobType")
.HasColumnType("smallint");
b.Property<DateTime>("LastExecution")
.HasColumnType("timestamp with time zone");
b.Property<string>("ParentJobId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<decimal>("RecurrenceMs")
.HasColumnType("numeric(20,0)");
b.Property<byte>("state")
.HasColumnType("smallint");
b.HasKey("JobId");
b.HasIndex("ParentJobId");
b.ToTable("Jobs");
b.HasDiscriminator<byte>("JobType");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("API.Schema.LocalLibrary", b =>
{
b.Property<string>("LocalLibraryId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("BasePath")
.IsRequired()
.HasMaxLength(256)
.HasColumnType("character varying(256)");
b.Property<string>("LibraryName")
.IsRequired()
.HasMaxLength(512)
.HasColumnType("character varying(512)");
b.HasKey("LocalLibraryId");
b.ToTable("LocalLibraries");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.Property<string>("MangaId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("CoverFileNameInCache")
.HasMaxLength(512)
.HasColumnType("character varying(512)");
b.Property<string>("CoverUrl")
.IsRequired()
.HasMaxLength(512)
.HasColumnType("character varying(512)");
b.Property<string>("Description")
.IsRequired()
.HasColumnType("text");
b.Property<string>("DirectoryName")
.IsRequired()
.HasMaxLength(1024)
.HasColumnType("character varying(1024)");
b.Property<string>("IdOnConnectorSite")
.IsRequired()
.HasMaxLength(256)
.HasColumnType("character varying(256)");
b.Property<float>("IgnoreChaptersBefore")
.HasColumnType("real");
b.Property<string>("LibraryId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("MangaConnectorName")
.IsRequired()
.HasMaxLength(32)
.HasColumnType("character varying(32)");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(512)
.HasColumnType("character varying(512)");
b.Property<string>("OriginalLanguage")
.HasMaxLength(8)
.HasColumnType("character varying(8)");
b.Property<byte>("ReleaseStatus")
.HasColumnType("smallint");
b.Property<string>("WebsiteUrl")
.IsRequired()
.HasMaxLength(512)
.HasColumnType("character varying(512)");
b.Property<long?>("Year")
.HasColumnType("bigint");
b.HasKey("MangaId");
b.HasIndex("LibraryId");
b.HasIndex("MangaConnectorName");
b.ToTable("Mangas");
});
modelBuilder.Entity("API.Schema.MangaConnectors.MangaConnector", b =>
{
b.Property<string>("Name")
.HasMaxLength(32)
.HasColumnType("character varying(32)");
b.PrimitiveCollection<string[]>("BaseUris")
.IsRequired()
.HasMaxLength(256)
.HasColumnType("text[]");
b.Property<bool>("Enabled")
.HasColumnType("boolean");
b.Property<string>("IconUrl")
.IsRequired()
.HasMaxLength(2048)
.HasColumnType("character varying(2048)");
b.PrimitiveCollection<string[]>("SupportedLanguages")
.IsRequired()
.HasMaxLength(8)
.HasColumnType("text[]");
b.HasKey("Name");
b.ToTable("MangaConnectors");
b.HasDiscriminator<string>("Name").HasValue("MangaConnector");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("API.Schema.MangaTag", b =>
{
b.Property<string>("Tag")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasKey("Tag");
b.ToTable("Tags");
});
modelBuilder.Entity("AuthorToManga", b =>
{
b.Property<string>("AuthorIds")
.HasColumnType("character varying(64)");
b.Property<string>("MangaIds")
.HasColumnType("character varying(64)");
b.HasKey("AuthorIds", "MangaIds");
b.HasIndex("MangaIds");
b.ToTable("AuthorToManga");
});
modelBuilder.Entity("JobJob", b =>
{
b.Property<string>("DependsOnJobsJobId")
.HasColumnType("character varying(64)");
b.Property<string>("JobId")
.HasColumnType("character varying(64)");
b.HasKey("DependsOnJobsJobId", "JobId");
b.HasIndex("JobId");
b.ToTable("JobJob");
});
modelBuilder.Entity("MangaTagToManga", b =>
{
b.Property<string>("MangaTagIds")
.HasColumnType("character varying(64)");
b.Property<string>("MangaIds")
.HasColumnType("character varying(64)");
b.HasKey("MangaTagIds", "MangaIds");
b.HasIndex("MangaIds");
b.ToTable("MangaTagToManga");
});
modelBuilder.Entity("API.Schema.Jobs.DownloadAvailableChaptersJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.ToTable("Jobs", t =>
{
t.Property("MangaId")
.HasColumnName("DownloadAvailableChaptersJob_MangaId");
});
b.HasDiscriminator().HasValue((byte)1);
});
modelBuilder.Entity("API.Schema.Jobs.DownloadMangaCoverJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.HasDiscriminator().HasValue((byte)4);
});
modelBuilder.Entity("API.Schema.Jobs.DownloadSingleChapterJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("ChapterId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("ChapterId");
b.HasDiscriminator().HasValue((byte)0);
});
modelBuilder.Entity("API.Schema.Jobs.MoveFileOrFolderJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("FromLocation")
.IsRequired()
.HasMaxLength(256)
.HasColumnType("character varying(256)");
b.Property<string>("ToLocation")
.IsRequired()
.HasMaxLength(256)
.HasColumnType("character varying(256)");
b.HasDiscriminator().HasValue((byte)3);
});
modelBuilder.Entity("API.Schema.Jobs.MoveMangaLibraryJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("ToLibraryId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.HasIndex("ToLibraryId");
b.ToTable("Jobs", t =>
{
t.Property("MangaId")
.HasColumnName("MoveMangaLibraryJob_MangaId");
});
b.HasDiscriminator().HasValue((byte)7);
});
modelBuilder.Entity("API.Schema.Jobs.RetrieveChaptersJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("Language")
.IsRequired()
.HasMaxLength(8)
.HasColumnType("character varying(8)");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.ToTable("Jobs", t =>
{
t.Property("MangaId")
.HasColumnName("RetrieveChaptersJob_MangaId");
});
b.HasDiscriminator().HasValue((byte)5);
});
modelBuilder.Entity("API.Schema.Jobs.UpdateChaptersDownloadedJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.ToTable("Jobs", t =>
{
t.Property("MangaId")
.HasColumnName("UpdateChaptersDownloadedJob_MangaId");
});
b.HasDiscriminator().HasValue((byte)6);
});
modelBuilder.Entity("API.Schema.Jobs.UpdateCoverJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.ToTable("Jobs", t =>
{
t.Property("MangaId")
.HasColumnName("UpdateCoverJob_MangaId");
});
b.HasDiscriminator().HasValue((byte)9);
});
modelBuilder.Entity("API.Schema.MangaConnectors.ComickIo", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("ComickIo");
});
modelBuilder.Entity("API.Schema.MangaConnectors.Global", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("Global");
});
modelBuilder.Entity("API.Schema.MangaConnectors.MangaDex", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("MangaDex");
});
modelBuilder.Entity("API.Schema.Chapter", b =>
{
b.HasOne("API.Schema.Manga", "ParentManga")
.WithMany("Chapters")
.HasForeignKey("ParentMangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("ParentManga");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.HasOne("API.Schema.Jobs.Job", "ParentJob")
.WithMany()
.HasForeignKey("ParentJobId")
.OnDelete(DeleteBehavior.Cascade);
b.Navigation("ParentJob");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.HasOne("API.Schema.LocalLibrary", "Library")
.WithMany()
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.SetNull);
b.HasOne("API.Schema.MangaConnectors.MangaConnector", "MangaConnector")
.WithMany()
.HasForeignKey("MangaConnectorName")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.OwnsMany("API.Schema.Link", "Links", b1 =>
{
b1.Property<string>("LinkId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b1.Property<string>("LinkProvider")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b1.Property<string>("LinkUrl")
.IsRequired()
.HasMaxLength(2048)
.HasColumnType("character varying(2048)");
b1.Property<string>("MangaId")
.IsRequired()
.HasColumnType("character varying(64)");
b1.HasKey("LinkId");
b1.HasIndex("MangaId");
b1.ToTable("Link");
b1.WithOwner()
.HasForeignKey("MangaId");
});
b.OwnsMany("API.Schema.MangaAltTitle", "AltTitles", b1 =>
{
b1.Property<string>("AltTitleId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b1.Property<string>("Language")
.IsRequired()
.HasMaxLength(8)
.HasColumnType("character varying(8)");
b1.Property<string>("MangaId")
.IsRequired()
.HasColumnType("character varying(64)");
b1.Property<string>("Title")
.IsRequired()
.HasMaxLength(256)
.HasColumnType("character varying(256)");
b1.HasKey("AltTitleId");
b1.HasIndex("MangaId");
b1.ToTable("MangaAltTitle");
b1.WithOwner()
.HasForeignKey("MangaId");
});
b.Navigation("AltTitles");
b.Navigation("Library");
b.Navigation("Links");
b.Navigation("MangaConnector");
});
modelBuilder.Entity("AuthorToManga", b =>
{
b.HasOne("API.Schema.Author", null)
.WithMany()
.HasForeignKey("AuthorIds")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.Manga", null)
.WithMany()
.HasForeignKey("MangaIds")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("JobJob", b =>
{
b.HasOne("API.Schema.Jobs.Job", null)
.WithMany()
.HasForeignKey("DependsOnJobsJobId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.Jobs.Job", null)
.WithMany()
.HasForeignKey("JobId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("MangaTagToManga", b =>
{
b.HasOne("API.Schema.Manga", null)
.WithMany()
.HasForeignKey("MangaIds")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.MangaTag", null)
.WithMany()
.HasForeignKey("MangaTagIds")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("API.Schema.Jobs.DownloadAvailableChaptersJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Jobs.DownloadMangaCoverJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Jobs.DownloadSingleChapterJob", b =>
{
b.HasOne("API.Schema.Chapter", "Chapter")
.WithMany()
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Schema.Jobs.MoveMangaLibraryJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.LocalLibrary", "ToLibrary")
.WithMany()
.HasForeignKey("ToLibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
b.Navigation("ToLibrary");
});
modelBuilder.Entity("API.Schema.Jobs.RetrieveChaptersJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Jobs.UpdateChaptersDownloadedJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Jobs.UpdateCoverJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.Navigation("Chapters");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,50 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace API.Migrations.pgsql
{
/// <inheritdoc />
public partial class RemoveUpdateSingleChapterDownloadedJob : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropForeignKey(
name: "FK_Jobs_Chapters_UpdateSingleChapterDownloadedJob_ChapterId",
table: "Jobs");
migrationBuilder.DropIndex(
name: "IX_Jobs_UpdateSingleChapterDownloadedJob_ChapterId",
table: "Jobs");
migrationBuilder.DropColumn(
name: "UpdateSingleChapterDownloadedJob_ChapterId",
table: "Jobs");
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "UpdateSingleChapterDownloadedJob_ChapterId",
table: "Jobs",
type: "character varying(64)",
maxLength: 64,
nullable: true);
migrationBuilder.CreateIndex(
name: "IX_Jobs_UpdateSingleChapterDownloadedJob_ChapterId",
table: "Jobs",
column: "UpdateSingleChapterDownloadedJob_ChapterId");
migrationBuilder.AddForeignKey(
name: "FK_Jobs_Chapters_UpdateSingleChapterDownloadedJob_ChapterId",
table: "Jobs",
column: "UpdateSingleChapterDownloadedJob_ChapterId",
principalTable: "Chapters",
principalColumn: "ChapterId",
onDelete: ReferentialAction.Cascade);
}
}
}

View File

@ -457,26 +457,6 @@ namespace API.Migrations.pgsql
b.HasDiscriminator().HasValue((byte)9); b.HasDiscriminator().HasValue((byte)9);
}); });
modelBuilder.Entity("API.Schema.Jobs.UpdateSingleChapterDownloadedJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("ChapterId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("ChapterId");
b.ToTable("Jobs", t =>
{
t.Property("ChapterId")
.HasColumnName("UpdateSingleChapterDownloadedJob_ChapterId");
});
b.HasDiscriminator().HasValue((byte)8);
});
modelBuilder.Entity("API.Schema.MangaConnectors.ComickIo", b => modelBuilder.Entity("API.Schema.MangaConnectors.ComickIo", b =>
{ {
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector"); b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
@ -731,17 +711,6 @@ namespace API.Migrations.pgsql
b.Navigation("Manga"); b.Navigation("Manga");
}); });
modelBuilder.Entity("API.Schema.Jobs.UpdateSingleChapterDownloadedJob", b =>
{
b.HasOne("API.Schema.Chapter", "Chapter")
.WithMany()
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Schema.Manga", b => modelBuilder.Entity("API.Schema.Manga", b =>
{ {
b.Navigation("Chapters"); b.Navigation("Chapters");

View File

@ -124,7 +124,7 @@ using (IServiceScope scope = app.Services.CreateScope())
context.Jobs.AddRange(context.Jobs.Where(j => j.JobType == JobType.DownloadAvailableChaptersJob) context.Jobs.AddRange(context.Jobs.Where(j => j.JobType == JobType.DownloadAvailableChaptersJob)
.Include(downloadAvailableChaptersJob => ((DownloadAvailableChaptersJob)downloadAvailableChaptersJob).Manga) .Include(downloadAvailableChaptersJob => ((DownloadAvailableChaptersJob)downloadAvailableChaptersJob).Manga)
.ToList() .ToList()
.Select(dacj => new UpdateChaptersDownloadedJob(((DownloadAvailableChaptersJob)dacj).Manga, 0))); .Select(dacj => new UpdateChaptersDownloadedJob(((DownloadAvailableChaptersJob)dacj).Manga, 0, dacj)));
context.Jobs.RemoveRange(context.Jobs.Where(j => j.state == JobState.Completed && j.RecurrenceMs < 1)); context.Jobs.RemoveRange(context.Jobs.Where(j => j.state == JobState.Completed && j.RecurrenceMs < 1));
foreach (Job job in context.Jobs.Where(j => j.state == JobState.Running)) foreach (Job job in context.Jobs.Where(j => j.state == JobState.Running))
{ {

View File

@ -187,6 +187,8 @@ public class Chapter : IComparable<Chapter>
comicInfo.Add(new XElement("Writer", string.Join(',', ParentManga.Authors.Select(author => author.AuthorName)))); comicInfo.Add(new XElement("Writer", string.Join(',', ParentManga.Authors.Select(author => author.AuthorName))));
if(ParentManga.OriginalLanguage is not null) if(ParentManga.OriginalLanguage is not null)
comicInfo.Add(new XElement("LanguageISO", ParentManga.OriginalLanguage)); comicInfo.Add(new XElement("LanguageISO", ParentManga.OriginalLanguage));
if(ParentManga.Description != string.Empty)
comicInfo.Add(new XElement("Summary", ParentManga.Description));
return comicInfo.ToString(); return comicInfo.ToString();
} }

View File

@ -39,8 +39,7 @@ public class PgsqlContext(DbContextOptions<PgsqlContext> options) : DbContext(op
.HasValue<DownloadMangaCoverJob>(JobType.DownloadMangaCoverJob) .HasValue<DownloadMangaCoverJob>(JobType.DownloadMangaCoverJob)
.HasValue<RetrieveChaptersJob>(JobType.RetrieveChaptersJob) .HasValue<RetrieveChaptersJob>(JobType.RetrieveChaptersJob)
.HasValue<UpdateCoverJob>(JobType.UpdateCoverJob) .HasValue<UpdateCoverJob>(JobType.UpdateCoverJob)
.HasValue<UpdateChaptersDownloadedJob>(JobType.UpdateChaptersDownloadedJob) .HasValue<UpdateChaptersDownloadedJob>(JobType.UpdateChaptersDownloadedJob);
.HasValue<UpdateSingleChapterDownloadedJob>(JobType.UpdateSingleChapterDownloadedJob);
//Job specification //Job specification
modelBuilder.Entity<DownloadAvailableChaptersJob>() modelBuilder.Entity<DownloadAvailableChaptersJob>()

View File

@ -45,6 +45,11 @@ public class DownloadSingleChapterJob : Job
protected override IEnumerable<Job> RunInternal(PgsqlContext context) protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{ {
if (Chapter.Downloaded)
{
Log.Info("Chapter was already downloaded.");
return [];
}
string[] imageUrls = Chapter.ParentManga.MangaConnector.GetChapterImageUrls(Chapter); string[] imageUrls = Chapter.ParentManga.MangaConnector.GetChapterImageUrls(Chapter);
if (imageUrls.Length < 1) if (imageUrls.Length < 1)
{ {
@ -129,21 +134,39 @@ public class DownloadSingleChapterJob : Job
{ {
if (!TrangaSettings.bwImages && TrangaSettings.compression == 100) if (!TrangaSettings.bwImages && TrangaSettings.compression == 100)
{ {
Log.Debug($"No processing requested for image"); Log.Debug("No processing requested for image");
return; return;
} }
Log.Debug($"Processing image: {imagePath}"); Log.Debug($"Processing image: {imagePath}");
try
{
using Image image = Image.Load(imagePath); using Image image = Image.Load(imagePath);
File.Delete(imagePath); if (TrangaSettings.bwImages)
if(TrangaSettings.bwImages)
image.Mutate(i => i.ApplyProcessor(new AdaptiveThresholdProcessor())); image.Mutate(i => i.ApplyProcessor(new AdaptiveThresholdProcessor()));
File.Delete(imagePath);
image.SaveAsJpeg(imagePath, new JpegEncoder() image.SaveAsJpeg(imagePath, new JpegEncoder()
{ {
Quality = TrangaSettings.compression Quality = TrangaSettings.compression
}); });
} }
catch (Exception e)
{
if (e is UnknownImageFormatException or NotSupportedException)
{
//If the Image-Format is not processable by ImageSharp, we can't modify it.
Log.Debug($"Unable to process {imagePath}: Not supported image format");
}else if (e is InvalidImageContentException)
{
Log.Debug($"Unable to process {imagePath}: Invalid Content");
}
else
{
Log.Error(e);
}
}
}
private void CopyCoverFromCacheToDownloadLocation(Manga manga) private void CopyCoverFromCacheToDownloadLocation(Manga manga)
{ {
@ -167,7 +190,7 @@ public class DownloadSingleChapterJob : Job
string newFilePath = Path.Join(publicationFolder, $"cover.{Path.GetFileName(fileInCache).Split('.')[^1]}" ); string newFilePath = Path.Join(publicationFolder, $"cover.{Path.GetFileName(fileInCache).Split('.')[^1]}" );
File.Copy(fileInCache, newFilePath, true); File.Copy(fileInCache, newFilePath, true);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(newFilePath, GroupRead | GroupWrite | UserRead | UserWrite); File.SetUnixFileMode(newFilePath, GroupRead | GroupWrite | UserRead | UserWrite | OtherRead | OtherWrite);
Log.Debug($"Copied cover from {fileInCache} to {newFilePath}"); Log.Debug($"Copied cover from {fileInCache} to {newFilePath}");
} }

View File

@ -66,32 +66,31 @@ public abstract class Job
this.Log = LogManager.GetLogger(this.GetType()); this.Log = LogManager.GetLogger(this.GetType());
} }
public IEnumerable<Job> Run(PgsqlContext context) public IEnumerable<Job> Run(PgsqlContext context, ref bool running)
{ {
Log.Info($"Running job {JobId}"); Log.Info($"Running job {JobId}");
DateTime jobStart = DateTime.UtcNow; DateTime jobStart = DateTime.UtcNow;
context.Attach(this);
Job[]? ret = null; Job[]? ret = null;
try try
{ {
this.state = JobState.Running; this.state = JobState.Running;
context.SaveChanges(); context.SaveChanges();
running = true;
ret = RunInternal(context).ToArray(); ret = RunInternal(context).ToArray();
Log.Info($"Job {JobId} completed. Generated {ret.Length} new jobs.");
this.state = this.RecurrenceMs > 0 ? JobState.CompletedWaiting : JobState.Completed; this.state = this.RecurrenceMs > 0 ? JobState.CompletedWaiting : JobState.Completed;
this.LastExecution = DateTime.UtcNow; this.LastExecution = DateTime.UtcNow;
context.Jobs.AddRange(ret);
Log.Info($"Job {JobId} completed. Generated {ret.Length} new jobs.");
context.SaveChanges(); context.SaveChanges();
} }
catch (Exception e) catch (Exception e)
{ {
if (e is not DbUpdateException) if (e is not DbUpdateException)
{ {
Log.Error($"Failed to run job {JobId}", e);
this.state = JobState.Failed; this.state = JobState.Failed;
this.Enabled = false; this.Enabled = false;
this.LastExecution = DateTime.UtcNow; this.LastExecution = DateTime.UtcNow;
Log.Error($"Failed to run job {JobId}", e);
context.SaveChanges(); context.SaveChanges();
} }
else else
@ -100,12 +99,36 @@ public abstract class Job
} }
} }
try
{
if (ret != null)
{
context.Jobs.AddRange(ret);
context.SaveChanges();
}
}
catch (DbUpdateException e)
{
Log.Error($"Failed to update Database {JobId}", e);
}
Log.Info($"Finished Job {JobId}! (took {DateTime.UtcNow.Subtract(jobStart).TotalMilliseconds}ms)"); Log.Info($"Finished Job {JobId}! (took {DateTime.UtcNow.Subtract(jobStart).TotalMilliseconds}ms)");
return ret ?? []; return ret ?? [];
} }
protected abstract IEnumerable<Job> RunInternal(PgsqlContext context); protected abstract IEnumerable<Job> RunInternal(PgsqlContext context);
public List<Job> GetDependenciesAndSelf()
{
List<Job> ret = new ();
foreach (Job job in DependsOnJobs)
{
ret.AddRange(job.GetDependenciesAndSelf());
}
ret.Add(this);
return ret;
}
public override string ToString() public override string ToString()
{ {
return $"{JobId}"; return $"{JobId}";

View File

@ -5,12 +5,10 @@ public enum JobType : byte
{ {
DownloadSingleChapterJob = 0, DownloadSingleChapterJob = 0,
DownloadAvailableChaptersJob = 1, DownloadAvailableChaptersJob = 1,
UpdateMetaDataJob = 2,
MoveFileOrFolderJob = 3, MoveFileOrFolderJob = 3,
DownloadMangaCoverJob = 4, DownloadMangaCoverJob = 4,
RetrieveChaptersJob = 5, RetrieveChaptersJob = 5,
UpdateChaptersDownloadedJob = 6, UpdateChaptersDownloadedJob = 6,
MoveMangaLibraryJob = 7, MoveMangaLibraryJob = 7,
UpdateSingleChapterDownloadedJob = 8,
UpdateCoverJob = 9, UpdateCoverJob = 9,
} }

View File

@ -1,5 +1,6 @@
using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations;
using API.Schema.Contexts; using API.Schema.Contexts;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure; using Microsoft.EntityFrameworkCore.Infrastructure;
using Newtonsoft.Json; using Newtonsoft.Json;
@ -36,6 +37,20 @@ public class UpdateChaptersDownloadedJob : Job
protected override IEnumerable<Job> RunInternal(PgsqlContext context) protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{ {
return Manga.Chapters.Select(c => new UpdateSingleChapterDownloadedJob(c, this)); context.Entry(Manga).Reference<LocalLibrary>(m => m.Library).Load();
foreach (Chapter mangaChapter in Manga.Chapters)
{
mangaChapter.Downloaded = mangaChapter.CheckDownloaded();
}
try
{
context.SaveChanges();
}
catch (DbUpdateException e)
{
Log.Error(e);
}
return [];
} }
} }

View File

@ -1,54 +0,0 @@
using System.ComponentModel.DataAnnotations;
using API.Schema.Contexts;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Newtonsoft.Json;
namespace API.Schema.Jobs;
public class UpdateSingleChapterDownloadedJob : Job
{
[StringLength(64)] [Required] public string ChapterId { get; init; }
private Chapter _chapter = null!;
[JsonIgnore]
public Chapter Chapter
{
get => LazyLoader.Load(this, ref _chapter);
init => _chapter = value;
}
public UpdateSingleChapterDownloadedJob(Chapter chapter, Job? parentJob = null, ICollection<Job>? dependsOnJobs = null)
: base(TokenGen.CreateToken(typeof(UpdateSingleChapterDownloadedJob)), JobType.UpdateSingleChapterDownloadedJob, 0, parentJob, dependsOnJobs)
{
this.ChapterId = chapter.ChapterId;
this.Chapter = chapter;
}
/// <summary>
/// EF ONLY!!!
/// </summary>
internal UpdateSingleChapterDownloadedJob(ILazyLoader lazyLoader, string jobId, ulong recurrenceMs, string chapterId, string? parentJobId)
: base(lazyLoader, jobId, JobType.UpdateSingleChapterDownloadedJob, recurrenceMs, parentJobId)
{
this.ChapterId = chapterId;
}
protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{
context.Entry(Chapter).Reference<Manga>(c => c.ParentManga).Load();
context.Entry(Chapter.ParentManga).Reference<LocalLibrary>(m => m.Library).Load();
Chapter.Downloaded = Chapter.CheckDownloaded();
try
{
context.SaveChanges();
}
catch (DbUpdateException e)
{
Log.Error(e);
}
return [];
}
}

View File

@ -78,7 +78,7 @@ public class ComickIo : MangaConnector
public override Chapter[] GetChapters(Manga manga, string? language = null) public override Chapter[] GetChapters(Manga manga, string? language = null)
{ {
Log.Info($"Getting Chapters: {manga.IdOnConnectorSite}"); Log.Info($"Getting Chapters: {manga.IdOnConnectorSite}");
List<string> chapterHids = new(); List<Chapter> chapters = new();
int page = 1; int page = 1;
while(page < 50) while(page < 50)
{ {
@ -95,16 +95,13 @@ public class ComickIo : MangaConnector
JToken data = JToken.Parse(sr.ReadToEnd()); JToken data = JToken.Parse(sr.ReadToEnd());
JArray? chaptersArray = data["chapters"] as JArray; JArray? chaptersArray = data["chapters"] as JArray;
if (chaptersArray?.Count < 1) if (chaptersArray is null || chaptersArray.Count < 1)
break; break;
chapterHids.AddRange(chaptersArray?.Select(token => token.Value<string>("hid")!)!); chapters.AddRange(ParseChapters(manga, chaptersArray));
page++; page++;
} }
Log.Debug($"Getting chapters for {manga.Name} yielded {chapterHids.Count} hids. Requesting chapters now...");
List<Chapter> chapters = chapterHids.Select(hid => ChapterFromHid(manga, hid)).ToList();
return chapters.ToArray(); return chapters.ToArray();
} }
@ -219,29 +216,23 @@ public class ComickIo : MangaConnector
year: year, originalLanguage: originalLanguage); year: year, originalLanguage: originalLanguage);
} }
private Chapter ChapterFromHid(Manga parentManga, string hid) private List<Chapter> ParseChapters(Manga parentManga, JArray chaptersArray)
{ {
string requestUrl = $"https://api.comick.fun/chapter/{hid}"; List<Chapter> chapters = new ();
RequestResult result = downloadClient.MakeRequest(requestUrl, RequestType.Default); foreach (JToken chapter in chaptersArray)
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300)
{ {
Log.Error("Request failed"); string? chapterNum = chapter.Value<string>("chap");
throw new Exception("Request failed"); string? volumeNumStr = chapter.Value<string>("vol");
}
using StreamReader sr = new (result.result);
JToken data = JToken.Parse(sr.ReadToEnd());
string? canonical = data.Value<string>("canonical");
string? chapterNum = data["chapter"]?.Value<string>("chap");
string? volumeNumStr = data["chapter"]?.Value<string>("vol");
int? volumeNum = volumeNumStr is null ? null : int.Parse(volumeNumStr); int? volumeNum = volumeNumStr is null ? null : int.Parse(volumeNumStr);
string? title = data["chapter"]?.Value<string>("title"); string? title = chapter.Value<string>("title");
string? hid = chapter.Value<string>("hid");
string url = $"https://comick.io/comic/{parentManga.IdOnConnectorSite}/{hid}";
if(chapterNum is null) if(chapterNum is null || hid is null)
throw new Exception("chapterNum is null"); continue;
string url = $"https://comick.io{canonical}"; chapters.Add(new (parentManga, url, chapterNum, volumeNum, hid, title));
return new Chapter(parentManga, url, chapterNum, volumeNum, hid, title); }
return chapters;
} }
} }

View File

@ -226,22 +226,25 @@ public class MangaDex : MangaConnector
private Manga ParseMangaFromJToken(JToken jToken) private Manga ParseMangaFromJToken(JToken jToken)
{ {
string? id = jToken.Value<string>("id"); string? id = jToken.Value<string>("id");
if(id is null)
throw new Exception("jToken was not in expected format");
JObject? attributes = jToken["attributes"] as JObject; JObject? attributes = jToken["attributes"] as JObject;
string? name = attributes?["title"]?.Value<string>("en") ?? attributes?["title"]?.First?.First?.Value<string>(); if(attributes is null)
string? description = attributes?["description"]?.Value<string>("en")??attributes?["description"]?.First?.First?.Value<string>(); throw new Exception("jToken was not in expected format");
string? status = attributes?["status"]?.Value<string>(); string? name = attributes["title"]?.Value<string>("en") ?? attributes["title"]?.First?.First?.Value<string>();
uint? year = attributes?["year"]?.Value<uint>(); string description = attributes["description"]?.Value<string>("en")??attributes["description"]?.First?.First?.Value<string>()??"";
string? originalLanguage = attributes?["originalLanguage"]?.Value<string>(); string? status = attributes["status"]?.Value<string>();
JArray? altTitlesJArray = attributes?["altTitles"] as JArray; uint? year = attributes["year"]?.Value<uint?>();
JArray? tagsJArray = attributes?["tags"] as JArray; string? originalLanguage = attributes["originalLanguage"]?.Value<string>();
JArray? altTitlesJArray = attributes.TryGetValue("altTitles", out JToken? altTitlesArray) ? altTitlesArray as JArray : null;
JArray? tagsJArray = attributes.TryGetValue("tags", out JToken? tagsArray) ? tagsArray as JArray : null;
JArray? relationships = jToken["relationships"] as JArray; JArray? relationships = jToken["relationships"] as JArray;
string? coverFileName = if (name is null || status is null || relationships is null)
relationships?.FirstOrDefault(r => r["type"]?.Value<string>() == "cover_art")?["attributes"]?.Value<string>("fileName"); throw new Exception("jToken was not in expected format");
if (id is null || attributes is null || name is null || description is null || status is null || string? coverFileName = relationships.FirstOrDefault(r => r["type"]?.Value<string>() == "cover_art")?["attributes"]?.Value<string>("fileName");
altTitlesJArray is null || tagsJArray is null || relationships is null || coverFileName is null) if(coverFileName is null)
throw new Exception("jToken was not in expected format"); throw new Exception("jToken was not in expected format");
List<Link> links = attributes["links"]? List<Link> links = attributes["links"]?
@ -276,7 +279,7 @@ public class MangaDex : MangaConnector
return new Link(key, url); return new Link(key, url);
}).ToList()!; }).ToList()!;
List<MangaAltTitle> altTitles = altTitlesJArray List<MangaAltTitle> altTitles = (altTitlesJArray??[])
.Select(t => .Select(t =>
{ {
JObject? j = t as JObject; JObject? j = t as JObject;
@ -286,7 +289,7 @@ public class MangaDex : MangaConnector
return new MangaAltTitle(p.Name, p.Value.ToString()); return new MangaAltTitle(p.Name, p.Value.ToString());
}).Where(x => x is not null).ToList()!; }).Where(x => x is not null).ToList()!;
List<MangaTag> tags = tagsJArray List<MangaTag> tags = (tagsJArray??[])
.Where(t => t.Value<string>("type") == "tag") .Where(t => t.Value<string>("type") == "tag")
.Select(t => t["attributes"]?["name"]?.Value<string>("en")??t["attributes"]?["name"]?.First?.First?.Value<string>()) .Select(t => t["attributes"]?["name"]?.Value<string>("en")??t["attributes"]?["name"]?.First?.First?.Value<string>())
.Select(str => str is not null ? new MangaTag(str) : null) .Select(str => str is not null ? new MangaTag(str) : null)

View File

@ -34,7 +34,9 @@ public static class Tranga
internal static void RemoveStaleFiles(PgsqlContext context) internal static void RemoveStaleFiles(PgsqlContext context)
{ {
Log.Info($"Removing stale files..."); Log.Info("Removing stale files...");
if (!Directory.Exists(TrangaSettings.coverImageCache))
return;
string[] usedFiles = context.Mangas.Select(m => m.CoverFileNameInCache).Where(s => s != null).ToArray()!; string[] usedFiles = context.Mangas.Select(m => m.CoverFileNameInCache).Where(s => s != null).ToArray()!;
string[] extraneousFiles = new DirectoryInfo(TrangaSettings.coverImageCache).GetFiles() string[] extraneousFiles = new DirectoryInfo(TrangaSettings.coverImageCache).GetFiles()
.Where(f => usedFiles.Contains(f.FullName) == false) .Where(f => usedFiles.Contains(f.FullName) == false)
@ -117,78 +119,69 @@ public static class Tranga
return; return;
} }
IServiceProvider serviceProvider = (IServiceProvider)serviceProviderObj; IServiceProvider serviceProvider = (IServiceProvider)serviceProviderObj;
using IServiceScope scope = serviceProvider.CreateScope();
while (true) while (true)
{ {
Log.Debug("Starting Job-Cycle..."); Log.Debug("Starting Job-Cycle...");
DateTime cycleStart = DateTime.UtcNow; DateTime cycleStart = DateTime.UtcNow;
using IServiceScope scope = serviceProvider.CreateScope();
PgsqlContext cycleContext = scope.ServiceProvider.GetRequiredService<PgsqlContext>(); PgsqlContext cycleContext = scope.ServiceProvider.GetRequiredService<PgsqlContext>();
Log.Debug("Loading Jobs...");
DateTime loadStart = DateTime.UtcNow;
Log.Debug($"Jobs Loaded! (took {DateTime.UtcNow.Subtract(loadStart).TotalMilliseconds}ms)");
//Update finished Jobs to new states
IQueryable<Job> completedJobs = cycleContext.Jobs.Where(j => j.state == JobState.Completed);
foreach (Job completedJob in completedJobs)
if (completedJob.RecurrenceMs <= 0)
{
cycleContext.Jobs.Remove(completedJob);
}
//Retrieve waiting and due Jobs //Get Running Jobs
IQueryable<Job> runningJobs = cycleContext.Jobs.Where(j => j.state == JobState.Running); List<Job> runningJobs = cycleContext.Jobs.GetRunningJobs();
DateTime filterStart = DateTime.UtcNow; DateTime filterStart = DateTime.UtcNow;
Log.Debug("Filtering Jobs..."); Log.Debug("Filtering Jobs...");
List<MangaConnector> busyConnectors = GetBusyConnectors(runningJobs);
IQueryable<Job> waitingJobs = cycleContext.Jobs.Where(j => j.state == JobState.CompletedWaiting || j.state == JobState.FirstExecution); List<Job> waitingJobs = cycleContext.Jobs.GetWaitingJobs();
List<Job> dueJobs = FilterDueJobs(waitingJobs); List<Job> dueJobs = waitingJobs.FilterDueJobs();
List<Job> jobsWithoutBusyConnectors = FilterJobWithBusyConnectors(dueJobs, busyConnectors); List<Job> jobsWithoutDependencies = dueJobs.FilterJobDependencies();
List<Job> jobsWithoutMissingDependencies = FilterJobDependencies(jobsWithoutBusyConnectors);
List<Job> jobsWithoutDownloading = List<Job> jobsWithoutDownloading = jobsWithoutDependencies.FilterJobsWithoutDownloading();
jobsWithoutMissingDependencies
.Where(j => j.JobType != JobType.DownloadSingleChapterJob)
.DistinctBy(j => j.JobType)
.ToList();
List<Job> firstChapterPerConnector =
jobsWithoutMissingDependencies
.Where(j => j.JobType == JobType.DownloadSingleChapterJob)
.AsEnumerable()
.OrderBy(j =>
{
DownloadSingleChapterJob dscj = (DownloadSingleChapterJob)j;
return dscj.Chapter;
})
.DistinctBy(j =>
{
DownloadSingleChapterJob dscj = (DownloadSingleChapterJob)j;
return dscj.Chapter.ParentManga.MangaConnector;
})
.ToList();
List<Job> startJobs = jobsWithoutDownloading.Concat(firstChapterPerConnector).ToList(); //Match running and waiting jobs per Connector
Dictionary<string, Dictionary<JobType, List<Job>>> runningJobsPerConnector =
runningJobs.GetJobsPerJobTypeAndConnector();
Dictionary<string, Dictionary<JobType, List<Job>>> waitingJobsPerConnector =
jobsWithoutDependencies.GetJobsPerJobTypeAndConnector();
List<Job> jobsNotHeldBackByConnector =
MatchJobsRunningAndWaiting(runningJobsPerConnector, waitingJobsPerConnector);
List<Job> startJobs = jobsWithoutDownloading.Concat(jobsNotHeldBackByConnector).ToList();
Log.Debug($"Jobs Filtered! (took {DateTime.UtcNow.Subtract(filterStart).TotalMilliseconds}ms)"); Log.Debug($"Jobs Filtered! (took {DateTime.UtcNow.Subtract(filterStart).TotalMilliseconds}ms)");
//Start Jobs that are allowed to run (preconditions match) //Start Jobs that are allowed to run (preconditions match)
foreach (Job job in startJobs) foreach (Job job in startJobs)
{ {
bool running = false;
Thread t = new(() => Thread t = new(() =>
{ {
using IServiceScope jobScope = serviceProvider.CreateScope(); using IServiceScope jobScope = serviceProvider.CreateScope();
PgsqlContext jobContext = jobScope.ServiceProvider.GetRequiredService<PgsqlContext>(); PgsqlContext jobContext = jobScope.ServiceProvider.GetRequiredService<PgsqlContext>();
jobContext.Jobs.Find(job.JobId)?.Run(jobContext); //FIND the job IN THE NEW CONTEXT!!!!!!! SO WE DON'T GET TRACKING PROBLEMS AND AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA if (jobContext.Jobs.Find(job.JobId) is not { } inContext)
return;
inContext.Run(jobContext, ref running); //FIND the job IN THE NEW CONTEXT!!!!!!! SO WE DON'T GET TRACKING PROBLEMS AND AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
}); });
RunningJobs.Add(t, job); RunningJobs.Add(t, job);
t.Start(); t.Start();
while(!running)
Thread.Sleep(10);
} }
Log.Debug($"Jobs Completed: {completedJobs.Count()} Running: {runningJobs.Count()}\n" + Log.Debug($"Running: {runningJobs.Count}\n" +
$"Waiting: {waitingJobs.Count()}\n" + $"{string.Join("\n", runningJobs.Select(s => "\t- " + s))}\n" +
$"\tof which Due: {dueJobs.Count()}\n" + $"Waiting: {waitingJobs.Count} Due: {dueJobs.Count}\n" +
$"\t\tof which can be started: {jobsWithoutMissingDependencies.Count()}\n" + $"{string.Join("\n", dueJobs.Select(s => "\t- " + s))}\n" +
$"\t\t\tof which started: {startJobs.Count()}"); $"of which {jobsWithoutDependencies.Count} without missing dependencies, of which\n" +
$"\t{jobsWithoutDownloading.Count} without downloading\n" +
$"\t{jobsNotHeldBackByConnector.Count} not held back by Connector\n" +
$"{startJobs.Count} were started:\n" +
$"{string.Join("\n", startJobs.Select(s => "\t- " + s))}");
if (Log.IsDebugEnabled && dueJobs.Count < 1)
if(waitingJobs.MinBy(j => j.NextExecution) is { } nextJob)
Log.Debug($"Next job in {nextJob.NextExecution.Subtract(DateTime.UtcNow)} (at {nextJob.NextExecution}): {nextJob.JobId}");
(Thread, Job)[] removeFromThreadsList = RunningJobs.Where(t => !t.Key.IsAlive) (Thread, Job)[] removeFromThreadsList = RunningJobs.Where(t => !t.Key.IsAlive)
.Select(t => (t.Key, t.Value)).ToArray(); .Select(t => (t.Key, t.Value)).ToArray();
@ -206,51 +199,140 @@ public static class Tranga
{ {
Log.Error("Failed saving Job changes.", e); Log.Error("Failed saving Job changes.", e);
} }
Log.Debug($"Job-Cycle over! (took {DateTime.UtcNow.Subtract(cycleStart).TotalMilliseconds}ms)"); Log.Debug($"Job-Cycle over! (took {DateTime.UtcNow.Subtract(cycleStart).TotalMilliseconds}ms");
Thread.Sleep(TrangaSettings.startNewJobTimeoutMs); Thread.Sleep(TrangaSettings.startNewJobTimeoutMs);
} }
} }
private static List<MangaConnector> GetBusyConnectors(IQueryable<Job> runningJobs) private static List<Job> GetRunningJobs(this IQueryable<Job> jobs)
{ {
HashSet<MangaConnector> busyConnectors = new(); DateTime start = DateTime.UtcNow;
foreach (Job runningJob in runningJobs) List<Job> ret = jobs.Where(j => j.state == JobState.Running).ToList();
{ DateTime end = DateTime.UtcNow;
if(GetJobConnector(runningJob) is { } mangaConnector) Log.Debug($"Getting running Jobs took {end.Subtract(start).TotalMilliseconds}ms");
busyConnectors.Add(mangaConnector); return ret;
}
return busyConnectors.ToList();
} }
private static List<Job> FilterDueJobs(IQueryable<Job> jobs) => private static List<Job> GetWaitingJobs(this IQueryable<Job> jobs)
jobs.ToList()
.Where(j => j.NextExecution < DateTime.UtcNow)
.ToList();
private static List<Job> FilterJobDependencies(List<Job> jobs) =>
jobs
.Where(job => job.DependsOnJobs.All(j => j.IsCompleted))
.ToList();
private static List<Job> FilterJobWithBusyConnectors(List<Job> jobs, List<MangaConnector> busyConnectors) =>
jobs.Where(j =>
{ {
//Filter jobs with busy connectors DateTime start = DateTime.UtcNow;
if (GetJobConnector(j) is { } mangaConnector) List<Job> ret = jobs.Where(j => j.state == JobState.CompletedWaiting || j.state == JobState.FirstExecution).ToList();
return busyConnectors.Contains(mangaConnector) == false; DateTime end = DateTime.UtcNow;
return true; Log.Debug($"Getting waiting Jobs took {end.Subtract(start).TotalMilliseconds}ms");
}).ToList(); return ret;
}
private static MangaConnector? GetJobConnector(Job job)
private static List<Job> FilterDueJobs(this List<Job> jobs)
{
DateTime start = DateTime.UtcNow;
List<Job> ret = jobs.Where(j => j.NextExecution < DateTime.UtcNow).ToList();
DateTime end = DateTime.UtcNow;
Log.Debug($"Filtering Due Jobs took {end.Subtract(start).TotalMilliseconds}ms");
return ret;
}
private static List<Job> FilterJobDependencies(this List<Job> jobs)
{
DateTime start = DateTime.UtcNow;
List<Job> ret = jobs.Where(job => job.DependsOnJobs.All(j => j.IsCompleted)).ToList();
DateTime end = DateTime.UtcNow;
Log.Debug($"Filtering Dependencies took {end.Subtract(start).TotalMilliseconds}ms");
return ret;
}
private static List<Job> FilterJobsWithoutDownloading(this List<Job> jobs)
{
JobType[] types = [JobType.MoveFileOrFolderJob, JobType.MoveMangaLibraryJob, JobType.UpdateChaptersDownloadedJob];
DateTime start = DateTime.UtcNow;
List<Job> ret = jobs.Where(j => types.Contains(j.JobType)).ToList();
DateTime end = DateTime.UtcNow;
Log.Debug($"Filtering Jobs without Download took {end.Subtract(start).TotalMilliseconds}ms");
return ret;
}
private static Dictionary<string, Dictionary<JobType, List<Job>>> GetJobsPerJobTypeAndConnector(this List<Job> jobs)
{
DateTime start = DateTime.UtcNow;
Dictionary<string, Dictionary<JobType, List<Job>>> ret = new();
foreach (Job job in jobs)
{
if(GetJobConnectorName(job) is not { } connector)
continue;
if (!ret.ContainsKey(connector))
ret.Add(connector, new());
if (!ret[connector].ContainsKey(job.JobType))
ret[connector].Add(job.JobType, new());
ret[connector][job.JobType].Add(job);
}
DateTime end = DateTime.UtcNow;
Log.Debug($"Fetching connector per Job for jobs took {end.Subtract(start).TotalMilliseconds}ms");
return ret;
}
private static List<Job> MatchJobsRunningAndWaiting(Dictionary<string, Dictionary<JobType, List<Job>>> running,
Dictionary<string, Dictionary<JobType, List<Job>>> waiting)
{
Log.Debug($"Matching {running.Count} running Jobs to {waiting.Count} waiting Jobs. Busy Connectors: {string.Join(", ", running.Select(r => r.Key))}");
DateTime start = DateTime.UtcNow;
List<Job> ret = new();
//Foreach MangaConnector
foreach ((string connector, Dictionary<JobType, List<Job>> jobTypeJobsWaiting) in waiting)
{
//Check if MangaConnector has a Job running
if (running.TryGetValue(connector, out Dictionary<JobType, List<Job>>? jobTypeJobsRunning))
{
//MangaConnector has running Jobs
//Match per JobType (MangaConnector can have 1 Job per Type running at the same time)
foreach ((JobType jobType, List<Job> jobsWaiting) in jobTypeJobsWaiting)
{
if(jobTypeJobsRunning.ContainsKey(jobType))
//Already a job of Type running on MangaConnector
continue;
if (jobType is not JobType.DownloadSingleChapterJob)
//If it is not a DownloadSingleChapterJob, just add the first
ret.Add(jobsWaiting.First());
else
//Add the Job with the lowest Chapternumber
ret.Add(jobsWaiting.OrderBy(j => ((DownloadSingleChapterJob)j).Chapter).First());
}
}
else
{
//MangaConnector has no running Jobs
foreach ((JobType jobType, List<Job> jobsWaiting) in jobTypeJobsWaiting)
{
if(ret.Any(j => j.JobType == jobType))
//Already a job of type to be started
continue;
if (jobType is not JobType.DownloadSingleChapterJob)
//If it is not a DownloadSingleChapterJob, just add the first
ret.Add(jobsWaiting.First());
else
//Add the Job with the lowest Chapternumber
ret.Add(jobsWaiting.OrderBy(j => ((DownloadSingleChapterJob)j).Chapter).First());
}
}
}
DateTime end = DateTime.UtcNow;
Log.Debug($"Getting eligible jobs (not held back by Connector) took {end.Subtract(start).TotalMilliseconds}ms");
return ret;
}
private static string? GetJobConnectorName(Job job)
{ {
if (job is DownloadAvailableChaptersJob dacj) if (job is DownloadAvailableChaptersJob dacj)
return dacj.Manga.MangaConnector; return dacj.Manga.MangaConnectorName;
if (job is DownloadMangaCoverJob dmcj) if (job is DownloadMangaCoverJob dmcj)
return dmcj.Manga.MangaConnector; return dmcj.Manga.MangaConnectorName;
if (job is DownloadSingleChapterJob dscj) if (job is DownloadSingleChapterJob dscj)
return dscj.Chapter.ParentManga.MangaConnector; return dscj.Chapter.ParentManga.MangaConnectorName;
if (job is RetrieveChaptersJob rcj) if (job is RetrieveChaptersJob rcj)
return rcj.Manga.MangaConnector; return rcj.Manga.MangaConnectorName;
return null; return null;
} }
} }

View File

@ -11,10 +11,11 @@ public static class TrangaSettings
public static string downloadLocation { get; private set; } = (RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "/Manga" : Path.Join(Directory.GetCurrentDirectory(), "Downloads")); public static string downloadLocation { get; private set; } = (RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "/Manga" : Path.Join(Directory.GetCurrentDirectory(), "Downloads"));
public static string workingDirectory { get; private set; } = Path.Join(RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "/usr/share" : Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "tranga-api"); public static string workingDirectory { get; private set; } = Path.Join(RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "/usr/share" : Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "tranga-api");
[JsonIgnore] [JsonIgnore]
internal static readonly string DefaultUserAgent = $"Tranga ({Enum.GetName(Environment.OSVersion.Platform)}; {(Environment.Is64BitOperatingSystem ? "x64" : "")}) / 1.0"; internal static readonly string DefaultUserAgent = $"Tranga/2.0 ({Enum.GetName(Environment.OSVersion.Platform)}; {(Environment.Is64BitOperatingSystem ? "x64" : "")})";
public static string userAgent { get; private set; } = DefaultUserAgent; public static string userAgent { get; private set; } = DefaultUserAgent;
public static int compression{ get; private set; } = 40; public static int compression{ get; private set; } = 40;
public static bool bwImages { get; private set; } = false; public static bool bwImages { get; private set; } = false;
public static string flareSolverrUrl { get; private set; } = string.Empty;
/// <summary> /// <summary>
/// Placeholders: /// Placeholders:
/// %M Manga Name /// %M Manga Name
@ -35,14 +36,14 @@ public static class TrangaSettings
[JsonIgnore] [JsonIgnore]
public static string coverImageCache => Path.Join(workingDirectory, "imageCache"); public static string coverImageCache => Path.Join(workingDirectory, "imageCache");
public static bool aprilFoolsMode { get; private set; } = true; public static bool aprilFoolsMode { get; private set; } = true;
public static int startNewJobTimeoutMs { get; private set; } = 1000; public static int startNewJobTimeoutMs { get; private set; } = 20000;
[JsonIgnore] [JsonIgnore]
internal static readonly Dictionary<RequestType, int> DefaultRequestLimits = new () internal static readonly Dictionary<RequestType, int> DefaultRequestLimits = new ()
{ {
{RequestType.MangaInfo, 60}, {RequestType.MangaInfo, 60},
{RequestType.MangaDexFeed, 60}, {RequestType.MangaDexFeed, 60},
{RequestType.MangaDexImage, 40}, {RequestType.MangaDexImage, 60},
{RequestType.MangaImage, 60}, {RequestType.MangaImage, 240},
{RequestType.MangaCover, 60}, {RequestType.MangaCover, 60},
{RequestType.Default, 60} {RequestType.Default, 60}
}; };
@ -102,6 +103,12 @@ public static class TrangaSettings
ExportSettings(); ExportSettings();
} }
public static void UpdateFlareSolverrUrl(string url)
{
flareSolverrUrl = url;
ExportSettings();
}
public static void ResetRequestLimits() public static void ResetRequestLimits()
{ {
requestLimits = DefaultRequestLimits; requestLimits = DefaultRequestLimits;
@ -148,6 +155,7 @@ public static class TrangaSettings
jobj.Add("bwImages", JToken.FromObject(bwImages)); jobj.Add("bwImages", JToken.FromObject(bwImages));
jobj.Add("startNewJobTimeoutMs", JToken.FromObject(startNewJobTimeoutMs)); jobj.Add("startNewJobTimeoutMs", JToken.FromObject(startNewJobTimeoutMs));
jobj.Add("chapterNamingScheme", JToken.FromObject(chapterNamingScheme)); jobj.Add("chapterNamingScheme", JToken.FromObject(chapterNamingScheme));
jobj.Add("flareSolverrUrl", JToken.FromObject(flareSolverrUrl));
return jobj; return jobj;
} }
@ -174,5 +182,7 @@ public static class TrangaSettings
startNewJobTimeoutMs = snjt.Value<int>()!; startNewJobTimeoutMs = snjt.Value<int>()!;
if (jobj.TryGetValue("chapterNamingScheme", out JToken? cns)) if (jobj.TryGetValue("chapterNamingScheme", out JToken? cns))
chapterNamingScheme = cns.Value<string>()!; chapterNamingScheme = cns.Value<string>()!;
if (jobj.TryGetValue("flareSolverrUrl", out JToken? fsu))
flareSolverrUrl = fsu.Value<string>()!;
} }
} }

View File

@ -39,4 +39,4 @@ WORKDIR /publish
COPY --chown=1000:1000 --from=build-env /publish . COPY --chown=1000:1000 --from=build-env /publish .
USER 0 USER 0
ENTRYPOINT ["dotnet", "/publish/API.dll"] ENTRYPOINT ["dotnet", "/publish/API.dll"]
CMD ["-f", "-c", "-l", "/usr/share/tranga-api/logs"] CMD [""]

View File

@ -84,17 +84,16 @@ Endpoints are documented in Swagger. Just spin up an instance, and go to `http:/
## Built With ## Built With
- .NET - ASP.NET
- ASP.NET - Entity Framework Core
- Entity Framework
- [PostgreSQL](https://www.postgresql.org/about/licence/) - [PostgreSQL](https://www.postgresql.org/about/licence/)
- [Swagger](https://github.com/domaindrivendev/Swashbuckle.AspNetCore/blob/master/LICENSE)
- [Ngpsql](https://github.com/npgsql/npgsql/blob/main/LICENSE) - [Ngpsql](https://github.com/npgsql/npgsql/blob/main/LICENSE)
- [Swagger](https://github.com/domaindrivendev/Swashbuckle.AspNetCore/blob/master/LICENSE)
- [Newtonsoft.Json](https://github.com/JamesNK/Newtonsoft.Json/blob/master/LICENSE.md) - [Newtonsoft.Json](https://github.com/JamesNK/Newtonsoft.Json/blob/master/LICENSE.md)
- [Sixlabors.ImageSharp](https://docs-v2.sixlabors.com/articles/imagesharp/index.html#license)
- [PuppeteerSharp](https://github.com/hardkoded/puppeteer-sharp/blob/master/LICENSE) - [PuppeteerSharp](https://github.com/hardkoded/puppeteer-sharp/blob/master/LICENSE)
- [Html Agility Pack (HAP)](https://github.com/zzzprojects/html-agility-pack/blob/master/LICENSE) - [Html Agility Pack (HAP)](https://github.com/zzzprojects/html-agility-pack/blob/master/LICENSE)
- [Soenneker.Utils.String.NeedlemanWunsch](https://github.com/soenneker/soenneker.utils.string.needlemanwunsch/blob/main/LICENSE) - [Soenneker.Utils.String.NeedlemanWunsch](https://github.com/soenneker/soenneker.utils.string.needlemanwunsch/blob/main/LICENSE)
- [Sixlabors.ImageSharp](https://docs-v2.sixlabors.com/articles/imagesharp/index.html#license)
- 💙 Blåhaj 🦈 - 💙 Blåhaj 🦈
<p align="right">(<a href="#readme-top">back to top</a>)</p> <p align="right">(<a href="#readme-top">back to top</a>)</p>
@ -126,13 +125,13 @@ access the folder. Permission conflicts with Komga and Kavita should thus be lim
### Bare-Metal ### Bare-Metal
While not supported/currently built, Tranga will also run Bare-Metal without issue. While not supported/currently built, Tranga should also run Bare-Metal without issue.
Configuration-Files will be stored per OS: Configuration-Files will be stored per OS:
- Linux `/usr/share/tranga-api` - Linux `/usr/share/tranga-api`
- Windows `%appdata%/tranga-api` - Windows `%appdata%/tranga-api`
Downloads (default) are stored in - but this can be configured in `settings.json`: Downloads (default) are stored in - but this can be configured in `settings.json` (which will be generated on first after first launch):
- Linux `/Manga` - Linux `/Manga`
- Windows `%currentDirectory%/Downloads` - Windows `%currentDirectory%/Downloads`
@ -148,9 +147,10 @@ If you want to contribute, please feel free to fork and create a Pull-Request!
General rules: General rules:
- Strongly-type your variables. This improves readability. - Strongly-type your variables. This improves readability.
```csharp ```csharp
var xyz = Object.GetSomething(); //Do not do this. What type is xyz? var xyz = Object.GetSomething(); //Do not do this. What type is xyz (without looking at Method returns etc.)?
Manga[] zyx = Object.GetAnotherThing(); //I can now easily see that zyx is an Array. Manga[] zyx = Object.GetAnotherThing(); //I can now easily see that zyx is an Array.
``` ```
Tranga is using a code-first Entity-Framework Core approach. If you modify the db-table structure you need to create a migration.
**A broad overview of where is what:**<br /> **A broad overview of where is what:**<br />
@ -171,6 +171,10 @@ If you want to add a new Website-Connector: <br />
in the constructor). in the constructor).
4. In `Program.cs` add a new Object to the Array. 4. In `Program.cs` add a new Object to the Array.
### How to test locally
In the Project root a `docker-compose.local.yaml` file will compile the code and create the container(s).
<!-- LICENSE --> <!-- LICENSE -->
## License ## License

View File

@ -11,10 +11,16 @@ services:
ports: ports:
- "6531:6531" - "6531:6531"
depends_on: depends_on:
- tranga-pg tranga-pg:
condition: service_healthy
environment: environment:
- POSTGRES_HOST=tranga-pg - POSTGRES_HOST=tranga-pg
restart: unless-stopped restart: unless-stopped
logging:
driver: json-file
options:
max-size: "10m"
max-file: "5"
tranga-pg: tranga-pg:
image: postgres:latest image: postgres:latest
container_name: tranga-pg container_name: tranga-pg
@ -22,4 +28,15 @@ services:
- "5432:5432" - "5432:5432"
environment: environment:
- POSTGRES_PASSWORD=postgres - POSTGRES_PASSWORD=postgres
healthcheck:
test: ["CMD-SHELL", "pg_isready"]
interval: 30s
timeout: 60s
retries: 5
start_period: 80s
restart: unless-stopped restart: unless-stopped
logging:
driver: json-file
options:
max-size: "10m"
max-file: "5"

View File

@ -1,7 +1,7 @@
version: '3' version: '3'
services: services:
tranga-api: tranga-api:
image: glax/tranga-api:latest image: glax/tranga-api:Server-V2
container_name: tranga-api container_name: tranga-api
volumes: volumes:
- ./Manga:/Manga - ./Manga:/Manga
@ -9,18 +9,29 @@ services:
ports: ports:
- "6531:6531" - "6531:6531"
depends_on: depends_on:
- tranga-pg tranga-pg:
condition: service_healthy
environment: environment:
- POSTGRES_HOST=tranga-pg - POSTGRES_HOST=tranga-pg
restart: unless-stopped restart: unless-stopped
logging:
driver: json-file
options:
max-size: "10m"
max-file: "5"
tranga-website: tranga-website:
image: glax/tranga-website:latest image: glax/tranga-website:Server-V2
container_name: tranga-website container_name: tranga-website
ports: ports:
- "9555:80" - "9555:80"
depends_on: depends_on:
- tranga-api - tranga-api
restart: unless-stopped restart: unless-stopped
logging:
driver: json-file
options:
max-size: "10m"
max-file: "5"
tranga-pg: tranga-pg:
image: postgres:latest image: postgres:latest
container_name: tranga-pg container_name: tranga-pg
@ -28,4 +39,15 @@ services:
- "5432:5432" - "5432:5432"
environment: environment:
- POSTGRES_PASSWORD=postgres - POSTGRES_PASSWORD=postgres
healthcheck:
test: ["CMD-SHELL", "pg_isready"]
interval: 30s
timeout: 60s
retries: 5
start_period: 80s
restart: unless-stopped restart: unless-stopped
logging:
driver: json-file
options:
max-size: "10m"
max-file: "5"