21 Commits

Author SHA1 Message Date
e063cf1fd9 Debug: MatchJobsRunningAndWaiting
Some checks failed
Docker Image CI / build (push) Has been cancelled
UpdateCoverJobs not starting.
2025-06-28 23:15:51 +02:00
8170e1d762 JobCycle Info-Debug list jobs started/running
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-06-28 20:35:10 +02:00
254383b006 Include Description in ComicInfo.xml 2025-06-28 20:28:28 +02:00
df431e533a Add POST Jobs/Cleaup Endpoint:
Removes failed and completed Jobs (that are not recurring)
2025-06-28 20:18:28 +02:00
9a4cc0cbaf Only log Error on image-processing if we dont know what Exception was thrown 2025-06-28 20:13:09 +02:00
861cf7e166 Fix Image-Processing:
Format is not supported by Imagesharp, throwing exception causing Job to fail.
2025-06-28 20:00:01 +02:00
7e34b3b91e Update readme to contain information on how to test locally 2025-06-28 19:48:47 +02:00
29d36484f9 include logging driver in docker-compose
Remove parameters from start-CMD in Dockerfile
2025-06-28 19:39:19 +02:00
2c6e8e4d16 Default startNewJobTimeoutMs set to 20s
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-06-18 02:11:03 +02:00
fab2886684 ComickIo Stop double work for retrieving chapters:
We can build the canonical url from the hids
2025-06-18 01:55:19 +02:00
d9ccf71b21 DownloadSingleChapterJob add check if chapter is already downloaded before re-downloading 2025-06-18 01:18:06 +02:00
f36f34f212 We dont need to actually load the MangaConnector to know if two names match.
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-06-18 00:23:33 +02:00
ff10432c79 Fix FilterJobsWithoutDownloading: Dont check if a job has a connector, that takes forever 2025-06-18 00:11:05 +02:00
776e1e4890 ...use what we coded... 2025-06-17 20:18:10 +02:00
db0643fa19 More Debug 2025-06-17 20:09:49 +02:00
3eeb563ca1 Add Debug Statement to find slow operations in Job-Cycle 2025-06-17 19:55:54 +02:00
7a88b1f7ee Increase default request Limits 2025-06-17 19:55:31 +02:00
b5411e9c6c Better Debugging for HttpDownloadClient 2025-06-17 18:52:27 +02:00
07b260dea6 GC Cleanup 2025-06-17 18:52:14 +02:00
71ad32de31 Fix FlareSolverr IsJson-Check 2025-06-17 18:51:29 +02:00
ecd2c2722f Fix FlareSolverr, Flaresolverrsharp is broken 2025-06-17 18:28:18 +02:00
15 changed files with 414 additions and 137 deletions

View File

@ -11,7 +11,6 @@
<ItemGroup> <ItemGroup>
<PackageReference Include="Asp.Versioning.Mvc.ApiExplorer" Version="8.1.0" /> <PackageReference Include="Asp.Versioning.Mvc.ApiExplorer" Version="8.1.0" />
<PackageReference Include="FlareSolverrSharp" Version="3.0.7" />
<PackageReference Include="HtmlAgilityPack" Version="1.12.0" /> <PackageReference Include="HtmlAgilityPack" Version="1.12.0" />
<PackageReference Include="log4net" Version="3.0.4" /> <PackageReference Include="log4net" Version="3.0.4" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="9.0.3" /> <PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="9.0.3" />

View File

@ -374,4 +374,25 @@ public class JobController(PgsqlContext context, ILog Log) : Controller
{ {
return StatusCode(Status501NotImplemented); return StatusCode(Status501NotImplemented);
} }
/// <summary>
/// Removes failed and completed Jobs (that are not recurring)
/// </summary>
/// <response code="202">Job started</response>
/// <response code="500">Error during Database Operation</response>
[HttpPost("Cleanup")]
public IActionResult CleanupJobs()
{
try
{
context.Jobs.RemoveRange(context.Jobs.Where(j => j.state == JobState.Failed || j.state == JobState.Completed));
context.SaveChanges();
return Ok();
}
catch (Exception e)
{
Log.Error(e);
return StatusCode(500, e.Message);
}
}
} }

View File

@ -4,7 +4,6 @@ using API.Schema;
using API.Schema.Contexts; using API.Schema.Contexts;
using API.Schema.Jobs; using API.Schema.Jobs;
using Asp.Versioning; using Asp.Versioning;
using FlareSolverrSharp;
using log4net; using log4net;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Newtonsoft.Json; using Newtonsoft.Json;
@ -321,43 +320,18 @@ public class SettingsController(PgsqlContext context, ILog Log) : Controller
} }
/// <summary> /// <summary>
/// /// Test FlareSolverr
/// </summary> /// </summary>
/// <response code="200">FlareSolverr is working!</response> /// <response code="200">FlareSolverr is working!</response>
/// <response code="400">FlareSolverr URL is malformed</response>
/// <response code="500">FlareSolverr is not working</response> /// <response code="500">FlareSolverr is not working</response>
/// <response code="503">FlareSolverr could not be reached</response>
[HttpPost("FlareSolverr/Test")] [HttpPost("FlareSolverr/Test")]
[ProducesResponseType(Status200OK)] [ProducesResponseType(Status200OK)]
[ProducesResponseType(Status400BadRequest)]
[ProducesResponseType(Status500InternalServerError)] [ProducesResponseType(Status500InternalServerError)]
[ProducesResponseType(Status503ServiceUnavailable)]
public IActionResult TestFlareSolverrReachable() public IActionResult TestFlareSolverrReachable()
{ {
const string knownProtectedUrl = "https://prowlarr.servarr.com/v1/ping"; const string knownProtectedUrl = "https://prowlarr.servarr.com/v1/ping";
HttpClient client = new(); FlareSolverrDownloadClient client = new();
if (!Uri.TryCreate(new(TrangaSettings.flareSolverrUrl), "v1", out Uri? uri)) RequestResult result = client.MakeRequestInternal(knownProtectedUrl);
return BadRequest(); return (int)result.statusCode >= 200 && (int)result.statusCode < 300 ? Ok() : StatusCode(500, result.statusCode);
HttpRequestMessage request = new(HttpMethod.Post, uri);
JObject data = new()
{
["cmd"] = "request.get",
["url"] = knownProtectedUrl
};
request.Content = new StringContent(JsonConvert.SerializeObject(data));
request.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json");
HttpResponseMessage response = client.Send(request);
if (!response.IsSuccessStatusCode)
return StatusCode(Status503ServiceUnavailable);
client = new(new ClearanceHandler(TrangaSettings.flareSolverrUrl));
try
{
client.GetStringAsync(knownProtectedUrl).Wait();
return Ok();
}
catch (Exception e)
{
return StatusCode(Status500InternalServerError);
}
} }
} }

View File

@ -3,7 +3,7 @@ using log4net;
namespace API.MangaDownloadClients; namespace API.MangaDownloadClients;
internal abstract class DownloadClient public abstract class DownloadClient
{ {
private static readonly Dictionary<RequestType, DateTime> LastExecutedRateLimit = new(); private static readonly Dictionary<RequestType, DateTime> LastExecutedRateLimit = new();
protected ILog Log { get; init; } protected ILog Log { get; init; }

View File

@ -0,0 +1,180 @@
using System.Diagnostics.CodeAnalysis;
using System.Net;
using System.Text;
using System.Text.Json;
using HtmlAgilityPack;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace API.MangaDownloadClients;
public class FlareSolverrDownloadClient : DownloadClient
{
internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null)
{
if (clickButton is not null)
Log.Warn("Client can not click button");
if(referrer is not null)
Log.Warn("Client can not set referrer");
if (TrangaSettings.flareSolverrUrl == string.Empty)
{
Log.Error("FlareSolverr URL is empty");
return new(HttpStatusCode.InternalServerError, null, Stream.Null);
}
Uri flareSolverrUri = new (TrangaSettings.flareSolverrUrl);
if (flareSolverrUri.Segments.Last() != "v1")
flareSolverrUri = new UriBuilder(flareSolverrUri)
{
Path = "v1"
}.Uri;
HttpClient client = new()
{
Timeout = TimeSpan.FromSeconds(10),
DefaultVersionPolicy = HttpVersionPolicy.RequestVersionOrHigher,
DefaultRequestHeaders = { { "User-Agent", TrangaSettings.userAgent } }
};
JObject requestObj = new()
{
["cmd"] = "request.get",
["url"] = url
};
HttpRequestMessage requestMessage = new(HttpMethod.Post, flareSolverrUri)
{
Content = new StringContent(JsonConvert.SerializeObject(requestObj)),
};
requestMessage.Content.Headers.ContentType = new ("application/json");
Log.Debug($"Requesting {url}");
HttpResponseMessage? response;
try
{
response = client.Send(requestMessage);
}
catch (HttpRequestException e)
{
Log.Error(e);
return new (HttpStatusCode.Unused, null, Stream.Null);
}
if (!response.IsSuccessStatusCode)
{
Log.Debug($"Request returned status code {(int)response.StatusCode} {response.StatusCode}:\n" +
$"=====\n" +
$"Request:\n" +
$"{requestMessage.Method} {requestMessage.RequestUri}\n" +
$"{requestMessage.Version} {requestMessage.VersionPolicy}\n" +
$"Headers:\n\t{string.Join("\n\t", requestMessage.Headers.Select(h => $"{h.Key}: <{string.Join(">, <", h.Value)}"))}>\n" +
$"{requestMessage.Content?.ReadAsStringAsync().Result}" +
$"=====\n" +
$"Response:\n" +
$"{response.Version}\n" +
$"Headers:\n\t{string.Join("\n\t", response.Headers.Select(h => $"{h.Key}: <{string.Join(">, <", h.Value)}"))}>\n" +
$"{response.Content.ReadAsStringAsync().Result}");
return new (response.StatusCode, null, Stream.Null);
}
string responseString = response.Content.ReadAsStringAsync().Result;
JObject responseObj = JObject.Parse(responseString);
if (!IsInCorrectFormat(responseObj, out string? reason))
{
Log.Error($"Wrong format: {reason}");
return new(HttpStatusCode.Unused, null, Stream.Null);
}
string statusResponse = responseObj["status"]!.Value<string>()!;
if (statusResponse != "ok")
{
Log.Debug($"Status is not ok: {statusResponse}");
return new(HttpStatusCode.Unused, null, Stream.Null);
}
JObject solution = (responseObj["solution"] as JObject)!;
if (!Enum.TryParse(solution["status"]!.Value<int>().ToString(), out HttpStatusCode statusCode))
{
Log.Error($"Wrong format: Cant parse status code: {solution["status"]!.Value<int>()}");
return new(HttpStatusCode.Unused, null, Stream.Null);
}
if (statusCode < HttpStatusCode.OK || statusCode >= HttpStatusCode.MultipleChoices)
{
Log.Debug($"Status is: {statusCode}");
return new(statusCode, null, Stream.Null);
}
if (solution["response"]!.Value<string>() is not { } htmlString)
{
Log.Error("Wrong format: Cant find response in solution");
return new(HttpStatusCode.Unused, null, Stream.Null);
}
if (IsJson(htmlString, out HtmlDocument document, out string? json))
{
MemoryStream ms = new();
ms.Write(Encoding.UTF8.GetBytes(json));
ms.Position = 0;
return new(statusCode, document, ms);
}
else
{
MemoryStream ms = new();
ms.Write(Encoding.UTF8.GetBytes(htmlString));
ms.Position = 0;
return new(statusCode, document, ms);
}
}
private bool IsInCorrectFormat(JObject responseObj, [NotNullWhen(false)]out string? reason)
{
reason = null;
if (!responseObj.ContainsKey("status"))
{
reason = "Cant find status on response";
return false;
}
if (responseObj["solution"] is not JObject solution)
{
reason = "Cant find solution";
return false;
}
if (!solution.ContainsKey("status"))
{
reason = "Wrong format: Cant find status in solution";
return false;
}
if (!solution.ContainsKey("response"))
{
reason = "Wrong format: Cant find response in solution";
return false;
}
return true;
}
private bool IsJson(string htmlString, out HtmlDocument document, [NotNullWhen(true)]out string? jsonString)
{
jsonString = null;
document = new();
document.LoadHtml(htmlString);
HtmlNode pre = document.DocumentNode.SelectSingleNode("//pre");
try
{
using JsonDocument _ = JsonDocument.Parse(pre.InnerText);
jsonString = pre.InnerText;
return true;
}
catch (JsonReaderException)
{
return false;
}
}
}

View File

@ -1,5 +1,4 @@
using System.Net; using System.Net;
using FlareSolverrSharp;
using HtmlAgilityPack; using HtmlAgilityPack;
namespace API.MangaDownloadClients; namespace API.MangaDownloadClients;
@ -10,9 +9,7 @@ internal class HttpDownloadClient : DownloadClient
{ {
if (clickButton is not null) if (clickButton is not null)
Log.Warn("Client can not click button"); Log.Warn("Client can not click button");
HttpClient client = TrangaSettings.flareSolverrUrl == string.Empty HttpClient client = new();
? new ()
: new (new ClearanceHandler(TrangaSettings.flareSolverrUrl));
client.Timeout = TimeSpan.FromSeconds(10); client.Timeout = TimeSpan.FromSeconds(10);
client.DefaultVersionPolicy = HttpVersionPolicy.RequestVersionOrHigher; client.DefaultVersionPolicy = HttpVersionPolicy.RequestVersionOrHigher;
client.DefaultRequestHeaders.Add("User-Agent", TrangaSettings.userAgent); client.DefaultRequestHeaders.Add("User-Agent", TrangaSettings.userAgent);
@ -33,6 +30,15 @@ internal class HttpDownloadClient : DownloadClient
} }
if (!response.IsSuccessStatusCode) if (!response.IsSuccessStatusCode)
{
Log.Debug($"Request returned status code {(int)response.StatusCode} {response.StatusCode}");
if (response.Headers.Server.Any(s =>
(s.Product?.Name ?? "").Contains("cloudflare", StringComparison.InvariantCultureIgnoreCase)))
{
Log.Debug("Retrying with FlareSolverr!");
return new FlareSolverrDownloadClient().MakeRequestInternal(url, referrer, clickButton);
}
else
{ {
Log.Debug($"Request returned status code {(int)response.StatusCode} {response.StatusCode}:\n" + Log.Debug($"Request returned status code {(int)response.StatusCode} {response.StatusCode}:\n" +
$"=====\n" + $"=====\n" +
@ -46,7 +52,7 @@ internal class HttpDownloadClient : DownloadClient
$"{response.Version}\n" + $"{response.Version}\n" +
$"Headers:\n\t{string.Join("\n\t", response.Headers.Select(h => $"{h.Key}: <{string.Join(">, <", h.Value)}"))}>\n" + $"Headers:\n\t{string.Join("\n\t", response.Headers.Select(h => $"{h.Key}: <{string.Join(">, <", h.Value)}"))}>\n" +
$"{response.Content.ReadAsStringAsync().Result}"); $"{response.Content.ReadAsStringAsync().Result}");
return new (response.StatusCode, null, Stream.Null); }
} }
Stream stream; Stream stream;

View File

@ -187,6 +187,8 @@ public class Chapter : IComparable<Chapter>
comicInfo.Add(new XElement("Writer", string.Join(',', ParentManga.Authors.Select(author => author.AuthorName)))); comicInfo.Add(new XElement("Writer", string.Join(',', ParentManga.Authors.Select(author => author.AuthorName))));
if(ParentManga.OriginalLanguage is not null) if(ParentManga.OriginalLanguage is not null)
comicInfo.Add(new XElement("LanguageISO", ParentManga.OriginalLanguage)); comicInfo.Add(new XElement("LanguageISO", ParentManga.OriginalLanguage));
if(ParentManga.Description != string.Empty)
comicInfo.Add(new XElement("Summary", ParentManga.Description));
return comicInfo.ToString(); return comicInfo.ToString();
} }

View File

@ -45,6 +45,11 @@ public class DownloadSingleChapterJob : Job
protected override IEnumerable<Job> RunInternal(PgsqlContext context) protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{ {
if (Chapter.Downloaded)
{
Log.Info("Chapter was already downloaded.");
return [];
}
string[] imageUrls = Chapter.ParentManga.MangaConnector.GetChapterImageUrls(Chapter); string[] imageUrls = Chapter.ParentManga.MangaConnector.GetChapterImageUrls(Chapter);
if (imageUrls.Length < 1) if (imageUrls.Length < 1)
{ {
@ -129,21 +134,39 @@ public class DownloadSingleChapterJob : Job
{ {
if (!TrangaSettings.bwImages && TrangaSettings.compression == 100) if (!TrangaSettings.bwImages && TrangaSettings.compression == 100)
{ {
Log.Debug($"No processing requested for image"); Log.Debug("No processing requested for image");
return; return;
} }
Log.Debug($"Processing image: {imagePath}"); Log.Debug($"Processing image: {imagePath}");
try
{
using Image image = Image.Load(imagePath); using Image image = Image.Load(imagePath);
File.Delete(imagePath);
if (TrangaSettings.bwImages) if (TrangaSettings.bwImages)
image.Mutate(i => i.ApplyProcessor(new AdaptiveThresholdProcessor())); image.Mutate(i => i.ApplyProcessor(new AdaptiveThresholdProcessor()));
File.Delete(imagePath);
image.SaveAsJpeg(imagePath, new JpegEncoder() image.SaveAsJpeg(imagePath, new JpegEncoder()
{ {
Quality = TrangaSettings.compression Quality = TrangaSettings.compression
}); });
} }
catch (Exception e)
{
if (e is UnknownImageFormatException or NotSupportedException)
{
//If the Image-Format is not processable by ImageSharp, we can't modify it.
Log.Debug($"Unable to process {imagePath}: Not supported image format");
}else if (e is InvalidImageContentException)
{
Log.Debug($"Unable to process {imagePath}: Invalid Content");
}
else
{
Log.Error(e);
}
}
}
private void CopyCoverFromCacheToDownloadLocation(Manga manga) private void CopyCoverFromCacheToDownloadLocation(Manga manga)
{ {

View File

@ -78,7 +78,7 @@ public class ComickIo : MangaConnector
public override Chapter[] GetChapters(Manga manga, string? language = null) public override Chapter[] GetChapters(Manga manga, string? language = null)
{ {
Log.Info($"Getting Chapters: {manga.IdOnConnectorSite}"); Log.Info($"Getting Chapters: {manga.IdOnConnectorSite}");
List<string> chapterHids = new(); List<Chapter> chapters = new();
int page = 1; int page = 1;
while(page < 50) while(page < 50)
{ {
@ -95,16 +95,13 @@ public class ComickIo : MangaConnector
JToken data = JToken.Parse(sr.ReadToEnd()); JToken data = JToken.Parse(sr.ReadToEnd());
JArray? chaptersArray = data["chapters"] as JArray; JArray? chaptersArray = data["chapters"] as JArray;
if (chaptersArray?.Count < 1) if (chaptersArray is null || chaptersArray.Count < 1)
break; break;
chapterHids.AddRange(chaptersArray?.Select(token => token.Value<string>("hid")!)!); chapters.AddRange(ParseChapters(manga, chaptersArray));
page++; page++;
} }
Log.Debug($"Getting chapters for {manga.Name} yielded {chapterHids.Count} hids. Requesting chapters now...");
List<Chapter> chapters = chapterHids.Select(hid => ChapterFromHid(manga, hid)).ToList();
return chapters.ToArray(); return chapters.ToArray();
} }
@ -219,29 +216,23 @@ public class ComickIo : MangaConnector
year: year, originalLanguage: originalLanguage); year: year, originalLanguage: originalLanguage);
} }
private Chapter ChapterFromHid(Manga parentManga, string hid) private List<Chapter> ParseChapters(Manga parentManga, JArray chaptersArray)
{ {
string requestUrl = $"https://api.comick.fun/chapter/{hid}"; List<Chapter> chapters = new ();
RequestResult result = downloadClient.MakeRequest(requestUrl, RequestType.Default); foreach (JToken chapter in chaptersArray)
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300)
{ {
Log.Error("Request failed"); string? chapterNum = chapter.Value<string>("chap");
throw new Exception("Request failed"); string? volumeNumStr = chapter.Value<string>("vol");
}
using StreamReader sr = new (result.result);
JToken data = JToken.Parse(sr.ReadToEnd());
string? canonical = data.Value<string>("canonical");
string? chapterNum = data["chapter"]?.Value<string>("chap");
string? volumeNumStr = data["chapter"]?.Value<string>("vol");
int? volumeNum = volumeNumStr is null ? null : int.Parse(volumeNumStr); int? volumeNum = volumeNumStr is null ? null : int.Parse(volumeNumStr);
string? title = data["chapter"]?.Value<string>("title"); string? title = chapter.Value<string>("title");
string? hid = chapter.Value<string>("hid");
string url = $"https://comick.io/comic/{parentManga.IdOnConnectorSite}/{hid}";
if(chapterNum is null) if(chapterNum is null || hid is null)
throw new Exception("chapterNum is null"); continue;
string url = $"https://comick.io{canonical}"; chapters.Add(new (parentManga, url, chapterNum, volumeNum, hid, title));
return new Chapter(parentManga, url, chapterNum, volumeNum, hid, title); }
return chapters;
} }
} }

View File

@ -137,12 +137,12 @@ public static class Tranga
List<Job> dueJobs = waitingJobs.FilterDueJobs(); List<Job> dueJobs = waitingJobs.FilterDueJobs();
List<Job> jobsWithoutDependencies = dueJobs.FilterJobDependencies(); List<Job> jobsWithoutDependencies = dueJobs.FilterJobDependencies();
List<Job> jobsWithoutDownloading = jobsWithoutDependencies.Where(j => GetJobConnector(j) is null).ToList(); List<Job> jobsWithoutDownloading = jobsWithoutDependencies.FilterJobsWithoutDownloading();
//Match running and waiting jobs per Connector //Match running and waiting jobs per Connector
Dictionary<MangaConnector, Dictionary<JobType, List<Job>>> runningJobsPerConnector = Dictionary<string, Dictionary<JobType, List<Job>>> runningJobsPerConnector =
runningJobs.GetJobsPerJobTypeAndConnector(); runningJobs.GetJobsPerJobTypeAndConnector();
Dictionary<MangaConnector, Dictionary<JobType, List<Job>>> waitingJobsPerConnector = Dictionary<string, Dictionary<JobType, List<Job>>> waitingJobsPerConnector =
jobsWithoutDependencies.GetJobsPerJobTypeAndConnector(); jobsWithoutDependencies.GetJobsPerJobTypeAndConnector();
List<Job> jobsNotHeldBackByConnector = List<Job> jobsNotHeldBackByConnector =
MatchJobsRunningAndWaiting(runningJobsPerConnector, waitingJobsPerConnector); MatchJobsRunningAndWaiting(runningJobsPerConnector, waitingJobsPerConnector);
@ -169,11 +169,15 @@ public static class Tranga
while(!running) while(!running)
Thread.Sleep(10); Thread.Sleep(10);
} }
Log.Debug($"Running: {runningJobs.Count} Waiting: {waitingJobs.Count} Due: {dueJobs.Count} of which \n" + Log.Debug($"Running: {runningJobs.Count}\n" +
$"{jobsWithoutDependencies.Count} without missing dependencies, of which\n" + $"{string.Join("\n", runningJobs.Select(s => "\t- " + s))}\n" +
$"Waiting: {waitingJobs.Count} Due: {dueJobs.Count}\n" +
$"{string.Join("\n", dueJobs.Select(s => "\t- " + s))}\n" +
$"of which {jobsWithoutDependencies.Count} without missing dependencies, of which\n" +
$"\t{jobsWithoutDownloading.Count} without downloading\n" + $"\t{jobsWithoutDownloading.Count} without downloading\n" +
$"\t{jobsNotHeldBackByConnector.Count} not held back by Connector\n" + $"\t{jobsNotHeldBackByConnector.Count} not held back by Connector\n" +
$"{startJobs.Count} were started."); $"{startJobs.Count} were started:\n" +
$"{string.Join("\n", startJobs.Select(s => "\t- " + s))}");
if (Log.IsDebugEnabled && dueJobs.Count < 1) if (Log.IsDebugEnabled && dueJobs.Count < 1)
if(waitingJobs.MinBy(j => j.NextExecution) is { } nextJob) if(waitingJobs.MinBy(j => j.NextExecution) is { } nextJob)
@ -195,32 +199,68 @@ public static class Tranga
{ {
Log.Error("Failed saving Job changes.", e); Log.Error("Failed saving Job changes.", e);
} }
Log.Debug($"Job-Cycle over! (took {DateTime.UtcNow.Subtract(cycleStart).TotalMilliseconds}ms)"); Log.Debug($"Job-Cycle over! (took {DateTime.UtcNow.Subtract(cycleStart).TotalMilliseconds}ms");
Thread.Sleep(TrangaSettings.startNewJobTimeoutMs); Thread.Sleep(TrangaSettings.startNewJobTimeoutMs);
} }
} }
private static List<Job> GetRunningJobs(this IQueryable<Job> jobs) => private static List<Job> GetRunningJobs(this IQueryable<Job> jobs)
jobs.Where(j => j.state == JobState.Running).ToList();
private static List<Job> GetWaitingJobs(this IQueryable<Job> jobs) =>
jobs.Where(j => j.state == JobState.CompletedWaiting || j.state == JobState.FirstExecution)
.ToList();
private static List<Job> FilterDueJobs(this List<Job> jobs) =>
jobs.Where(j => j.NextExecution < DateTime.UtcNow)
.ToList();
private static List<Job> FilterJobDependencies(this List<Job> jobs) =>
jobs.Where(job => job.DependsOnJobs.All(j => j.IsCompleted))
.ToList();
private static Dictionary<MangaConnector, Dictionary<JobType, List<Job>>> GetJobsPerJobTypeAndConnector(this List<Job> jobs)
{ {
Dictionary<MangaConnector, Dictionary<JobType, List<Job>>> ret = new(); DateTime start = DateTime.UtcNow;
List<Job> ret = jobs.Where(j => j.state == JobState.Running).ToList();
DateTime end = DateTime.UtcNow;
Log.Debug($"Getting running Jobs took {end.Subtract(start).TotalMilliseconds}ms");
return ret;
}
private static List<Job> GetWaitingJobs(this IQueryable<Job> jobs)
{
DateTime start = DateTime.UtcNow;
List<Job> ret = jobs.Where(j => j.state == JobState.CompletedWaiting || j.state == JobState.FirstExecution).ToList();
DateTime end = DateTime.UtcNow;
Log.Debug($"Getting waiting Jobs took {end.Subtract(start).TotalMilliseconds}ms");
return ret;
}
private static List<Job> FilterDueJobs(this List<Job> jobs)
{
DateTime start = DateTime.UtcNow;
List<Job> ret = jobs.Where(j => j.NextExecution < DateTime.UtcNow).ToList();
DateTime end = DateTime.UtcNow;
Log.Debug($"Filtering Due Jobs took {end.Subtract(start).TotalMilliseconds}ms");
return ret;
}
private static List<Job> FilterJobDependencies(this List<Job> jobs)
{
DateTime start = DateTime.UtcNow;
List<Job> ret = jobs.Where(job => job.DependsOnJobs.All(j => j.IsCompleted)).ToList();
DateTime end = DateTime.UtcNow;
Log.Debug($"Filtering Dependencies took {end.Subtract(start).TotalMilliseconds}ms");
return ret;
}
private static List<Job> FilterJobsWithoutDownloading(this List<Job> jobs)
{
JobType[] types = [JobType.MoveFileOrFolderJob, JobType.MoveMangaLibraryJob, JobType.UpdateChaptersDownloadedJob];
DateTime start = DateTime.UtcNow;
List<Job> ret = jobs.Where(j => types.Contains(j.JobType)).ToList();
DateTime end = DateTime.UtcNow;
Log.Debug($"Filtering Jobs without Download took {end.Subtract(start).TotalMilliseconds}ms");
return ret;
}
private static Dictionary<string, Dictionary<JobType, List<Job>>> GetJobsPerJobTypeAndConnector(this List<Job> jobs)
{
DateTime start = DateTime.UtcNow;
Dictionary<string, Dictionary<JobType, List<Job>>> ret = new();
foreach (Job job in jobs) foreach (Job job in jobs)
{ {
if(GetJobConnector(job) is not { } connector) if(GetJobConnectorName(job) is not { } connector)
continue; continue;
if (!ret.ContainsKey(connector)) if (!ret.ContainsKey(connector))
ret.Add(connector, new()); ret.Add(connector, new());
@ -228,18 +268,25 @@ public static class Tranga
ret[connector].Add(job.JobType, new()); ret[connector].Add(job.JobType, new());
ret[connector][job.JobType].Add(job); ret[connector][job.JobType].Add(job);
} }
DateTime end = DateTime.UtcNow;
Log.Debug($"Fetching connector per Job for jobs took {end.Subtract(start).TotalMilliseconds}ms");
return ret; return ret;
} }
private static List<Job> MatchJobsRunningAndWaiting(Dictionary<MangaConnector, Dictionary<JobType, List<Job>>> running, private static List<Job> MatchJobsRunningAndWaiting(Dictionary<string, Dictionary<JobType, List<Job>>> running,
Dictionary<MangaConnector, Dictionary<JobType, List<Job>>> waiting) Dictionary<string, Dictionary<JobType, List<Job>>> waiting)
{ {
Log.Debug($"Matching {running.Count} running Jobs to {waiting.Count} waiting Jobs. Busy Connectors: {string.Join(", ", running.Select(r => r.Key))}");
DateTime start = DateTime.UtcNow;
List<Job> ret = new(); List<Job> ret = new();
foreach ((MangaConnector connector, Dictionary<JobType, List<Job>> jobTypeJobsWaiting) in waiting) //Foreach MangaConnector
foreach ((string connector, Dictionary<JobType, List<Job>> jobTypeJobsWaiting) in waiting)
{ {
//Check if MangaConnector has a Job running
if (running.TryGetValue(connector, out Dictionary<JobType, List<Job>>? jobTypeJobsRunning)) if (running.TryGetValue(connector, out Dictionary<JobType, List<Job>>? jobTypeJobsRunning))
{ //MangaConnector has running Jobs {
//Match per JobType //MangaConnector has running Jobs
//Match per JobType (MangaConnector can have 1 Job per Type running at the same time)
foreach ((JobType jobType, List<Job> jobsWaiting) in jobTypeJobsWaiting) foreach ((JobType jobType, List<Job> jobsWaiting) in jobTypeJobsWaiting)
{ {
if(jobTypeJobsRunning.ContainsKey(jobType)) if(jobTypeJobsRunning.ContainsKey(jobType))
@ -254,9 +301,13 @@ public static class Tranga
} }
} }
else else
{ //MangaConnector has no running Jobs {
//MangaConnector has no running Jobs
foreach ((JobType jobType, List<Job> jobsWaiting) in jobTypeJobsWaiting) foreach ((JobType jobType, List<Job> jobsWaiting) in jobTypeJobsWaiting)
{ {
if(ret.Any(j => j.JobType == jobType))
//Already a job of type to be started
continue;
if (jobType is not JobType.DownloadSingleChapterJob) if (jobType is not JobType.DownloadSingleChapterJob)
//If it is not a DownloadSingleChapterJob, just add the first //If it is not a DownloadSingleChapterJob, just add the first
ret.Add(jobsWaiting.First()); ret.Add(jobsWaiting.First());
@ -266,20 +317,22 @@ public static class Tranga
} }
} }
} }
DateTime end = DateTime.UtcNow;
Log.Debug($"Getting eligible jobs (not held back by Connector) took {end.Subtract(start).TotalMilliseconds}ms");
return ret; return ret;
} }
private static MangaConnector? GetJobConnector(Job job)
private static string? GetJobConnectorName(Job job)
{ {
if (job is DownloadAvailableChaptersJob dacj) if (job is DownloadAvailableChaptersJob dacj)
return dacj.Manga.MangaConnector; return dacj.Manga.MangaConnectorName;
if (job is DownloadMangaCoverJob dmcj) if (job is DownloadMangaCoverJob dmcj)
return dmcj.Manga.MangaConnector; return dmcj.Manga.MangaConnectorName;
if (job is DownloadSingleChapterJob dscj) if (job is DownloadSingleChapterJob dscj)
return dscj.Chapter.ParentManga.MangaConnector; return dscj.Chapter.ParentManga.MangaConnectorName;
if (job is RetrieveChaptersJob rcj) if (job is RetrieveChaptersJob rcj)
return rcj.Manga.MangaConnector; return rcj.Manga.MangaConnectorName;
return null; return null;
} }
} }

View File

@ -36,14 +36,14 @@ public static class TrangaSettings
[JsonIgnore] [JsonIgnore]
public static string coverImageCache => Path.Join(workingDirectory, "imageCache"); public static string coverImageCache => Path.Join(workingDirectory, "imageCache");
public static bool aprilFoolsMode { get; private set; } = true; public static bool aprilFoolsMode { get; private set; } = true;
public static int startNewJobTimeoutMs { get; private set; } = 5000; public static int startNewJobTimeoutMs { get; private set; } = 20000;
[JsonIgnore] [JsonIgnore]
internal static readonly Dictionary<RequestType, int> DefaultRequestLimits = new () internal static readonly Dictionary<RequestType, int> DefaultRequestLimits = new ()
{ {
{RequestType.MangaInfo, 60}, {RequestType.MangaInfo, 60},
{RequestType.MangaDexFeed, 60}, {RequestType.MangaDexFeed, 60},
{RequestType.MangaDexImage, 40}, {RequestType.MangaDexImage, 60},
{RequestType.MangaImage, 60}, {RequestType.MangaImage, 240},
{RequestType.MangaCover, 60}, {RequestType.MangaCover, 60},
{RequestType.Default, 60} {RequestType.Default, 60}
}; };

View File

@ -39,4 +39,4 @@ WORKDIR /publish
COPY --chown=1000:1000 --from=build-env /publish . COPY --chown=1000:1000 --from=build-env /publish .
USER 0 USER 0
ENTRYPOINT ["dotnet", "/publish/API.dll"] ENTRYPOINT ["dotnet", "/publish/API.dll"]
CMD ["-f", "-c", "-l", "/usr/share/tranga-api/logs"] CMD [""]

View File

@ -84,18 +84,16 @@ Endpoints are documented in Swagger. Just spin up an instance, and go to `http:/
## Built With ## Built With
- .NET
- ASP.NET - ASP.NET
- Entity Framework Core - Entity Framework Core
- [PostgreSQL](https://www.postgresql.org/about/licence/) - [PostgreSQL](https://www.postgresql.org/about/licence/)
- [Swagger](https://github.com/domaindrivendev/Swashbuckle.AspNetCore/blob/master/LICENSE)
- [Ngpsql](https://github.com/npgsql/npgsql/blob/main/LICENSE) - [Ngpsql](https://github.com/npgsql/npgsql/blob/main/LICENSE)
- [Swagger](https://github.com/domaindrivendev/Swashbuckle.AspNetCore/blob/master/LICENSE)
- [Newtonsoft.Json](https://github.com/JamesNK/Newtonsoft.Json/blob/master/LICENSE.md) - [Newtonsoft.Json](https://github.com/JamesNK/Newtonsoft.Json/blob/master/LICENSE.md)
- [Sixlabors.ImageSharp](https://docs-v2.sixlabors.com/articles/imagesharp/index.html#license)
- [PuppeteerSharp](https://github.com/hardkoded/puppeteer-sharp/blob/master/LICENSE) - [PuppeteerSharp](https://github.com/hardkoded/puppeteer-sharp/blob/master/LICENSE)
- [FlareSolverrSharp](https://github.com/FlareSolverr/FlareSolverrSharp)
- [Html Agility Pack (HAP)](https://github.com/zzzprojects/html-agility-pack/blob/master/LICENSE) - [Html Agility Pack (HAP)](https://github.com/zzzprojects/html-agility-pack/blob/master/LICENSE)
- [Soenneker.Utils.String.NeedlemanWunsch](https://github.com/soenneker/soenneker.utils.string.needlemanwunsch/blob/main/LICENSE) - [Soenneker.Utils.String.NeedlemanWunsch](https://github.com/soenneker/soenneker.utils.string.needlemanwunsch/blob/main/LICENSE)
- [Sixlabors.ImageSharp](https://docs-v2.sixlabors.com/articles/imagesharp/index.html#license)
- 💙 Blåhaj 🦈 - 💙 Blåhaj 🦈
<p align="right">(<a href="#readme-top">back to top</a>)</p> <p align="right">(<a href="#readme-top">back to top</a>)</p>
@ -127,13 +125,13 @@ access the folder. Permission conflicts with Komga and Kavita should thus be lim
### Bare-Metal ### Bare-Metal
While not supported/currently built, Tranga will also run Bare-Metal without issue. While not supported/currently built, Tranga should also run Bare-Metal without issue.
Configuration-Files will be stored per OS: Configuration-Files will be stored per OS:
- Linux `/usr/share/tranga-api` - Linux `/usr/share/tranga-api`
- Windows `%appdata%/tranga-api` - Windows `%appdata%/tranga-api`
Downloads (default) are stored in - but this can be configured in `settings.json`: Downloads (default) are stored in - but this can be configured in `settings.json` (which will be generated on first after first launch):
- Linux `/Manga` - Linux `/Manga`
- Windows `%currentDirectory%/Downloads` - Windows `%currentDirectory%/Downloads`
@ -149,9 +147,10 @@ If you want to contribute, please feel free to fork and create a Pull-Request!
General rules: General rules:
- Strongly-type your variables. This improves readability. - Strongly-type your variables. This improves readability.
```csharp ```csharp
var xyz = Object.GetSomething(); //Do not do this. What type is xyz? var xyz = Object.GetSomething(); //Do not do this. What type is xyz (without looking at Method returns etc.)?
Manga[] zyx = Object.GetAnotherThing(); //I can now easily see that zyx is an Array. Manga[] zyx = Object.GetAnotherThing(); //I can now easily see that zyx is an Array.
``` ```
Tranga is using a code-first Entity-Framework Core approach. If you modify the db-table structure you need to create a migration.
**A broad overview of where is what:**<br /> **A broad overview of where is what:**<br />
@ -172,6 +171,10 @@ If you want to add a new Website-Connector: <br />
in the constructor). in the constructor).
4. In `Program.cs` add a new Object to the Array. 4. In `Program.cs` add a new Object to the Array.
### How to test locally
In the Project root a `docker-compose.local.yaml` file will compile the code and create the container(s).
<!-- LICENSE --> <!-- LICENSE -->
## License ## License

View File

@ -16,6 +16,11 @@ services:
environment: environment:
- POSTGRES_HOST=tranga-pg - POSTGRES_HOST=tranga-pg
restart: unless-stopped restart: unless-stopped
logging:
driver: json-file
options:
max-size: "10m"
max-file: "5"
tranga-pg: tranga-pg:
image: postgres:latest image: postgres:latest
container_name: tranga-pg container_name: tranga-pg
@ -30,3 +35,8 @@ services:
retries: 5 retries: 5
start_period: 80s start_period: 80s
restart: unless-stopped restart: unless-stopped
logging:
driver: json-file
options:
max-size: "10m"
max-file: "5"

View File

@ -1,7 +1,7 @@
version: '3' version: '3'
services: services:
tranga-api: tranga-api:
image: glax/tranga-api:latest image: glax/tranga-api:Server-V2
container_name: tranga-api container_name: tranga-api
volumes: volumes:
- ./Manga:/Manga - ./Manga:/Manga
@ -14,14 +14,24 @@ services:
environment: environment:
- POSTGRES_HOST=tranga-pg - POSTGRES_HOST=tranga-pg
restart: unless-stopped restart: unless-stopped
logging:
driver: json-file
options:
max-size: "10m"
max-file: "5"
tranga-website: tranga-website:
image: glax/tranga-website:latest image: glax/tranga-website:Server-V2
container_name: tranga-website container_name: tranga-website
ports: ports:
- "9555:80" - "9555:80"
depends_on: depends_on:
- tranga-api - tranga-api
restart: unless-stopped restart: unless-stopped
logging:
driver: json-file
options:
max-size: "10m"
max-file: "5"
tranga-pg: tranga-pg:
image: postgres:latest image: postgres:latest
container_name: tranga-pg container_name: tranga-pg
@ -36,3 +46,8 @@ services:
retries: 5 retries: 5
start_period: 80s start_period: 80s
restart: unless-stopped restart: unless-stopped
logging:
driver: json-file
options:
max-size: "10m"
max-file: "5"