Merge pull request 'cuttingedge' (!57) from cuttingedge into master
Reviewed-on: #57
This commit is contained in:
commit
f22c332cab
@ -62,7 +62,7 @@ public class MemoryLogger : LoggerBase
|
||||
ret.Add(_logMessages.GetValueAtIndex(_lastLogMessageIndex + retIndex).ToString());
|
||||
}
|
||||
}
|
||||
catch (NullReferenceException e)//Called when LogMessage has not finished writing
|
||||
catch (NullReferenceException)//Called when LogMessage has not finished writing
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
@ -8,5 +8,6 @@
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=mangakatana/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Manganato/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Mangasee/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Mangaworld/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Taskmanager/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Tranga/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>
|
@ -27,7 +27,7 @@ public class JobBoss : GlobalBase
|
||||
{
|
||||
Log($"Added {job}");
|
||||
this.jobs.Add(job);
|
||||
ExportJob(job);
|
||||
UpdateJobFile(job);
|
||||
}
|
||||
}
|
||||
|
||||
@ -37,6 +37,10 @@ public class JobBoss : GlobalBase
|
||||
AddJob(job);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compares contents of the provided job and all current jobs
|
||||
/// Does not check if objects are the same
|
||||
/// </summary>
|
||||
public bool ContainsJobLike(Job job)
|
||||
{
|
||||
if (job is DownloadChapter dcJob)
|
||||
@ -57,13 +61,14 @@ public class JobBoss : GlobalBase
|
||||
this.jobs.Remove(job);
|
||||
if(job.subJobs is not null && job.subJobs.Any())
|
||||
RemoveJobs(job.subJobs);
|
||||
ExportJob(job);
|
||||
UpdateJobFile(job);
|
||||
}
|
||||
|
||||
public void RemoveJobs(IEnumerable<Job?> jobsToRemove)
|
||||
{
|
||||
Log($"Removing {jobsToRemove.Count()} jobs.");
|
||||
foreach (Job? job in jobsToRemove)
|
||||
List<Job?> toRemove = jobsToRemove.ToList(); //Prevent multiple enumeration
|
||||
Log($"Removing {toRemove.Count()} jobs.");
|
||||
foreach (Job? job in toRemove)
|
||||
if(job is not null)
|
||||
RemoveJob(job);
|
||||
}
|
||||
@ -96,7 +101,7 @@ public class JobBoss : GlobalBase
|
||||
Chapter? chapter = null)
|
||||
{
|
||||
if (chapter is not null)
|
||||
return GetJobsLike(mangaConnector?.name, chapter.Value.parentManga.internalId, chapter?.chapterNumber);
|
||||
return GetJobsLike(mangaConnector?.name, chapter.Value.parentManga.internalId, chapter.Value.chapterNumber);
|
||||
else
|
||||
return GetJobsLike(mangaConnector?.name, publication?.internalId);
|
||||
}
|
||||
@ -122,47 +127,52 @@ public class JobBoss : GlobalBase
|
||||
|
||||
private bool QueueContainsJob(Job job)
|
||||
{
|
||||
mangaConnectorJobQueue.TryAdd(job.mangaConnector, new Queue<Job>());
|
||||
if (mangaConnectorJobQueue.TryAdd(job.mangaConnector, new Queue<Job>()))//If we can add the queue, there is certainly no job in it
|
||||
return true;
|
||||
return mangaConnectorJobQueue[job.mangaConnector].Contains(job);
|
||||
}
|
||||
|
||||
public void AddJobToQueue(Job job)
|
||||
{
|
||||
Log($"Adding Job to Queue. {job}");
|
||||
mangaConnectorJobQueue.TryAdd(job.mangaConnector, new Queue<Job>());
|
||||
Queue<Job> connectorJobQueue = mangaConnectorJobQueue[job.mangaConnector];
|
||||
if(!connectorJobQueue.Contains(job))
|
||||
connectorJobQueue.Enqueue(job);
|
||||
if(!QueueContainsJob(job))
|
||||
mangaConnectorJobQueue[job.mangaConnector].Enqueue(job);
|
||||
job.ExecutionEnqueue();
|
||||
}
|
||||
|
||||
public void AddJobsToQueue(IEnumerable<Job> jobs)
|
||||
private void AddJobsToQueue(IEnumerable<Job> newJobs)
|
||||
{
|
||||
foreach(Job job in jobs)
|
||||
foreach(Job job in newJobs)
|
||||
AddJobToQueue(job);
|
||||
}
|
||||
|
||||
public void LoadJobsList(HashSet<MangaConnector> connectors)
|
||||
private void LoadJobsList(HashSet<MangaConnector> connectors)
|
||||
{
|
||||
Directory.CreateDirectory(settings.jobsFolderPath);
|
||||
if (!Directory.Exists(settings.jobsFolderPath)) //No jobs to load
|
||||
{
|
||||
Directory.CreateDirectory(settings.jobsFolderPath);
|
||||
return;
|
||||
}
|
||||
Regex idRex = new (@"(.*)\.json");
|
||||
|
||||
foreach (FileInfo file in new DirectoryInfo(settings.jobsFolderPath).EnumerateFiles())
|
||||
if (idRex.IsMatch(file.Name))
|
||||
{
|
||||
Job job = JsonConvert.DeserializeObject<Job>(File.ReadAllText(file.FullName),
|
||||
new JobJsonConverter(this, new MangaConnectorJsonConverter(this, connectors)))!;
|
||||
this.jobs.Add(job);
|
||||
}
|
||||
|
||||
foreach (Job job in this.jobs)
|
||||
this.jobs.FirstOrDefault(jjob => jjob.id == job.parentJobId)?.AddSubJob(job);
|
||||
//Load json-job-files
|
||||
foreach (FileInfo file in new DirectoryInfo(settings.jobsFolderPath).EnumerateFiles().Where(fileInfo => idRex.IsMatch(fileInfo.Name)))
|
||||
{
|
||||
Job job = JsonConvert.DeserializeObject<Job>(File.ReadAllText(file.FullName),
|
||||
new JobJsonConverter(this, new MangaConnectorJsonConverter(this, connectors)))!;
|
||||
this.jobs.Add(job);
|
||||
}
|
||||
|
||||
foreach (DownloadNewChapters ncJob in this.jobs.Where(job => job is DownloadNewChapters))
|
||||
cachedPublications.Add(ncJob.manga);
|
||||
//Connect jobs to parent-jobs and add Publications to cache
|
||||
foreach (Job job in this.jobs)
|
||||
{
|
||||
this.jobs.FirstOrDefault(jjob => jjob.id == job.parentJobId)?.AddSubJob(job);
|
||||
if(job is DownloadNewChapters dncJob)
|
||||
cachedPublications.Add(dncJob.manga);
|
||||
}
|
||||
}
|
||||
|
||||
public void ExportJob(Job job)
|
||||
private void UpdateJobFile(Job job)
|
||||
{
|
||||
string jobFilePath = Path.Join(settings.jobsFolderPath, $"{job.id}.json");
|
||||
|
||||
@ -190,11 +200,11 @@ public class JobBoss : GlobalBase
|
||||
}
|
||||
}
|
||||
|
||||
public void ExportJobsList()
|
||||
private void UpdateAllJobFiles()
|
||||
{
|
||||
Log("Exporting Jobs");
|
||||
foreach (Job job in this.jobs)
|
||||
ExportJob(job);
|
||||
UpdateJobFile(job);
|
||||
|
||||
//Remove files with jobs not in this.jobs-list
|
||||
Regex idRex = new (@"(.*)\.json");
|
||||
@ -228,9 +238,10 @@ public class JobBoss : GlobalBase
|
||||
Job queueHead = jobQueue.Peek();
|
||||
if (queueHead.progressToken.state is ProgressToken.State.Complete or ProgressToken.State.Cancelled)
|
||||
{
|
||||
queueHead.ResetProgress();
|
||||
if(!queueHead.recurring)
|
||||
RemoveJob(queueHead);
|
||||
else
|
||||
queueHead.ResetProgress();
|
||||
jobQueue.Dequeue();
|
||||
Log($"Next job in {jobs.MinBy(job => job.nextExecution)?.nextExecution.Subtract(DateTime.Now)} {jobs.MinBy(job => job.nextExecution)?.id}");
|
||||
}else if (queueHead.progressToken.state is ProgressToken.State.Standby)
|
||||
@ -238,6 +249,10 @@ public class JobBoss : GlobalBase
|
||||
Job[] subJobs = jobQueue.Peek().ExecuteReturnSubTasks().ToArray();
|
||||
AddJobs(subJobs);
|
||||
AddJobsToQueue(subJobs);
|
||||
}else if (queueHead.progressToken.state is ProgressToken.State.Running && DateTime.Now.Subtract(queueHead.progressToken.lastUpdate) > TimeSpan.FromMinutes(5))
|
||||
{
|
||||
Log($"{queueHead} inactive for more than 5 minutes. Cancelling.");
|
||||
queueHead.Cancel();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ public class ProgressToken
|
||||
public int increments { get; set; }
|
||||
public int incrementsCompleted { get; set; }
|
||||
public float progress => GetProgress();
|
||||
|
||||
public DateTime lastUpdate { get; private set; }
|
||||
public DateTime executionStarted { get; private set; }
|
||||
public TimeSpan timeRemaining => GetTimeRemaining();
|
||||
|
||||
@ -20,12 +20,13 @@ public class ProgressToken
|
||||
this.incrementsCompleted = 0;
|
||||
this.state = State.Waiting;
|
||||
this.executionStarted = DateTime.UnixEpoch;
|
||||
this.lastUpdate = DateTime.UnixEpoch;
|
||||
}
|
||||
|
||||
private float GetProgress()
|
||||
{
|
||||
if(increments > 0 && incrementsCompleted > 0)
|
||||
return (float)incrementsCompleted / (float)increments;
|
||||
return incrementsCompleted / (float)increments;
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -38,6 +39,7 @@ public class ProgressToken
|
||||
|
||||
public void Increment()
|
||||
{
|
||||
this.lastUpdate = DateTime.Now;
|
||||
this.incrementsCompleted++;
|
||||
if (incrementsCompleted > increments)
|
||||
state = State.Complete;
|
||||
@ -45,27 +47,32 @@ public class ProgressToken
|
||||
|
||||
public void Standby()
|
||||
{
|
||||
this.lastUpdate = DateTime.Now;
|
||||
state = State.Standby;
|
||||
}
|
||||
|
||||
public void Start()
|
||||
{
|
||||
this.lastUpdate = DateTime.Now;
|
||||
state = State.Running;
|
||||
this.executionStarted = DateTime.Now;
|
||||
}
|
||||
|
||||
public void Complete()
|
||||
{
|
||||
this.lastUpdate = DateTime.Now;
|
||||
state = State.Complete;
|
||||
}
|
||||
|
||||
public void Cancel()
|
||||
{
|
||||
this.lastUpdate = DateTime.Now;
|
||||
state = State.Cancelled;
|
||||
}
|
||||
|
||||
public void Waiting()
|
||||
{
|
||||
this.lastUpdate = DateTime.Now;
|
||||
state = State.Waiting;
|
||||
}
|
||||
}
|
@ -5,7 +5,7 @@ namespace Tranga.LibraryConnectors;
|
||||
|
||||
public class LibraryManagerJsonConverter : JsonConverter
|
||||
{
|
||||
private GlobalBase _clone;
|
||||
private readonly GlobalBase _clone;
|
||||
|
||||
internal LibraryManagerJsonConverter(GlobalBase clone)
|
||||
{
|
||||
|
@ -48,7 +48,8 @@ internal class ChromiumDownloadClient : DownloadClient
|
||||
"--disable-gpu",
|
||||
"--disable-dev-shm-usage",
|
||||
"--disable-setuid-sandbox",
|
||||
"--no-sandbox"}
|
||||
"--no-sandbox"},
|
||||
Timeout = 10000
|
||||
});
|
||||
}
|
||||
|
||||
@ -59,8 +60,10 @@ internal class ChromiumDownloadClient : DownloadClient
|
||||
|
||||
protected override RequestResult MakeRequestInternal(string url, string? referrer = null)
|
||||
{
|
||||
IPage page = this.browser!.NewPageAsync().Result;
|
||||
IPage page = this.browser.NewPageAsync().Result;
|
||||
page.DefaultTimeout = 10000;
|
||||
IResponse response = page.GoToAsync(url, WaitUntilNavigation.DOMContentLoaded).Result;
|
||||
Log("Page loaded.");
|
||||
|
||||
Stream stream = Stream.Null;
|
||||
HtmlDocument? document = null;
|
||||
@ -83,7 +86,7 @@ internal class ChromiumDownloadClient : DownloadClient
|
||||
page.CloseAsync();
|
||||
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
|
||||
}
|
||||
|
||||
|
||||
page.CloseAsync();
|
||||
return new RequestResult(response.Status, document, stream, false, "");
|
||||
}
|
||||
|
@ -30,7 +30,10 @@ internal abstract class DownloadClient : GlobalBase
|
||||
.Subtract(DateTime.Now.Subtract(_lastExecutedRateLimit[requestType]));
|
||||
|
||||
if (rateLimitTimeout > TimeSpan.Zero)
|
||||
{
|
||||
Log($"Waiting {rateLimitTimeout.TotalSeconds} seconds");
|
||||
Thread.Sleep(rateLimitTimeout);
|
||||
}
|
||||
|
||||
RequestResult result = MakeRequestInternal(url, referrer);
|
||||
_lastExecutedRateLimit[requestType] = DateTime.Now;
|
||||
|
@ -58,9 +58,9 @@ public abstract class MangaConnector : GlobalBase
|
||||
Log($"Getting new Chapters for {manga}");
|
||||
Chapter[] newChapters = this.GetChapters(manga, language);
|
||||
Log($"Checking for duplicates {manga}");
|
||||
List<Chapter> newChaptersList = newChapters.Where(nChapter =>
|
||||
float.Parse(nChapter.chapterNumber, numberFormatDecimalPoint) > manga.ignoreChaptersBelow &&
|
||||
!nChapter.CheckChapterIsDownloaded(settings.downloadLocation)).ToList();
|
||||
List<Chapter> newChaptersList = newChapters.Where(nChapter => float.TryParse(nChapter.chapterNumber, numberFormatDecimalPoint, out float chapterNumber)
|
||||
&& chapterNumber > manga.ignoreChaptersBelow
|
||||
&& !nChapter.CheckChapterIsDownloaded(settings.downloadLocation)).ToList();
|
||||
Log($"{newChaptersList.Count} new chapters. {manga}");
|
||||
|
||||
return newChaptersList.ToArray();
|
||||
@ -175,11 +175,15 @@ public abstract class MangaConnector : GlobalBase
|
||||
private HttpStatusCode DownloadImage(string imageUrl, string fullPath, byte requestType, string? referrer = null)
|
||||
{
|
||||
DownloadClient.RequestResult requestResult = downloadClient.MakeRequest(imageUrl, requestType, referrer);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.result == Stream.Null)
|
||||
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return requestResult.statusCode;
|
||||
byte[] buffer = new byte[requestResult.result.Length];
|
||||
requestResult.result.ReadExactly(buffer, 0, buffer.Length);
|
||||
File.WriteAllBytes(fullPath, buffer);
|
||||
if (requestResult.result == Stream.Null)
|
||||
return HttpStatusCode.NotFound;
|
||||
|
||||
FileStream fs = new (fullPath, FileMode.Create);
|
||||
requestResult.result.CopyTo(fs);
|
||||
fs.Close();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
@ -213,6 +217,7 @@ public abstract class MangaConnector : GlobalBase
|
||||
string extension = split[^1];
|
||||
Log($"Downloading image {chapter + 1:000}/{imageUrls.Length:000}"); //TODO
|
||||
HttpStatusCode status = DownloadImage(imageUrl, Path.Join(tempFolder, $"{chapter++}.{extension}"), requestType, referrer);
|
||||
Log($"{saveArchiveFilePath} {chapter + 1:000}/{imageUrls.Length:000} {status}");
|
||||
if ((int)status < 200 || (int)status >= 300)
|
||||
{
|
||||
progressToken?.Complete();
|
||||
@ -220,7 +225,7 @@ public abstract class MangaConnector : GlobalBase
|
||||
}
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken?.Complete();
|
||||
progressToken.Complete();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
progressToken?.Increment();
|
||||
|
@ -6,12 +6,12 @@ namespace Tranga.MangaConnectors;
|
||||
public class MangaConnectorJsonConverter : JsonConverter
|
||||
{
|
||||
private GlobalBase _clone;
|
||||
private HashSet<MangaConnector> connectors;
|
||||
private readonly HashSet<MangaConnector> _connectors;
|
||||
|
||||
internal MangaConnectorJsonConverter(GlobalBase clone, HashSet<MangaConnector> connectors)
|
||||
{
|
||||
this._clone = clone;
|
||||
this.connectors = connectors;
|
||||
this._connectors = connectors;
|
||||
}
|
||||
|
||||
public override bool CanConvert(Type objectType)
|
||||
@ -25,15 +25,15 @@ public class MangaConnectorJsonConverter : JsonConverter
|
||||
switch (jo.GetValue("name")!.Value<string>()!)
|
||||
{
|
||||
case "MangaDex":
|
||||
return this.connectors.First(c => c is MangaDex);
|
||||
return this._connectors.First(c => c is MangaDex);
|
||||
case "Manganato":
|
||||
return this.connectors.First(c => c is Manganato);
|
||||
return this._connectors.First(c => c is Manganato);
|
||||
case "MangaKatana":
|
||||
return this.connectors.First(c => c is MangaKatana);
|
||||
return this._connectors.First(c => c is MangaKatana);
|
||||
case "Mangasee":
|
||||
return this.connectors.First(c => c is Mangasee);
|
||||
return this._connectors.First(c => c is Mangasee);
|
||||
case "Mangaworld":
|
||||
return this.connectors.First(c => c is Mangaworld);
|
||||
return this._connectors.First(c => c is Mangaworld);
|
||||
}
|
||||
|
||||
throw new Exception();
|
||||
|
@ -1,5 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Net;
|
||||
using System.Net;
|
||||
using System.Text.Json.Nodes;
|
||||
using System.Text.RegularExpressions;
|
||||
using Tranga.Jobs;
|
||||
@ -51,16 +50,23 @@ public class MangaDex : MangaConnector
|
||||
if (result is null)
|
||||
break;
|
||||
|
||||
total = result["total"]!.GetValue<int>(); //Update the total number of Publications
|
||||
|
||||
JsonArray mangaInResult = result["data"]!.AsArray(); //Manga-data-Array
|
||||
//Loop each Manga and extract information from JSON
|
||||
foreach (JsonNode? mangaNode in mangaInResult)
|
||||
if(result.ContainsKey("total"))
|
||||
total = result["total"]!.GetValue<int>(); //Update the total number of Publications
|
||||
else continue;
|
||||
|
||||
if (result.ContainsKey("data"))
|
||||
{
|
||||
Log($"Getting publication data. {++loadedPublicationData}/{total}");
|
||||
Manga manga = MangaFromJsonObject((JsonObject)mangaNode);
|
||||
retManga.Add(manga); //Add Publication (Manga) to result
|
||||
}
|
||||
JsonArray mangaInResult = result["data"]!.AsArray(); //Manga-data-Array
|
||||
//Loop each Manga and extract information from JSON
|
||||
foreach (JsonNode? mangaNode in mangaInResult)
|
||||
{
|
||||
if(mangaNode is null)
|
||||
continue;
|
||||
Log($"Getting publication data. {++loadedPublicationData}/{total}");
|
||||
if(MangaFromJsonObject((JsonObject) mangaNode) is { } manga)
|
||||
retManga.Add(manga); //Add Publication (Manga) to result
|
||||
}
|
||||
}//else continue;
|
||||
}
|
||||
Log($"Retrieved {retManga.Count} publications. Term=\"{publicationTitle}\"");
|
||||
return retManga.ToArray();
|
||||
@ -81,20 +87,30 @@ public class MangaDex : MangaConnector
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga MangaFromJsonObject(JsonObject manga)
|
||||
private Manga? MangaFromJsonObject(JsonObject manga)
|
||||
{
|
||||
if (!manga.ContainsKey("attributes"))
|
||||
return null;
|
||||
JsonObject attributes = manga["attributes"]!.AsObject();
|
||||
|
||||
|
||||
if(!manga.ContainsKey("id"))
|
||||
return null;
|
||||
string publicationId = manga["id"]!.GetValue<string>();
|
||||
|
||||
if(!attributes.ContainsKey("title"))
|
||||
return null;
|
||||
string title = attributes["title"]!.AsObject().ContainsKey("en") && attributes["title"]!["en"] is not null
|
||||
? attributes["title"]!["en"]!.GetValue<string>()
|
||||
: attributes["title"]![((IDictionary<string, JsonNode?>)attributes["title"]!.AsObject()).Keys.First()]!.GetValue<string>();
|
||||
|
||||
if(!attributes.ContainsKey("description"))
|
||||
return null;
|
||||
string? description = attributes["description"]!.AsObject().ContainsKey("en") && attributes["description"]!["en"] is not null
|
||||
? attributes["description"]!["en"]!.GetValue<string?>()
|
||||
: null;
|
||||
|
||||
if(!attributes.ContainsKey("altTitles"))
|
||||
return null;
|
||||
JsonArray altTitlesObject = attributes["altTitles"]!.AsArray();
|
||||
Dictionary<string, string> altTitlesDict = new();
|
||||
foreach (JsonNode? altTitleNode in altTitlesObject)
|
||||
@ -104,6 +120,8 @@ public class MangaDex : MangaConnector
|
||||
altTitlesDict.TryAdd(key, altTitleObject[key]!.GetValue<string>());
|
||||
}
|
||||
|
||||
if(!attributes.ContainsKey("tags"))
|
||||
return null;
|
||||
JsonArray tagsObject = attributes["tags"]!.AsArray();
|
||||
HashSet<string> tags = new();
|
||||
foreach (JsonNode? tagNode in tagsObject)
|
||||
@ -149,6 +167,8 @@ public class MangaDex : MangaConnector
|
||||
? attributes["originalLanguage"]!.GetValue<string?>()
|
||||
: null;
|
||||
|
||||
if(!attributes.ContainsKey("status"))
|
||||
return null;
|
||||
string status = attributes["status"]!.GetValue<string>();
|
||||
|
||||
Manga pub = new(
|
||||
@ -226,7 +246,7 @@ public class MangaDex : MangaConnector
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Net;
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
@ -187,7 +186,7 @@ public class MangaKatana : MangaConnector
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Net;
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
@ -35,7 +34,8 @@ public class Manganato : MangaConnector
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
IEnumerable<HtmlNode> searchResults = document.DocumentNode.Descendants("div").Where(n => n.HasClass("search-story-item"));
|
||||
List<HtmlNode> searchResults = document.DocumentNode.Descendants("div").Where(n => n.HasClass("search-story-item")).ToList();
|
||||
Log($"{searchResults.Count} items.");
|
||||
List<string> urls = new();
|
||||
foreach (HtmlNode mangaResult in searchResults)
|
||||
{
|
||||
@ -141,7 +141,12 @@ public class Manganato : MangaConnector
|
||||
return Array.Empty<Chapter>();
|
||||
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.OrderBy(chapter => Convert.ToSingle(chapter.chapterNumber, numberFormatDecimalPoint)).ToArray();
|
||||
return chapters.OrderBy(chapter =>
|
||||
{
|
||||
if (float.TryParse(chapter.chapterNumber, numberFormatDecimalPoint, out float chapterNumber))
|
||||
return chapterNumber;
|
||||
else return 0;
|
||||
}).ToArray();
|
||||
}
|
||||
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
|
||||
@ -159,7 +164,7 @@ public class Manganato : MangaConnector
|
||||
string fullString = chapterInfo.Descendants("a").First(d => d.HasClass("chapter-name")).InnerText;
|
||||
|
||||
string? volumeNumber = volRex.IsMatch(fullString) ? volRex.Match(fullString).Groups[1].Value : null;
|
||||
string chapterNumber = chapterRex.Match(fullString).Groups[1].Value;
|
||||
string chapterNumber = chapterRex.IsMatch(fullString) ? chapterRex.Match(fullString).Groups[1].Value : fullString;
|
||||
string chapterName = nameRex.Match(fullString).Groups[3].Value;
|
||||
string url = chapterInfo.Descendants("a").First(d => d.HasClass("chapter-name"))
|
||||
.GetAttributeValue("href", "");
|
||||
@ -173,7 +178,7 @@ public class Manganato : MangaConnector
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,6 @@
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Xml.Linq;
|
||||
using HtmlAgilityPack;
|
||||
using Newtonsoft.Json;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
@ -20,7 +19,8 @@ public class Mangasee : MangaConnector
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string requestUrl = $"https://mangasee123.com/_search.php";
|
||||
string sanitizedTitle = string.Join('+', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://mangasee123.com/search/?name={sanitizedTitle}";
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, 1);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
@ -28,7 +28,7 @@ public class Mangasee : MangaConnector
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Manga>();
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument, publicationTitle);
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
@ -44,37 +44,25 @@ public class Mangasee : MangaConnector
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document, string publicationTitle)
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
string jsonString = document.DocumentNode.SelectSingleNode("//body").InnerText;
|
||||
List<SearchResultItem> result = JsonConvert.DeserializeObject<List<SearchResultItem>>(jsonString)!;
|
||||
Dictionary<SearchResultItem, int> queryFiltered = new();
|
||||
foreach (SearchResultItem resultItem in result)
|
||||
{
|
||||
int matches = resultItem.GetMatches(publicationTitle);
|
||||
if (matches > 0)
|
||||
queryFiltered.TryAdd(resultItem, matches);
|
||||
}
|
||||
|
||||
queryFiltered = queryFiltered.Where(item => item.Value >= publicationTitle.Split(' ').Length - 1)
|
||||
.ToDictionary(item => item.Key, item => item.Value);
|
||||
|
||||
Log($"Retrieved {queryFiltered.Count} publications.");
|
||||
HtmlNode resultsNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']/div[last()]/div[1]/div");
|
||||
Log($"{resultsNode.SelectNodes("div").Count} items.");
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
List<SearchResultItem> orderedFiltered =
|
||||
queryFiltered.OrderBy(item => item.Value).ToDictionary(item => item.Key, item => item.Value).Keys.ToList();
|
||||
|
||||
foreach (SearchResultItem orderedItem in orderedFiltered)
|
||||
foreach (HtmlNode resultNode in resultsNode.SelectNodes("div"))
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl($"https://mangasee123.com/manga/{orderedItem.i}");
|
||||
string url = resultNode.Descendants().First(d => d.HasClass("SeriesName")).GetAttributeValue("href", "");
|
||||
Manga? manga = GetMangaFromUrl($"https://mangasee123.com{url}");
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
@ -88,71 +76,42 @@ public class Mangasee : MangaConnector
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//h1");
|
||||
string sortName = titleNode.InnerText;
|
||||
|
||||
HtmlNode[] authorsNodes = document.DocumentNode.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Author(s):']/..").Descendants("a").ToArray();
|
||||
HtmlNode[] authorsNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Author(s):']/..").Descendants("a")
|
||||
.ToArray();
|
||||
List<string> authors = new();
|
||||
foreach(HtmlNode authorNode in authorsNodes)
|
||||
foreach (HtmlNode authorNode in authorsNodes)
|
||||
authors.Add(authorNode.InnerText);
|
||||
|
||||
HtmlNode[] genreNodes = document.DocumentNode.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Genre(s):']/..").Descendants("a").ToArray();
|
||||
|
||||
HtmlNode[] genreNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Genre(s):']/..").Descendants("a")
|
||||
.ToArray();
|
||||
foreach (HtmlNode genreNode in genreNodes)
|
||||
tags.Add(genreNode.InnerText);
|
||||
|
||||
HtmlNode yearNode = document.DocumentNode.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Released:']/..").Descendants("a").First();
|
||||
|
||||
HtmlNode yearNode = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Released:']/..").Descendants("a")
|
||||
.First();
|
||||
int year = Convert.ToInt32(yearNode.InnerText);
|
||||
|
||||
HtmlNode[] statusNodes = document.DocumentNode.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Status:']/..").Descendants("a").ToArray();
|
||||
foreach(HtmlNode statusNode in statusNodes)
|
||||
|
||||
HtmlNode[] statusNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Status:']/..").Descendants("a")
|
||||
.ToArray();
|
||||
foreach (HtmlNode statusNode in statusNodes)
|
||||
if (statusNode.InnerText.Contains("publish", StringComparison.CurrentCultureIgnoreCase))
|
||||
status = statusNode.InnerText.Split(' ')[0];
|
||||
|
||||
HtmlNode descriptionNode = document.DocumentNode.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Description:']/..").Descendants("div").First();
|
||||
|
||||
HtmlNode descriptionNode = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Description:']/..")
|
||||
.Descendants("div").First();
|
||||
string description = descriptionNode.InnerText;
|
||||
|
||||
Manga manga = new (sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
|
||||
|
||||
Manga manga = new(sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
|
||||
coverFileNameInCache, links,
|
||||
year, originalLanguage, status, publicationId);
|
||||
cachedPublications.Add(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
// ReSharper disable once ClassNeverInstantiated.Local Will be instantiated during deserialization
|
||||
private class SearchResultItem
|
||||
{
|
||||
public string i { get; init; }
|
||||
public string s { get; init; }
|
||||
public string[] a { get; init; }
|
||||
|
||||
[JsonConstructor]
|
||||
public SearchResultItem(string i, string s, string[] a)
|
||||
{
|
||||
this.i = i;
|
||||
this.s = s;
|
||||
this.a = a;
|
||||
}
|
||||
|
||||
public int GetMatches(string title)
|
||||
{
|
||||
int ret = 0;
|
||||
Regex cleanRex = new("[A-z0-9]*");
|
||||
string[] badWords = { "a", "an", "no", "ni", "so", "as", "and", "the", "of", "that", "in", "is", "for" };
|
||||
|
||||
string[] titleTerms = title.Split(new[] { ' ', '-' }).Where(str => !badWords.Contains(str)).ToArray();
|
||||
|
||||
foreach (Match matchTerm in cleanRex.Matches(this.i))
|
||||
ret += titleTerms.Count(titleTerm =>
|
||||
titleTerm.Equals(matchTerm.Value, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
foreach (Match matchTerm in cleanRex.Matches(this.s))
|
||||
ret += titleTerms.Count(titleTerm =>
|
||||
titleTerm.Equals(matchTerm.Value, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
foreach(string alt in this.a)
|
||||
foreach (Match matchTerm in cleanRex.Matches(alt))
|
||||
ret += titleTerms.Count(titleTerm =>
|
||||
titleTerm.Equals(matchTerm.Value, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
@ -163,11 +122,10 @@ public class Mangasee : MangaConnector
|
||||
foreach (XElement chapter in chapterItems)
|
||||
{
|
||||
string volumeNumber = "1";
|
||||
string chapterName = chapter.Descendants("title").First().Value;
|
||||
string chapterNumber = Regex.Matches(chapterName, "[0-9]+")[^1].ToString();
|
||||
|
||||
string url = chapter.Descendants("link").First().Value;
|
||||
url = url.Replace(Regex.Matches(url,"(-page-[0-9])")[0].ToString(),"");
|
||||
string chapterNumber = Regex.Match(url, @"-chapter-([0-9\.]+)").Groups[1].ToString();
|
||||
|
||||
url = url.Replace(Regex.Match(url,"(-page-[0-9])").Value,"");
|
||||
chapters.Add(new Chapter(manga, "", volumeNumber, chapterNumber, url));
|
||||
}
|
||||
|
||||
@ -180,14 +138,14 @@ public class Mangasee : MangaConnector
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
|
@ -69,11 +69,8 @@ public class Mangaworld: MangaConnector
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId)
|
||||
{
|
||||
string status = "";
|
||||
Dictionary<string, string> altTitles = new();
|
||||
Dictionary<string, string>? links = null;
|
||||
HashSet<string> tags = new();
|
||||
string[] authors = Array.Empty<string>();
|
||||
string originalLanguage = "";
|
||||
|
||||
HtmlNode infoNode = document.DocumentNode.Descendants("div").First(d => d.HasClass("info"));
|
||||
@ -90,13 +87,13 @@ public class Mangaworld: MangaConnector
|
||||
|
||||
HtmlNode genresNode =
|
||||
metadata.SelectSingleNode("//span[text()='Generi: ']/..");
|
||||
tags = genresNode.SelectNodes("a").Select(node => node.InnerText).ToHashSet();
|
||||
HashSet<string> tags = genresNode.SelectNodes("a").Select(node => node.InnerText).ToHashSet();
|
||||
|
||||
HtmlNode authorsNode =
|
||||
metadata.SelectSingleNode("//span[text()='Autore: ']/..");
|
||||
authors = new[] { authorsNode.SelectNodes("a").First().InnerText };
|
||||
string[] authors = new[] { authorsNode.SelectNodes("a").First().InnerText };
|
||||
|
||||
status = metadata.SelectSingleNode("//span[text()='Stato: ']/..").SelectNodes("a").First().InnerText;
|
||||
string status = metadata.SelectSingleNode("//span[text()='Stato: ']/..").SelectNodes("a").First().InnerText;
|
||||
|
||||
string posterUrl = document.DocumentNode.SelectSingleNode("//img[@class='rounded']").GetAttributeValue("src", "");
|
||||
|
||||
@ -169,7 +166,7 @@ public class Mangaworld: MangaConnector
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
|
@ -52,7 +52,7 @@ public class Server : GlobalBase
|
||||
});
|
||||
t.Start();
|
||||
}
|
||||
catch (HttpListenerException e)
|
||||
catch (HttpListenerException)
|
||||
{
|
||||
|
||||
}
|
||||
|
@ -59,7 +59,7 @@ public partial class Tranga : GlobalBase
|
||||
private static void PrintHelp()
|
||||
{
|
||||
Console.WriteLine("Tranga-Help:");
|
||||
foreach (Argument argument in arguments.Values)
|
||||
foreach (Argument argument in Arguments.Values)
|
||||
{
|
||||
foreach(string name in argument.names)
|
||||
Console.Write("{0} ", name);
|
||||
@ -82,14 +82,14 @@ public partial class Tranga : GlobalBase
|
||||
{
|
||||
List<string> argsList = args.ToList();
|
||||
List<string> ret = new();
|
||||
foreach (string name in arguments[arg].names)
|
||||
foreach (string name in Arguments[arg].names)
|
||||
{
|
||||
int argIndex = argsList.IndexOf(name);
|
||||
if (argIndex != -1)
|
||||
{
|
||||
if (arguments[arg].parameterCount == 0)
|
||||
if (Arguments[arg].parameterCount == 0)
|
||||
return ret.ToArray();
|
||||
for (int parameterIndex = 1; parameterIndex <= arguments[arg].parameterCount; parameterIndex++)
|
||||
for (int parameterIndex = 1; parameterIndex <= Arguments[arg].parameterCount; parameterIndex++)
|
||||
{
|
||||
if(argIndex + parameterIndex >= argsList.Count || args[argIndex + parameterIndex].Contains('-'))//End of arguments, or no parameter provided, when one is required
|
||||
Console.WriteLine($"No parameter provided for argument {name}. -h for help.");
|
||||
@ -100,7 +100,7 @@ public partial class Tranga : GlobalBase
|
||||
return ret.Any() ? ret.ToArray() : null;
|
||||
}
|
||||
|
||||
private static Dictionary<ArgEnum, Argument> arguments = new()
|
||||
private static readonly Dictionary<ArgEnum, Argument> Arguments = new()
|
||||
{
|
||||
{ ArgEnum.DownloadLocation, new(new []{"-d", "--downloadLocation"}, 1, "Directory to which downloaded Manga are saved") },
|
||||
{ ArgEnum.WorkingDirectory, new(new []{"-w", "--workingDirectory"}, 1, "Directory in which application-data is saved") },
|
||||
|
@ -46,7 +46,7 @@ public class TrangaSettings
|
||||
this.downloadLocation = downloadLocation!;
|
||||
this.workingDirectory = workingDirectory!;
|
||||
}
|
||||
UpdateDownloadLocation(this.downloadLocation!, false);
|
||||
UpdateDownloadLocation(this.downloadLocation, false);
|
||||
}
|
||||
|
||||
public HashSet<LibraryConnector> LoadLibraryConnectors(GlobalBase clone)
|
||||
|
Loading…
Reference in New Issue
Block a user