2
0

Compare commits

..

26 Commits

Author SHA1 Message Date
55cc2a2e84
Merge pull request #277 from C9Glax/asuratoon
Asuratoon
2024-11-02 17:51:12 +01:00
b619109ea1 fix #141 chapternames 2024-11-02 17:48:18 +01:00
72943330c3 Merge branch 'refs/heads/cuttingedge' into asuratoon 2024-11-02 17:45:13 +01:00
38bc1e4d53 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-11-02 17:44:30 +01:00
47479f7a0d Fix chaptermarkers.
Don't create one if Chapter does not have an ID
2024-11-02 17:44:23 +01:00
b2381be860 #141 fix ParsePublicationsFromHtml, statusNode, titleNode, firstChapterNode
fix ParseChaptersFromHtml nodeCollection of ChapterURls
fix ParseImageUrlsFromHtml xPath
fix Chapterparsing names
2024-11-02 17:42:26 +01:00
657e1b338b resolves #141 Asuratoon connector 2024-11-02 17:19:17 +01:00
ee265a7519 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-11-02 16:24:55 +01:00
5b0624654b rename duplicates to append ".duplicate" 2024-11-02 16:24:44 +01:00
a75549c699 Only try loading .json files on startup (exclude .failed for example) 2024-11-02 16:24:25 +01:00
f46244cb9c Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-10-31 20:43:11 +01:00
9db3f1b0da Extend logging on startup 2024-10-31 20:42:56 +01:00
dc9cd4b1dd Append ".failed" to job-files that werent successfully added. 2024-10-31 20:41:46 +01:00
3566ad774d Moved logging to actually say if we added a job to the list 2024-10-31 20:41:21 +01:00
94b81969c7 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-10-30 22:40:31 +01:00
bd8cb86c52 Always set directory-permissions 2024-10-30 22:29:32 +01:00
34c5436b33 Always set directory-permissions 2024-10-30 22:29:16 +01:00
4690394437 Formatting 2024-10-30 22:27:55 +01:00
02cf8578c9 Explicitly set File/Directory permissions for jobs 2024-10-30 22:27:50 +01:00
067497ddd0 Delete duplicate files on startup. 2024-10-30 20:38:53 +01:00
4b88cdbd90 When updating Jobfiles, dont write a new file if we werent able to successfully delete the old one 2024-10-30 20:31:16 +01:00
420013f07b Delete chapterMarkers if the file doesn't exist anymore. 2024-10-30 18:23:14 +01:00
8cee11aa22 Fix #272 Manhuaplus missing year string 2024-10-29 19:15:19 +01:00
198bbdcf94 Set hidden Attribute to Markerfiles 2024-10-27 02:58:50 +02:00
c58adf64fa #271 Create Marker-files for Chapters.
If a Connector provides a unique ID for a chapter, Tranga will create a markerfile, containing the current name of the Chapter
This should prevent duplicates, or missing chapters.
2024-10-27 02:41:28 +02:00
957debea01 Mangahere change list-2 to list-1 in selector 2024-10-27 02:22:58 +02:00
15 changed files with 322 additions and 96 deletions

View File

@ -1,5 +1,7 @@
using System.Text.RegularExpressions;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using System.Xml.Linq;
using static System.IO.UnixFileMode;
namespace Tranga;
@ -17,23 +19,21 @@ public readonly struct Chapter : IComparable
public string url { get; }
// ReSharper disable once MemberCanBePrivate.Global
public string fileName { get; }
public string? id { get; }
private static readonly Regex LegalCharacters = new (@"([A-z]*[0-9]* *\.*-*,*\]*\[*'*\'*\)*\(*~*!*)*");
private static readonly Regex IllegalStrings = new(@"(Vol(ume)?|Ch(apter)?)\.?", RegexOptions.IgnoreCase);
private static readonly Regex Digits = new(@"[0-9\.]*");
public Chapter(Manga parentManga, string? name, string? volumeNumber, string chapterNumber, string url)
public Chapter(Manga parentManga, string? name, string? volumeNumber, string chapterNumber, string url, string? id = null)
{
this.parentManga = parentManga;
this.name = name;
this.volumeNumber = volumeNumber is not null ? string.Concat(Digits.Matches(volumeNumber).Select(x => x.Value)) : "0";
this.chapterNumber = string.Concat(Digits.Matches(chapterNumber).Select(x => x.Value));
this.url = url;
this.id = id;
string chapterVolNumStr;
if (volumeNumber is not null && volumeNumber.Length > 0)
chapterVolNumStr = $"Vol.{volumeNumber} Ch.{chapterNumber}";
else
chapterVolNumStr = $"Ch.{chapterNumber}";
string chapterVolNumStr = $"Vol.{this.volumeNumber} Ch.{chapterNumber}";
if (name is not null && name.Length > 0)
{
@ -87,24 +87,49 @@ public readonly struct Chapter : IComparable
string mangaDirectory = Path.Join(TrangaSettings.downloadLocation, parentManga.folderName);
if (!Directory.Exists(mangaDirectory))
return false;
FileInfo? mangaArchive = null;
string markerPath = Path.Join(mangaDirectory, $".{id}");
if (this.id is not null && File.Exists(markerPath))
{
if(File.Exists(File.ReadAllText(markerPath)))
mangaArchive = new FileInfo(File.ReadAllText(markerPath));
else
File.Delete(markerPath);
}
if(mangaArchive is null)
{
FileInfo[] archives = new DirectoryInfo(mangaDirectory).GetFiles("*.cbz");
Regex volChRex = new(@"(?:Vol(?:ume)?\.([0-9]+)\D*)?Ch(?:apter)?\.([0-9]+(?:\.[0-9]+)*)");
Chapter t = this;
string correctPath = GetArchiveFilePath();
FileInfo? archive = archives.FirstOrDefault(archive =>
mangaArchive = archives.FirstOrDefault(archive =>
{
Match m = volChRex.Match(archive.Name);
/*Uncommenting this section will only allow *Version without Volume number* -> *Version with Volume number* but not the other way
if (m.Groups[1].Success)
return m.Groups[1].Value == t.volumeNumber && m.Groups[2].Value == t.chapterNumber;
else*/
else
return m.Groups[2].Value == t.chapterNumber;
});
if(archive is not null && archive.FullName != correctPath)
archive.MoveTo(correctPath, true);
return (archive is not null);
}
string correctPath = GetArchiveFilePath();
if(mangaArchive is not null && mangaArchive.FullName != correctPath)
mangaArchive.MoveTo(correctPath, true);
return (mangaArchive is not null);
}
public void CreateChapterMarker()
{
if (this.id is null)
return;
string path = Path.Join(TrangaSettings.downloadLocation, parentManga.folderName, $".{id}");
File.WriteAllText(path, GetArchiveFilePath());
File.SetAttributes(path, FileAttributes.Hidden);
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(path, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute | OtherRead | OtherExecute);
}
/// <summary>
/// Creates full file path of chapter-archive
/// </summary>

View File

@ -1,6 +1,8 @@
using System.Text.RegularExpressions;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using Newtonsoft.Json;
using Tranga.MangaConnectors;
using static System.IO.UnixFileMode;
namespace Tranga.Jobs;
@ -17,18 +19,21 @@ public class JobBoss : GlobalBase
Log($"Next job in {jobs.MinBy(job => job.nextExecution)?.nextExecution.Subtract(DateTime.Now)} {jobs.MinBy(job => job.nextExecution)?.id}");
}
public void AddJob(Job job)
public bool AddJob(Job job, string? jobFile = null)
{
if (ContainsJobLike(job))
{
Log($"Already Contains Job {job}");
return false;
}
else
{
if (!this.jobs.Add(job))
return false;
Log($"Added {job}");
this.jobs.Add(job);
UpdateJobFile(job);
UpdateJobFile(job, jobFile);
}
return true;
}
public void AddJobs(IEnumerable<Job> jobsToAdd)
@ -139,16 +144,15 @@ public class JobBoss : GlobalBase
}
private void LoadJobsList(HashSet<MangaConnector> connectors)
{
if (!Directory.Exists(TrangaSettings.jobsFolderPath)) //No jobs to load
{
Directory.CreateDirectory(TrangaSettings.jobsFolderPath);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(TrangaSettings.jobsFolderPath, UserRead | UserWrite | UserExecute | GroupRead | OtherRead);
if (!Directory.Exists(TrangaSettings.jobsFolderPath)) //No jobs to load
return;
}
Regex idRex = new (@"(.*)\.json");
//Load json-job-files
foreach (FileInfo file in new DirectoryInfo(TrangaSettings.jobsFolderPath).EnumerateFiles().Where(fileInfo => idRex.IsMatch(fileInfo.Name)))
foreach (FileInfo file in Directory.GetFiles(TrangaSettings.jobsFolderPath, "*.json").Select(f => new FileInfo(f)))
{
Log($"Adding {file.Name}");
Job? job = JsonConvert.DeserializeObject<Job>(File.ReadAllText(file.FullName),
@ -162,8 +166,12 @@ public class JobBoss : GlobalBase
else
{
Log($"Adding Job {job}");
this.jobs.Add(job);
UpdateJobFile(job, file.Name);
if (!AddJob(job, file.FullName)) //If we detect a duplicate, delete the file.
{
string path = string.Concat(file.FullName, ".duplicate");
file.MoveTo(path);
Log($"Duplicate detected or otherwise not able to add job to list.\nMoved job {job} to {path}");
}
}
}
@ -203,7 +211,8 @@ public class JobBoss : GlobalBase
}
catch (Exception e)
{
Log(e.ToString());
Log($"Error deleting {oldFilePath} job {job.id}\n{e}");
return; //Don't export a new file when we haven't actually deleted the old one
}
}
@ -215,6 +224,8 @@ public class JobBoss : GlobalBase
while(IsFileInUse(newJobFilePath))
Thread.Sleep(10);
File.WriteAllText(newJobFilePath, jobStr);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(newJobFilePath, UserRead | UserWrite | GroupRead | OtherRead);
}
}

View File

@ -0,0 +1,208 @@
using System.Net;
using System.Text.RegularExpressions;
using HtmlAgilityPack;
using Tranga.Jobs;
namespace Tranga.MangaConnectors;
public class AsuraToon : MangaConnector
{
public AsuraToon(GlobalBase clone) : base(clone, "AsuraToon", ["en"])
{
this.downloadClient = new HttpDownloadClient(clone);
}
public override Manga[] GetManga(string publicationTitle = "")
{
Log($"Searching Publications. Term=\"{publicationTitle}\"");
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
string requestUrl = $"https://asuracomic.net/series?name={sanitizedTitle}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return Array.Empty<Manga>();
if (requestResult.htmlDocument is null)
{
Log($"Failed to retrieve site");
return Array.Empty<Manga>();
}
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
return publications;
}
public override Manga? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://asuracomic.net/series/{publicationId}");
}
public override Manga? GetMangaFromUrl(string url)
{
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return null;
if (requestResult.htmlDocument is null)
{
Log($"Failed to retrieve site");
return null;
}
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
}
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
{
HtmlNodeCollection mangaList = document.DocumentNode.SelectNodes("//a[starts-with(@href,'series')]");
if (mangaList.Count < 1)
return Array.Empty<Manga>();
IEnumerable<string> urls = mangaList.Select(a => $"https://asuracomic.net/{a.GetAttributeValue("href", "")}");
List<Manga> ret = new();
foreach (string url in urls)
{
Manga? manga = GetMangaFromUrl(url);
if (manga is not null)
ret.Add((Manga)manga);
}
return ret.ToArray();
}
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
string? originalLanguage = null;
Dictionary<string, string> altTitles = new(), links = new();
HtmlNodeCollection genreNodes = document.DocumentNode.SelectNodes("//h3[text()='Genres']/../div/button");
string[] tags = genreNodes.Select(b => b.InnerText).ToArray();
HtmlNode statusNode = document.DocumentNode.SelectSingleNode("//h3[text()='Status']/../h3[2]");
Manga.ReleaseStatusByte releaseStatus = statusNode.InnerText.ToLower() switch
{
"ongoing" => Manga.ReleaseStatusByte.Continuing,
"hiatus" => Manga.ReleaseStatusByte.OnHiatus,
"completed" => Manga.ReleaseStatusByte.Completed,
"dropped" => Manga.ReleaseStatusByte.Cancelled,
"season end" => Manga.ReleaseStatusByte.Continuing,
"coming soon" => Manga.ReleaseStatusByte.Unreleased,
_ => Manga.ReleaseStatusByte.Unreleased
};
HtmlNode coverNode =
document.DocumentNode.SelectSingleNode("//img[@alt='poster']");
string coverUrl = coverNode.GetAttributeValue("src", "");
string coverFileNameInCache = SaveCoverImageToCache(coverUrl, publicationId, RequestType.MangaCover);
HtmlNode titleNode =
document.DocumentNode.SelectSingleNode("//title");
string sortName = Regex.Match(titleNode.InnerText, @"(.*) - Asura Scans").Groups[1].Value;
HtmlNode descriptionNode =
document.DocumentNode.SelectSingleNode("//h3[starts-with(text(),'Synopsis')]/../span");
string description = descriptionNode.InnerText;
HtmlNodeCollection authorNodes = document.DocumentNode.SelectNodes("//h3[text()='Author']/../h3[not(text()='Author' or text()='_')]");
HtmlNodeCollection artistNodes = document.DocumentNode.SelectNodes("//h3[text()='Artist']/../h3[not(text()='Author' or text()='_')]");
List<string> authors = authorNodes.Select(a => a.InnerText).Concat(artistNodes.Select(a => a.InnerText)).ToList();
HtmlNode? firstChapterNode = document.DocumentNode.SelectSingleNode("//a[contains(@href, 'chapter/1')]/../following-sibling::h3");
int? year = int.Parse(firstChapterNode?.InnerText.Split(' ')[^1] ?? "2000");
Manga manga = new (sortName, authors, description, altTitles, tags, coverUrl, coverFileNameInCache, links,
year, originalLanguage, publicationId, releaseStatus, websiteUrl);
AddMangaToCache(manga);
return manga;
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
Log($"Getting chapters {manga}");
string requestUrl = $"https://asuracomic.net/series/{manga.publicationId}";
// Leaving this in for verification if the page exists
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return Array.Empty<Chapter>();
//Return Chapters ordered by Chapter-Number
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl);
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.Order().ToArray();
}
private List<Chapter> ParseChaptersFromHtml(Manga manga, string mangaUrl)
{
RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default);
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
{
Log("Failed to load site");
return new List<Chapter>();
}
List<Chapter> ret = new();
HtmlNodeCollection chapterURLNodes = result.htmlDocument.DocumentNode.SelectNodes("//a[contains(@href, '/chapter/')]");
Regex infoRex = new(@"Chapter ([0-9]+)(.*)?");
foreach (HtmlNode chapterInfo in chapterURLNodes)
{
string chapterUrl = chapterInfo.GetAttributeValue("href", "");
Match match = infoRex.Match(chapterInfo.InnerText);
string chapterNumber = match.Groups[1].Value;
string? chapterName = match.Groups[2].Success && match.Groups[2].Length > 1 ? match.Groups[2].Value : null;
string url = $"https://asuracomic.net/series/{chapterUrl}";
ret.Add(new Chapter(manga, chapterName, null, chapterNumber, url));
}
return ret;
}
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
{
if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Manga chapterParentManga = chapter.parentManga;
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
string requestUrl = chapter.url;
// Leaving this in to check if the page exists
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
progressToken?.Cancel();
return requestResult.statusCode;
}
string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken);
}
private string[] ParseImageUrlsFromHtml(string mangaUrl)
{
RequestResult requestResult =
downloadClient.MakeRequest(mangaUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
return Array.Empty<string>();
}
if (requestResult.htmlDocument is null)
{
Log($"Failed to retrieve site");
return Array.Empty<string>();
}
HtmlNodeCollection images =
requestResult.htmlDocument.DocumentNode.SelectNodes("//img[contains(@alt, 'chapter page')]");
return images.Select(i => i.GetAttributeValue("src", "")).ToArray();
}
}

View File

@ -150,7 +150,7 @@ public class Bato : MangaConnector
HtmlNode chapterList =
result.htmlDocument.DocumentNode.SelectSingleNode("/html/body/div/main/div[3]/astro-island/div/div[2]/div/div/astro-slot");
Regex numberRex = new(@"\/title\/.+\/[0-9]+(-vol_([0-9]+))?-ch_([0-9\.]+)");
Regex numberRex = new(@"\/title\/.+\/([0-9])+(?:-vol_([0-9]+))?-ch_([0-9\.]+)");
foreach (HtmlNode chapterInfo in chapterList.SelectNodes("div"))
{
@ -158,6 +158,7 @@ public class Bato : MangaConnector
string chapterUrl = infoNode.GetAttributeValue("href", "");
Match match = numberRex.Match(chapterUrl);
string id = match.Groups[1].Value;
string? volumeNumber = match.Groups[2].Success ? match.Groups[2].Value : null;
string chapterNumber = match.Groups[3].Value;
string chapterName = chapterNumber;
@ -190,10 +191,7 @@ public class Bato : MangaConnector
string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
string comicInfoPath = Path.GetTempFileName();
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
return DownloadChapterImages(imageUrls, chapter.GetArchiveFilePath(), RequestType.MangaImage, comicInfoPath, "https://mangakatana.com/", progressToken:progressToken);
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken);
}
private string[] ParseImageUrlsFromHtml(string mangaUrl)

View File

@ -214,8 +214,10 @@ public abstract class MangaConnector : GlobalBase
return requestResult.statusCode;
}
protected HttpStatusCode DownloadChapterImages(string[] imageUrls, string saveArchiveFilePath, RequestType requestType, string? comicInfoPath = null, string? referrer = null, ProgressToken? progressToken = null)
protected HttpStatusCode DownloadChapterImages(string[] imageUrls, Chapter chapter, RequestType requestType, string? referrer = null, ProgressToken? progressToken = null)
{
string saveArchiveFilePath = chapter.GetArchiveFilePath();
if (progressToken?.cancellationRequested ?? false)
return HttpStatusCode.RequestTimeout;
Log($"Downloading Images for {saveArchiveFilePath}");
@ -239,7 +241,7 @@ public abstract class MangaConnector : GlobalBase
//Create a temporary folder to store images
string tempFolder = Directory.CreateTempSubdirectory("trangatemp").FullName;
int chapter = 0;
int chapterNum = 0;
//Download all Images to temporary Folder
if (imageUrls.Length == 0)
{
@ -253,9 +255,9 @@ public abstract class MangaConnector : GlobalBase
foreach (string imageUrl in imageUrls)
{
string extension = imageUrl.Split('.')[^1].Split('?')[0];
Log($"Downloading image {chapter + 1:000}/{imageUrls.Length:000}"); //TODO
HttpStatusCode status = DownloadImage(imageUrl, Path.Join(tempFolder, $"{chapter++}.{extension}"), requestType, referrer);
Log($"{saveArchiveFilePath} {chapter + 1:000}/{imageUrls.Length:000} {status}");
Log($"Downloading image {chapterNum + 1:000}/{imageUrls.Length:000}"); //TODO
HttpStatusCode status = DownloadImage(imageUrl, Path.Join(tempFolder, $"{chapterNum++}.{extension}"), requestType, referrer);
Log($"{saveArchiveFilePath} {chapterNum + 1:000}/{imageUrls.Length:000} {status}");
if ((int)status < 200 || (int)status >= 300)
{
progressToken?.Complete();
@ -269,16 +271,14 @@ public abstract class MangaConnector : GlobalBase
progressToken?.Increment();
}
if(comicInfoPath is not null){
File.Copy(comicInfoPath, Path.Join(tempFolder, "ComicInfo.xml"));
File.Delete(comicInfoPath); //Delete tmp-file
}
File.WriteAllText(Path.Join(tempFolder, "ComicInfo.xml"), chapter.GetComicInfoXmlString());
Log($"Creating archive {saveArchiveFilePath}");
//ZIP-it and ship-it
ZipFile.CreateFromDirectory(tempFolder, saveArchiveFilePath);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(saveArchiveFilePath, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute);
chapter.CreateChapterMarker();
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(saveArchiveFilePath, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute | OtherRead | OtherExecute);
Directory.Delete(tempFolder, true); //Cleanup
progressToken?.Complete();

View File

@ -38,6 +38,7 @@ public class MangaConnectorJsonConverter : JsonConverter
"Manga4Life" => this._connectors.First(c => c is MangaLife),
"ManhuaPlus" => this._connectors.First(c => c is ManhuaPlus),
"MangaHere" => this._connectors.First(c => c is MangaHere),
"AsuraToon" => this._connectors.First(c => c is AsuraToon),
_ => throw new UnreachableException($"Could not find Connector with name {connectorName}")
};
}

View File

@ -247,7 +247,7 @@ public class MangaDex : MangaConnector
}
if(chapterNum is not "null" && !chapters.Any(chp => chp.volumeNumber.Equals(volume) && chp.chapterNumber.Equals(chapterNum)))
chapters.Add(new Chapter(manga, title, volume, chapterNum, chapterId));
chapters.Add(new Chapter(manga, title, volume, chapterNum, chapterId, chapterId));
}
}
@ -289,10 +289,7 @@ public class MangaDex : MangaConnector
foreach (JsonNode? image in imageFileNames)
imageUrls.Add($"{baseUrl}/data/{hash}/{image!.GetValue<string>()}");
string comicInfoPath = Path.GetTempFileName();
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
//Download Chapter-Images
return DownloadChapterImages(imageUrls.ToArray(), chapter.GetArchiveFilePath(), RequestType.MangaImage, comicInfoPath, progressToken:progressToken);
return DownloadChapterImages(imageUrls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
}
}

View File

@ -117,7 +117,7 @@ public class MangaHere : MangaConnector
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
return Array.Empty<Chapter>();
List<string> urls = requestResult.htmlDocument.DocumentNode.SelectNodes("//div[@id='list-2']/ul//li//a[contains(@href, '/manga/')]")
List<string> urls = requestResult.htmlDocument.DocumentNode.SelectNodes("//div[@id='list-1']/ul//li//a[contains(@href, '/manga/')]")
.Select(node => node.GetAttributeValue("href", "")).ToList();
Regex chapterRex = new(@".*\/manga\/[a-zA-Z0-9\-\._\~\!\$\&\'\(\)\*\+\,\;\=\:\@]+\/v([0-9(TBD)]+)\/c([0-9\.]+)\/.*");
@ -181,12 +181,9 @@ public class MangaHere : MangaConnector
}
} while (downloaded++ <= images);
string comicInfoPath = Path.GetTempFileName();
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
if (progressToken is not null)
progressToken.increments = images;//we blip to normal length, in downloadchapterimages it is increasaed by the amount of urls again
return DownloadChapterImages(imageUrls.ToArray(), chapter.GetArchiveFilePath(), RequestType.MangaImage, comicInfoPath, progressToken:progressToken);
return DownloadChapterImages(imageUrls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
}
private string[] ParseImageUrlsFromHtml(HtmlDocument document)

View File

@ -214,10 +214,7 @@ public class MangaKatana : MangaConnector
string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
string comicInfoPath = Path.GetTempFileName();
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
return DownloadChapterImages(imageUrls, chapter.GetArchiveFilePath(), RequestType.MangaImage, comicInfoPath, "https://mangakatana.com/", progressToken:progressToken);
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken);
}
private string[] ParseImageUrlsFromHtml(string mangaUrl)

View File

@ -194,6 +194,6 @@ public class MangaLife : MangaConnector
string comicInfoPath = Path.GetTempFileName();
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
return DownloadChapterImages(urls.ToArray(), chapter.GetArchiveFilePath(), RequestType.MangaImage, comicInfoPath, progressToken:progressToken);
return DownloadChapterImages(urls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
}
}

View File

@ -214,10 +214,7 @@ public class Manganato : MangaConnector
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
string comicInfoPath = Path.GetTempFileName();
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
return DownloadChapterImages(imageUrls, chapter.GetArchiveFilePath(), RequestType.MangaImage, comicInfoPath, "https://chapmanganato.com/", progressToken:progressToken);
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, "https://chapmanganato.com/", progressToken:progressToken);
}
private string[] ParseImageUrlsFromHtml(HtmlDocument document)

View File

@ -222,9 +222,6 @@ public class Mangasee : MangaConnector
foreach(HtmlNode galleryImage in images)
urls.Add(galleryImage.GetAttributeValue("src", ""));
string comicInfoPath = Path.GetTempFileName();
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
return DownloadChapterImages(urls.ToArray(), chapter.GetArchiveFilePath(), RequestType.MangaImage, comicInfoPath, progressToken:progressToken);
return DownloadChapterImages(urls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
}
}

View File

@ -149,19 +149,21 @@ public class Mangaworld: MangaConnector
document.DocumentNode.SelectSingleNode(
"//div[contains(concat(' ',normalize-space(@class),' '),'chapters-wrapper')]");
Regex volumeRex = new(@"[Vv]olume ([0-9]+).*");
Regex chapterRex = new(@"[Cc]apitolo ([0-9]+).*");
Regex idRex = new(@".*\/read\/([a-z0-9]+)(?:[?\/].*)?");
if (chaptersWrapper.Descendants("div").Any(descendant => descendant.HasClass("volume-element")))
{
foreach (HtmlNode volNode in document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),'volume-element')]"))
{
string volume = Regex.Match(volNode.SelectNodes("div").First(node => node.HasClass("volume")).SelectSingleNode("p").InnerText,
@"[Vv]olume ([0-9]+).*").Groups[1].Value;
string volume = volumeRex.Match(volNode.SelectNodes("div").First(node => node.HasClass("volume")).SelectSingleNode("p").InnerText).Groups[1].Value;
foreach (HtmlNode chNode in volNode.SelectNodes("div").First(node => node.HasClass("volume-chapters")).SelectNodes("div"))
{
string number = Regex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText,
@"[Cc]apitolo ([0-9]+).*").Groups[1].Value;
string number = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
string url = chNode.SelectSingleNode("a").GetAttributeValue("href", "");
ret.Add(new Chapter(manga, null, volume, number, url));
string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value;
ret.Add(new Chapter(manga, null, volume, number, url, id));
}
}
}
@ -169,10 +171,10 @@ public class Mangaworld: MangaConnector
{
foreach (HtmlNode chNode in chaptersWrapper.SelectNodes("div").Where(node => node.HasClass("chapter")))
{
string number = Regex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText,
@"[Cc]apitolo ([0-9]+).*").Groups[1].Value;
string number = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
string url = chNode.SelectSingleNode("a").GetAttributeValue("href", "");
ret.Add(new Chapter(manga, null, null, number, url));
string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value;
ret.Add(new Chapter(manga, null, null, number, url, id));
}
}
@ -207,10 +209,7 @@ public class Mangaworld: MangaConnector
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
string comicInfoPath = Path.GetTempFileName();
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
return DownloadChapterImages(imageUrls, chapter.GetArchiveFilePath(), RequestType.MangaImage, comicInfoPath, "https://www.mangaworld.bz/", progressToken:progressToken);
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage,"https://www.mangaworld.bz/", progressToken:progressToken);
}
private string[] ParseImageUrlsFromHtml(HtmlDocument document)

View File

@ -108,9 +108,10 @@ public class ManhuaPlus : MangaConnector
Log("No genres found");
}
string yearNodeStr = document.DocumentNode
.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-clock ')]/../span").InnerText.Replace("\n", "");
int year = int.Parse(yearNodeStr.Split(' ')[0].Split('/')[^1]);
Regex yearRex = new(@"(?:[0-9]{1,2}\/){2}([0-9]{2,4}) [0-9]{1,2}:[0-9]{1,2}");
HtmlNode yearNode = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-clock ')]/../span");
Match match = yearRex.Match(yearNode.InnerText);
int year = match.Success && match.Groups[1].Success ? int.Parse(match.Groups[1].Value) : 1960;
status = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-rss ')]/../span").InnerText.Replace("\n", "");
switch (status.ToLower())
@ -190,9 +191,6 @@ public class ManhuaPlus : MangaConnector
HtmlNode[] images = document.DocumentNode.SelectNodes("//a[contains(concat(' ',normalize-space(@class),' '),' readImg ')]/img").ToArray();
List<string> urls = images.Select(node => node.GetAttributeValue("src", "")).ToList();
string comicInfoPath = Path.GetTempFileName();
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
return DownloadChapterImages(urls.ToArray(), chapter.GetArchiveFilePath(), RequestType.MangaImage, comicInfoPath, progressToken:progressToken);
return DownloadChapterImages(urls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
}
}

View File

@ -26,6 +26,7 @@ public partial class Tranga : GlobalBase
new MangaLife(this),
new ManhuaPlus(this),
new MangaHere(this),
new AsuraToon(this),
};
foreach(DirectoryInfo dir in new DirectoryInfo(Path.GetTempPath()).GetDirectories("trangatemp"))//Cleanup old temp folders
dir.Delete();