mirror of
https://github.com/C9Glax/tranga.git
synced 2025-02-23 07:40:13 +01:00
MangaConnectors in API
This commit is contained in:
parent
8c5bcd2665
commit
538e6fa60b
@ -3,6 +3,7 @@ using API.Schema.Jobs;
|
||||
using API.Schema.MangaConnectors;
|
||||
using Asp.Versioning;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Soenneker.Utils.String.NeedlemanWunsch;
|
||||
using static Microsoft.AspNetCore.Http.StatusCodes;
|
||||
|
||||
@ -35,12 +36,26 @@ public class ConnectorController(PgsqlContext context) : Controller
|
||||
[ProducesResponseType<Manga[]>(Status500InternalServerError)]
|
||||
public IActionResult SearchMangaGlobal(string name)
|
||||
{
|
||||
List<Manga> allManga = new List<Manga>();
|
||||
List<(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])> allManga = new();
|
||||
foreach (MangaConnector contextMangaConnector in context.MangaConnectors)
|
||||
{
|
||||
allManga.AddRange(contextMangaConnector.GetManga(name));
|
||||
foreach ((Manga? manga, Author[]? authors, MangaTag[]? tags, Link[]? links, MangaAltTitle[]? altTitles) in allManga)
|
||||
{
|
||||
try
|
||||
{
|
||||
context.Tags.AddRange(tags);
|
||||
context.Authors.AddRange(authors);
|
||||
context.Link.AddRange(links);
|
||||
context.AltTitles.AddRange(altTitles);
|
||||
context.Manga.AddRange(manga);
|
||||
context.SaveChanges();
|
||||
}
|
||||
catch (DbUpdateException)
|
||||
{
|
||||
return StatusCode(500, new ProblemResponse("An error occurred while processing your request."));
|
||||
}
|
||||
}
|
||||
return Ok(allManga.ToArray());
|
||||
return Ok(allManga.Select(m => m.Item1).ToArray());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@ -52,12 +67,29 @@ public class ConnectorController(PgsqlContext context) : Controller
|
||||
[HttpPost("{id}/SearchManga")]
|
||||
[ProducesResponseType<Manga[]>(Status200OK)]
|
||||
[ProducesResponseType<ProblemResponse>(Status404NotFound)]
|
||||
[ProducesResponseType<ProblemResponse>(Status500InternalServerError)]
|
||||
public IActionResult SearchManga(string id, [FromBody]string name)
|
||||
{
|
||||
MangaConnector? connector = context.MangaConnectors.Find(id);
|
||||
if (connector is null)
|
||||
return NotFound(new ProblemResponse("Connector not found."));
|
||||
Manga[] manga = connector.GetManga(name);
|
||||
return Ok(manga);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] mangas = connector.GetManga(name);
|
||||
foreach ((Manga? manga, Author[]? authors, MangaTag[]? tags, Link[]? links, MangaAltTitle[]? altTitles) in mangas)
|
||||
{
|
||||
try
|
||||
{
|
||||
context.Tags.AddRange(tags);
|
||||
context.Authors.AddRange(authors);
|
||||
context.Link.AddRange(links);
|
||||
context.AltTitles.AddRange(altTitles);
|
||||
context.Manga.AddRange(manga);
|
||||
context.SaveChanges();
|
||||
}
|
||||
catch (DbUpdateException)
|
||||
{
|
||||
return StatusCode(500, new ProblemResponse("An error occurred while processing your request."));
|
||||
}
|
||||
}
|
||||
return Ok(mangas.Select(m => m.Item1).ToArray());
|
||||
}
|
||||
}
|
@ -12,30 +12,30 @@ public class AsuraToon : MangaConnector
|
||||
this.downloadClient = new ChromiumDownloadClient();
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://asuracomic.net/series?name={sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
return [];
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
return publications;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://asuracomic.net/series/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
@ -47,7 +47,7 @@ public class AsuraToon : MangaConnector
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
HtmlNodeCollection mangaList = document.DocumentNode.SelectNodes("//a[starts-with(@href,'series')]");
|
||||
if (mangaList is null || mangaList.Count < 1)
|
||||
@ -55,24 +55,25 @@ public class AsuraToon : MangaConnector
|
||||
|
||||
IEnumerable<string> urls = mangaList.Select(a => $"https://asuracomic.net/{a.GetAttributeValue("href", "")}");
|
||||
|
||||
List<Manga> ret = new();
|
||||
List<(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? manga = GetMangaFromUrl(url);
|
||||
if (manga is { } x)
|
||||
ret.Add(x);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[]) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string? originalLanguage = null;
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
|
||||
HtmlNodeCollection genreNodes = document.DocumentNode.SelectNodes("//h3[text()='Genres']/../div/button");
|
||||
string[] tags = genreNodes.Select(b => b.InnerText).ToArray();
|
||||
MangaTag[] mangaTags = tags.Select(t => new MangaTag(t)).ToArray();
|
||||
|
||||
HtmlNode statusNode = document.DocumentNode.SelectSingleNode("//h3[text()='Status']/../h3[2]");
|
||||
MangaReleaseStatus releaseStatus = statusNode.InnerText.ToLower() switch
|
||||
@ -102,16 +103,21 @@ public class AsuraToon : MangaConnector
|
||||
HtmlNodeCollection artistNodes = document.DocumentNode.SelectNodes("//h3[text()='Artist']/../h3[not(text()='Artist' or text()='_')]");
|
||||
IEnumerable<string> authorNames = authorNodes is null ? [] : authorNodes.Select(a => a.InnerText);
|
||||
IEnumerable<string> artistNames = artistNodes is null ? [] : artistNodes.Select(a => a.InnerText);
|
||||
List<string> authors = authorNames.Concat(artistNames).ToList();
|
||||
List<string> authorStrings = authorNames.Concat(artistNames).ToList();
|
||||
Author[] authors = authorStrings.Select(author => new Author(author)).ToArray();
|
||||
|
||||
HtmlNode? firstChapterNode = document.DocumentNode.SelectSingleNode("//a[contains(@href, 'chapter/1')]/../following-sibling::h3");
|
||||
uint year = uint.Parse(firstChapterNode?.InnerText.Split(' ')[^1] ?? "2000");
|
||||
|
||||
Manga manga = new Manga(publicationId, sortName, description, websiteUrl, coverUrl, null, year,
|
||||
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
|
||||
originalLanguage, releaseStatus, -1, null, null,
|
||||
this.Name, authors, tags, links, altTitles); //TODO
|
||||
this.Name,
|
||||
authors.Select(a => a.AuthorId).ToArray(),
|
||||
mangaTags.Select(t => t.Tag).ToArray(),
|
||||
[],
|
||||
[]);
|
||||
|
||||
return manga;
|
||||
return (manga, authors, mangaTags, [], []);
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
|
@ -13,30 +13,30 @@ public class Bato : MangaConnector
|
||||
this.downloadClient = new HttpDownloadClient();
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://bato.to/v3x-search?word={sanitizedTitle}&lang=en";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
return [];
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
return publications;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://bato.to/title/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
@ -48,7 +48,7 @@ public class Bato : MangaConnector
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
HtmlNode mangaList = document.DocumentNode.SelectSingleNode("//div[@data-hk='0-0-2']");
|
||||
if (!mangaList.ChildNodes.Any(node => node.Name == "div"))
|
||||
@ -57,18 +57,18 @@ public class Bato : MangaConnector
|
||||
List<string> urls = mangaList.ChildNodes
|
||||
.Select(node => $"https://bato.to{node.Descendants("div").First().FirstChild.GetAttributeValue("href", "")}").ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
HashSet<(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? manga = GetMangaFromUrl(url);
|
||||
if (manga is { } x)
|
||||
ret.Add(x);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[]) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
HtmlNode infoNode = document.DocumentNode.SelectSingleNode("/html/body/div/main/div[1]/div[2]");
|
||||
|
||||
@ -78,24 +78,26 @@ public class Bato : MangaConnector
|
||||
|
||||
string[] altTitlesList = infoNode.ChildNodes[1].ChildNodes[2].InnerText.Split('/');
|
||||
int i = 0;
|
||||
Dictionary<string, string> altTitles = altTitlesList.ToDictionary(s => i++.ToString(), s => s);
|
||||
MangaAltTitle[] altTitles = altTitlesList.Select(a => new MangaAltTitle(i++.ToString(), a)).ToArray();
|
||||
|
||||
string posterUrl = document.DocumentNode.SelectNodes("//img")
|
||||
string coverUrl = document.DocumentNode.SelectNodes("//img")
|
||||
.First(child => child.GetAttributeValue("data-hk", "") == "0-1-0").GetAttributeValue("src", "").Replace("&", "&");
|
||||
|
||||
List<HtmlNode> genreNodes = document.DocumentNode.SelectSingleNode("//b[text()='Genres:']/..").SelectNodes("span").ToList();
|
||||
string[] tags = genreNodes.Select(node => node.FirstChild.InnerText).ToArray();
|
||||
MangaTag[] mangaTags = tags.Select(s => new MangaTag(s)).ToArray();
|
||||
|
||||
List<HtmlNode> authorsNodes = infoNode.ChildNodes[1].ChildNodes[3].Descendants("a").ToList();
|
||||
List<string> authors = authorsNodes.Select(node => node.InnerText.Replace("amp;", "")).ToList();
|
||||
List<string> authorNames = authorsNodes.Select(node => node.InnerText.Replace("amp;", "")).ToList();
|
||||
Author[] authors = authorNames.Select(n => new Author(n)).ToArray();
|
||||
|
||||
HtmlNode? originalLanguageNode = document.DocumentNode.SelectSingleNode("//span[text()='Tr From']/..");
|
||||
string originalLanguage = originalLanguageNode is not null ? originalLanguageNode.LastChild.InnerText : "";
|
||||
|
||||
if (!int.TryParse(
|
||||
|
||||
if (!uint.TryParse(
|
||||
document.DocumentNode.SelectSingleNode("//span[text()='Original Publication:']/..").LastChild.InnerText.Split('-')[0],
|
||||
out int year))
|
||||
year = DateTime.Now.Year;
|
||||
out uint year))
|
||||
year = (uint)DateTime.Now.Year;
|
||||
|
||||
string status = document.DocumentNode.SelectSingleNode("//span[text()='Original Publication:']/..")
|
||||
.ChildNodes[2].InnerText;
|
||||
@ -109,8 +111,15 @@ public class Bato : MangaConnector
|
||||
case "pending": releaseStatus = MangaReleaseStatus.Unreleased; break;
|
||||
}
|
||||
|
||||
Manga manga = //TODO
|
||||
return manga;
|
||||
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
|
||||
originalLanguage, releaseStatus, -1, null, null,
|
||||
this.Name,
|
||||
authors.Select(a => a.AuthorId).ToArray(),
|
||||
mangaTags.Select(t => t.Tag).ToArray(),
|
||||
[],
|
||||
altTitles.Select(a => a.AltTitleId).ToArray());
|
||||
|
||||
return (manga, authors, mangaTags, [], altTitles);
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
|
@ -18,11 +18,11 @@ public abstract class MangaConnector(string name, string[] supportedLanguages, s
|
||||
public virtual Manga[] Mangas { get; internal set; } = [];
|
||||
|
||||
|
||||
public abstract Manga[] GetManga(string publicationTitle = "");
|
||||
public abstract (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] GetManga(string publicationTitle = "");
|
||||
|
||||
public abstract Manga? GetMangaFromUrl(string url);
|
||||
public abstract (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromUrl(string url);
|
||||
|
||||
public abstract Manga? GetMangaFromId(string publicationId);
|
||||
public abstract (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromId(string publicationId);
|
||||
|
||||
public abstract Chapter[] GetChapters(Manga manga, string language="en");
|
||||
|
||||
|
@ -16,12 +16,12 @@ public class MangaDex : MangaConnector
|
||||
this.downloadClient = new HttpDownloadClient();
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
const int limit = 100; //How many values we want returned at once
|
||||
int offset = 0; //"Page"
|
||||
int total = int.MaxValue; //How many total results are there, is updated on first request
|
||||
HashSet<Manga> retManga = new();
|
||||
HashSet<(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])> retManga = new();
|
||||
int loadedPublicationData = 0;
|
||||
List<JsonNode> results = new();
|
||||
|
||||
@ -59,7 +59,7 @@ public class MangaDex : MangaConnector
|
||||
return retManga.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromId(string publicationId)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest($"https://api.mangadex.org/manga/{publicationId}?includes%5B%5D=manga&includes%5B%5D=cover_art&includes%5B%5D=author&includes%5B%5D=artist&includes%5B%5D=tag", RequestType.MangaInfo);
|
||||
@ -71,14 +71,14 @@ public class MangaDex : MangaConnector
|
||||
return null;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromUrl(string url)
|
||||
{
|
||||
Regex idRex = new (@"https:\/\/mangadex.org\/title\/([A-z0-9-]*)\/.*");
|
||||
string id = idRex.Match(url).Groups[1].Value;
|
||||
return GetMangaFromId(id);
|
||||
}
|
||||
|
||||
private Manga? MangaFromJsonObject(JsonObject manga)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? MangaFromJsonObject(JsonObject manga)
|
||||
{
|
||||
if (!manga.TryGetPropertyValue("id", out JsonNode? idNode))
|
||||
return null;
|
||||
@ -90,7 +90,7 @@ public class MangaDex : MangaConnector
|
||||
|
||||
if (!attributes.TryGetPropertyValue("title", out JsonNode? titleNode))
|
||||
return null;
|
||||
string title = titleNode!.AsObject().ContainsKey("en") switch
|
||||
string sortName = titleNode!.AsObject().ContainsKey("en") switch
|
||||
{
|
||||
true => titleNode.AsObject()["en"]!.GetValue<string>(),
|
||||
false => titleNode.AsObject().First().Value!.GetValue<string>()
|
||||
@ -105,6 +105,7 @@ public class MangaDex : MangaConnector
|
||||
altTitlesDict.TryAdd(altTitleNodeObject.First().Key, altTitleNodeObject.First().Value!.GetValue<string>());
|
||||
}
|
||||
}
|
||||
MangaAltTitle[] altTitles = altTitlesDict.Select(t => new MangaAltTitle(t.Key, t.Value)).ToArray();
|
||||
|
||||
if (!attributes.TryGetPropertyValue("description", out JsonNode? descriptionNode))
|
||||
return null;
|
||||
@ -118,6 +119,7 @@ public class MangaDex : MangaConnector
|
||||
if (attributes.TryGetPropertyValue("links", out JsonNode? linksNode) && linksNode is not null)
|
||||
foreach (KeyValuePair<string, JsonNode?> linkKv in linksNode!.AsObject())
|
||||
linksDict.TryAdd(linkKv.Key, linkKv.Value.GetValue<string>());
|
||||
Link[] links = linksDict.Select(x => new Link(x.Key, x.Value)).ToArray();
|
||||
|
||||
string? originalLanguage =
|
||||
attributes.TryGetPropertyValue("originalLanguage", out JsonNode? originalLanguageNode) switch
|
||||
@ -139,17 +141,17 @@ public class MangaDex : MangaConnector
|
||||
};
|
||||
}
|
||||
|
||||
int? year = attributes.TryGetPropertyValue("year", out JsonNode? yearNode) switch
|
||||
uint year = attributes.TryGetPropertyValue("year", out JsonNode? yearNode) switch
|
||||
{
|
||||
true => yearNode?.GetValue<int>(),
|
||||
false => null
|
||||
true => yearNode?.GetValue<uint>()??0,
|
||||
false => 0
|
||||
};
|
||||
|
||||
HashSet<string> tags = new(128);
|
||||
if (attributes.TryGetPropertyValue("tags", out JsonNode? tagsNode))
|
||||
foreach (JsonNode? tagNode in tagsNode!.AsArray())
|
||||
tags.Add(tagNode!["attributes"]!["name"]!["en"]!.GetValue<string>());
|
||||
|
||||
MangaTag[] mangaTags = tags.Select(t => new MangaTag(t)).ToArray();
|
||||
|
||||
if (!manga.TryGetPropertyValue("relationships", out JsonNode? relationshipsNode))
|
||||
return null;
|
||||
@ -161,18 +163,26 @@ public class MangaDex : MangaConnector
|
||||
string fileName = coverNode["attributes"]!["fileName"]!.GetValue<string>();
|
||||
string coverUrl = $"https://uploads.mangadex.org/covers/{publicationId}/{fileName}";
|
||||
|
||||
List<string> authors = new();
|
||||
List<string> authorNames = new();
|
||||
JsonNode?[] authorNodes = relationshipsNode.AsArray()
|
||||
.Where(rel => rel!["type"]!.GetValue<string>().Equals("author") || rel!["type"]!.GetValue<string>().Equals("artist")).ToArray();
|
||||
foreach (JsonNode? authorNode in authorNodes)
|
||||
{
|
||||
string authorName = authorNode!["attributes"]!["name"]!.GetValue<string>();
|
||||
if(!authors.Contains(authorName))
|
||||
authors.Add(authorName);
|
||||
if(!authorNames.Contains(authorName))
|
||||
authorNames.Add(authorName);
|
||||
}
|
||||
Author[] authors = authorNames.Select(a => new Author(a)).ToArray();
|
||||
|
||||
Manga pub = //TODO
|
||||
return pub;
|
||||
Manga pub = new (publicationId, sortName, description, $"https://mangadex.org/title/{publicationId}", coverUrl, null, year,
|
||||
originalLanguage, releaseStatus, -1, null, null,
|
||||
this.Name,
|
||||
authors.Select(a => a.AuthorId).ToArray(),
|
||||
mangaTags.Select(t => t.Tag).ToArray(),
|
||||
links.Select(l => l.LinkId).ToArray(),
|
||||
altTitles.Select(a => a.AltTitleId).ToArray());
|
||||
|
||||
return (pub, authors, mangaTags, links, altTitles);
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
|
@ -11,45 +11,45 @@ public class MangaHere : MangaConnector
|
||||
this.downloadClient = new ChromiumDownloadClient();
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
string sanitizedTitle = string.Join('+', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://www.mangahere.cc/search?title={sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
|
||||
return Array.Empty<Manga>();
|
||||
return [];
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
if (document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),' container ')]").Any(node => node.ChildNodes.Any(cNode => cNode.HasClass("search-keywords"))))
|
||||
return Array.Empty<Manga>();
|
||||
return [];
|
||||
|
||||
List<string> urls = document.DocumentNode
|
||||
.SelectNodes("//a[contains(@href, '/manga/') and not(contains(@href, '.html'))]")
|
||||
.Select(thumb => $"https://www.mangahere.cc{thumb.GetAttributeValue("href", "")}").Distinct().ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
HashSet<(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? manga = GetMangaFromUrl(url);
|
||||
if (manga is { } x)
|
||||
ret.Add(x);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://www.mangahere.cc/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
@ -61,27 +61,29 @@ public class MangaHere : MangaConnector
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id, url);
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[]) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
|
||||
|
||||
//We dont get posters, because same origin bs HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//img[contains(concat(' ',normalize-space(@class),' '),' detail-info-cover-img ')]");
|
||||
string posterUrl = "http://static.mangahere.cc/v20230914/mangahere/images/nopicture.jpg";
|
||||
string coverUrl = "http://static.mangahere.cc/v20230914/mangahere/images/nopicture.jpg";
|
||||
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//span[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-title-font ')]");
|
||||
string sortName = titleNode.InnerText;
|
||||
|
||||
List<string> authors = document.DocumentNode
|
||||
List<string> authorNames = document.DocumentNode
|
||||
.SelectNodes("//p[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-say ')]/a")
|
||||
.Select(node => node.InnerText)
|
||||
.ToList();
|
||||
Author[] authors = authorNames.Select(n => new Author(n)).ToArray();
|
||||
|
||||
HashSet<string> tags = document.DocumentNode
|
||||
.SelectNodes("//p[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-tag-list ')]/a")
|
||||
.Select(node => node.InnerText)
|
||||
.ToHashSet();
|
||||
MangaTag[] mangaTags = tags.Select(n => new MangaTag(n)).ToArray();
|
||||
|
||||
status = document.DocumentNode.SelectSingleNode("//span[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-title-tip ')]").InnerText;
|
||||
switch (status.ToLower())
|
||||
@ -97,8 +99,15 @@ public class MangaHere : MangaConnector
|
||||
.SelectSingleNode("//p[contains(concat(' ',normalize-space(@class),' '),' fullcontent ')]");
|
||||
string description = descriptionNode.InnerText;
|
||||
|
||||
Manga manga =//TODO
|
||||
return manga;
|
||||
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, 0,
|
||||
originalLanguage, releaseStatus, -1, null, null,
|
||||
this.Name,
|
||||
authors.Select(a => a.AuthorId).ToArray(),
|
||||
mangaTags.Select(t => t.Tag).ToArray(),
|
||||
[],
|
||||
[]);
|
||||
|
||||
return (manga, authors, mangaTags, [], []);
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
|
@ -11,14 +11,14 @@ public class MangaKatana : MangaConnector
|
||||
this.downloadClient = new HttpDownloadClient();
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
string sanitizedTitle = string.Join("%20", Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://mangakatana.com/?search={sanitizedTitle}&search_by=book_name";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
return [];
|
||||
|
||||
// ReSharper disable once MergeIntoPattern
|
||||
// If a single result is found, the user will be redirected to the results directly instead of a result page
|
||||
@ -29,16 +29,16 @@ public class MangaKatana : MangaConnector
|
||||
return new [] { ParseSinglePublicationFromHtml(requestResult.result, requestResult.redirectedToUrl.Split('/')[^1], requestResult.redirectedToUrl) };
|
||||
}
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.result);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] publications = ParsePublicationsFromHtml(requestResult.result);
|
||||
return publications;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://mangakatana.com/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
@ -47,7 +47,7 @@ public class MangaKatana : MangaConnector
|
||||
return ParseSinglePublicationFromHtml(requestResult.result, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(Stream html)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] ParsePublicationsFromHtml(Stream html)
|
||||
{
|
||||
StreamReader reader = new(html);
|
||||
string htmlString = reader.ReadToEnd();
|
||||
@ -55,7 +55,7 @@ public class MangaKatana : MangaConnector
|
||||
document.LoadHtml(htmlString);
|
||||
IEnumerable<HtmlNode> searchResults = document.DocumentNode.SelectNodes("//*[@id='book_list']/div");
|
||||
if (searchResults is null || !searchResults.Any())
|
||||
return Array.Empty<Manga>();
|
||||
return [];
|
||||
List<string> urls = new();
|
||||
foreach (HtmlNode mangaResult in searchResults)
|
||||
{
|
||||
@ -63,27 +63,27 @@ public class MangaKatana : MangaConnector
|
||||
.First(a => a.Name == "href").Value);
|
||||
}
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
HashSet<(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? manga = GetMangaFromUrl(url);
|
||||
if (manga is { } x)
|
||||
ret.Add(x);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(Stream html, string publicationId, string websiteUrl)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[]) ParseSinglePublicationFromHtml(Stream html, string publicationId, string websiteUrl)
|
||||
{
|
||||
StreamReader reader = new(html);
|
||||
string htmlString = reader.ReadToEnd();
|
||||
HtmlDocument document = new();
|
||||
document.LoadHtml(htmlString);
|
||||
Dictionary<string, string> altTitles = new();
|
||||
Dictionary<string, string> altTitlesDict = new();
|
||||
Dictionary<string, string>? links = null;
|
||||
HashSet<string> tags = new();
|
||||
string[] authors = Array.Empty<string>();
|
||||
string[] authorNames = [];
|
||||
string originalLanguage = "";
|
||||
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
|
||||
|
||||
@ -102,10 +102,10 @@ public class MangaKatana : MangaConnector
|
||||
case "altnames":
|
||||
string[] alts = value.Split(" ; ");
|
||||
for (int i = 0; i < alts.Length; i++)
|
||||
altTitles.Add(i.ToString(), alts[i]);
|
||||
altTitlesDict.Add(i.ToString(), alts[i]);
|
||||
break;
|
||||
case "authorsartists":
|
||||
authors = value.Split(',');
|
||||
authorNames = value.Split(',');
|
||||
break;
|
||||
case "status":
|
||||
switch (value.ToLower())
|
||||
@ -120,29 +120,38 @@ public class MangaKatana : MangaConnector
|
||||
}
|
||||
}
|
||||
|
||||
string posterUrl = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[1]/div").Descendants("img").First()
|
||||
string coverUrl = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[1]/div").Descendants("img").First()
|
||||
.GetAttributes().First(a => a.Name == "src").Value;
|
||||
|
||||
string description = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[3]/p").InnerText;
|
||||
while (description.StartsWith('\n'))
|
||||
description = description.Substring(1);
|
||||
|
||||
int year = DateTime.Now.Year;
|
||||
uint year = (uint)DateTime.Now.Year;
|
||||
string yearString = infoTable.Descendants("div").First(d => d.HasClass("updateAt"))
|
||||
.InnerText.Split('-')[^1];
|
||||
|
||||
if(yearString.Contains("ago") == false)
|
||||
{
|
||||
year = Convert.ToInt32(yearString);
|
||||
year = uint.Parse(yearString);
|
||||
}
|
||||
Author[] authors = authorNames.Select(n => new Author(n)).ToArray();
|
||||
MangaTag[] mangaTags = tags.Select(n => new MangaTag(n)).ToArray();
|
||||
MangaAltTitle[] altTitles = altTitlesDict.Select(x => new MangaAltTitle(x.Key, x.Value)).ToArray();
|
||||
|
||||
Manga manga = //TODO
|
||||
return manga;
|
||||
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
|
||||
originalLanguage, releaseStatus, -1, null, null,
|
||||
this.Name,
|
||||
authors.Select(a => a.AuthorId).ToArray(),
|
||||
mangaTags.Select(t => t.Tag).ToArray(),
|
||||
[],
|
||||
altTitles.Select(a => a.AltTitleId).ToArray());
|
||||
|
||||
return (manga, authors, mangaTags, [], altTitles);
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"https://mangakatana.com/manga/{manga.MangaId}";
|
||||
// Leaving this in for verification if the page exists
|
||||
RequestResult requestResult =
|
||||
|
@ -12,27 +12,27 @@ public class MangaLife : MangaConnector
|
||||
this.downloadClient = new ChromiumDownloadClient();
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
string sanitizedTitle = WebUtility.UrlEncode(publicationTitle);
|
||||
string requestUrl = $"https://manga4life.com/search/?name={sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
return [];
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Manga>();
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
return [];
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
return publications;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://manga4life.com/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromUrl(string url)
|
||||
{
|
||||
Regex publicationIdRex = new(@"https:\/\/(www\.)?manga4life.com\/manga\/(.*)(\/.*)*");
|
||||
string publicationId = publicationIdRex.Match(url).Groups[2].Value;
|
||||
@ -43,7 +43,7 @@ public class MangaLife : MangaConnector
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
HtmlNode resultsNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']/div[last()]/div[1]/div");
|
||||
if (resultsNode.Descendants("div").Count() == 1 && resultsNode.Descendants("div").First().HasClass("NoResults"))
|
||||
@ -51,21 +51,21 @@ public class MangaLife : MangaConnector
|
||||
return [];
|
||||
}
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
List<(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])> ret = new();
|
||||
|
||||
foreach (HtmlNode resultNode in resultsNode.SelectNodes("div"))
|
||||
{
|
||||
string url = resultNode.Descendants().First(d => d.HasClass("SeriesName")).GetAttributeValue("href", "");
|
||||
Manga? manga = GetMangaFromUrl($"https://manga4life.com{url}");
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? manga = GetMangaFromUrl($"https://manga4life.com{url}");
|
||||
if (manga is { } x)
|
||||
ret.Add(x);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[]) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
@ -73,7 +73,7 @@ public class MangaLife : MangaConnector
|
||||
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
|
||||
|
||||
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//img");
|
||||
string posterUrl = posterNode.GetAttributeValue("src", "");
|
||||
string coverUrl = posterNode.GetAttributeValue("src", "");
|
||||
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//h1");
|
||||
string sortName = titleNode.InnerText;
|
||||
@ -81,20 +81,22 @@ public class MangaLife : MangaConnector
|
||||
HtmlNode[] authorsNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Author(s):']/..").Descendants("a")
|
||||
.ToArray();
|
||||
List<string> authors = new();
|
||||
List<string> authorNames = new();
|
||||
foreach (HtmlNode authorNode in authorsNodes)
|
||||
authors.Add(authorNode.InnerText);
|
||||
authorNames.Add(authorNode.InnerText);
|
||||
Author[] authors = authorNames.Select(a => new Author(a)).ToArray();
|
||||
|
||||
HtmlNode[] genreNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Genre(s):']/..").Descendants("a")
|
||||
.ToArray();
|
||||
foreach (HtmlNode genreNode in genreNodes)
|
||||
tags.Add(genreNode.InnerText);
|
||||
MangaTag[] mangaTags = tags.Select(t => new MangaTag(t)).ToArray();
|
||||
|
||||
HtmlNode yearNode = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Released:']/..").Descendants("a")
|
||||
.First();
|
||||
int year = Convert.ToInt32(yearNode.InnerText);
|
||||
uint year = uint.Parse(yearNode.InnerText);
|
||||
|
||||
HtmlNode[] statusNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Status:']/..").Descendants("a")
|
||||
@ -116,8 +118,15 @@ public class MangaLife : MangaConnector
|
||||
.Descendants("div").First();
|
||||
string description = descriptionNode.InnerText;
|
||||
|
||||
Manga manga = //TODO
|
||||
return manga;
|
||||
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
|
||||
originalLanguage, releaseStatus, -1, null, null,
|
||||
this.Name,
|
||||
authors.Select(a => a.AuthorId).ToArray(),
|
||||
mangaTags.Select(t => t.Tag).ToArray(),
|
||||
[],
|
||||
[]);
|
||||
|
||||
return (manga, authors, mangaTags, [], []);
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
|
@ -13,7 +13,7 @@ public class Manganato : MangaConnector
|
||||
this.downloadClient = new HttpDownloadClient();
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
string sanitizedTitle = string.Join('_', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://manganato.com/search/story/{sanitizedTitle}";
|
||||
@ -21,11 +21,11 @@ public class Manganato : MangaConnector
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 ||requestResult.htmlDocument is null)
|
||||
return [];
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
List<HtmlNode> searchResults = document.DocumentNode.Descendants("div").Where(n => n.HasClass("search-story-item")).ToList();
|
||||
List<string> urls = new();
|
||||
@ -35,23 +35,23 @@ public class Manganato : MangaConnector
|
||||
.First(a => a.Name == "href").Value);
|
||||
}
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
List<(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add(manga);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? manga = GetMangaFromUrl(url);
|
||||
if (manga is { } x)
|
||||
ret.Add(x);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://chapmanganato.com/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
@ -63,12 +63,12 @@ public class Manganato : MangaConnector
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[]) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
Dictionary<string, string> altTitles = new();
|
||||
Dictionary<string, string> altTitlesDict = new();
|
||||
Dictionary<string, string>? links = null;
|
||||
HashSet<string> tags = new();
|
||||
string[] authors = Array.Empty<string>();
|
||||
string[] authorNames = [];
|
||||
string originalLanguage = "";
|
||||
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
|
||||
|
||||
@ -89,12 +89,12 @@ public class Manganato : MangaConnector
|
||||
case "alternative":
|
||||
string[] alts = value.Split(" ; ");
|
||||
for(int i = 0; i < alts.Length; i++)
|
||||
altTitles.Add(i.ToString(), alts[i]);
|
||||
altTitlesDict.Add(i.ToString(), alts[i]);
|
||||
break;
|
||||
case "authors":
|
||||
authors = value.Split('-');
|
||||
for (int i = 0; i < authors.Length; i++)
|
||||
authors[i] = authors[i].Replace("\r\n", "");
|
||||
authorNames = value.Split('-');
|
||||
for (int i = 0; i < authorNames.Length; i++)
|
||||
authorNames[i] = authorNames[i].Replace("\r\n", "");
|
||||
break;
|
||||
case "status":
|
||||
switch (value.ToLower())
|
||||
@ -111,8 +111,11 @@ public class Manganato : MangaConnector
|
||||
break;
|
||||
}
|
||||
}
|
||||
Author[] authors = authorNames.Select(n => new Author(n)).ToArray();
|
||||
MangaTag[] mangaTags = tags.Select(n => new MangaTag(n)).ToArray();
|
||||
MangaAltTitle[] mangaAltTitles = altTitlesDict.Select(a => new MangaAltTitle(a.Key, a.Value)).ToArray();
|
||||
|
||||
string posterUrl = document.DocumentNode.Descendants("span").First(s => s.HasClass("info-image")).Descendants("img").First()
|
||||
string coverUrl = document.DocumentNode.Descendants("span").First(s => s.HasClass("info-image")).Descendants("img").First()
|
||||
.GetAttributes().First(a => a.Name == "src").Value;
|
||||
|
||||
string description = document.DocumentNode.Descendants("div").First(d => d.HasClass("panel-story-info-description"))
|
||||
@ -128,11 +131,18 @@ public class Manganato : MangaConnector
|
||||
CultureInfo.InvariantCulture).Millisecond);
|
||||
|
||||
|
||||
int year = DateTime.ParseExact(oldestChapter?.GetAttributeValue("title", "Dec 31 2400, 23:59")??"Dec 31 2400, 23:59", pattern,
|
||||
uint year = (uint)DateTime.ParseExact(oldestChapter?.GetAttributeValue("title", "Dec 31 2400, 23:59")??"Dec 31 2400, 23:59", pattern,
|
||||
CultureInfo.InvariantCulture).Year;
|
||||
|
||||
Manga manga = //TODO
|
||||
return manga;
|
||||
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
|
||||
originalLanguage, releaseStatus, -1, null, null,
|
||||
this.Name,
|
||||
authors.Select(a => a.AuthorId).ToArray(),
|
||||
mangaTags.Select(t => t.Tag).ToArray(),
|
||||
[],
|
||||
mangaAltTitles.Select(a => a.AltTitleId).ToArray());
|
||||
|
||||
return (manga, authors, mangaTags, [], mangaAltTitles);
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
|
@ -23,7 +23,7 @@ public class Mangasee : MangaConnector
|
||||
public string[] a { get; set; }
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
string requestUrl = "https://mangasee123.com/_search.php";
|
||||
RequestResult requestResult =
|
||||
@ -41,10 +41,10 @@ public class Mangasee : MangaConnector
|
||||
|
||||
|
||||
string[] urls = filteredResults.Select(result => $"https://mangasee123.com/manga/{result.i}").ToArray();
|
||||
List<Manga> searchResultManga = new();
|
||||
List<(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])> searchResultManga = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? newManga = GetMangaFromUrl(url);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? newManga = GetMangaFromUrl(url);
|
||||
if(newManga is { } manga)
|
||||
searchResultManga.Add(manga);
|
||||
}
|
||||
@ -79,12 +79,12 @@ public class Mangasee : MangaConnector
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://mangasee123.com/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromUrl(string url)
|
||||
{
|
||||
Regex publicationIdRex = new(@"https:\/\/mangasee123.com\/manga\/(.*)(\/.*)*");
|
||||
string publicationId = publicationIdRex.Match(url).Groups[1].Value;
|
||||
@ -95,7 +95,7 @@ public class Mangasee : MangaConnector
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[]) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
@ -103,7 +103,7 @@ public class Mangasee : MangaConnector
|
||||
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
|
||||
|
||||
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//img");
|
||||
string posterUrl = posterNode.GetAttributeValue("src", "");
|
||||
string coverUrl = posterNode.GetAttributeValue("src", "");
|
||||
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//h1");
|
||||
string sortName = titleNode.InnerText;
|
||||
@ -111,20 +111,22 @@ public class Mangasee : MangaConnector
|
||||
HtmlNode[] authorsNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Author(s):']/..").Descendants("a")
|
||||
.ToArray();
|
||||
List<string> authors = new();
|
||||
List<string> authorNames = new();
|
||||
foreach (HtmlNode authorNode in authorsNodes)
|
||||
authors.Add(authorNode.InnerText);
|
||||
authorNames.Add(authorNode.InnerText);
|
||||
Author[] authors = authorNames.Select(a => new Author(a)).ToArray();
|
||||
|
||||
HtmlNode[] genreNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Genre(s):']/..").Descendants("a")
|
||||
.ToArray();
|
||||
foreach (HtmlNode genreNode in genreNodes)
|
||||
tags.Add(genreNode.InnerText);
|
||||
MangaTag[] mangaTags = tags.Select(t => new MangaTag(t)).ToArray();
|
||||
|
||||
HtmlNode yearNode = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Released:']/..").Descendants("a")
|
||||
.First();
|
||||
int year = Convert.ToInt32(yearNode.InnerText);
|
||||
uint year = uint.Parse(yearNode.InnerText);
|
||||
|
||||
HtmlNode[] statusNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Status:']/..").Descendants("a")
|
||||
@ -146,8 +148,15 @@ public class Mangasee : MangaConnector
|
||||
.Descendants("div").First();
|
||||
string description = descriptionNode.InnerText;
|
||||
|
||||
Manga manga = //TODO
|
||||
return manga;
|
||||
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
|
||||
originalLanguage, releaseStatus, -1, null, null,
|
||||
this.Name,
|
||||
authors.Select(a => a.AuthorId).ToArray(),
|
||||
mangaTags.Select(t => t.Tag).ToArray(),
|
||||
[],
|
||||
[]);
|
||||
|
||||
return (manga, authors, mangaTags, [], []);
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
@ -180,7 +189,7 @@ public class Mangasee : MangaConnector
|
||||
}
|
||||
catch (HttpRequestException e)
|
||||
{
|
||||
return Array.Empty<Chapter>();
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -12,49 +12,49 @@ public class Mangaworld : MangaConnector
|
||||
this.downloadClient = new HttpDownloadClient();
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://www.mangaworld.ac/archive?keyword={sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
return [];
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Manga>();
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
return [];
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
if (!document.DocumentNode.SelectSingleNode("//div[@class='comics-grid']").ChildNodes
|
||||
.Any(node => node.HasClass("entry")))
|
||||
return Array.Empty<Manga>();
|
||||
return [];
|
||||
|
||||
List<string> urls = document.DocumentNode
|
||||
.SelectNodes(
|
||||
"//div[@class='comics-grid']//div[@class='entry']//a[contains(concat(' ',normalize-space(@class),' '),'thumb')]")
|
||||
.Select(thumb => thumb.GetAttributeValue("href", "")).ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
List<(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? manga = GetMangaFromUrl(url);
|
||||
if (manga is { } x)
|
||||
ret.Add(x);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://www.mangaworld.ac/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
@ -69,9 +69,9 @@ public class Mangaworld : MangaConnector
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id, url);
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[]) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
Dictionary<string, string> altTitles = new();
|
||||
Dictionary<string, string> altTitlesDict = new();
|
||||
Dictionary<string, string>? links = null;
|
||||
string originalLanguage = "";
|
||||
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
|
||||
@ -83,18 +83,20 @@ public class Mangaworld : MangaConnector
|
||||
HtmlNode metadata = infoNode.Descendants().First(d => d.HasClass("meta-data"));
|
||||
|
||||
HtmlNode altTitlesNode = metadata.SelectSingleNode("//span[text()='Titoli alternativi: ' or text()='Titolo alternativo: ']/..").ChildNodes[1];
|
||||
|
||||
string[] alts = altTitlesNode.InnerText.Split(", ");
|
||||
for(int i = 0; i < alts.Length; i++)
|
||||
altTitles.Add(i.ToString(), alts[i]);
|
||||
altTitlesDict.Add(i.ToString(), alts[i]);
|
||||
MangaAltTitle[] altTitles = altTitlesDict.Select(a => new MangaAltTitle(a.Key, a.Value)).ToArray();
|
||||
|
||||
HtmlNode genresNode =
|
||||
metadata.SelectSingleNode("//span[text()='Generi: ' or text()='Genero: ']/..");
|
||||
HashSet<string> tags = genresNode.SelectNodes("a").Select(node => node.InnerText).ToHashSet();
|
||||
MangaTag[] mangaTags = tags.Select(t => new MangaTag(t)).ToArray();
|
||||
|
||||
HtmlNode authorsNode =
|
||||
metadata.SelectSingleNode("//span[text()='Autore: ' or text()='Autori: ']/..");
|
||||
string[] authors = authorsNode.SelectNodes("a").Select(node => node.InnerText).ToArray();
|
||||
string[] authorNames = authorsNode.SelectNodes("a").Select(node => node.InnerText).ToArray();
|
||||
Author[] authors = authorNames.Select(n => new Author(n)).ToArray();
|
||||
|
||||
string status = metadata.SelectSingleNode("//span[text()='Stato: ']/..").SelectNodes("a").First().InnerText;
|
||||
// ReSharper disable 5 times StringLiteralTypo
|
||||
@ -107,15 +109,22 @@ public class Mangaworld : MangaConnector
|
||||
case "in corso": releaseStatus = MangaReleaseStatus.Continuing; break;
|
||||
}
|
||||
|
||||
string posterUrl = document.DocumentNode.SelectSingleNode("//img[@class='rounded']").GetAttributeValue("src", "");
|
||||
string coverUrl = document.DocumentNode.SelectSingleNode("//img[@class='rounded']").GetAttributeValue("src", "");
|
||||
|
||||
string description = document.DocumentNode.SelectSingleNode("//div[@id='noidungm']").InnerText;
|
||||
|
||||
string yearString = metadata.SelectSingleNode("//span[text()='Anno di uscita: ']/..").SelectNodes("a").First().InnerText;
|
||||
int year = Convert.ToInt32(yearString);
|
||||
uint year = uint.Parse(yearString);
|
||||
|
||||
Manga manga = //TODO
|
||||
return manga;
|
||||
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
|
||||
originalLanguage, releaseStatus, -1, null, null,
|
||||
this.Name,
|
||||
authors.Select(a => a.AuthorId).ToArray(),
|
||||
mangaTags.Select(t => t.Tag).ToArray(),
|
||||
[],
|
||||
altTitles.Select(a => a.AltTitleId).ToArray());
|
||||
|
||||
return (manga, authors, mangaTags, [], altTitles);
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
@ -145,16 +154,18 @@ public class Mangaworld : MangaConnector
|
||||
{
|
||||
foreach (HtmlNode volNode in document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),'volume-element')]"))
|
||||
{
|
||||
string volume = volumeRex.Match(volNode.SelectNodes("div").First(node => node.HasClass("volume")).SelectSingleNode("p").InnerText).Groups[1].Value;
|
||||
string volumeStr = volumeRex.Match(volNode.SelectNodes("div").First(node => node.HasClass("volume")).SelectSingleNode("p").InnerText).Groups[1].Value;
|
||||
float volume = float.Parse(volumeStr);
|
||||
foreach (HtmlNode chNode in volNode.SelectNodes("div").First(node => node.HasClass("volume-chapters")).SelectNodes("div"))
|
||||
{
|
||||
|
||||
string number = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
|
||||
string numberStr = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
|
||||
float chapter = float.Parse(numberStr);
|
||||
string url = chNode.SelectSingleNode("a").GetAttributeValue("href", "");
|
||||
string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value;
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, null, volume, number, url, id));
|
||||
ret.Add(new Chapter(manga, url, chapter, volume, null));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
@ -166,12 +177,13 @@ public class Mangaworld : MangaConnector
|
||||
{
|
||||
foreach (HtmlNode chNode in chaptersWrapper.SelectNodes("div").Where(node => node.HasClass("chapter")))
|
||||
{
|
||||
string number = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
|
||||
string numberStr = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
|
||||
float chapter = float.Parse(numberStr);
|
||||
string url = chNode.SelectSingleNode("a").GetAttributeValue("href", "");
|
||||
string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value;
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, null, null, number, url, id));
|
||||
ret.Add(new Chapter(manga, url, chapter, null, null));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
|
@ -12,7 +12,7 @@ public class ManhuaPlus : MangaConnector
|
||||
this.downloadClient = new ChromiumDownloadClient();
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://manhuaplus.org/search?keyword={sanitizedTitle}";
|
||||
@ -21,37 +21,37 @@ public class ManhuaPlus : MangaConnector
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
|
||||
return [];
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
if (document.DocumentNode.SelectSingleNode("//h1/../..").ChildNodes//I already want to not.
|
||||
.Any(node => node.InnerText.Contains("No manga found")))
|
||||
return Array.Empty<Manga>();
|
||||
return [];
|
||||
|
||||
List<string> urls = document.DocumentNode
|
||||
.SelectNodes("//h1/../..//a[contains(@href, 'https://manhuaplus.org/manga/') and contains(concat(' ',normalize-space(@class),' '),' clamp ') and not(contains(@href, '/chapter'))]")
|
||||
.Select(mangaNode => mangaNode.GetAttributeValue("href", "")).ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
List<(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? manga = GetMangaFromUrl(url);
|
||||
if (manga is { } x)
|
||||
ret.Add(x);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://manhuaplus.org/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromUrl(string url)
|
||||
{
|
||||
Regex publicationIdRex = new(@"https:\/\/manhuaplus.org\/manga\/(.*)(\/.*)*");
|
||||
string publicationId = publicationIdRex.Match(url).Groups[1].Value;
|
||||
@ -62,7 +62,7 @@ public class ManhuaPlus : MangaConnector
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[]) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
@ -71,23 +71,24 @@ public class ManhuaPlus : MangaConnector
|
||||
|
||||
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("/html/body/main/div/div/div[2]/div[1]/figure/a/img");//BRUH
|
||||
Regex posterRex = new(@".*(\/uploads/covers/[a-zA-Z0-9\-\._\~\!\$\&\'\(\)\*\+\,\;\=\:\@]+).*");
|
||||
string posterUrl = $"https://manhuaplus.org/{posterRex.Match(posterNode.GetAttributeValue("src", "")).Groups[1].Value}";
|
||||
string coverUrl = $"https://manhuaplus.org/{posterRex.Match(posterNode.GetAttributeValue("src", "")).Groups[1].Value}";
|
||||
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//h1");
|
||||
string sortName = titleNode.InnerText.Replace("\n", "");
|
||||
|
||||
List<string> authors = new();
|
||||
List<string> authorNames = new();
|
||||
try
|
||||
{
|
||||
HtmlNode[] authorsNodes = document.DocumentNode
|
||||
.SelectNodes("//a[contains(@href, 'https://manhuaplus.org/authors/')]")
|
||||
.ToArray();
|
||||
foreach (HtmlNode authorNode in authorsNodes)
|
||||
authors.Add(authorNode.InnerText);
|
||||
authorNames.Add(authorNode.InnerText);
|
||||
}
|
||||
catch (ArgumentNullException e)
|
||||
{
|
||||
}
|
||||
Author[] authors = authorNames.Select(a => new Author(a)).ToArray();
|
||||
|
||||
try
|
||||
{
|
||||
@ -99,11 +100,12 @@ public class ManhuaPlus : MangaConnector
|
||||
catch (ArgumentNullException e)
|
||||
{
|
||||
}
|
||||
MangaTag[] mangaTags = tags.Select(t => new MangaTag(t)).ToArray();
|
||||
|
||||
Regex yearRex = new(@"(?:[0-9]{1,2}\/){2}([0-9]{2,4}) [0-9]{1,2}:[0-9]{1,2}");
|
||||
HtmlNode yearNode = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-clock ')]/../span");
|
||||
Match match = yearRex.Match(yearNode.InnerText);
|
||||
int year = match.Success && match.Groups[1].Success ? int.Parse(match.Groups[1].Value) : 1960;
|
||||
uint year = match.Success && match.Groups[1].Success ? uint.Parse(match.Groups[1].Value) : 0;
|
||||
|
||||
status = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-rss ')]/../span").InnerText.Replace("\n", "");
|
||||
switch (status.ToLower())
|
||||
@ -119,8 +121,15 @@ public class ManhuaPlus : MangaConnector
|
||||
.SelectSingleNode("//div[@id='syn-target']");
|
||||
string description = descriptionNode.InnerText;
|
||||
|
||||
Manga manga = //TODO
|
||||
return manga;
|
||||
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
|
||||
originalLanguage, releaseStatus, -1, null, null,
|
||||
this.Name,
|
||||
authors.Select(a => a.AuthorId).ToArray(),
|
||||
mangaTags.Select(t => t.Tag).ToArray(),
|
||||
[],
|
||||
[]);
|
||||
|
||||
return (manga, authors, mangaTags, [], []);
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
|
@ -17,7 +17,7 @@ public class Weebcentral : MangaConnector
|
||||
downloadClient = new ChromiumDownloadClient();
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
const int limit = 32; //How many values we want returned at once
|
||||
var offset = 0; //"Page"
|
||||
@ -36,7 +36,7 @@ public class Weebcentral : MangaConnector
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
if (document.DocumentNode.SelectNodes("//article") == null)
|
||||
return [];
|
||||
@ -44,18 +44,18 @@ public class Weebcentral : MangaConnector
|
||||
var urls = document.DocumentNode.SelectNodes("/html/body/article/a[@class='link link-hover']")
|
||||
.Select(elem => elem.GetAttributeValue("href", "")).ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
List<(Manga, Author[], MangaTag[], Link[], MangaAltTitle[])> ret = new();
|
||||
foreach (var url in urls)
|
||||
{
|
||||
var manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
if (manga is { } x)
|
||||
ret.Add(x);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromUrl(string url)
|
||||
{
|
||||
Regex publicationIdRex = new(@"https:\/\/weebcentral\.com\/series\/(\w*)\/(.*)");
|
||||
var publicationId = publicationIdRex.Match(url).Groups[1].Value;
|
||||
@ -67,22 +67,24 @@ public class Weebcentral : MangaConnector
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
private (Manga, Author[], MangaTag[], Link[], MangaAltTitle[]) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
var posterNode =
|
||||
document.DocumentNode.SelectSingleNode("//section[@class='flex items-center justify-center']/picture/img");
|
||||
var posterUrl = posterNode?.GetAttributeValue("src", "") ?? "";
|
||||
var coverUrl = posterNode?.GetAttributeValue("src", "") ?? "";
|
||||
|
||||
var titleNode = document.DocumentNode.SelectSingleNode("//section/h1");
|
||||
var sortName = titleNode?.InnerText ?? "Undefined";
|
||||
|
||||
HtmlNode[] authorsNodes =
|
||||
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Author(s): ']/span")?.ToArray() ?? [];
|
||||
var authors = authorsNodes.Select(n => n.InnerText).ToList();
|
||||
var authorNames = authorsNodes.Select(n => n.InnerText).ToList();
|
||||
Author[] authors = authorNames.Select(n => new Author(n)).ToArray();
|
||||
|
||||
HtmlNode[] genreNodes =
|
||||
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Tags(s): ']/span")?.ToArray() ?? [];
|
||||
HashSet<string> tags = genreNodes.Select(n => n.InnerText).ToHashSet();
|
||||
MangaTag[] mangaTags = tags.Select(t => new MangaTag(t)).ToArray();
|
||||
|
||||
var statusNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Status: ']/a");
|
||||
var status = statusNode?.InnerText ?? "";
|
||||
@ -96,24 +98,32 @@ public class Weebcentral : MangaConnector
|
||||
}
|
||||
|
||||
var yearNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Released: ']/span");
|
||||
var year = Convert.ToInt32(yearNode?.InnerText ?? "0");
|
||||
var year = uint.Parse(yearNode?.InnerText ?? "0");
|
||||
|
||||
var descriptionNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Description']/p");
|
||||
var description = descriptionNode?.InnerText ?? "Undefined";
|
||||
|
||||
HtmlNode[] altTitleNodes = document.DocumentNode
|
||||
.SelectNodes("//ul/li[strong/text() = 'Associated Name(s)']/ul/li")?.ToArray() ?? [];
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
Dictionary<string, string> altTitlesDict = new(), links = new();
|
||||
for (var i = 0; i < altTitleNodes.Length; i++)
|
||||
altTitles.Add(i.ToString(), altTitleNodes[i].InnerText);
|
||||
altTitlesDict.Add(i.ToString(), altTitleNodes[i].InnerText);
|
||||
MangaAltTitle[] altTitles = altTitlesDict.Select(a => new MangaAltTitle(a.Key, a.Value)).ToArray();
|
||||
|
||||
var originalLanguage = "";
|
||||
|
||||
Manga manga = //TODO
|
||||
return manga;
|
||||
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
|
||||
originalLanguage, releaseStatus, -1, null, null,
|
||||
this.Name,
|
||||
authors.Select(a => a.AuthorId).ToArray(),
|
||||
mangaTags.Select(t => t.Tag).ToArray(),
|
||||
[],
|
||||
altTitles.Select(a => a.AltTitleId).ToArray());
|
||||
|
||||
return (manga, authors, mangaTags, [], altTitles);
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
public override (Manga, Author[], MangaTag[], Link[], MangaAltTitle[])? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://weebcentral.com/series/{publicationId}");
|
||||
}
|
||||
|
@ -1,217 +0,0 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class AsuraToon : MangaConnector
|
||||
{
|
||||
|
||||
public AsuraToon(GlobalBase clone) : base(clone, "AsuraToon", ["en"], ["asuracomic.net"])
|
||||
{
|
||||
this.downloadClient = new ChromiumDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://asuracomic.net/series?name={sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return Array.Empty<Manga>();
|
||||
}
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://asuracomic.net/series/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return null;
|
||||
}
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
HtmlNodeCollection mangaList = document.DocumentNode.SelectNodes("//a[starts-with(@href,'series')]");
|
||||
if (mangaList is null || mangaList.Count < 1)
|
||||
return [];
|
||||
|
||||
IEnumerable<string> urls = mangaList.Select(a => $"https://asuracomic.net/{a.GetAttributeValue("href", "")}");
|
||||
|
||||
List<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string? originalLanguage = null;
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
|
||||
HtmlNodeCollection genreNodes = document.DocumentNode.SelectNodes("//h3[text()='Genres']/../div/button");
|
||||
string[] tags = genreNodes.Select(b => b.InnerText).ToArray();
|
||||
|
||||
HtmlNode statusNode = document.DocumentNode.SelectSingleNode("//h3[text()='Status']/../h3[2]");
|
||||
Manga.ReleaseStatusByte releaseStatus = statusNode.InnerText.ToLower() switch
|
||||
{
|
||||
"ongoing" => Manga.ReleaseStatusByte.Continuing,
|
||||
"hiatus" => Manga.ReleaseStatusByte.OnHiatus,
|
||||
"completed" => Manga.ReleaseStatusByte.Completed,
|
||||
"dropped" => Manga.ReleaseStatusByte.Cancelled,
|
||||
"season end" => Manga.ReleaseStatusByte.Continuing,
|
||||
"coming soon" => Manga.ReleaseStatusByte.Unreleased,
|
||||
_ => Manga.ReleaseStatusByte.Unreleased
|
||||
};
|
||||
|
||||
HtmlNode coverNode =
|
||||
document.DocumentNode.SelectSingleNode("//img[@alt='poster']");
|
||||
string coverUrl = coverNode.GetAttributeValue("src", "");
|
||||
string coverFileNameInCache = SaveCoverImageToCache(coverUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
HtmlNode titleNode =
|
||||
document.DocumentNode.SelectSingleNode("//title");
|
||||
string sortName = Regex.Match(titleNode.InnerText, @"(.*) - Asura Scans").Groups[1].Value;
|
||||
|
||||
HtmlNode descriptionNode =
|
||||
document.DocumentNode.SelectSingleNode("//h3[starts-with(text(),'Synopsis')]/../span");
|
||||
string description = descriptionNode?.InnerText??"";
|
||||
|
||||
HtmlNodeCollection authorNodes = document.DocumentNode.SelectNodes("//h3[text()='Author']/../h3[not(text()='Author' or text()='_')]");
|
||||
HtmlNodeCollection artistNodes = document.DocumentNode.SelectNodes("//h3[text()='Artist']/../h3[not(text()='Artist' or text()='_')]");
|
||||
IEnumerable<string> authorNames = authorNodes is null ? [] : authorNodes.Select(a => a.InnerText);
|
||||
IEnumerable<string> artistNames = artistNodes is null ? [] : artistNodes.Select(a => a.InnerText);
|
||||
List<string> authors = authorNames.Concat(artistNames).ToList();
|
||||
|
||||
HtmlNode? firstChapterNode = document.DocumentNode.SelectSingleNode("//a[contains(@href, 'chapter/1')]/../following-sibling::h3");
|
||||
int? year = int.Parse(firstChapterNode?.InnerText.Split(' ')[^1] ?? "2000");
|
||||
|
||||
Manga manga = new (this, sortName, authors, description, altTitles, tags, coverUrl, coverFileNameInCache, links,
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"https://asuracomic.net/series/{manga.publicationId}";
|
||||
// Leaving this in for verification if the page exists
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl);
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, string mangaUrl)
|
||||
{
|
||||
RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default);
|
||||
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
|
||||
{
|
||||
Log("Failed to load site");
|
||||
return new List<Chapter>();
|
||||
}
|
||||
|
||||
List<Chapter> ret = new();
|
||||
|
||||
HtmlNodeCollection chapterURLNodes = result.htmlDocument.DocumentNode.SelectNodes("//a[contains(@href, '/chapter/')]");
|
||||
Regex infoRex = new(@"Chapter ([0-9]+)(.*)?");
|
||||
|
||||
foreach (HtmlNode chapterInfo in chapterURLNodes)
|
||||
{
|
||||
string chapterUrl = chapterInfo.GetAttributeValue("href", "");
|
||||
|
||||
Match match = infoRex.Match(chapterInfo.InnerText);
|
||||
string chapterNumber = match.Groups[1].Value;
|
||||
string? chapterName = match.Groups[2].Success && match.Groups[2].Length > 1 ? match.Groups[2].Value : null;
|
||||
string url = $"https://asuracomic.net/series/{chapterUrl}";
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, chapterName, null, chapterNumber, url));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
string requestUrl = chapter.url;
|
||||
// Leaving this in to check if the page exists
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
|
||||
|
||||
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
|
||||
private string[] ParseImageUrlsFromHtml(string mangaUrl)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(mangaUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
HtmlNodeCollection images =
|
||||
requestResult.htmlDocument.DocumentNode.SelectNodes("//img[contains(@alt, 'chapter page')]");
|
||||
|
||||
return images.Select(i => i.GetAttributeValue("src", "")).ToArray();
|
||||
}
|
||||
}
|
@ -1,230 +0,0 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class Bato : MangaConnector
|
||||
{
|
||||
|
||||
public Bato(GlobalBase clone) : base(clone, "Bato", ["en"], ["bato.to"])
|
||||
{
|
||||
this.downloadClient = new HttpDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://bato.to/v3x-search?word={sanitizedTitle}&lang=en";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return Array.Empty<Manga>();
|
||||
}
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://bato.to/title/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return null;
|
||||
}
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
HtmlNode mangaList = document.DocumentNode.SelectSingleNode("//div[@data-hk='0-0-2']");
|
||||
if (!mangaList.ChildNodes.Any(node => node.Name == "div"))
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
List<string> urls = mangaList.ChildNodes
|
||||
.Select(node => $"https://bato.to{node.Descendants("div").First().FirstChild.GetAttributeValue("href", "")}").ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
HtmlNode infoNode = document.DocumentNode.SelectSingleNode("/html/body/div/main/div[1]/div[2]");
|
||||
|
||||
string sortName = infoNode.Descendants("h3").First().InnerText;
|
||||
string description = document.DocumentNode
|
||||
.SelectSingleNode("//div[contains(concat(' ',normalize-space(@class),' '),'prose')]").InnerText;
|
||||
|
||||
string[] altTitlesList = infoNode.ChildNodes[1].ChildNodes[2].InnerText.Split('/');
|
||||
int i = 0;
|
||||
Dictionary<string, string> altTitles = altTitlesList.ToDictionary(s => i++.ToString(), s => s);
|
||||
|
||||
string posterUrl = document.DocumentNode.SelectNodes("//img")
|
||||
.First(child => child.GetAttributeValue("data-hk", "") == "0-1-0").GetAttributeValue("src", "").Replace("&", "&");
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
List<HtmlNode> genreNodes = document.DocumentNode.SelectSingleNode("//b[text()='Genres:']/..").SelectNodes("span").ToList();
|
||||
string[] tags = genreNodes.Select(node => node.FirstChild.InnerText).ToArray();
|
||||
|
||||
List<HtmlNode> authorsNodes = infoNode.ChildNodes[1].ChildNodes[3].Descendants("a").ToList();
|
||||
List<string> authors = authorsNodes.Select(node => node.InnerText.Replace("amp;", "")).ToList();
|
||||
|
||||
HtmlNode? originalLanguageNode = document.DocumentNode.SelectSingleNode("//span[text()='Tr From']/..");
|
||||
string originalLanguage = originalLanguageNode is not null ? originalLanguageNode.LastChild.InnerText : "";
|
||||
|
||||
if (!int.TryParse(
|
||||
document.DocumentNode.SelectSingleNode("//span[text()='Original Publication:']/..").LastChild.InnerText.Split('-')[0],
|
||||
out int year))
|
||||
year = DateTime.Now.Year;
|
||||
|
||||
string status = document.DocumentNode.SelectSingleNode("//span[text()='Original Publication:']/..")
|
||||
.ChildNodes[2].InnerText;
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
switch (status.ToLower())
|
||||
{
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
case "completed": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
|
||||
case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "pending": releaseStatus = Manga.ReleaseStatusByte.Unreleased; break;
|
||||
}
|
||||
|
||||
Manga manga = new (this, sortName, authors, description, altTitles, tags, posterUrl, coverFileNameInCache, new Dictionary<string, string>(),
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"https://bato.to/title/{manga.publicationId}";
|
||||
// Leaving this in for verification if the page exists
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl);
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, string mangaUrl)
|
||||
{
|
||||
RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default);
|
||||
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
|
||||
{
|
||||
Log("Failed to load site");
|
||||
return new List<Chapter>();
|
||||
}
|
||||
|
||||
List<Chapter> ret = new();
|
||||
|
||||
HtmlNode chapterList =
|
||||
result.htmlDocument.DocumentNode.SelectSingleNode("/html/body/div/main/div[3]/astro-island/div/div[2]/div/div/astro-slot");
|
||||
|
||||
Regex numberRex = new(@"\/title\/.+\/([0-9])+(?:-vol_([0-9]+))?-ch_([0-9\.]+)");
|
||||
|
||||
foreach (HtmlNode chapterInfo in chapterList.SelectNodes("div"))
|
||||
{
|
||||
HtmlNode infoNode = chapterInfo.FirstChild.FirstChild;
|
||||
string chapterUrl = infoNode.GetAttributeValue("href", "");
|
||||
|
||||
Match match = numberRex.Match(chapterUrl);
|
||||
string id = match.Groups[1].Value;
|
||||
string? volumeNumber = match.Groups[2].Success ? match.Groups[2].Value : null;
|
||||
string chapterNumber = match.Groups[3].Value;
|
||||
string chapterName = chapterNumber;
|
||||
string url = $"https://bato.to{chapterUrl}?load=2";
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
string requestUrl = chapter.url;
|
||||
// Leaving this in to check if the page exists
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
|
||||
|
||||
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
|
||||
private string[] ParseImageUrlsFromHtml(string mangaUrl)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(mangaUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
HtmlDocument document = requestResult.htmlDocument;
|
||||
|
||||
HtmlNode images = document.DocumentNode.SelectNodes("//astro-island").First(node =>
|
||||
node.GetAttributeValue("component-url", "").Contains("/_astro/ImageList."));
|
||||
|
||||
string weirdString = images.OuterHtml;
|
||||
string weirdString2 = Regex.Match(weirdString, @"props=\""(.*)}\""").Groups[1].Value;
|
||||
string[] urls = Regex.Matches(weirdString2, @"(https:\/\/[A-z\-0-9\.\?\&\;\=\/]+)\\")
|
||||
.Select(match => match.Groups[1].Value.Replace("&", "&")).ToArray();
|
||||
|
||||
return urls;
|
||||
}
|
||||
}
|
@ -1,115 +0,0 @@
|
||||
using System.Net;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using PuppeteerSharp;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
internal class ChromiumDownloadClient : DownloadClient
|
||||
{
|
||||
private static IBrowser? _browser;
|
||||
private const int StartTimeoutMs = 10000;
|
||||
private readonly HttpDownloadClient _httpDownloadClient;
|
||||
|
||||
private static async Task<IBrowser> StartBrowser(Logging.Logger? logger = null)
|
||||
{
|
||||
logger?.WriteLine("Starting ChromiumDownloadClient Puppeteer");
|
||||
return await Puppeteer.LaunchAsync(new LaunchOptions
|
||||
{
|
||||
Headless = true,
|
||||
Args = new [] {
|
||||
"--disable-gpu",
|
||||
"--disable-dev-shm-usage",
|
||||
"--disable-setuid-sandbox",
|
||||
"--no-sandbox"},
|
||||
Timeout = StartTimeoutMs
|
||||
}, new LoggerFactory([new LogProvider(logger)]));
|
||||
}
|
||||
|
||||
private class LogProvider : GlobalBase, ILoggerProvider
|
||||
{
|
||||
public LogProvider(Logging.Logger? logger) : base(logger) { }
|
||||
|
||||
public void Dispose() { }
|
||||
|
||||
public ILogger CreateLogger(string categoryName) => new Logger(logger);
|
||||
}
|
||||
|
||||
private class Logger : GlobalBase, ILogger
|
||||
{
|
||||
public Logger(Logging.Logger? logger) : base(logger) { }
|
||||
|
||||
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter)
|
||||
{
|
||||
if (logLevel <= LogLevel.Information)
|
||||
return;
|
||||
logger?.WriteLine("Puppeteer", formatter.Invoke(state, exception));
|
||||
}
|
||||
|
||||
public bool IsEnabled(LogLevel logLevel) => true;
|
||||
|
||||
public IDisposable? BeginScope<TState>(TState state) where TState : notnull => null;
|
||||
}
|
||||
|
||||
public ChromiumDownloadClient(GlobalBase clone) : base(clone)
|
||||
{
|
||||
_httpDownloadClient = new(this);
|
||||
if(_browser is null)
|
||||
_browser = StartBrowser(this.logger).Result;
|
||||
}
|
||||
|
||||
private readonly Regex _imageUrlRex = new(@"https?:\/\/.*\.(?:p?jpe?g|gif|a?png|bmp|avif|webp)(\?.*)?");
|
||||
internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null)
|
||||
{
|
||||
return _imageUrlRex.IsMatch(url)
|
||||
? _httpDownloadClient.MakeRequestInternal(url, referrer)
|
||||
: MakeRequestBrowser(url, referrer, clickButton);
|
||||
}
|
||||
|
||||
private RequestResult MakeRequestBrowser(string url, string? referrer = null, string? clickButton = null)
|
||||
{
|
||||
IPage page = _browser.NewPageAsync().Result;
|
||||
page.DefaultTimeout = 10000;
|
||||
IResponse response;
|
||||
try
|
||||
{
|
||||
response = page.GoToAsync(url, WaitUntilNavigation.Networkidle0).Result;
|
||||
Log("Page loaded.");
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Could not load Page:\n{e.Message}");
|
||||
page.CloseAsync();
|
||||
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
|
||||
}
|
||||
|
||||
Stream stream = Stream.Null;
|
||||
HtmlDocument? document = null;
|
||||
|
||||
if (response.Headers.TryGetValue("Content-Type", out string? content))
|
||||
{
|
||||
if (content.Contains("text/html"))
|
||||
{
|
||||
if (clickButton is not null && page.QuerySelectorAsync(clickButton).Result is not null)
|
||||
page.ClickAsync(clickButton).Wait();
|
||||
string htmlString = page.GetContentAsync().Result;
|
||||
stream = new MemoryStream(Encoding.Default.GetBytes(htmlString));
|
||||
document = new ();
|
||||
document.LoadHtml(htmlString);
|
||||
}else if (content.Contains("image"))
|
||||
{
|
||||
stream = new MemoryStream(response.BufferAsync().Result);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
page.CloseAsync();
|
||||
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
|
||||
}
|
||||
|
||||
page.CloseAsync();
|
||||
return new RequestResult(response.Status, document, stream, false, "");
|
||||
}
|
||||
}
|
@ -1,44 +0,0 @@
|
||||
using System.Net;
|
||||
using HtmlAgilityPack;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
internal abstract class DownloadClient : GlobalBase
|
||||
{
|
||||
private readonly Dictionary<RequestType, DateTime> _lastExecutedRateLimit;
|
||||
|
||||
protected DownloadClient(GlobalBase clone) : base(clone)
|
||||
{
|
||||
this._lastExecutedRateLimit = new();
|
||||
}
|
||||
|
||||
public RequestResult MakeRequest(string url, RequestType requestType, string? referrer = null, string? clickButton = null)
|
||||
{
|
||||
if (!TrangaSettings.requestLimits.ContainsKey(requestType))
|
||||
{
|
||||
Log("RequestType not configured for rate-limit.");
|
||||
return new RequestResult(HttpStatusCode.NotAcceptable, null, Stream.Null);
|
||||
}
|
||||
|
||||
int rateLimit = TrangaSettings.userAgent == TrangaSettings.DefaultUserAgent
|
||||
? TrangaSettings.DefaultRequestLimits[requestType]
|
||||
: TrangaSettings.requestLimits[requestType];
|
||||
|
||||
TimeSpan timeBetweenRequests = TimeSpan.FromMinutes(1).Divide(rateLimit);
|
||||
_lastExecutedRateLimit.TryAdd(requestType, DateTime.Now.Subtract(timeBetweenRequests));
|
||||
|
||||
TimeSpan rateLimitTimeout = timeBetweenRequests.Subtract(DateTime.Now.Subtract(_lastExecutedRateLimit[requestType]));
|
||||
|
||||
if (rateLimitTimeout > TimeSpan.Zero)
|
||||
{
|
||||
Log($"Waiting {rateLimitTimeout.TotalSeconds} seconds");
|
||||
Thread.Sleep(rateLimitTimeout);
|
||||
}
|
||||
|
||||
RequestResult result = MakeRequestInternal(url, referrer, clickButton);
|
||||
_lastExecutedRateLimit[requestType] = DateTime.Now;
|
||||
return result;
|
||||
}
|
||||
|
||||
internal abstract RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null);
|
||||
}
|
@ -1,75 +0,0 @@
|
||||
using System.Net;
|
||||
using System.Net.Http.Headers;
|
||||
using HtmlAgilityPack;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
internal class HttpDownloadClient : DownloadClient
|
||||
{
|
||||
private static readonly HttpClient Client = new()
|
||||
{
|
||||
Timeout = TimeSpan.FromSeconds(10)
|
||||
};
|
||||
|
||||
public HttpDownloadClient(GlobalBase clone) : base(clone)
|
||||
{
|
||||
Client.DefaultRequestHeaders.TryAddWithoutValidation("User-Agent", TrangaSettings.userAgent);
|
||||
}
|
||||
|
||||
internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null)
|
||||
{
|
||||
if(clickButton is not null)
|
||||
Log("Can not click button on static site.");
|
||||
HttpResponseMessage? response = null;
|
||||
while (response is null)
|
||||
{
|
||||
HttpRequestMessage requestMessage = new(HttpMethod.Get, url);
|
||||
if (referrer is not null)
|
||||
requestMessage.Headers.Referrer = new Uri(referrer);
|
||||
//Log($"Requesting {requestType} {url}");
|
||||
try
|
||||
{
|
||||
response = Client.Send(requestMessage);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
switch (e)
|
||||
{
|
||||
case TaskCanceledException:
|
||||
Log($"Request timed out {url}.\n\r{e}");
|
||||
return new RequestResult(HttpStatusCode.RequestTimeout, null, Stream.Null);
|
||||
case HttpRequestException:
|
||||
Log($"Request failed {url}\n\r{e}");
|
||||
return new RequestResult(HttpStatusCode.BadRequest, null, Stream.Null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
Log($"Request-Error {response.StatusCode}: {url}");
|
||||
return new RequestResult(response.StatusCode, null, Stream.Null);
|
||||
}
|
||||
|
||||
Stream stream = response.Content.ReadAsStream();
|
||||
|
||||
HtmlDocument? document = null;
|
||||
|
||||
if (response.Content.Headers.ContentType?.MediaType == "text/html")
|
||||
{
|
||||
StreamReader reader = new (stream);
|
||||
document = new ();
|
||||
document.LoadHtml(reader.ReadToEnd());
|
||||
stream.Position = 0;
|
||||
}
|
||||
|
||||
// Request has been redirected to another page. For example, it redirects directly to the results when there is only 1 result
|
||||
if (response.RequestMessage is not null && response.RequestMessage.RequestUri is not null)
|
||||
{
|
||||
return new RequestResult(response.StatusCode, document, stream, true,
|
||||
response.RequestMessage.RequestUri.AbsoluteUri);
|
||||
}
|
||||
|
||||
return new RequestResult(response.StatusCode, document, stream);
|
||||
}
|
||||
}
|
@ -1,260 +0,0 @@
|
||||
using System.IO.Compression;
|
||||
using System.Net;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text.RegularExpressions;
|
||||
using SixLabors.ImageSharp;
|
||||
using SixLabors.ImageSharp.Formats.Jpeg;
|
||||
using SixLabors.ImageSharp.Processing;
|
||||
using SixLabors.ImageSharp.Processing.Processors.Binarization;
|
||||
using Tranga.Jobs;
|
||||
using static System.IO.UnixFileMode;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
/// <summary>
|
||||
/// Base-Class for all Connectors
|
||||
/// Provides some methods to be used by all Connectors, as well as a DownloadClient
|
||||
/// </summary>
|
||||
public abstract class MangaConnector : GlobalBase
|
||||
{
|
||||
internal DownloadClient downloadClient { get; init; } = null!;
|
||||
public string[] SupportedLanguages;
|
||||
public string[] BaseUris;
|
||||
|
||||
protected MangaConnector(GlobalBase clone, string name, string[] supportedLanguages, string[] baseUris) : base(clone)
|
||||
{
|
||||
this.name = name;
|
||||
this.SupportedLanguages = supportedLanguages;
|
||||
this.BaseUris = baseUris;
|
||||
Directory.CreateDirectory(TrangaSettings.coverImageCache);
|
||||
}
|
||||
|
||||
public string name { get; } //Name of the Connector (e.g. Website)
|
||||
|
||||
/// <summary>
|
||||
/// Returns all Publications with the given string.
|
||||
/// If the string is empty or null, returns all Publication of the Connector
|
||||
/// </summary>
|
||||
/// <param name="publicationTitle">Search-Query</param>
|
||||
/// <returns>Publications matching the query</returns>
|
||||
public abstract Manga[] GetManga(string publicationTitle = "");
|
||||
|
||||
public abstract Manga? GetMangaFromUrl(string url);
|
||||
|
||||
public abstract Manga? GetMangaFromId(string publicationId);
|
||||
|
||||
/// <summary>
|
||||
/// Returns all Chapters of the publication in the provided language.
|
||||
/// If the language is empty or null, returns all Chapters in all Languages.
|
||||
/// </summary>
|
||||
/// <param name="manga">Publication to get Chapters for</param>
|
||||
/// <param name="language">Language of the Chapters</param>
|
||||
/// <returns>Array of Chapters matching Publication and Language</returns>
|
||||
public abstract Chapter[] GetChapters(Manga manga, string language="en");
|
||||
|
||||
/// <summary>
|
||||
/// Updates the available Chapters of a Publication
|
||||
/// </summary>
|
||||
/// <param name="manga">Publication to check</param>
|
||||
/// <param name="language">Language to receive chapters for</param>
|
||||
/// <returns>List of Chapters that were previously not in collection</returns>
|
||||
public Chapter[] GetNewChapters(Manga manga, string language = "en")
|
||||
{
|
||||
Log($"Getting new Chapters for {manga}");
|
||||
Chapter[] allChapters = this.GetChapters(manga, language);
|
||||
if (allChapters.Length < 1)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
Log($"Checking for duplicates {manga}");
|
||||
List<Chapter> newChaptersList = allChapters.Where(nChapter => nChapter.chapterNumber > manga.ignoreChaptersBelow
|
||||
&& !nChapter.CheckChapterIsDownloaded()).ToList();
|
||||
Log($"{newChaptersList.Count} new chapters. {manga}");
|
||||
try
|
||||
{
|
||||
Chapter latestChapterAvailable =
|
||||
allChapters.Max();
|
||||
manga.latestChapterAvailable =
|
||||
Convert.ToSingle(latestChapterAvailable.chapterNumber, numberFormatDecimalPoint);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log(e.ToString());
|
||||
Log($"Failed getting new Chapters for {manga}");
|
||||
}
|
||||
|
||||
return newChaptersList.ToArray();
|
||||
}
|
||||
|
||||
public abstract HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null);
|
||||
|
||||
/// <summary>
|
||||
/// Copies the already downloaded cover from cache to downloadLocation
|
||||
/// </summary>
|
||||
/// <param name="manga">Publication to retrieve Cover for</param>
|
||||
/// <param name="retries">Number of times to retry to copy the cover (or download it first)</param>
|
||||
public void CopyCoverFromCacheToDownloadLocation(Manga manga, int? retries = 1)
|
||||
{
|
||||
Log($"Copy cover {manga}");
|
||||
//Check if Publication already has a Folder and cover
|
||||
string publicationFolder = manga.CreatePublicationFolder(TrangaSettings.downloadLocation);
|
||||
DirectoryInfo dirInfo = new (publicationFolder);
|
||||
if (dirInfo.EnumerateFiles().Any(info => info.Name.Contains("cover", StringComparison.InvariantCultureIgnoreCase)))
|
||||
{
|
||||
Log($"Cover exists {manga}");
|
||||
return;
|
||||
}
|
||||
|
||||
string? fileInCache = manga.coverFileNameInCache;
|
||||
if (fileInCache is null || !File.Exists(fileInCache))
|
||||
{
|
||||
Log($"Cloning cover failed: File missing {fileInCache}.");
|
||||
if (retries > 0 && manga.coverUrl is not null)
|
||||
{
|
||||
Log($"Trying {retries} more times");
|
||||
SaveCoverImageToCache(manga.coverUrl, manga.internalId, 0);
|
||||
CopyCoverFromCacheToDownloadLocation(manga, --retries);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
string newFilePath = Path.Join(publicationFolder, $"cover.{Path.GetFileName(fileInCache).Split('.')[^1]}" );
|
||||
Log($"Cloning cover {fileInCache} -> {newFilePath}");
|
||||
File.Copy(fileInCache, newFilePath, true);
|
||||
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
File.SetUnixFileMode(newFilePath, GroupRead | GroupWrite | UserRead | UserWrite);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Downloads Image from URL and saves it to the given path(incl. fileName)
|
||||
/// </summary>
|
||||
/// <param name="imageUrl"></param>
|
||||
/// <param name="fullPath"></param>
|
||||
/// <param name="requestType">RequestType for Rate-Limit</param>
|
||||
/// <param name="referrer">referrer used in html request header</param>
|
||||
private HttpStatusCode DownloadImage(string imageUrl, string fullPath, RequestType requestType, string? referrer = null)
|
||||
{
|
||||
RequestResult requestResult = downloadClient.MakeRequest(imageUrl, requestType, referrer);
|
||||
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return requestResult.statusCode;
|
||||
if (requestResult.result == Stream.Null)
|
||||
return HttpStatusCode.NotFound;
|
||||
|
||||
FileStream fs = new (fullPath, FileMode.Create);
|
||||
requestResult.result.CopyTo(fs);
|
||||
fs.Close();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
private void ProcessImage(string imagePath)
|
||||
{
|
||||
if (!TrangaSettings.bwImages && TrangaSettings.compression == 100)
|
||||
return;
|
||||
DateTime start = DateTime.Now;
|
||||
using Image image = Image.Load(imagePath);
|
||||
File.Delete(imagePath);
|
||||
if(TrangaSettings.bwImages)
|
||||
image.Mutate(i => i.ApplyProcessor(new AdaptiveThresholdProcessor()));
|
||||
image.SaveAsJpeg(imagePath, new JpegEncoder()
|
||||
{
|
||||
Quality = TrangaSettings.compression
|
||||
});
|
||||
Log($"Image processing took {DateTime.Now.Subtract(start):s\\.fff} B/W:{TrangaSettings.bwImages} Compression: {TrangaSettings.compression}");
|
||||
}
|
||||
|
||||
protected HttpStatusCode DownloadChapterImages(string[] imageUrls, Chapter chapter, RequestType requestType, string? referrer = null, ProgressToken? progressToken = null)
|
||||
{
|
||||
string saveArchiveFilePath = chapter.GetArchiveFilePath();
|
||||
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
Log($"Downloading Images for {saveArchiveFilePath}");
|
||||
if (progressToken is not null)
|
||||
progressToken.increments += imageUrls.Length;
|
||||
//Check if Publication Directory already exists
|
||||
string directoryPath = Path.GetDirectoryName(saveArchiveFilePath)!;
|
||||
if (!Directory.Exists(directoryPath))
|
||||
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
Directory.CreateDirectory(directoryPath,
|
||||
UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute );
|
||||
else
|
||||
Directory.CreateDirectory(directoryPath);
|
||||
|
||||
if (File.Exists(saveArchiveFilePath)) //Don't download twice.
|
||||
{
|
||||
progressToken?.Complete();
|
||||
return HttpStatusCode.Created;
|
||||
}
|
||||
|
||||
//Create a temporary folder to store images
|
||||
string tempFolder = Directory.CreateTempSubdirectory("trangatemp").FullName;
|
||||
|
||||
int chapterNum = 0;
|
||||
//Download all Images to temporary Folder
|
||||
if (imageUrls.Length == 0)
|
||||
{
|
||||
Log("No images found");
|
||||
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
File.SetUnixFileMode(saveArchiveFilePath, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute);
|
||||
Directory.Delete(tempFolder, true);
|
||||
progressToken?.Complete();
|
||||
return HttpStatusCode.NoContent;
|
||||
}
|
||||
|
||||
foreach (string imageUrl in imageUrls)
|
||||
{
|
||||
string extension = imageUrl.Split('.')[^1].Split('?')[0];
|
||||
Log($"Downloading image {chapterNum + 1:000}/{imageUrls.Length:000}");
|
||||
string imagePath = Path.Join(tempFolder, $"{chapterNum++}.{extension}");
|
||||
HttpStatusCode status = DownloadImage(imageUrl, imagePath, requestType, referrer);
|
||||
ProcessImage(imagePath);
|
||||
Log($"{saveArchiveFilePath} {chapterNum + 1:000}/{imageUrls.Length:000} {status}");
|
||||
if ((int)status < 200 || (int)status >= 300)
|
||||
{
|
||||
progressToken?.Complete();
|
||||
return status;
|
||||
}
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Complete();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
progressToken?.Increment();
|
||||
}
|
||||
|
||||
File.WriteAllText(Path.Join(tempFolder, "ComicInfo.xml"), chapter.GetComicInfoXmlString());
|
||||
|
||||
Log($"Creating archive {saveArchiveFilePath}");
|
||||
//ZIP-it and ship-it
|
||||
ZipFile.CreateFromDirectory(tempFolder, saveArchiveFilePath);
|
||||
chapter.CreateChapterMarker();
|
||||
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
File.SetUnixFileMode(saveArchiveFilePath, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute | OtherRead | OtherExecute);
|
||||
Directory.Delete(tempFolder, true); //Cleanup
|
||||
|
||||
Log("Created archive.");
|
||||
progressToken?.Complete();
|
||||
Log("Download complete.");
|
||||
return HttpStatusCode.OK;
|
||||
}
|
||||
|
||||
protected string SaveCoverImageToCache(string url, string mangaInternalId, RequestType requestType)
|
||||
{
|
||||
Regex urlRex = new (@"https?:\/\/((?:[a-zA-Z0-9-]+\.)+[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+))");
|
||||
//https?:\/\/[a-zA-Z0-9-]+\.([a-zA-Z0-9-]+\.[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+)) for only second level domains
|
||||
Match match = urlRex.Match(url);
|
||||
string filename = $"{match.Groups[1].Value}-{mangaInternalId}.{match.Groups[3].Value}";
|
||||
string saveImagePath = Path.Join(TrangaSettings.coverImageCache, filename);
|
||||
|
||||
if (File.Exists(saveImagePath))
|
||||
return saveImagePath;
|
||||
|
||||
RequestResult coverResult = downloadClient.MakeRequest(url, requestType);
|
||||
using MemoryStream ms = new();
|
||||
coverResult.result.CopyTo(ms);
|
||||
Directory.CreateDirectory(TrangaSettings.coverImageCache);
|
||||
File.WriteAllBytes(saveImagePath, ms.ToArray());
|
||||
Log($"Saving cover to {saveImagePath}");
|
||||
return saveImagePath;
|
||||
}
|
||||
}
|
@ -1,56 +0,0 @@
|
||||
using System.Data;
|
||||
using System.Diagnostics;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class MangaConnectorJsonConverter : JsonConverter
|
||||
{
|
||||
private GlobalBase _clone;
|
||||
private readonly HashSet<MangaConnector> _connectors;
|
||||
|
||||
internal MangaConnectorJsonConverter(GlobalBase clone, HashSet<MangaConnector> connectors)
|
||||
{
|
||||
this._clone = clone;
|
||||
this._connectors = connectors;
|
||||
}
|
||||
|
||||
public override bool CanConvert(Type objectType)
|
||||
{
|
||||
return (objectType == typeof(MangaConnector));
|
||||
}
|
||||
|
||||
public override object ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer)
|
||||
{
|
||||
JObject jo = JObject.Load(reader);
|
||||
string? connectorName = jo.Value<string>("name");
|
||||
if (connectorName is null)
|
||||
throw new ConstraintException("Name can not be null.");
|
||||
return connectorName switch
|
||||
{
|
||||
"MangaDex" => this._connectors.First(c => c is MangaDex),
|
||||
"Manganato" => this._connectors.First(c => c is Manganato),
|
||||
"MangaKatana" => this._connectors.First(c => c is MangaKatana),
|
||||
"Mangasee" => this._connectors.First(c => c is Mangasee),
|
||||
"Mangaworld" => this._connectors.First(c => c is Mangaworld),
|
||||
"Bato" => this._connectors.First(c => c is Bato),
|
||||
"Manga4Life" => this._connectors.First(c => c is MangaLife),
|
||||
"ManhuaPlus" => this._connectors.First(c => c is ManhuaPlus),
|
||||
"MangaHere" => this._connectors.First(c => c is MangaHere),
|
||||
"AsuraToon" => this._connectors.First(c => c is AsuraToon),
|
||||
"Weebcentral" => this._connectors.First(c => c is Weebcentral),
|
||||
_ => throw new UnreachableException($"Could not find Connector with name {connectorName}")
|
||||
};
|
||||
}
|
||||
|
||||
public override bool CanWrite => false;
|
||||
|
||||
/// <summary>
|
||||
/// Don't call this
|
||||
/// </summary>
|
||||
public override void WriteJson(JsonWriter writer, object? value, JsonSerializer serializer)
|
||||
{
|
||||
throw new Exception("Dont call this");
|
||||
}
|
||||
}
|
@ -1,305 +0,0 @@
|
||||
using System.Net;
|
||||
using System.Text.Json.Nodes;
|
||||
using System.Text.RegularExpressions;
|
||||
using Tranga.Jobs;
|
||||
using JsonSerializer = System.Text.Json.JsonSerializer;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
public class MangaDex : MangaConnector
|
||||
{
|
||||
//https://api.mangadex.org/docs/3-enumerations/#language-codes--localization
|
||||
//https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes
|
||||
//https://gist.github.com/Josantonius/b455e315bc7f790d14b136d61d9ae469
|
||||
public MangaDex(GlobalBase clone) : base(clone, "MangaDex", ["en","pt","pt-br","it","de","ru","aa","ab","ae","af","ak","am","an","ar-ae","ar-bh","ar-dz","ar-eg","ar-iq","ar-jo","ar-kw","ar-lb","ar-ly","ar-ma","ar-om","ar-qa","ar-sa","ar-sy","ar-tn","ar-ye","ar","as","av","ay","az","ba","be","bg","bh","bi","bm","bn","bo","br","bs","ca","ce","ch","co","cr","cs","cu","cv","cy","da","de-at","de-ch","de-de","de-li","de-lu","div","dv","dz","ee","el","en-au","en-bz","en-ca","en-cb","en-gb","en-ie","en-jm","en-nz","en-ph","en-tt","en-us","en-za","en-zw","eo","es-ar","es-bo","es-cl","es-co","es-cr","es-do","es-ec","es-es","es-gt","es-hn","es-la","es-mx","es-ni","es-pa","es-pe","es-pr","es-py","es-sv","es-us","es-uy","es-ve","es","et","eu","fa","ff","fi","fj","fo","fr-be","fr-ca","fr-ch","fr-fr","fr-lu","fr-mc","fr","fy","ga","gd","gl","gn","gu","gv","ha","he","hi","ho","hr-ba","hr-hr","hr","ht","hu","hy","hz","ia","id","ie","ig","ii","ik","in","io","is","it-ch","it-it","iu","iw","ja","ja-ro","ji","jv","jw","ka","kg","ki","kj","kk","kl","km","kn","ko","ko-ro","kr","ks","ku","kv","kw","ky","kz","la","lb","lg","li","ln","lo","ls","lt","lu","lv","mg","mh","mi","mk","ml","mn","mo","mr","ms-bn","ms-my","ms","mt","my","na","nb","nd","ne","ng","nl-be","nl-nl","nl","nn","no","nr","ns","nv","ny","oc","oj","om","or","os","pa","pi","pl","ps","pt-pt","qu-bo","qu-ec","qu-pe","qu","rm","rn","ro","rw","sa","sb","sc","sd","se-fi","se-no","se-se","se","sg","sh","si","sk","sl","sm","sn","so","sq","sr-ba","sr-sp","sr","ss","st","su","sv-fi","sv-se","sv","sw","sx","syr","ta","te","tg","th","ti","tk","tl","tn","to","tr","ts","tt","tw","ty","ug","uk","ur","us","uz","ve","vi","vo","wa","wo","xh","yi","yo","za","zh-cn","zh-hk","zh-mo","zh-ro","zh-sg","zh-tw","zh","zu"], ["mangadex.org"])
|
||||
{
|
||||
this.downloadClient = new HttpDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term={publicationTitle}");
|
||||
const int limit = 100; //How many values we want returned at once
|
||||
int offset = 0; //"Page"
|
||||
int total = int.MaxValue; //How many total results are there, is updated on first request
|
||||
HashSet<Manga> retManga = new();
|
||||
int loadedPublicationData = 0;
|
||||
List<JsonNode> results = new();
|
||||
|
||||
//Request all search-results
|
||||
while (offset < total) //As long as we haven't requested all "Pages"
|
||||
{
|
||||
//Request next Page
|
||||
RequestResult requestResult = downloadClient.MakeRequest(
|
||||
$"https://api.mangadex.org/manga?limit={limit}&title={publicationTitle}&offset={offset}" +
|
||||
$"&contentRating%5B%5D=safe&contentRating%5B%5D=suggestive&contentRating%5B%5D=erotica" +
|
||||
$"&contentRating%5B%5D=pornographic" +
|
||||
$"&includes%5B%5D=manga&includes%5B%5D=cover_art&includes%5B%5D=author" +
|
||||
$"&includes%5B%5D=artist&includes%5B%5D=tag", RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
break;
|
||||
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
||||
|
||||
offset += limit;
|
||||
if (result is null)
|
||||
break;
|
||||
|
||||
if(result.ContainsKey("total"))
|
||||
total = result["total"]!.GetValue<int>(); //Update the total number of Publications
|
||||
else continue;
|
||||
|
||||
if (result.ContainsKey("data"))
|
||||
results.AddRange(result["data"]!.AsArray()!);//Manga-data-Array
|
||||
}
|
||||
|
||||
foreach (JsonNode mangaNode in results)
|
||||
{
|
||||
Log($"Getting publication data. {++loadedPublicationData}/{total}");
|
||||
if(MangaFromJsonObject(mangaNode.AsObject()) is { } manga)
|
||||
retManga.Add(manga); //Add Publication (Manga) to result
|
||||
}
|
||||
Log($"Retrieved {retManga.Count} publications. Term={publicationTitle}");
|
||||
return retManga.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest($"https://api.mangadex.org/manga/{publicationId}?includes%5B%5D=manga&includes%5B%5D=cover_art&includes%5B%5D=author&includes%5B%5D=artist&includes%5B%5D=tag", RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
||||
if(result is not null)
|
||||
return MangaFromJsonObject(result["data"]!.AsObject());
|
||||
return null;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
Regex idRex = new (@"https:\/\/mangadex.org\/title\/([A-z0-9-]*)\/.*");
|
||||
string id = idRex.Match(url).Groups[1].Value;
|
||||
Log($"Got id {id} from {url}");
|
||||
return GetMangaFromId(id);
|
||||
}
|
||||
|
||||
private Manga? MangaFromJsonObject(JsonObject manga)
|
||||
{
|
||||
if (!manga.TryGetPropertyValue("id", out JsonNode? idNode))
|
||||
return null;
|
||||
string publicationId = idNode!.GetValue<string>();
|
||||
|
||||
if (!manga.TryGetPropertyValue("attributes", out JsonNode? attributesNode))
|
||||
return null;
|
||||
JsonObject attributes = attributesNode!.AsObject();
|
||||
|
||||
if (!attributes.TryGetPropertyValue("title", out JsonNode? titleNode))
|
||||
return null;
|
||||
string title = titleNode!.AsObject().ContainsKey("en") switch
|
||||
{
|
||||
true => titleNode.AsObject()["en"]!.GetValue<string>(),
|
||||
false => titleNode.AsObject().First().Value!.GetValue<string>()
|
||||
};
|
||||
|
||||
Dictionary<string, string> altTitlesDict = new();
|
||||
if (attributes.TryGetPropertyValue("altTitles", out JsonNode? altTitlesNode))
|
||||
{
|
||||
foreach (JsonNode? altTitleNode in altTitlesNode!.AsArray())
|
||||
{
|
||||
JsonObject altTitleNodeObject = altTitleNode!.AsObject();
|
||||
altTitlesDict.TryAdd(altTitleNodeObject.First().Key, altTitleNodeObject.First().Value!.GetValue<string>());
|
||||
}
|
||||
}
|
||||
|
||||
if (!attributes.TryGetPropertyValue("description", out JsonNode? descriptionNode))
|
||||
return null;
|
||||
string description = descriptionNode!.AsObject().ContainsKey("en") switch
|
||||
{
|
||||
true => descriptionNode.AsObject()["en"]!.GetValue<string>(),
|
||||
false => descriptionNode.AsObject().FirstOrDefault().Value?.GetValue<string>() ?? ""
|
||||
};
|
||||
|
||||
Dictionary<string, string> linksDict = new();
|
||||
if (attributes.TryGetPropertyValue("links", out JsonNode? linksNode) && linksNode is not null)
|
||||
foreach (KeyValuePair<string, JsonNode?> linkKv in linksNode!.AsObject())
|
||||
linksDict.TryAdd(linkKv.Key, linkKv.Value.GetValue<string>());
|
||||
|
||||
string? originalLanguage =
|
||||
attributes.TryGetPropertyValue("originalLanguage", out JsonNode? originalLanguageNode) switch
|
||||
{
|
||||
true => originalLanguageNode?.GetValue<string>(),
|
||||
false => null
|
||||
};
|
||||
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
if (attributes.TryGetPropertyValue("status", out JsonNode? statusNode))
|
||||
{
|
||||
releaseStatus = statusNode?.GetValue<string>().ToLower() switch
|
||||
{
|
||||
"ongoing" => Manga.ReleaseStatusByte.Continuing,
|
||||
"completed" => Manga.ReleaseStatusByte.Completed,
|
||||
"hiatus" => Manga.ReleaseStatusByte.OnHiatus,
|
||||
"cancelled" => Manga.ReleaseStatusByte.Cancelled,
|
||||
_ => Manga.ReleaseStatusByte.Unreleased
|
||||
};
|
||||
}
|
||||
|
||||
int? year = attributes.TryGetPropertyValue("year", out JsonNode? yearNode) switch
|
||||
{
|
||||
true => yearNode?.GetValue<int>(),
|
||||
false => null
|
||||
};
|
||||
|
||||
HashSet<string> tags = new(128);
|
||||
if (attributes.TryGetPropertyValue("tags", out JsonNode? tagsNode))
|
||||
foreach (JsonNode? tagNode in tagsNode!.AsArray())
|
||||
tags.Add(tagNode!["attributes"]!["name"]!["en"]!.GetValue<string>());
|
||||
|
||||
|
||||
if (!manga.TryGetPropertyValue("relationships", out JsonNode? relationshipsNode))
|
||||
return null;
|
||||
|
||||
JsonNode? coverNode = relationshipsNode!.AsArray()
|
||||
.FirstOrDefault(rel => rel!["type"]!.GetValue<string>().Equals("cover_art"));
|
||||
if (coverNode is null)
|
||||
return null;
|
||||
string fileName = coverNode["attributes"]!["fileName"]!.GetValue<string>();
|
||||
string coverUrl = $"https://uploads.mangadex.org/covers/{publicationId}/{fileName}";
|
||||
string coverCacheName = SaveCoverImageToCache(coverUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
List<string> authors = new();
|
||||
JsonNode?[] authorNodes = relationshipsNode.AsArray()
|
||||
.Where(rel => rel!["type"]!.GetValue<string>().Equals("author") || rel!["type"]!.GetValue<string>().Equals("artist")).ToArray();
|
||||
foreach (JsonNode? authorNode in authorNodes)
|
||||
{
|
||||
string authorName = authorNode!["attributes"]!["name"]!.GetValue<string>();
|
||||
if(!authors.Contains(authorName))
|
||||
authors.Add(authorName);
|
||||
}
|
||||
|
||||
Manga pub = new(
|
||||
this,
|
||||
title,
|
||||
authors,
|
||||
description,
|
||||
altTitlesDict,
|
||||
tags.ToArray(),
|
||||
coverUrl,
|
||||
coverCacheName,
|
||||
linksDict,
|
||||
year,
|
||||
originalLanguage,
|
||||
publicationId,
|
||||
releaseStatus,
|
||||
$"https://mangadex.org/title/{publicationId}"
|
||||
);
|
||||
AddMangaToCache(pub);
|
||||
return pub;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
const int limit = 100; //How many values we want returned at once
|
||||
int offset = 0; //"Page"
|
||||
int total = int.MaxValue; //How many total results are there, is updated on first request
|
||||
List<Chapter> chapters = new();
|
||||
//As long as we haven't requested all "Pages"
|
||||
while (offset < total)
|
||||
{
|
||||
//Request next "Page"
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(
|
||||
$"https://api.mangadex.org/manga/{manga.publicationId}/feed?limit={limit}&offset={offset}&translatedLanguage%5B%5D={language}&contentRating%5B%5D=safe&contentRating%5B%5D=suggestive&contentRating%5B%5D=erotica&contentRating%5B%5D=pornographic", RequestType.MangaDexFeed);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
break;
|
||||
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
||||
|
||||
offset += limit;
|
||||
if (result is null)
|
||||
break;
|
||||
|
||||
total = result["total"]!.GetValue<int>();
|
||||
JsonArray chaptersInResult = result["data"]!.AsArray();
|
||||
//Loop through all Chapters in result and extract information from JSON
|
||||
foreach (JsonNode? jsonNode in chaptersInResult)
|
||||
{
|
||||
JsonObject chapter = (JsonObject)jsonNode!;
|
||||
JsonObject attributes = chapter["attributes"]!.AsObject();
|
||||
|
||||
string chapterId = chapter["id"]!.GetValue<string>();
|
||||
|
||||
string? title = attributes.ContainsKey("title") && attributes["title"] is not null
|
||||
? attributes["title"]!.GetValue<string>()
|
||||
: null;
|
||||
|
||||
string? volume = attributes.ContainsKey("volume") && attributes["volume"] is not null
|
||||
? attributes["volume"]!.GetValue<string>()
|
||||
: null;
|
||||
|
||||
string chapterNum = attributes.ContainsKey("chapter") && attributes["chapter"] is not null
|
||||
? attributes["chapter"]!.GetValue<string>()
|
||||
: "null";
|
||||
|
||||
|
||||
if (attributes.ContainsKey("pages") && attributes["pages"] is not null &&
|
||||
attributes["pages"]!.GetValue<int>() < 1)
|
||||
{
|
||||
Log($"Skipping {chapterId} Vol.{volume} Ch.{chapterNum} {title} because it has no pages or is externally linked.");
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
if(!chapters.Any(chp =>
|
||||
chp.volumeNumber.Equals(float.Parse(volume??"0", numberFormatDecimalPoint)) &&
|
||||
chp.chapterNumber.Equals(float.Parse(chapterNum, numberFormatDecimalPoint))))
|
||||
chapters.Add(new Chapter(manga, title, volume, chapterNum, chapterId, chapterId));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNum}: {e.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
//Request URLs for Chapter-Images
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest($"https://api.mangadex.org/at-home/server/{chapter.url}?forcePort443=false", RequestType.MangaDexImage);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
||||
if (result is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.NoContent;
|
||||
}
|
||||
|
||||
string baseUrl = result["baseUrl"]!.GetValue<string>();
|
||||
string hash = result["chapter"]!["hash"]!.GetValue<string>();
|
||||
JsonArray imageFileNames = result["chapter"]!["data"]!.AsArray();
|
||||
//Loop through all imageNames and construct urls (imageUrl)
|
||||
HashSet<string> imageUrls = new();
|
||||
foreach (JsonNode? image in imageFileNames)
|
||||
imageUrls.Add($"{baseUrl}/data/{hash}/{image!.GetValue<string>()}");
|
||||
|
||||
//Download Chapter-Images
|
||||
return DownloadChapterImages(imageUrls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
}
|
@ -1,208 +0,0 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class MangaHere : MangaConnector
|
||||
{
|
||||
public MangaHere(GlobalBase clone) : base(clone, "MangaHere", ["en"], ["www.mangahere.cc"])
|
||||
{
|
||||
this.downloadClient = new ChromiumDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join('+', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://www.mangahere.cc/search?title={sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
if (document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),' container ')]").Any(node => node.ChildNodes.Any(cNode => cNode.HasClass("search-keywords"))))
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
List<string> urls = document.DocumentNode
|
||||
.SelectNodes("//a[contains(@href, '/manga/') and not(contains(@href, '.html'))]")
|
||||
.Select(thumb => $"https://www.mangahere.cc{thumb.GetAttributeValue("href", "")}").Distinct().ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://www.mangahere.cc/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
|
||||
return null;
|
||||
|
||||
Regex idRex = new (@"https:\/\/www\.mangahere\.[a-z]{0,63}\/manga\/([0-9A-z\-]+).*");
|
||||
string id = idRex.Match(url).Groups[1].Value;
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id, url);
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
|
||||
//We dont get posters, because same origin bs HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//img[contains(concat(' ',normalize-space(@class),' '),' detail-info-cover-img ')]");
|
||||
string posterUrl = "http://static.mangahere.cc/v20230914/mangahere/images/nopicture.jpg";
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//span[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-title-font ')]");
|
||||
string sortName = titleNode.InnerText;
|
||||
|
||||
List<string> authors = document.DocumentNode
|
||||
.SelectNodes("//p[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-say ')]/a")
|
||||
.Select(node => node.InnerText)
|
||||
.ToList();
|
||||
|
||||
HashSet<string> tags = document.DocumentNode
|
||||
.SelectNodes("//p[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-tag-list ')]/a")
|
||||
.Select(node => node.InnerText)
|
||||
.ToHashSet();
|
||||
|
||||
status = document.DocumentNode.SelectSingleNode("//span[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-title-tip ')]").InnerText;
|
||||
switch (status.ToLower())
|
||||
{
|
||||
case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
|
||||
case "discontinued": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "complete": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
}
|
||||
|
||||
HtmlNode descriptionNode = document.DocumentNode
|
||||
.SelectSingleNode("//p[contains(concat(' ',normalize-space(@class),' '),' fullcontent ')]");
|
||||
string description = descriptionNode.InnerText;
|
||||
|
||||
Manga manga = new(this, sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
|
||||
coverFileNameInCache, links,
|
||||
null, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"https://www.mangahere.cc/manga/{manga.publicationId}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
List<string> urls = requestResult.htmlDocument.DocumentNode.SelectNodes("//div[@id='list-1']/ul//li//a[contains(@href, '/manga/')]")
|
||||
.Select(node => node.GetAttributeValue("href", "")).ToList();
|
||||
Regex chapterRex = new(@".*\/manga\/[a-zA-Z0-9\-\._\~\!\$\&\'\(\)\*\+\,\;\=\:\@]+\/v([0-9(TBD)]+)\/c([0-9\.]+)\/.*");
|
||||
|
||||
List<Chapter> chapters = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Match rexMatch = chapterRex.Match(url);
|
||||
|
||||
string volumeNumber = rexMatch.Groups[1].Value == "TBD" ? "0" : rexMatch.Groups[1].Value;
|
||||
string chapterNumber = rexMatch.Groups[2].Value;
|
||||
string fullUrl = $"https://www.mangahere.cc{url}";
|
||||
|
||||
try
|
||||
{
|
||||
chapters.Add(new Chapter(manga, "", volumeNumber, chapterNumber, fullUrl));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
|
||||
List<string> imageUrls = new();
|
||||
|
||||
int downloaded = 1;
|
||||
int images = 1;
|
||||
string url = string.Join('/', chapter.url.Split('/')[..^1]);
|
||||
do
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest($"{url}/{downloaded}.html", RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.InternalServerError;
|
||||
}
|
||||
|
||||
imageUrls.AddRange(ParseImageUrlsFromHtml(requestResult.htmlDocument));
|
||||
|
||||
images = requestResult.htmlDocument.DocumentNode
|
||||
.SelectNodes("//a[contains(@href, '/manga/')]")
|
||||
.MaxBy(node => node.GetAttributeValue("data-page", 0))!.GetAttributeValue("data-page", 0);
|
||||
logger?.WriteLine($"MangaHere speciality: Get Image-url {downloaded}/{images}");
|
||||
if (progressToken is not null)
|
||||
{
|
||||
progressToken.increments = images * 2;//we also have to download the images later
|
||||
progressToken.Increment();
|
||||
}
|
||||
} while (downloaded++ <= images);
|
||||
|
||||
if (progressToken is not null)
|
||||
progressToken.increments = images;//we blip to normal length, in downloadchapterimages it is increasaed by the amount of urls again
|
||||
return DownloadChapterImages(imageUrls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
|
||||
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
|
||||
{
|
||||
return document.DocumentNode
|
||||
.SelectNodes("//img[contains(concat(' ',normalize-space(@class),' '),' reader-main-img ')]")
|
||||
.Select(node =>
|
||||
{
|
||||
string url = node.GetAttributeValue("src", "");
|
||||
return url.StartsWith("//") ? $"https:{url}" : url;
|
||||
})
|
||||
.ToArray();
|
||||
}
|
||||
}
|
@ -1,245 +0,0 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class MangaKatana : MangaConnector
|
||||
{
|
||||
public MangaKatana(GlobalBase clone) : base(clone, "MangaKatana", ["en"], ["mangakatana.com"])
|
||||
{
|
||||
this.downloadClient = new HttpDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join("%20", Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://mangakatana.com/?search={sanitizedTitle}&search_by=book_name";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
// ReSharper disable once MergeIntoPattern
|
||||
// If a single result is found, the user will be redirected to the results directly instead of a result page
|
||||
if(requestResult.hasBeenRedirected
|
||||
&& requestResult.redirectedToUrl is not null
|
||||
&& requestResult.redirectedToUrl.Contains("mangakatana.com/manga"))
|
||||
{
|
||||
return new [] { ParseSinglePublicationFromHtml(requestResult.result, requestResult.redirectedToUrl.Split('/')[^1], requestResult.redirectedToUrl) };
|
||||
}
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.result);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://mangakatana.com/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
return ParseSinglePublicationFromHtml(requestResult.result, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(Stream html)
|
||||
{
|
||||
StreamReader reader = new(html);
|
||||
string htmlString = reader.ReadToEnd();
|
||||
HtmlDocument document = new();
|
||||
document.LoadHtml(htmlString);
|
||||
IEnumerable<HtmlNode> searchResults = document.DocumentNode.SelectNodes("//*[@id='book_list']/div");
|
||||
if (searchResults is null || !searchResults.Any())
|
||||
return Array.Empty<Manga>();
|
||||
List<string> urls = new();
|
||||
foreach (HtmlNode mangaResult in searchResults)
|
||||
{
|
||||
urls.Add(mangaResult.Descendants("a").First().GetAttributes()
|
||||
.First(a => a.Name == "href").Value);
|
||||
}
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(Stream html, string publicationId, string websiteUrl)
|
||||
{
|
||||
StreamReader reader = new(html);
|
||||
string htmlString = reader.ReadToEnd();
|
||||
HtmlDocument document = new();
|
||||
document.LoadHtml(htmlString);
|
||||
Dictionary<string, string> altTitles = new();
|
||||
Dictionary<string, string>? links = null;
|
||||
HashSet<string> tags = new();
|
||||
string[] authors = Array.Empty<string>();
|
||||
string originalLanguage = "";
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
|
||||
HtmlNode infoNode = document.DocumentNode.SelectSingleNode("//*[@id='single_book']");
|
||||
string sortName = infoNode.Descendants("h1").First(n => n.HasClass("heading")).InnerText;
|
||||
HtmlNode infoTable = infoNode.SelectSingleNode("//*[@id='single_book']/div[2]/div/ul");
|
||||
|
||||
foreach (HtmlNode row in infoTable.Descendants("li"))
|
||||
{
|
||||
string key = row.SelectNodes("div").First().InnerText.ToLower();
|
||||
string value = row.SelectNodes("div").Last().InnerText;
|
||||
string keySanitized = string.Concat(Regex.Matches(key, "[a-z]"));
|
||||
|
||||
switch (keySanitized)
|
||||
{
|
||||
case "altnames":
|
||||
string[] alts = value.Split(" ; ");
|
||||
for (int i = 0; i < alts.Length; i++)
|
||||
altTitles.Add(i.ToString(), alts[i]);
|
||||
break;
|
||||
case "authorsartists":
|
||||
authors = value.Split(',');
|
||||
break;
|
||||
case "status":
|
||||
switch (value.ToLower())
|
||||
{
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
case "completed": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
}
|
||||
break;
|
||||
case "genres":
|
||||
tags = row.SelectNodes("div").Last().Descendants("a").Select(a => a.InnerText).ToHashSet();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
string posterUrl = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[1]/div").Descendants("img").First()
|
||||
.GetAttributes().First(a => a.Name == "src").Value;
|
||||
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
string description = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[3]/p").InnerText;
|
||||
while (description.StartsWith('\n'))
|
||||
description = description.Substring(1);
|
||||
|
||||
int year = DateTime.Now.Year;
|
||||
string yearString = infoTable.Descendants("div").First(d => d.HasClass("updateAt"))
|
||||
.InnerText.Split('-')[^1];
|
||||
|
||||
if(yearString.Contains("ago") == false)
|
||||
{
|
||||
year = Convert.ToInt32(yearString);
|
||||
}
|
||||
|
||||
Manga manga = new (this, sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"https://mangakatana.com/manga/{manga.publicationId}";
|
||||
// Leaving this in for verification if the page exists
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl);
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, string mangaUrl)
|
||||
{
|
||||
// Using HtmlWeb will include the chapters since they are loaded with js
|
||||
HtmlWeb web = new();
|
||||
HtmlDocument document = web.Load(mangaUrl);
|
||||
|
||||
List<Chapter> ret = new();
|
||||
|
||||
HtmlNode chapterList = document.DocumentNode.SelectSingleNode("//div[contains(@class, 'chapters')]/table/tbody");
|
||||
|
||||
Regex volumeRex = new(@"[0-9a-z\-\.]+\/[0-9a-z\-]*v([0-9\.]+)");
|
||||
Regex chapterNumRex = new(@"[0-9a-z\-\.]+\/[0-9a-z\-]*c([0-9\.]+)");
|
||||
Regex chapterNameRex = new(@"Chapter [0-9\.]+:? (.*)");
|
||||
|
||||
foreach (HtmlNode chapterInfo in chapterList.Descendants("tr"))
|
||||
{
|
||||
string fullString = chapterInfo.Descendants("a").First().InnerText;
|
||||
string url = chapterInfo.Descendants("a").First()
|
||||
.GetAttributeValue("href", "");
|
||||
|
||||
string? volumeNumber = volumeRex.IsMatch(url) ? volumeRex.Match(url).Groups[1].Value : null;
|
||||
string chapterNumber = chapterNumRex.Match(url).Groups[1].Value;
|
||||
string chapterName = chapterNameRex.Match(fullString).Groups[1].Value;
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
string requestUrl = chapter.url;
|
||||
// Leaving this in to check if the page exists
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
|
||||
|
||||
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
|
||||
private string[] ParseImageUrlsFromHtml(string mangaUrl)
|
||||
{
|
||||
HtmlWeb web = new();
|
||||
HtmlDocument document = web.Load(mangaUrl);
|
||||
|
||||
// Images are loaded dynamically, but the urls are present in a piece of js code on the page
|
||||
string js = document.DocumentNode.SelectSingleNode("//script[contains(., 'data-src')]").InnerText
|
||||
.Replace("\r", "")
|
||||
.Replace("\n", "")
|
||||
.Replace("\t", "");
|
||||
|
||||
// ReSharper disable once StringLiteralTypo
|
||||
string regexPat = @"(var thzq=\[')(.*)(,];function)";
|
||||
var group = Regex.Matches(js, regexPat).First().Groups[2].Value.Replace("'", "");
|
||||
var urls = group.Split(',');
|
||||
|
||||
return urls;
|
||||
}
|
||||
}
|
@ -1,206 +0,0 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class MangaLife : MangaConnector
|
||||
{
|
||||
public MangaLife(GlobalBase clone) : base(clone, "Manga4Life", ["en"], ["manga4life.com"])
|
||||
{
|
||||
this.downloadClient = new ChromiumDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = WebUtility.UrlEncode(publicationTitle);
|
||||
string requestUrl = $"https://manga4life.com/search/?name={sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Manga>();
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://manga4life.com/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
Regex publicationIdRex = new(@"https:\/\/(www\.)?manga4life.com\/manga\/(.*)(\/.*)*");
|
||||
string publicationId = publicationIdRex.Match(url).Groups[2].Value;
|
||||
|
||||
RequestResult requestResult = this.downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if(requestResult.htmlDocument is not null)
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
HtmlNode resultsNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']/div[last()]/div[1]/div");
|
||||
if (resultsNode.Descendants("div").Count() == 1 && resultsNode.Descendants("div").First().HasClass("NoResults"))
|
||||
{
|
||||
Log("No results.");
|
||||
return Array.Empty<Manga>();
|
||||
}
|
||||
Log($"{resultsNode.SelectNodes("div").Count} items.");
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
|
||||
foreach (HtmlNode resultNode in resultsNode.SelectNodes("div"))
|
||||
{
|
||||
string url = resultNode.Descendants().First(d => d.HasClass("SeriesName")).GetAttributeValue("href", "");
|
||||
Manga? manga = GetMangaFromUrl($"https://manga4life.com{url}");
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
HashSet<string> tags = new();
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
|
||||
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//img");
|
||||
string posterUrl = posterNode.GetAttributeValue("src", "");
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//h1");
|
||||
string sortName = titleNode.InnerText;
|
||||
|
||||
HtmlNode[] authorsNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Author(s):']/..").Descendants("a")
|
||||
.ToArray();
|
||||
List<string> authors = new();
|
||||
foreach (HtmlNode authorNode in authorsNodes)
|
||||
authors.Add(authorNode.InnerText);
|
||||
|
||||
HtmlNode[] genreNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Genre(s):']/..").Descendants("a")
|
||||
.ToArray();
|
||||
foreach (HtmlNode genreNode in genreNodes)
|
||||
tags.Add(genreNode.InnerText);
|
||||
|
||||
HtmlNode yearNode = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Released:']/..").Descendants("a")
|
||||
.First();
|
||||
int year = Convert.ToInt32(yearNode.InnerText);
|
||||
|
||||
HtmlNode[] statusNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Status:']/..").Descendants("a")
|
||||
.ToArray();
|
||||
foreach (HtmlNode statusNode in statusNodes)
|
||||
if (statusNode.InnerText.Contains("publish", StringComparison.CurrentCultureIgnoreCase))
|
||||
status = statusNode.InnerText.Split(' ')[0];
|
||||
switch (status.ToLower())
|
||||
{
|
||||
case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
|
||||
case "discontinued": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "complete": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
}
|
||||
|
||||
HtmlNode descriptionNode = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Description:']/..")
|
||||
.Descendants("div").First();
|
||||
string description = descriptionNode.InnerText;
|
||||
|
||||
Manga manga = new(this, sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
|
||||
coverFileNameInCache, links, year, originalLanguage, publicationId, releaseStatus, websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
RequestResult result = downloadClient.MakeRequest($"https://manga4life.com/manga/{manga.publicationId}", RequestType.Default, clickButton:"[class*='ShowAllChapters']");
|
||||
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
|
||||
{
|
||||
return Array.Empty<Chapter>();
|
||||
}
|
||||
|
||||
HtmlNodeCollection chapterNodes = result.htmlDocument.DocumentNode.SelectNodes(
|
||||
"//a[contains(concat(' ',normalize-space(@class),' '),' ChapterLink ')]");
|
||||
string[] urls = chapterNodes.Select(node => node.GetAttributeValue("href", "")).ToArray();
|
||||
Regex urlRex = new (@"-chapter-([0-9\\.]+)(-index-([0-9\\.]+))?");
|
||||
|
||||
List<Chapter> chapters = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Match rexMatch = urlRex.Match(url);
|
||||
|
||||
string volumeNumber = "1";
|
||||
if (rexMatch.Groups[3].Value.Length > 0)
|
||||
volumeNumber = rexMatch.Groups[3].Value;
|
||||
string chapterNumber = rexMatch.Groups[1].Value;
|
||||
string fullUrl = $"https://manga4life.com{url}";
|
||||
fullUrl = fullUrl.Replace(Regex.Match(url,"(-page-[0-9])").Value,"");
|
||||
try
|
||||
{
|
||||
chapters.Add(new Chapter(manga, "", volumeNumber, chapterNumber, fullUrl));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
|
||||
RequestResult requestResult = this.downloadClient.MakeRequest(chapter.url, RequestType.Default);
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
HtmlDocument document = requestResult.htmlDocument;
|
||||
|
||||
HtmlNode gallery = document.DocumentNode.Descendants("div").First(div => div.HasClass("ImageGallery"));
|
||||
HtmlNode[] images = gallery.Descendants("img").Where(img => img.HasClass("img-fluid")).ToArray();
|
||||
List<string> urls = new();
|
||||
foreach(HtmlNode galleryImage in images)
|
||||
urls.Add(galleryImage.GetAttributeValue("src", ""));
|
||||
|
||||
string comicInfoPath = Path.GetTempFileName();
|
||||
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
|
||||
|
||||
return DownloadChapterImages(urls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
}
|
@ -1,238 +0,0 @@
|
||||
using System.Globalization;
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class Manganato : MangaConnector
|
||||
{
|
||||
public Manganato(GlobalBase clone) : base(clone, "Manganato", ["en"], ["manganato.com"])
|
||||
{
|
||||
this.downloadClient = new HttpDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join('_', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://manganato.com/search/story/{sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Manga>();
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
List<HtmlNode> searchResults = document.DocumentNode.Descendants("div").Where(n => n.HasClass("search-story-item")).ToList();
|
||||
Log($"{searchResults.Count} items.");
|
||||
List<string> urls = new();
|
||||
foreach (HtmlNode mangaResult in searchResults)
|
||||
{
|
||||
urls.Add(mangaResult.Descendants("a").First(n => n.HasClass("item-title")).GetAttributes()
|
||||
.First(a => a.Name == "href").Value);
|
||||
}
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://chapmanganato.com/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
return null;
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
Dictionary<string, string> altTitles = new();
|
||||
Dictionary<string, string>? links = null;
|
||||
HashSet<string> tags = new();
|
||||
string[] authors = Array.Empty<string>();
|
||||
string originalLanguage = "";
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
|
||||
HtmlNode infoNode = document.DocumentNode.Descendants("div").First(d => d.HasClass("story-info-right"));
|
||||
|
||||
string sortName = infoNode.Descendants("h1").First().InnerText;
|
||||
|
||||
HtmlNode infoTable = infoNode.Descendants().First(d => d.Name == "table");
|
||||
|
||||
foreach (HtmlNode row in infoTable.Descendants("tr"))
|
||||
{
|
||||
string key = row.SelectNodes("td").First().InnerText.ToLower();
|
||||
string value = row.SelectNodes("td").Last().InnerText;
|
||||
string keySanitized = string.Concat(Regex.Matches(key, "[a-z]"));
|
||||
|
||||
switch (keySanitized)
|
||||
{
|
||||
case "alternative":
|
||||
string[] alts = value.Split(" ; ");
|
||||
for(int i = 0; i < alts.Length; i++)
|
||||
altTitles.Add(i.ToString(), alts[i]);
|
||||
break;
|
||||
case "authors":
|
||||
authors = value.Split('-');
|
||||
for (int i = 0; i < authors.Length; i++)
|
||||
authors[i] = authors[i].Replace("\r\n", "");
|
||||
break;
|
||||
case "status":
|
||||
switch (value.ToLower())
|
||||
{
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
case "completed": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
}
|
||||
break;
|
||||
case "genres":
|
||||
string[] genres = value.Split(" - ");
|
||||
for (int i = 0; i < genres.Length; i++)
|
||||
genres[i] = genres[i].Replace("\r\n", "");
|
||||
tags = genres.ToHashSet();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
string posterUrl = document.DocumentNode.Descendants("span").First(s => s.HasClass("info-image")).Descendants("img").First()
|
||||
.GetAttributes().First(a => a.Name == "src").Value;
|
||||
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
string description = document.DocumentNode.Descendants("div").First(d => d.HasClass("panel-story-info-description"))
|
||||
.InnerText.Replace("Description :", "");
|
||||
while (description.StartsWith('\n'))
|
||||
description = description.Substring(1);
|
||||
|
||||
string pattern = "MMM dd,yyyy HH:mm";
|
||||
|
||||
HtmlNode? oldestChapter = document.DocumentNode
|
||||
.SelectNodes("//span[contains(concat(' ',normalize-space(@class),' '),' chapter-time ')]").MaxBy(
|
||||
node => DateTime.ParseExact(node.GetAttributeValue("title", "Dec 31 2400, 23:59"), pattern,
|
||||
CultureInfo.InvariantCulture).Millisecond);
|
||||
|
||||
|
||||
int year = DateTime.ParseExact(oldestChapter?.GetAttributeValue("title", "Dec 31 2400, 23:59")??"Dec 31 2400, 23:59", pattern,
|
||||
CultureInfo.InvariantCulture).Year;
|
||||
|
||||
Manga manga = new (this, sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"https://chapmanganato.com/{manga.publicationId}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Chapter>();
|
||||
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
|
||||
{
|
||||
List<Chapter> ret = new();
|
||||
|
||||
HtmlNode chapterList = document.DocumentNode.Descendants("ul").First(l => l.HasClass("row-content-chapter"));
|
||||
|
||||
Regex volRex = new(@"Vol\.([0-9]+).*");
|
||||
Regex chapterRex = new(@"https:\/\/chapmanganato.[A-z]+\/manga-[A-z0-9]+\/chapter-([0-9\.]+)");
|
||||
Regex nameRex = new(@"Chapter ([0-9]+(\.[0-9]+)*){1}:? (.*)");
|
||||
|
||||
foreach (HtmlNode chapterInfo in chapterList.Descendants("li"))
|
||||
{
|
||||
string fullString = chapterInfo.Descendants("a").First(d => d.HasClass("chapter-name")).InnerText;
|
||||
|
||||
string url = chapterInfo.Descendants("a").First(d => d.HasClass("chapter-name"))
|
||||
.GetAttributeValue("href", "");
|
||||
string? volumeNumber = volRex.IsMatch(fullString) ? volRex.Match(fullString).Groups[1].Value : null;
|
||||
string chapterNumber = chapterRex.Match(url).Groups[1].Value;
|
||||
string chapterName = nameRex.Match(fullString).Groups[3].Value;
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
ret.Reverse();
|
||||
return ret;
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
string requestUrl = chapter.url;
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.InternalServerError;
|
||||
}
|
||||
|
||||
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
|
||||
|
||||
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, "https://chapmanganato.com/", progressToken:progressToken);
|
||||
}
|
||||
|
||||
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
|
||||
{
|
||||
List<string> ret = new();
|
||||
|
||||
HtmlNode imageContainer =
|
||||
document.DocumentNode.Descendants("div").First(i => i.HasClass("container-chapter-reader"));
|
||||
foreach(HtmlNode imageNode in imageContainer.Descendants("img"))
|
||||
ret.Add(imageNode.GetAttributeValue("src", ""));
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
}
|
@ -1,233 +0,0 @@
|
||||
using System.Data;
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Xml.Linq;
|
||||
using HtmlAgilityPack;
|
||||
using Newtonsoft.Json;
|
||||
using Soenneker.Utils.String.NeedlemanWunsch;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class Mangasee : MangaConnector
|
||||
{
|
||||
public Mangasee(GlobalBase clone) : base(clone, "Mangasee", ["en"], ["mangasee123.com"])
|
||||
{
|
||||
this.downloadClient = new ChromiumDownloadClient(clone);
|
||||
}
|
||||
|
||||
private struct SearchResult
|
||||
{
|
||||
public string i { get; set; }
|
||||
public string s { get; set; }
|
||||
public string[] a { get; set; }
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string requestUrl = "https://mangasee123.com/_search.php";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
Log($"Failed to retrieve search: {requestResult.statusCode}");
|
||||
return Array.Empty<Manga>();
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
SearchResult[] searchResults = JsonConvert.DeserializeObject<SearchResult[]>(requestResult.htmlDocument!.DocumentNode.InnerText) ??
|
||||
throw new NoNullAllowedException();
|
||||
SearchResult[] filteredResults = FilteredResults(publicationTitle, searchResults);
|
||||
Log($"Total available manga: {searchResults.Length} Filtered down to: {filteredResults.Length}");
|
||||
|
||||
|
||||
string[] urls = filteredResults.Select(result => $"https://mangasee123.com/manga/{result.i}").ToArray();
|
||||
List<Manga> searchResultManga = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? newManga = GetMangaFromUrl(url);
|
||||
if(newManga is { } manga)
|
||||
searchResultManga.Add(manga);
|
||||
}
|
||||
Log($"Retrieved {searchResultManga.Count} publications. Term=\"{publicationTitle}\"");
|
||||
return searchResultManga.ToArray();
|
||||
}
|
||||
catch (NoNullAllowedException)
|
||||
{
|
||||
Log("Failed to retrieve search");
|
||||
return Array.Empty<Manga>();
|
||||
}
|
||||
}
|
||||
|
||||
private readonly string[] _filterWords = {"a", "the", "of", "as", "to", "no", "for", "on", "with", "be", "and", "in", "wa", "at", "be", "ni"};
|
||||
private string ToFilteredString(string input) => string.Join(' ', input.ToLower().Split(' ').Where(word => _filterWords.Contains(word) == false));
|
||||
private SearchResult[] FilteredResults(string publicationTitle, SearchResult[] unfilteredSearchResults)
|
||||
{
|
||||
Dictionary<SearchResult, int> similarity = new();
|
||||
foreach (SearchResult sr in unfilteredSearchResults)
|
||||
{
|
||||
List<int> scores = new();
|
||||
string filteredPublicationString = ToFilteredString(publicationTitle);
|
||||
string filteredSString = ToFilteredString(sr.s);
|
||||
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(filteredSString, filteredPublicationString));
|
||||
foreach (string srA in sr.a)
|
||||
{
|
||||
string filteredAString = ToFilteredString(srA);
|
||||
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(filteredAString, filteredPublicationString));
|
||||
}
|
||||
similarity.Add(sr, scores.Sum() / scores.Count);
|
||||
}
|
||||
|
||||
List<SearchResult> ret = similarity.OrderBy(s => s.Value).Take(10).Select(s => s.Key).ToList();
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://mangasee123.com/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
Regex publicationIdRex = new(@"https:\/\/mangasee123.com\/manga\/(.*)(\/.*)*");
|
||||
string publicationId = publicationIdRex.Match(url).Groups[1].Value;
|
||||
|
||||
RequestResult requestResult = this.downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if((int)requestResult.statusCode < 300 && (int)requestResult.statusCode >= 200 && requestResult.htmlDocument is not null)
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
HashSet<string> tags = new();
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
|
||||
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//img");
|
||||
string posterUrl = posterNode.GetAttributeValue("src", "");
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//h1");
|
||||
string sortName = titleNode.InnerText;
|
||||
|
||||
HtmlNode[] authorsNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Author(s):']/..").Descendants("a")
|
||||
.ToArray();
|
||||
List<string> authors = new();
|
||||
foreach (HtmlNode authorNode in authorsNodes)
|
||||
authors.Add(authorNode.InnerText);
|
||||
|
||||
HtmlNode[] genreNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Genre(s):']/..").Descendants("a")
|
||||
.ToArray();
|
||||
foreach (HtmlNode genreNode in genreNodes)
|
||||
tags.Add(genreNode.InnerText);
|
||||
|
||||
HtmlNode yearNode = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Released:']/..").Descendants("a")
|
||||
.First();
|
||||
int year = Convert.ToInt32(yearNode.InnerText);
|
||||
|
||||
HtmlNode[] statusNodes = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Status:']/..").Descendants("a")
|
||||
.ToArray();
|
||||
foreach (HtmlNode statusNode in statusNodes)
|
||||
if (statusNode.InnerText.Contains("publish", StringComparison.CurrentCultureIgnoreCase))
|
||||
status = statusNode.InnerText.Split(' ')[0];
|
||||
switch (status.ToLower())
|
||||
{
|
||||
case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
|
||||
case "discontinued": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "complete": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
}
|
||||
|
||||
HtmlNode descriptionNode = document.DocumentNode
|
||||
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Description:']/..")
|
||||
.Descendants("div").First();
|
||||
string description = descriptionNode.InnerText;
|
||||
|
||||
Manga manga = new(this, sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
|
||||
coverFileNameInCache, links, year, originalLanguage, publicationId, releaseStatus, websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
try
|
||||
{
|
||||
XDocument doc = XDocument.Load($"https://mangasee123.com/rss/{manga.publicationId}.xml");
|
||||
XElement[] chapterItems = doc.Descendants("item").ToArray();
|
||||
List<Chapter> chapters = new();
|
||||
Regex chVolRex = new(@".*chapter-([0-9\.]+)(?:-index-([0-9\.]+))?.*");
|
||||
foreach (XElement chapter in chapterItems)
|
||||
{
|
||||
string url = chapter.Descendants("link").First().Value;
|
||||
Match m = chVolRex.Match(url);
|
||||
string? volumeNumber = m.Groups[2].Success ? m.Groups[2].Value : "1";
|
||||
string chapterNumber = m.Groups[1].Value;
|
||||
|
||||
string chapterUrl = Regex.Replace(url, @"-page-[0-9]+(\.html)", ".html");
|
||||
try
|
||||
{
|
||||
chapters.Add(new Chapter(manga, "", volumeNumber, chapterNumber, chapterUrl));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
catch (HttpRequestException e)
|
||||
{
|
||||
Log($"Failed to load https://mangasee123.com/rss/{manga.publicationId}.xml \n\r{e}");
|
||||
return Array.Empty<Chapter>();
|
||||
}
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
|
||||
RequestResult requestResult = this.downloadClient.MakeRequest(chapter.url, RequestType.Default);
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
HtmlDocument document = requestResult.htmlDocument;
|
||||
|
||||
HtmlNode gallery = document.DocumentNode.Descendants("div").First(div => div.HasClass("ImageGallery"));
|
||||
HtmlNode[] images = gallery.Descendants("img").Where(img => img.HasClass("img-fluid")).ToArray();
|
||||
List<string> urls = new();
|
||||
foreach(HtmlNode galleryImage in images)
|
||||
urls.Add(galleryImage.GetAttributeValue("src", ""));
|
||||
|
||||
return DownloadChapterImages(urls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
}
|
@ -1,240 +0,0 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class Mangaworld: MangaConnector
|
||||
{
|
||||
public Mangaworld(GlobalBase clone) : base(clone, "Mangaworld", ["it"], ["www.mangaworld.ac"])
|
||||
{
|
||||
this.downloadClient = new HttpDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://www.mangaworld.ac/archive?keyword={sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Manga>();
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
if (!document.DocumentNode.SelectSingleNode("//div[@class='comics-grid']").ChildNodes
|
||||
.Any(node => node.HasClass("entry")))
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
List<string> urls = document.DocumentNode
|
||||
.SelectNodes(
|
||||
"//div[@class='comics-grid']//div[@class='entry']//a[contains(concat(' ',normalize-space(@class),' '),'thumb')]")
|
||||
.Select(thumb => thumb.GetAttributeValue("href", "")).ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://www.mangaworld.ac/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
return null;
|
||||
|
||||
Regex idRex = new (@"https:\/\/www\.mangaworld\.[a-z]{0,63}\/manga\/([0-9]+\/[0-9A-z\-]+).*");
|
||||
string id = idRex.Match(url).Groups[1].Value;
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id, url);
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
Dictionary<string, string> altTitles = new();
|
||||
Dictionary<string, string>? links = null;
|
||||
string originalLanguage = "";
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
|
||||
HtmlNode infoNode = document.DocumentNode.Descendants("div").First(d => d.HasClass("info"));
|
||||
|
||||
string sortName = infoNode.Descendants("h1").First().InnerText;
|
||||
|
||||
HtmlNode metadata = infoNode.Descendants().First(d => d.HasClass("meta-data"));
|
||||
|
||||
HtmlNode altTitlesNode = metadata.SelectSingleNode("//span[text()='Titoli alternativi: ' or text()='Titolo alternativo: ']/..").ChildNodes[1];
|
||||
|
||||
string[] alts = altTitlesNode.InnerText.Split(", ");
|
||||
for(int i = 0; i < alts.Length; i++)
|
||||
altTitles.Add(i.ToString(), alts[i]);
|
||||
|
||||
HtmlNode genresNode =
|
||||
metadata.SelectSingleNode("//span[text()='Generi: ' or text()='Genero: ']/..");
|
||||
HashSet<string> tags = genresNode.SelectNodes("a").Select(node => node.InnerText).ToHashSet();
|
||||
|
||||
HtmlNode authorsNode =
|
||||
metadata.SelectSingleNode("//span[text()='Autore: ' or text()='Autori: ']/..");
|
||||
string[] authors = authorsNode.SelectNodes("a").Select(node => node.InnerText).ToArray();
|
||||
|
||||
string status = metadata.SelectSingleNode("//span[text()='Stato: ']/..").SelectNodes("a").First().InnerText;
|
||||
// ReSharper disable 5 times StringLiteralTypo
|
||||
switch (status.ToLower())
|
||||
{
|
||||
case "cancellato": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "in pausa": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
|
||||
case "droppato": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "finito": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
case "in corso": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
}
|
||||
|
||||
string posterUrl = document.DocumentNode.SelectSingleNode("//img[@class='rounded']").GetAttributeValue("src", "");
|
||||
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId.Replace('/', '-'), RequestType.MangaCover);
|
||||
|
||||
string description = document.DocumentNode.SelectSingleNode("//div[@id='noidungm']").InnerText;
|
||||
|
||||
string yearString = metadata.SelectSingleNode("//span[text()='Anno di uscita: ']/..").SelectNodes("a").First().InnerText;
|
||||
int year = Convert.ToInt32(yearString);
|
||||
|
||||
Manga manga = new (this, sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"https://www.mangaworld.ac/manga/{manga.publicationId}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Chapter>();
|
||||
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
|
||||
{
|
||||
List<Chapter> ret = new();
|
||||
|
||||
HtmlNode chaptersWrapper =
|
||||
document.DocumentNode.SelectSingleNode(
|
||||
"//div[contains(concat(' ',normalize-space(@class),' '),'chapters-wrapper')]");
|
||||
|
||||
Regex volumeRex = new(@"[Vv]olume ([0-9]+).*");
|
||||
Regex chapterRex = new(@"[Cc]apitolo ([0-9]+(?:\.[0-9]+)?).*");
|
||||
Regex idRex = new(@".*\/read\/([a-z0-9]+)(?:[?\/].*)?");
|
||||
if (chaptersWrapper.Descendants("div").Any(descendant => descendant.HasClass("volume-element")))
|
||||
{
|
||||
foreach (HtmlNode volNode in document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),'volume-element')]"))
|
||||
{
|
||||
string volume = volumeRex.Match(volNode.SelectNodes("div").First(node => node.HasClass("volume")).SelectSingleNode("p").InnerText).Groups[1].Value;
|
||||
foreach (HtmlNode chNode in volNode.SelectNodes("div").First(node => node.HasClass("volume-chapters")).SelectNodes("div"))
|
||||
{
|
||||
|
||||
string number = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
|
||||
string url = chNode.SelectSingleNode("a").GetAttributeValue("href", "");
|
||||
string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value;
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, null, volume, number, url, id));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {number}: {e.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach (HtmlNode chNode in chaptersWrapper.SelectNodes("div").Where(node => node.HasClass("chapter")))
|
||||
{
|
||||
string number = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
|
||||
string url = chNode.SelectSingleNode("a").GetAttributeValue("href", "");
|
||||
string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value;
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, null, null, number, url, id));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {number}: {e.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ret.Reverse();
|
||||
return ret;
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
string requestUrl = $"{chapter.url}?style=list";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.InternalServerError;
|
||||
}
|
||||
|
||||
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
|
||||
|
||||
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage,"https://www.mangaworld.bz/", progressToken:progressToken);
|
||||
}
|
||||
|
||||
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
|
||||
{
|
||||
List<string> ret = new();
|
||||
|
||||
HtmlNode imageContainer =
|
||||
document.DocumentNode.SelectSingleNode("//div[@id='page']");
|
||||
foreach(HtmlNode imageNode in imageContainer.Descendants("img"))
|
||||
ret.Add(imageNode.GetAttributeValue("src", ""));
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
}
|
@ -1,203 +0,0 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class ManhuaPlus : MangaConnector
|
||||
{
|
||||
public ManhuaPlus(GlobalBase clone) : base(clone, "ManhuaPlus", ["en"], ["manhuaplus.org"])
|
||||
{
|
||||
this.downloadClient = new ChromiumDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://manhuaplus.org/search?keyword={sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Manga>();
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
if (document.DocumentNode.SelectSingleNode("//h1/../..").ChildNodes//I already want to not.
|
||||
.Any(node => node.InnerText.Contains("No manga found")))
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
List<string> urls = document.DocumentNode
|
||||
.SelectNodes("//h1/../..//a[contains(@href, 'https://manhuaplus.org/manga/') and contains(concat(' ',normalize-space(@class),' '),' clamp ') and not(contains(@href, '/chapter'))]")
|
||||
.Select(mangaNode => mangaNode.GetAttributeValue("href", "")).ToList();
|
||||
logger?.WriteLine($"Got {urls.Count} urls.");
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://manhuaplus.org/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
Regex publicationIdRex = new(@"https:\/\/manhuaplus.org\/manga\/(.*)(\/.*)*");
|
||||
string publicationId = publicationIdRex.Match(url).Groups[1].Value;
|
||||
|
||||
RequestResult requestResult = this.downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if((int)requestResult.statusCode < 300 && (int)requestResult.statusCode >= 200 && requestResult.htmlDocument is not null && requestResult.redirectedToUrl != "https://manhuaplus.org/home") //When manga doesnt exists it redirects to home
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
HashSet<string> tags = new();
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
|
||||
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("/html/body/main/div/div/div[2]/div[1]/figure/a/img");//BRUH
|
||||
Regex posterRex = new(@".*(\/uploads/covers/[a-zA-Z0-9\-\._\~\!\$\&\'\(\)\*\+\,\;\=\:\@]+).*");
|
||||
string posterUrl = $"https://manhuaplus.org/{posterRex.Match(posterNode.GetAttributeValue("src", "")).Groups[1].Value}";
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//h1");
|
||||
string sortName = titleNode.InnerText.Replace("\n", "");
|
||||
|
||||
List<string> authors = new();
|
||||
try
|
||||
{
|
||||
HtmlNode[] authorsNodes = document.DocumentNode
|
||||
.SelectNodes("//a[contains(@href, 'https://manhuaplus.org/authors/')]")
|
||||
.ToArray();
|
||||
foreach (HtmlNode authorNode in authorsNodes)
|
||||
authors.Add(authorNode.InnerText);
|
||||
}
|
||||
catch (ArgumentNullException e)
|
||||
{
|
||||
Log("No authors found.");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
HtmlNode[] genreNodes = document.DocumentNode
|
||||
.SelectNodes("//a[contains(@href, 'https://manhuaplus.org/genres/')]").ToArray();
|
||||
foreach (HtmlNode genreNode in genreNodes)
|
||||
tags.Add(genreNode.InnerText.Replace("\n", ""));
|
||||
}
|
||||
catch (ArgumentNullException e)
|
||||
{
|
||||
Log("No genres found");
|
||||
}
|
||||
|
||||
Regex yearRex = new(@"(?:[0-9]{1,2}\/){2}([0-9]{2,4}) [0-9]{1,2}:[0-9]{1,2}");
|
||||
HtmlNode yearNode = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-clock ')]/../span");
|
||||
Match match = yearRex.Match(yearNode.InnerText);
|
||||
int year = match.Success && match.Groups[1].Success ? int.Parse(match.Groups[1].Value) : 1960;
|
||||
|
||||
status = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-rss ')]/../span").InnerText.Replace("\n", "");
|
||||
switch (status.ToLower())
|
||||
{
|
||||
case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
|
||||
case "discontinued": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "complete": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
}
|
||||
|
||||
HtmlNode descriptionNode = document.DocumentNode
|
||||
.SelectSingleNode("//div[@id='syn-target']");
|
||||
string description = descriptionNode.InnerText;
|
||||
|
||||
Manga manga = new(this, sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
|
||||
coverFileNameInCache, links,
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
RequestResult result = downloadClient.MakeRequest($"https://manhuaplus.org/manga/{manga.publicationId}", RequestType.Default);
|
||||
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
|
||||
{
|
||||
return Array.Empty<Chapter>();
|
||||
}
|
||||
|
||||
HtmlNodeCollection chapterNodes = result.htmlDocument.DocumentNode.SelectNodes("//li[contains(concat(' ',normalize-space(@class),' '),' chapter ')]//a");
|
||||
string[] urls = chapterNodes.Select(node => node.GetAttributeValue("href", "")).ToArray();
|
||||
Regex urlRex = new (@".*\/chapter-([0-9\-]+).*");
|
||||
|
||||
List<Chapter> chapters = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Match rexMatch = urlRex.Match(url);
|
||||
|
||||
string volumeNumber = "1";
|
||||
string chapterNumber = rexMatch.Groups[1].Value;
|
||||
string fullUrl = url;
|
||||
try
|
||||
{
|
||||
chapters.Add(new Chapter(manga, "", volumeNumber, chapterNumber, fullUrl));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
|
||||
RequestResult requestResult = this.downloadClient.MakeRequest(chapter.url, RequestType.Default);
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
HtmlDocument document = requestResult.htmlDocument;
|
||||
|
||||
HtmlNode[] images = document.DocumentNode.SelectNodes("//a[contains(concat(' ',normalize-space(@class),' '),' readImg ')]/img").ToArray();
|
||||
List<string> urls = images.Select(node => node.GetAttributeValue("src", "")).ToList();
|
||||
|
||||
return DownloadChapterImages(urls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
}
|
@ -1,27 +0,0 @@
|
||||
using System.Net;
|
||||
using HtmlAgilityPack;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public struct RequestResult
|
||||
{
|
||||
public HttpStatusCode statusCode { get; }
|
||||
public Stream result { get; }
|
||||
public bool hasBeenRedirected { get; }
|
||||
public string? redirectedToUrl { get; }
|
||||
public HtmlDocument? htmlDocument { get; }
|
||||
|
||||
public RequestResult(HttpStatusCode statusCode, HtmlDocument? htmlDocument, Stream result)
|
||||
{
|
||||
this.statusCode = statusCode;
|
||||
this.htmlDocument = htmlDocument;
|
||||
this.result = result;
|
||||
}
|
||||
|
||||
public RequestResult(HttpStatusCode statusCode, HtmlDocument? htmlDocument, Stream result, bool hasBeenRedirected, string redirectedTo)
|
||||
: this(statusCode, htmlDocument, result)
|
||||
{
|
||||
this.hasBeenRedirected = hasBeenRedirected;
|
||||
redirectedToUrl = redirectedTo;
|
||||
}
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public enum RequestType : byte
|
||||
{
|
||||
Default = 0,
|
||||
MangaDexFeed = 1,
|
||||
MangaImage = 2,
|
||||
MangaCover = 3,
|
||||
MangaDexImage = 5,
|
||||
MangaInfo = 6
|
||||
}
|
@ -1,243 +0,0 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Soenneker.Utils.String.NeedlemanWunsch;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class Weebcentral : MangaConnector
|
||||
{
|
||||
private readonly string _baseUrl = "https://weebcentral.com";
|
||||
|
||||
private readonly string[] _filterWords =
|
||||
{ "a", "the", "of", "as", "to", "no", "for", "on", "with", "be", "and", "in", "wa", "at", "be", "ni" };
|
||||
|
||||
public Weebcentral(GlobalBase clone) : base(clone, "Weebcentral", ["en"])
|
||||
{
|
||||
downloadClient = new ChromiumDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
const int limit = 32; //How many values we want returned at once
|
||||
var offset = 0; //"Page"
|
||||
var requestUrl =
|
||||
$"{_baseUrl}/search/data?limit={limit}&offset={offset}&text={publicationTitle}&sort=Best+Match&order=Ascending&official=Any&display_mode=Minimal%20Display";
|
||||
var requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 ||
|
||||
requestResult.htmlDocument == null)
|
||||
{
|
||||
Log($"Failed to retrieve search: {requestResult.statusCode}");
|
||||
return [];
|
||||
}
|
||||
|
||||
var publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
if (document.DocumentNode.SelectNodes("//article") == null)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
var urls = document.DocumentNode.SelectNodes("/html/body/article/a[@class='link link-hover']")
|
||||
.Select(elem => elem.GetAttributeValue("href", "")).ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
foreach (var url in urls)
|
||||
{
|
||||
var manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
Regex publicationIdRex = new(@"https:\/\/weebcentral\.com\/series\/(\w*)\/(.*)");
|
||||
var publicationId = publicationIdRex.Match(url).Groups[1].Value;
|
||||
|
||||
var requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 300 && (int)requestResult.statusCode >= 200 &&
|
||||
requestResult.htmlDocument is not null)
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
var posterNode =
|
||||
document.DocumentNode.SelectSingleNode("//section[@class='flex items-center justify-center']/picture/img");
|
||||
var posterUrl = posterNode?.GetAttributeValue("src", "") ?? "";
|
||||
var coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
var titleNode = document.DocumentNode.SelectSingleNode("//section/h1");
|
||||
var sortName = titleNode?.InnerText ?? "Undefined";
|
||||
|
||||
HtmlNode[] authorsNodes =
|
||||
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Author(s): ']/span")?.ToArray() ?? [];
|
||||
var authors = authorsNodes.Select(n => n.InnerText).ToList();
|
||||
|
||||
HtmlNode[] genreNodes =
|
||||
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Tags(s): ']/span")?.ToArray() ?? [];
|
||||
HashSet<string> tags = genreNodes.Select(n => n.InnerText).ToHashSet();
|
||||
|
||||
var statusNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Status: ']/a");
|
||||
var status = statusNode?.InnerText ?? "";
|
||||
Log("unable to parse status");
|
||||
var releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
switch (status.ToLower())
|
||||
{
|
||||
case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
|
||||
case "complete": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
}
|
||||
|
||||
var yearNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Released: ']/span");
|
||||
var year = Convert.ToInt32(yearNode?.InnerText ?? "0");
|
||||
|
||||
var descriptionNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Description']/p");
|
||||
var description = descriptionNode?.InnerText ?? "Undefined";
|
||||
|
||||
HtmlNode[] altTitleNodes = document.DocumentNode
|
||||
.SelectNodes("//ul/li[strong/text() = 'Associated Name(s)']/ul/li")?.ToArray() ?? [];
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
for (var i = 0; i < altTitleNodes.Length; i++)
|
||||
altTitles.Add(i.ToString(), altTitleNodes[i].InnerText);
|
||||
|
||||
var originalLanguage = "";
|
||||
|
||||
Manga manga = new(sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
|
||||
coverFileNameInCache, links,
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://weebcentral.com/series/{publicationId}");
|
||||
}
|
||||
|
||||
private string ToFilteredString(string input)
|
||||
{
|
||||
return string.Join(' ', input.ToLower().Split(' ').Where(word => _filterWords.Contains(word) == false));
|
||||
}
|
||||
|
||||
private SearchResult[] FilteredResults(string publicationTitle, SearchResult[] unfilteredSearchResults)
|
||||
{
|
||||
Dictionary<SearchResult, int> similarity = new();
|
||||
foreach (var sr in unfilteredSearchResults)
|
||||
{
|
||||
List<int> scores = new();
|
||||
var filteredPublicationString = ToFilteredString(publicationTitle);
|
||||
var filteredSString = ToFilteredString(sr.s);
|
||||
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(filteredSString, filteredPublicationString));
|
||||
foreach (var srA in sr.a)
|
||||
{
|
||||
var filteredAString = ToFilteredString(srA);
|
||||
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(filteredAString, filteredPublicationString));
|
||||
}
|
||||
|
||||
similarity.Add(sr, scores.Sum() / scores.Count);
|
||||
}
|
||||
|
||||
var ret = similarity.OrderBy(s => s.Value).Take(10).Select(s => s.Key).ToList();
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language = "en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
var requestUrl = $"{_baseUrl}/series/{manga.publicationId}/full-chapter-list";
|
||||
var requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Chapter>();
|
||||
var chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
|
||||
{
|
||||
var chaptersWrapper = document.DocumentNode.SelectSingleNode("/html/body");
|
||||
|
||||
Regex chapterRex = new(@".* (\d+)");
|
||||
Regex idRex = new(@"https:\/\/weebcentral\.com\/chapters\/(\w*)");
|
||||
|
||||
var ret = chaptersWrapper.Descendants("a").Select(elem =>
|
||||
{
|
||||
var url = elem.GetAttributeValue("href", "") ?? "Undefined";
|
||||
|
||||
if (!url.StartsWith("https://") && !url.StartsWith("http://"))
|
||||
return new Chapter(manga, null, null, "-1", "undefined");
|
||||
|
||||
var idMatch = idRex.Match(url);
|
||||
var id = idMatch.Success ? idMatch.Groups[1].Value : null;
|
||||
|
||||
var chapterNode = elem.SelectSingleNode("span[@class='grow flex items-center gap-2']/span")?.InnerText ??
|
||||
"Undefined";
|
||||
|
||||
var chapterNumberMatch = chapterRex.Match(chapterNode);
|
||||
var chapterNumber = chapterNumberMatch.Success ? chapterNumberMatch.Groups[1].Value : "-1";
|
||||
|
||||
return new Chapter(manga, null, null, chapterNumber, url, id);
|
||||
}).Where(elem => elem.chapterNumber != -1 && elem.url != "undefined").ToList();
|
||||
|
||||
ret.Reverse();
|
||||
return ret;
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
var chapterParentManga = chapter.parentManga;
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
|
||||
var requestResult = downloadClient.MakeRequest(chapter.url, RequestType.Default);
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
var document = requestResult.htmlDocument;
|
||||
|
||||
var imageNodes =
|
||||
document.DocumentNode.SelectNodes($"//section[@hx-get='{chapter.url}/images']/img")?.ToArray() ?? [];
|
||||
var urls = imageNodes.Select(imgNode => imgNode.GetAttributeValue("src", "")).ToArray();
|
||||
|
||||
return DownloadChapterImages(urls, chapter, RequestType.MangaImage, progressToken: progressToken);
|
||||
}
|
||||
|
||||
private struct SearchResult
|
||||
{
|
||||
public string i { get; set; }
|
||||
public string s { get; set; }
|
||||
public string[] a { get; set; }
|
||||
}
|
||||
}
|
@ -14,20 +14,6 @@ public partial class Tranga : GlobalBase
|
||||
{
|
||||
Log("\n\n _______ \n|_ _|.----..---.-..-----..-----..---.-.\n | | | _|| _ || || _ || _ |\n |___| |__| |___._||__|__||___ ||___._|\n |_____| \n\n");
|
||||
keepRunning = true;
|
||||
_connectors = new HashSet<MangaConnector>()
|
||||
{
|
||||
new Manganato(this),
|
||||
new Mangasee(this),
|
||||
new MangaDex(this),
|
||||
new MangaKatana(this),
|
||||
new Mangaworld(this),
|
||||
new Bato(this),
|
||||
new MangaLife(this),
|
||||
new ManhuaPlus(this),
|
||||
new MangaHere(this),
|
||||
new AsuraToon(this),
|
||||
new Weebcentral(this)
|
||||
};
|
||||
foreach(DirectoryInfo dir in new DirectoryInfo(Path.GetTempPath()).GetDirectories("trangatemp"))//Cleanup old temp folders
|
||||
dir.Delete();
|
||||
jobBoss = new(this, this._connectors);
|
||||
|
Loading…
x
Reference in New Issue
Block a user