2
0

#68 Because XML is sometimes broken, we parse from somewhere else

Also fixed the faulty url completion.
This commit is contained in:
glax 2023-10-20 15:01:55 +02:00
parent f63851d95d
commit 7219641859

View File

@ -58,7 +58,7 @@ public class MangaLife : MangaConnector
foreach (HtmlNode resultNode in resultsNode.SelectNodes("div")) foreach (HtmlNode resultNode in resultsNode.SelectNodes("div"))
{ {
string url = resultNode.Descendants().First(d => d.HasClass("SeriesName")).GetAttributeValue("href", ""); string url = resultNode.Descendants().First(d => d.HasClass("SeriesName")).GetAttributeValue("href", "");
Manga? manga = GetMangaFromUrl($"https://mangasee123.com{url}"); Manga? manga = GetMangaFromUrl($"https://manga4life.com{url}");
if (manga is not null) if (manga is not null)
ret.Add((Manga)manga); ret.Add((Manga)manga);
} }
@ -120,23 +120,23 @@ public class MangaLife : MangaConnector
public override Chapter[] GetChapters(Manga manga, string language="en") public override Chapter[] GetChapters(Manga manga, string language="en")
{ {
Log($"Getting chapters {manga}"); Log($"Getting chapters {manga}");
DownloadClient.RequestResult result = downloadClient.MakeRequest($"https://manga4life.com/rss/{manga.publicationId}.xml", 1); DownloadClient.RequestResult result = downloadClient.MakeRequest($"https://manga4life.com/manga/{manga.publicationId}", 1);
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300) if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
{ {
Log("Failed to load chapterinfo");
return Array.Empty<Chapter>(); return Array.Empty<Chapter>();
} }
StreamReader sr = new (result.result); HtmlNodeCollection chapterNodes = result.htmlDocument.DocumentNode.SelectNodes(
string unformattedString = sr.ReadToEnd(); "//a[contains(concat(' ',normalize-space(@class),' '),' ChapterLink ')]");
Regex urlRex = new(@"(https:\/\/manga4life.com/read-online/[A-z0-9\-]+\.html)"); string[] urls = chapterNodes.Select(node => node.GetAttributeValue("href", "")).ToArray();
string[] urls = urlRex.Matches(unformattedString).Select(match => match.Groups[1].Value).ToArray();
List<Chapter> chapters = new(); List<Chapter> chapters = new();
foreach (string url in urls) foreach (string url in urls)
{ {
string volumeNumber = "1"; string volumeNumber = "1";
string chapterNumber = Regex.Match(url, @"-chapter-([0-9\.]+)").Groups[1].ToString(); string chapterNumber = Regex.Match(url, @"-chapter-([0-9\.]+)").Groups[1].ToString();
string fullUrl = url.Replace(Regex.Match(url,"(-page-[0-9])").Value,""); string fullUrl = $"https://manga4life.com{url}";
fullUrl = fullUrl.Replace(Regex.Match(url,"(-page-[0-9])").Value,"");
chapters.Add(new Chapter(manga, "", volumeNumber, chapterNumber, fullUrl)); chapters.Add(new Chapter(manga, "", volumeNumber, chapterNumber, fullUrl));
} }
//Return Chapters ordered by Chapter-Number //Return Chapters ordered by Chapter-Number