Merge pull request #199 from C9Glax/cuttingedge
Merge cuttingedge to latest.
This commit is contained in:
commit
eba511749b
@ -59,7 +59,6 @@ Notifications can be sent to your devices using [Gotify](https://gotify.net/) an
|
||||
|
||||
Tranga (this git-repo) will open a port (standard 6531) and listen for requests to add Jobs to Monitor and/or download specific Manga.
|
||||
The configuration is all done through HTTP-Requests.
|
||||
The frontend in this repo is **CLI**-based.
|
||||
_**For a web-frontend use [tranga-website](https://github.com/C9Glax/tranga-website).**_
|
||||
|
||||
This project downloads the images for a Manga from the specified Scanlation-Website and packages them with some metadata - from that same website - in a .cbz-archive (per chapter).
|
||||
@ -86,6 +85,7 @@ That is why I wanted to create my own project, in a language I understand, and t
|
||||
- Newtonsoft.JSON
|
||||
- [PuppeteerSharp](https://www.puppeteersharp.com/)
|
||||
- [Html Agility Pack (HAP)](https://html-agility-pack.net/)
|
||||
- [Soenneker.Utils.String.NeedlemanWunsch](https://github.com/soenneker/soenneker.utils.string.needlemanwunsch)
|
||||
- 💙 Blåhaj 🦈
|
||||
|
||||
<p align="right">(<a href="#readme-top">back to top</a>)</p>
|
||||
|
@ -91,18 +91,22 @@ public readonly struct Chapter : IComparable
|
||||
{
|
||||
if (!Directory.Exists(Path.Join(downloadLocation, parentManga.folderName)))
|
||||
return false;
|
||||
FileInfo[] archives = new DirectoryInfo(Path.Join(downloadLocation, parentManga.folderName)).GetFiles();
|
||||
FileInfo[] archives = new DirectoryInfo(Path.Join(downloadLocation, parentManga.folderName)).GetFiles().Where(file => file.Name.Split('.')[^1] == "cbz").ToArray();
|
||||
Regex volChRex = new(@"(?:Vol(?:ume)?\.([0-9]+)\D*)?Ch(?:apter)?\.([0-9]+(?:\.[0-9]+)*)");
|
||||
|
||||
Chapter t = this;
|
||||
return archives.Select(archive => archive.Name).Any(archiveFileName =>
|
||||
string thisPath = GetArchiveFilePath(downloadLocation);
|
||||
FileInfo? archive = archives.FirstOrDefault(archive =>
|
||||
{
|
||||
Match m = volChRex.Match(archiveFileName);
|
||||
Match m = volChRex.Match(archive.Name);
|
||||
string archiveVolNum = m.Groups[1].Success ? m.Groups[1].Value : "0";
|
||||
string archiveChNum = m.Groups[2].Value;
|
||||
return archiveVolNum == t.volumeNumber &&
|
||||
archiveChNum == t.chapterNumber;
|
||||
return archiveVolNum == t.volumeNumber && archiveChNum == t.chapterNumber ||
|
||||
archiveVolNum == "0" && archiveChNum == t.chapterNumber;
|
||||
});
|
||||
if(archive is not null && thisPath != archive.FullName)
|
||||
archive.MoveTo(thisPath);
|
||||
return archive is not null;
|
||||
}
|
||||
/// <summary>
|
||||
/// Creates full file path of chapter-archive
|
||||
|
@ -14,7 +14,7 @@ public abstract class GlobalBase
|
||||
protected TrangaSettings settings { get; init; }
|
||||
protected HashSet<NotificationConnector> notificationConnectors { get; init; }
|
||||
protected HashSet<LibraryConnector> libraryConnectors { get; init; }
|
||||
protected List<Manga> cachedPublications { get; init; }
|
||||
private Dictionary<string, Manga> cachedPublications { get; init; }
|
||||
public static readonly NumberFormatInfo numberFormatDecimalPoint = new (){ NumberDecimalSeparator = "." };
|
||||
protected static readonly Regex baseUrlRex = new(@"https?:\/\/[0-9A-z\.-]+(:[0-9]+)?");
|
||||
|
||||
@ -36,6 +36,29 @@ public abstract class GlobalBase
|
||||
this.cachedPublications = new();
|
||||
}
|
||||
|
||||
protected void AddMangaToCache(Manga manga)
|
||||
{
|
||||
if (!this.cachedPublications.TryAdd(manga.internalId, manga))
|
||||
{
|
||||
Log($"Overwriting Manga {manga.internalId}");
|
||||
this.cachedPublications[manga.internalId] = manga;
|
||||
}
|
||||
}
|
||||
|
||||
protected Manga? GetCachedManga(string internalId)
|
||||
{
|
||||
return cachedPublications.TryGetValue(internalId, out Manga manga) switch
|
||||
{
|
||||
true => manga,
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
protected IEnumerable<Manga> GetAllCachedManga()
|
||||
{
|
||||
return cachedPublications.Values;
|
||||
}
|
||||
|
||||
protected void Log(string message)
|
||||
{
|
||||
logger?.WriteLine(this.GetType().Name, message);
|
||||
|
@ -150,39 +150,53 @@ public class JobBoss : GlobalBase
|
||||
//Load json-job-files
|
||||
foreach (FileInfo file in new DirectoryInfo(settings.jobsFolderPath).EnumerateFiles().Where(fileInfo => idRex.IsMatch(fileInfo.Name)))
|
||||
{
|
||||
Job job = JsonConvert.DeserializeObject<Job>(File.ReadAllText(file.FullName),
|
||||
new JobJsonConverter(this, new MangaConnectorJsonConverter(this, connectors)))!;
|
||||
Log($"Adding {file.Name}");
|
||||
Job? job = JsonConvert.DeserializeObject<Job>(File.ReadAllText(file.FullName),
|
||||
new JobJsonConverter(this, new MangaConnectorJsonConverter(this, connectors)));
|
||||
if (job is null)
|
||||
{
|
||||
string newName = file.FullName + ".failed";
|
||||
Log($"Failed loading file {file.Name}.\nMoving to {newName}");
|
||||
File.Move(file.FullName, newName);
|
||||
}
|
||||
else
|
||||
{
|
||||
Log($"Adding Job {job}");
|
||||
this.jobs.Add(job);
|
||||
}
|
||||
}
|
||||
|
||||
//Connect jobs to parent-jobs and add Publications to cache
|
||||
foreach (Job job in this.jobs)
|
||||
{
|
||||
this.jobs.FirstOrDefault(jjob => jjob.id == job.parentJobId)?.AddSubJob(job);
|
||||
Log($"Loading Job {job}");
|
||||
Job? parentJob = this.jobs.FirstOrDefault(jjob => jjob.id == job.parentJobId);
|
||||
if (parentJob is not null)
|
||||
{
|
||||
parentJob.AddSubJob(job);
|
||||
Log($"Parent Job {parentJob}");
|
||||
}
|
||||
if (job is DownloadNewChapters dncJob)
|
||||
cachedPublications.Add(dncJob.manga);
|
||||
AddMangaToCache(dncJob.manga);
|
||||
}
|
||||
|
||||
HashSet<string> coverFileNames = cachedPublications.Select(manga => manga.coverFileNameInCache!).ToHashSet();
|
||||
foreach (string fileName in Directory.GetFiles(settings.coverImageCache)) //Cleanup Unused Covers
|
||||
{
|
||||
if(!coverFileNames.Any(existingManga => fileName.Contains(existingManga)))
|
||||
string[] coverFiles = Directory.GetFiles(settings.coverImageCache);
|
||||
foreach(string fileName in coverFiles.Where(fileName => !GetAllCachedManga().Any(manga => manga.coverFileNameInCache == fileName)))
|
||||
File.Delete(fileName);
|
||||
}
|
||||
}
|
||||
|
||||
private void UpdateJobFile(Job job)
|
||||
internal void UpdateJobFile(Job job, string? oldFile = null)
|
||||
{
|
||||
string jobFilePath = Path.Join(settings.jobsFolderPath, $"{job.id}.json");
|
||||
string newJobFilePath = Path.Join(settings.jobsFolderPath, $"{job.id}.json");
|
||||
|
||||
if (!this.jobs.Any(jjob => jjob.id == job.id))
|
||||
{
|
||||
try
|
||||
{
|
||||
Log($"Deleting Job-file {jobFilePath}");
|
||||
while(IsFileInUse(jobFilePath))
|
||||
Log($"Deleting Job-file {newJobFilePath}");
|
||||
while(IsFileInUse(newJobFilePath))
|
||||
Thread.Sleep(10);
|
||||
File.Delete(jobFilePath);
|
||||
File.Delete(newJobFilePath);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
@ -191,11 +205,24 @@ public class JobBoss : GlobalBase
|
||||
}
|
||||
else
|
||||
{
|
||||
Log($"Exporting Job {jobFilePath}");
|
||||
string jobStr = JsonConvert.SerializeObject(job);
|
||||
while(IsFileInUse(jobFilePath))
|
||||
Log($"Exporting Job {newJobFilePath}");
|
||||
string jobStr = JsonConvert.SerializeObject(job, Formatting.Indented);
|
||||
while(IsFileInUse(newJobFilePath))
|
||||
Thread.Sleep(10);
|
||||
File.WriteAllText(jobFilePath, jobStr);
|
||||
File.WriteAllText(newJobFilePath, jobStr);
|
||||
}
|
||||
|
||||
if(oldFile is not null)
|
||||
try
|
||||
{
|
||||
Log($"Deleting old Job-file {oldFile}");
|
||||
while(IsFileInUse(oldFile))
|
||||
Thread.Sleep(10);
|
||||
File.Delete(oldFile);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log(e.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
@ -245,7 +272,9 @@ public class JobBoss : GlobalBase
|
||||
Log($"Next job in {jobs.MinBy(job => job.nextExecution)?.nextExecution.Subtract(DateTime.Now)} {jobs.MinBy(job => job.nextExecution)?.id}");
|
||||
}else if (queueHead.progressToken.state is ProgressToken.State.Standby)
|
||||
{
|
||||
Job[] subJobs = jobQueue.Peek().ExecuteReturnSubTasks(this).ToArray();
|
||||
Job eJob = jobQueue.Peek();
|
||||
Job[] subJobs = eJob.ExecuteReturnSubTasks(this).ToArray();
|
||||
UpdateJobFile(eJob);
|
||||
AddJobs(subJobs);
|
||||
AddJobsToQueue(subJobs);
|
||||
}else if (queueHead.progressToken.state is ProgressToken.State.Running && DateTime.Now.Subtract(queueHead.progressToken.lastUpdate) > TimeSpan.FromMinutes(5))
|
||||
|
@ -33,8 +33,26 @@ public class UpdateMetadata : Job
|
||||
return Array.Empty<Job>();
|
||||
}
|
||||
|
||||
this.manga.UpdateMetadata(updatedManga);
|
||||
this.manga = manga.WithMetadata(updatedManga);
|
||||
this.manga.SaveSeriesInfoJson(settings.downloadLocation, true);
|
||||
this.mangaConnector.CopyCoverFromCacheToDownloadLocation(manga);
|
||||
foreach (Job job in jobBoss.GetJobsLike(publication: this.manga))
|
||||
{
|
||||
string oldFile;
|
||||
if (job is DownloadNewChapters dc)
|
||||
{
|
||||
oldFile = dc.id;
|
||||
dc.manga = this.manga;
|
||||
}
|
||||
else if (job is UpdateMetadata um)
|
||||
{
|
||||
oldFile = um.id;
|
||||
um.manga = this.manga;
|
||||
}
|
||||
else
|
||||
continue;
|
||||
jobBoss.UpdateJobFile(job, oldFile);
|
||||
}
|
||||
this.progressToken.Complete();
|
||||
}
|
||||
else
|
||||
|
@ -68,6 +68,14 @@ public class Kavita : LibraryConnector
|
||||
NetClient.MakePost($"{baseUrl}/api/Library/scan?libraryId={lib.id}", "Bearer", auth, logger);
|
||||
}
|
||||
|
||||
internal override bool Test()
|
||||
{
|
||||
foreach (KavitaLibrary lib in GetLibraries())
|
||||
if (NetClient.MakePost($"{baseUrl}/api/Library/scan?libraryId={lib.id}", "Bearer", auth, logger))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fetches all libraries available to the user
|
||||
/// </summary>
|
||||
|
@ -32,6 +32,14 @@ public class Komga : LibraryConnector
|
||||
NetClient.MakePost($"{baseUrl}/api/v1/libraries/{lib.id}/scan", "Basic", auth, logger);
|
||||
}
|
||||
|
||||
internal override bool Test()
|
||||
{
|
||||
foreach (KomgaLibrary lib in GetLibraries())
|
||||
if (NetClient.MakePost($"{baseUrl}/api/v1/libraries/{lib.id}/scan", "Basic", auth, logger))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fetches all libraries available to the user
|
||||
/// </summary>
|
||||
|
@ -30,6 +30,7 @@ public abstract class LibraryConnector : GlobalBase
|
||||
this.libraryType = libraryType;
|
||||
}
|
||||
public abstract void UpdateLibrary();
|
||||
internal abstract bool Test();
|
||||
|
||||
protected static class NetClient
|
||||
{
|
||||
|
@ -12,15 +12,15 @@ namespace Tranga;
|
||||
public struct Manga
|
||||
{
|
||||
public string sortName { get; private set; }
|
||||
public List<string> authors { get; }
|
||||
public List<string> authors { get; private set; }
|
||||
// ReSharper disable once UnusedAutoPropertyAccessor.Global
|
||||
public Dictionary<string,string> altTitles { get; }
|
||||
public Dictionary<string,string> altTitles { get; private set; }
|
||||
// ReSharper disable once MemberCanBePrivate.Global
|
||||
public string? description { get; private set; }
|
||||
public string[] tags { get; }
|
||||
public string[] tags { get; private set; }
|
||||
// ReSharper disable once UnusedAutoPropertyAccessor.Global
|
||||
public string? coverUrl { get; }
|
||||
public string? coverFileNameInCache { get; }
|
||||
public string? coverUrl { get; private set; }
|
||||
public string? coverFileNameInCache { get; private set; }
|
||||
// ReSharper disable once UnusedAutoPropertyAccessor.Global
|
||||
public Dictionary<string,string> links { get; }
|
||||
// ReSharper disable once MemberCanBePrivate.Global
|
||||
@ -28,7 +28,7 @@ public struct Manga
|
||||
public string? originalLanguage { get; }
|
||||
// ReSharper disable twice MemberCanBePrivate.Global
|
||||
public string status { get; private set; }
|
||||
public ReleaseStatusByte releaseStatus { get; }
|
||||
public ReleaseStatusByte releaseStatus { get; private set; }
|
||||
public enum ReleaseStatusByte : byte
|
||||
{
|
||||
Continuing = 0,
|
||||
@ -44,10 +44,12 @@ public struct Manga
|
||||
public float latestChapterDownloaded { get; set; }
|
||||
public float latestChapterAvailable { get; set; }
|
||||
|
||||
public string? websiteUrl { get; private set; }
|
||||
|
||||
private static readonly Regex LegalCharacters = new (@"[A-Za-zÀ-ÖØ-öø-ÿ0-9 \.\-,'\'\)\(~!\+]*");
|
||||
|
||||
[JsonConstructor]
|
||||
public Manga(string sortName, List<string> authors, string? description, Dictionary<string,string> altTitles, string[] tags, string? coverUrl, string? coverFileNameInCache, Dictionary<string,string>? links, int? year, string? originalLanguage, string status, string publicationId, ReleaseStatusByte releaseStatus = 0, string? websiteUrl = null, string? folderName = null, float? ignoreChaptersBelow = 0)
|
||||
public Manga(string sortName, List<string> authors, string? description, Dictionary<string,string> altTitles, string[] tags, string? coverUrl, string? coverFileNameInCache, Dictionary<string,string>? links, int? year, string? originalLanguage, string publicationId, ReleaseStatusByte releaseStatus, string? websiteUrl = null, string? folderName = null, float? ignoreChaptersBelow = 0)
|
||||
{
|
||||
this.sortName = sortName;
|
||||
this.authors = authors;
|
||||
@ -59,7 +61,6 @@ public struct Manga
|
||||
this.links = links ?? new Dictionary<string, string>();
|
||||
this.year = year;
|
||||
this.originalLanguage = originalLanguage;
|
||||
this.status = status;
|
||||
this.publicationId = publicationId;
|
||||
this.folderName = folderName ?? string.Concat(LegalCharacters.Matches(sortName));
|
||||
while (this.folderName.EndsWith('.'))
|
||||
@ -70,17 +71,26 @@ public struct Manga
|
||||
this.latestChapterDownloaded = 0;
|
||||
this.latestChapterAvailable = 0;
|
||||
this.releaseStatus = releaseStatus;
|
||||
this.status = Enum.GetName(releaseStatus) ?? "";
|
||||
this.websiteUrl = websiteUrl;
|
||||
}
|
||||
|
||||
public void UpdateMetadata(Manga newManga)
|
||||
public Manga WithMetadata(Manga newManga)
|
||||
{
|
||||
this.sortName = newManga.sortName;
|
||||
this.description = newManga.description;
|
||||
foreach (string author in newManga.authors)
|
||||
if(!this.authors.Contains(author))
|
||||
this.authors.Add(author);
|
||||
this.status = newManga.status;
|
||||
this.year = newManga.year;
|
||||
return this with
|
||||
{
|
||||
sortName = newManga.sortName,
|
||||
description = newManga.description,
|
||||
coverUrl = newManga.coverUrl,
|
||||
authors = authors.Union(newManga.authors).ToList(),
|
||||
altTitles = altTitles.UnionBy(newManga.altTitles, kv => kv.Key).ToDictionary(x => x.Key, x => x.Value),
|
||||
tags = tags.Union(newManga.tags).ToArray(),
|
||||
status = newManga.status,
|
||||
releaseStatus = newManga.releaseStatus,
|
||||
websiteUrl = newManga.websiteUrl,
|
||||
year = newManga.year,
|
||||
coverFileNameInCache = newManga.coverFileNameInCache
|
||||
};
|
||||
}
|
||||
|
||||
public override bool Equals(object? obj)
|
||||
@ -93,7 +103,10 @@ public struct Manga
|
||||
this.releaseStatus == compareManga.releaseStatus &&
|
||||
this.sortName == compareManga.sortName &&
|
||||
this.latestChapterAvailable.Equals(compareManga.latestChapterAvailable) &&
|
||||
this.tags.SequenceEqual(compareManga.tags);
|
||||
this.authors.All(a => compareManga.authors.Contains(a)) &&
|
||||
(this.coverFileNameInCache??"").Equals(compareManga.coverFileNameInCache) &&
|
||||
(this.websiteUrl??"").Equals(compareManga.websiteUrl) &&
|
||||
this.tags.All(t => compareManga.tags.Contains(t));
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
@ -168,38 +181,22 @@ public struct Manga
|
||||
[JsonRequired]public string year { get; }
|
||||
[JsonRequired]public string status { get; }
|
||||
[JsonRequired]public string description_text { get; }
|
||||
[JsonIgnore] public static string[] continuing = new[]
|
||||
{
|
||||
"ongoing",
|
||||
"hiatus",
|
||||
"in corso",
|
||||
"in pausa"
|
||||
};
|
||||
[JsonIgnore] public static string[] ended = new[]
|
||||
{
|
||||
"completed",
|
||||
"cancelled",
|
||||
"discontinued",
|
||||
"finito",
|
||||
"cancellato",
|
||||
"droppato"
|
||||
};
|
||||
|
||||
public Metadata(Manga manga) : this(manga.sortName, manga.year.ToString() ?? string.Empty, manga.status, manga.description ?? "")
|
||||
public Metadata(Manga manga) : this(manga.sortName, manga.year.ToString() ?? string.Empty, manga.releaseStatus, manga.description ?? "")
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public Metadata(string name, string year, string status, string description_text)
|
||||
public Metadata(string name, string year, ReleaseStatusByte status, string description_text)
|
||||
{
|
||||
this.name = name;
|
||||
this.year = year;
|
||||
if(continuing.Contains(status.ToLower()))
|
||||
this.status = "Continuing";
|
||||
else if(ended.Contains(status.ToLower()))
|
||||
this.status = "Ended";
|
||||
else
|
||||
this.status = status;
|
||||
this.status = status switch
|
||||
{
|
||||
ReleaseStatusByte.Continuing => "Continuing",
|
||||
ReleaseStatusByte.Completed => "Ended",
|
||||
_ => Enum.GetName(status) ?? "Ended"
|
||||
};
|
||||
this.description_text = description_text;
|
||||
|
||||
//kill it with fire, but otherwise Komga will not parse
|
||||
|
@ -49,7 +49,7 @@ public class Bato : MangaConnector
|
||||
Log($"Failed to retrieve site");
|
||||
return null;
|
||||
}
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1]);
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
@ -72,7 +72,7 @@ public class Bato : MangaConnector
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId)
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
HtmlNode infoNode = document.DocumentNode.SelectSingleNode("/html/body/div/main/div[1]/div[2]");
|
||||
|
||||
@ -86,7 +86,7 @@ public class Bato : MangaConnector
|
||||
|
||||
string posterUrl = document.DocumentNode.SelectNodes("//img")
|
||||
.First(child => child.GetAttributeValue("data-hk", "") == "0-1-0").GetAttributeValue("src", "").Replace("&", "&");
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, RequestType.MangaCover);
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
List<HtmlNode> genreNodes = document.DocumentNode.SelectSingleNode("//b[text()='Genres:']/..").SelectNodes("span").ToList();
|
||||
string[] tags = genreNodes.Select(node => node.FirstChild.InnerText).ToArray();
|
||||
@ -115,8 +115,8 @@ public class Bato : MangaConnector
|
||||
}
|
||||
|
||||
Manga manga = new (sortName, authors, description, altTitles, tags, posterUrl, coverFileNameInCache, new Dictionary<string, string>(),
|
||||
year, originalLanguage, status, publicationId, releaseStatus);
|
||||
cachedPublications.Add(manga);
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
|
@ -175,14 +175,14 @@ public abstract class MangaConnector : GlobalBase
|
||||
return;
|
||||
}
|
||||
|
||||
string fileInCache = Path.Join(settings.coverImageCache, manga.coverFileNameInCache);
|
||||
if (!File.Exists(fileInCache))
|
||||
string? fileInCache = manga.coverFileNameInCache;
|
||||
if (fileInCache is null || !File.Exists(fileInCache))
|
||||
{
|
||||
Log($"Cloning cover failed: File missing {fileInCache}.");
|
||||
if (retries > 0 && manga.coverUrl is not null)
|
||||
{
|
||||
Log($"Trying {retries} more times");
|
||||
SaveCoverImageToCache(manga.coverUrl, 0);
|
||||
SaveCoverImageToCache(manga.coverUrl, manga.internalId, 0);
|
||||
CopyCoverFromCacheToDownloadLocation(manga, --retries);
|
||||
}
|
||||
|
||||
@ -285,20 +285,23 @@ public abstract class MangaConnector : GlobalBase
|
||||
return HttpStatusCode.OK;
|
||||
}
|
||||
|
||||
protected string SaveCoverImageToCache(string url, RequestType requestType)
|
||||
protected string SaveCoverImageToCache(string url, string mangaInternalId, RequestType requestType)
|
||||
{
|
||||
string filetype = url.Split('/')[^1].Split('?')[0].Split('.')[^1];
|
||||
string filename = $"{DateTime.Now.Ticks.ToString()}.{filetype}";
|
||||
Regex urlRex = new (@"https?:\/\/((?:[a-zA-Z0-9-]+\.)+[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+))");
|
||||
//https?:\/\/[a-zA-Z0-9-]+\.([a-zA-Z0-9-]+\.[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+)) for only second level domains
|
||||
Match match = urlRex.Match(url);
|
||||
string filename = $"{match.Groups[1].Value}-{mangaInternalId}.{match.Groups[3].Value}";
|
||||
string saveImagePath = Path.Join(settings.coverImageCache, filename);
|
||||
|
||||
if (File.Exists(saveImagePath))
|
||||
return filename;
|
||||
return saveImagePath;
|
||||
|
||||
RequestResult coverResult = downloadClient.MakeRequest(url, requestType);
|
||||
using MemoryStream ms = new();
|
||||
coverResult.result.CopyTo(ms);
|
||||
Directory.CreateDirectory(settings.coverImageCache);
|
||||
File.WriteAllBytes(saveImagePath, ms.ToArray());
|
||||
Log($"Saving cover to {saveImagePath}");
|
||||
return filename;
|
||||
return saveImagePath;
|
||||
}
|
||||
}
|
@ -115,8 +115,8 @@ public class MangaDex : MangaConnector
|
||||
};
|
||||
|
||||
Dictionary<string, string> linksDict = new();
|
||||
if (attributes.TryGetPropertyValue("links", out JsonNode? linksNode))
|
||||
foreach (KeyValuePair<string, JsonNode> linkKv in linksNode!.AsObject())
|
||||
if (attributes.TryGetPropertyValue("links", out JsonNode? linksNode) && linksNode is not null)
|
||||
foreach (KeyValuePair<string, JsonNode?> linkKv in linksNode!.AsObject())
|
||||
linksDict.TryAdd(linkKv.Key, linkKv.Value.GetValue<string>());
|
||||
|
||||
string? originalLanguage =
|
||||
@ -160,7 +160,7 @@ public class MangaDex : MangaConnector
|
||||
return null;
|
||||
string fileName = coverNode["attributes"]!["fileName"]!.GetValue<string>();
|
||||
string coverUrl = $"https://uploads.mangadex.org/covers/{publicationId}/{fileName}";
|
||||
string coverCacheName = SaveCoverImageToCache(coverUrl, RequestType.MangaCover);
|
||||
string coverCacheName = SaveCoverImageToCache(coverUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
List<string> authors = new();
|
||||
JsonNode?[] authorNodes = relationshipsNode.AsArray()
|
||||
@ -183,11 +183,11 @@ public class MangaDex : MangaConnector
|
||||
linksDict,
|
||||
year,
|
||||
originalLanguage,
|
||||
Enum.GetName(status) ?? "",
|
||||
publicationId,
|
||||
status
|
||||
status,
|
||||
websiteUrl: $"https://mangadex.org/title/{publicationId}"
|
||||
);
|
||||
cachedPublications.Add(pub);
|
||||
AddMangaToCache(pub);
|
||||
return pub;
|
||||
}
|
||||
|
||||
|
@ -28,7 +28,7 @@ public class MangaKatana : MangaConnector
|
||||
&& requestResult.redirectedToUrl is not null
|
||||
&& requestResult.redirectedToUrl.Contains("mangakatana.com/manga"))
|
||||
{
|
||||
return new [] { ParseSinglePublicationFromHtml(requestResult.result, requestResult.redirectedToUrl.Split('/')[^1]) };
|
||||
return new [] { ParseSinglePublicationFromHtml(requestResult.result, requestResult.redirectedToUrl.Split('/')[^1], requestResult.redirectedToUrl) };
|
||||
}
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.result);
|
||||
@ -47,7 +47,7 @@ public class MangaKatana : MangaConnector
|
||||
downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
return ParseSinglePublicationFromHtml(requestResult.result, url.Split('/')[^1]);
|
||||
return ParseSinglePublicationFromHtml(requestResult.result, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(Stream html)
|
||||
@ -77,13 +77,12 @@ public class MangaKatana : MangaConnector
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(Stream html, string publicationId)
|
||||
private Manga ParseSinglePublicationFromHtml(Stream html, string publicationId, string websiteUrl)
|
||||
{
|
||||
StreamReader reader = new(html);
|
||||
string htmlString = reader.ReadToEnd();
|
||||
HtmlDocument document = new();
|
||||
document.LoadHtml(htmlString);
|
||||
string status = "";
|
||||
Dictionary<string, string> altTitles = new();
|
||||
Dictionary<string, string>? links = null;
|
||||
HashSet<string> tags = new();
|
||||
@ -112,8 +111,7 @@ public class MangaKatana : MangaConnector
|
||||
authors = value.Split(',');
|
||||
break;
|
||||
case "status":
|
||||
status = value;
|
||||
switch (status.ToLower())
|
||||
switch (value.ToLower())
|
||||
{
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
case "completed": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
@ -128,7 +126,7 @@ public class MangaKatana : MangaConnector
|
||||
string posterUrl = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[1]/div").Descendants("img").First()
|
||||
.GetAttributes().First(a => a.Name == "src").Value;
|
||||
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, RequestType.MangaCover);
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
string description = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[3]/p").InnerText;
|
||||
while (description.StartsWith('\n'))
|
||||
@ -144,8 +142,8 @@ public class MangaKatana : MangaConnector
|
||||
}
|
||||
|
||||
Manga manga = new (sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
|
||||
year, originalLanguage, status, publicationId, releaseStatus);
|
||||
cachedPublications.Add(manga);
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
|
@ -41,7 +41,7 @@ public class MangaLife : MangaConnector
|
||||
|
||||
RequestResult requestResult = this.downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if(requestResult.htmlDocument is not null)
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId);
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -69,7 +69,7 @@ public class MangaLife : MangaConnector
|
||||
}
|
||||
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId)
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
@ -78,7 +78,7 @@ public class MangaLife : MangaConnector
|
||||
|
||||
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//img");
|
||||
string posterUrl = posterNode.GetAttributeValue("src", "");
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, RequestType.MangaCover);
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//h1");
|
||||
string sortName = titleNode.InnerText;
|
||||
@ -122,8 +122,8 @@ public class MangaLife : MangaConnector
|
||||
string description = descriptionNode.InnerText;
|
||||
|
||||
Manga manga = new(sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
|
||||
coverFileNameInCache, links, year, originalLanguage, status, publicationId, releaseStatus);
|
||||
cachedPublications.Add(manga);
|
||||
coverFileNameInCache, links, year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
|
@ -65,12 +65,11 @@ public class Manganato : MangaConnector
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
return null;
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1]);
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId)
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string status = "";
|
||||
Dictionary<string, string> altTitles = new();
|
||||
Dictionary<string, string>? links = null;
|
||||
HashSet<string> tags = new();
|
||||
@ -99,10 +98,11 @@ public class Manganato : MangaConnector
|
||||
break;
|
||||
case "authors":
|
||||
authors = value.Split('-');
|
||||
for (int i = 0; i < authors.Length; i++)
|
||||
authors[i] = authors[i].Replace("\r\n", "");
|
||||
break;
|
||||
case "status":
|
||||
status = value;
|
||||
switch (status.ToLower())
|
||||
switch (value.ToLower())
|
||||
{
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
case "completed": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
@ -110,6 +110,8 @@ public class Manganato : MangaConnector
|
||||
break;
|
||||
case "genres":
|
||||
string[] genres = value.Split(" - ");
|
||||
for (int i = 0; i < genres.Length; i++)
|
||||
genres[i] = genres[i].Replace("\r\n", "");
|
||||
tags = genres.ToHashSet();
|
||||
break;
|
||||
}
|
||||
@ -118,7 +120,7 @@ public class Manganato : MangaConnector
|
||||
string posterUrl = document.DocumentNode.Descendants("span").First(s => s.HasClass("info-image")).Descendants("img").First()
|
||||
.GetAttributes().First(a => a.Name == "src").Value;
|
||||
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, RequestType.MangaCover);
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
string description = document.DocumentNode.Descendants("div").First(d => d.HasClass("panel-story-info-description"))
|
||||
.InnerText.Replace("Description :", "");
|
||||
@ -130,8 +132,8 @@ public class Manganato : MangaConnector
|
||||
int year = Convert.ToInt32(yearString.Split(',')[^1]) + 2000;
|
||||
|
||||
Manga manga = new (sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
|
||||
year, originalLanguage, status, publicationId, releaseStatus);
|
||||
cachedPublications.Add(manga);
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
|
@ -4,6 +4,7 @@ using System.Text.RegularExpressions;
|
||||
using System.Xml.Linq;
|
||||
using HtmlAgilityPack;
|
||||
using Newtonsoft.Json;
|
||||
using Soenneker.Utils.String.NeedlemanWunsch;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
@ -41,14 +42,6 @@ public class Mangasee : MangaConnector
|
||||
SearchResult[] filteredResults = FilteredResults(publicationTitle, searchResults);
|
||||
Log($"Total available manga: {searchResults.Length} Filtered down to: {filteredResults.Length}");
|
||||
|
||||
/*
|
||||
Dictionary<SearchResult, int> levenshteinRelation = filteredResults.ToDictionary(result => result,
|
||||
result =>
|
||||
{
|
||||
Log($"Levenshtein {result.s}");
|
||||
return LevenshteinDistance(publicationTitle.Replace(" ", "").ToLower(), result.s.Replace(" ", "").ToLower());
|
||||
});
|
||||
Log($"After levenshtein: {levenshteinRelation.Count}");*/
|
||||
|
||||
string[] urls = filteredResults.Select(result => $"https://mangasee123.com/manga/{result.i}").ToArray();
|
||||
List<Manga> searchResultManga = new();
|
||||
@ -70,42 +63,19 @@ public class Mangasee : MangaConnector
|
||||
|
||||
private SearchResult[] FilteredResults(string publicationTitle, SearchResult[] unfilteredSearchResults)
|
||||
{
|
||||
string[] bannedStrings = {"a", "the", "of", "as", "to", "no", "for", "on", "with", "be", "and", "in", "wa", "at"};
|
||||
string[] cleanSplitPublicationTitle = publicationTitle.Split(' ')
|
||||
.Where(part => part.Length > 0 && !bannedStrings.Contains(part.ToLower())).ToArray();
|
||||
|
||||
return unfilteredSearchResults.Where(usr =>
|
||||
Dictionary<SearchResult, int> similarity = new();
|
||||
foreach (SearchResult sr in unfilteredSearchResults)
|
||||
{
|
||||
string cleanSearchResultString = string.Join(' ', usr.s.Split(' ').Where(part => part.Length > 0 && !bannedStrings.Contains(part.ToLower())));
|
||||
foreach(string splitPublicationTitlePart in cleanSplitPublicationTitle)
|
||||
if (cleanSearchResultString.Contains(splitPublicationTitlePart, StringComparison.InvariantCultureIgnoreCase) ||
|
||||
cleanSearchResultString.Contains(splitPublicationTitlePart, StringComparison.InvariantCultureIgnoreCase))
|
||||
return true;
|
||||
return false;
|
||||
}).ToArray();
|
||||
List<int> scores = new();
|
||||
foreach (string se in sr.a)
|
||||
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(se.ToLower(), publicationTitle.ToLower()));
|
||||
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(sr.s.ToLower(), publicationTitle.ToLower()));
|
||||
similarity.Add(sr, scores.Sum() / scores.Count);
|
||||
}
|
||||
|
||||
private int LevenshteinDistance(string a, string b)
|
||||
{
|
||||
if (b.Length == 0)
|
||||
return a.Length;
|
||||
if (a.Length == 0)
|
||||
return b.Length;
|
||||
if (a[0] == b[0])
|
||||
return LevenshteinDistance(a[1..], b[1..]);
|
||||
SearchResult[] similarity90 = similarity.Where(s => s.Value < 10).Select(s => s.Key).ToArray();
|
||||
|
||||
int case1 = LevenshteinDistance(a, b[1..]);
|
||||
int case2 = LevenshteinDistance(a[1..], b[1..]);
|
||||
int case3 = LevenshteinDistance(a[1..], b);
|
||||
|
||||
if (case1 < case2)
|
||||
{
|
||||
return 1 + (case1 < case3 ? case1 : case3);
|
||||
}
|
||||
else
|
||||
{
|
||||
return 1 + (case2 < case3 ? case2 : case3);
|
||||
}
|
||||
return similarity90;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
@ -120,11 +90,11 @@ public class Mangasee : MangaConnector
|
||||
|
||||
RequestResult requestResult = this.downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if((int)requestResult.statusCode < 300 && (int)requestResult.statusCode >= 200 && requestResult.htmlDocument is not null)
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId);
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId)
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
@ -133,7 +103,7 @@ public class Mangasee : MangaConnector
|
||||
|
||||
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//img");
|
||||
string posterUrl = posterNode.GetAttributeValue("src", "");
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, RequestType.MangaCover);
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//h1");
|
||||
string sortName = titleNode.InnerText;
|
||||
@ -178,8 +148,8 @@ public class Mangasee : MangaConnector
|
||||
|
||||
Manga manga = new(sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
|
||||
coverFileNameInCache, links,
|
||||
year, originalLanguage, status, publicationId, releaseStatus);
|
||||
cachedPublications.Add(manga);
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
|
@ -68,10 +68,10 @@ public class Mangaworld: MangaConnector
|
||||
|
||||
Regex idRex = new (@"https:\/\/www\.mangaworld\.[a-z]{0,63}\/manga\/([0-9]+\/[0-9A-z\-]+).*");
|
||||
string id = idRex.Match(url).Groups[1].Value;
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id);
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id, url);
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId)
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
Dictionary<string, string> altTitles = new();
|
||||
Dictionary<string, string>? links = null;
|
||||
@ -111,7 +111,7 @@ public class Mangaworld: MangaConnector
|
||||
|
||||
string posterUrl = document.DocumentNode.SelectSingleNode("//img[@class='rounded']").GetAttributeValue("src", "");
|
||||
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, RequestType.MangaCover);
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId.Replace('/', '-'), RequestType.MangaCover);
|
||||
|
||||
string description = document.DocumentNode.SelectSingleNode("//div[@id='noidungm']").InnerText;
|
||||
|
||||
@ -119,8 +119,8 @@ public class Mangaworld: MangaConnector
|
||||
int year = Convert.ToInt32(yearString);
|
||||
|
||||
Manga manga = new (sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
|
||||
year, originalLanguage, status, publicationId, releaseStatus);
|
||||
cachedPublications.Add(manga);
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
@ -153,10 +153,13 @@ public class Mangaworld: MangaConnector
|
||||
{
|
||||
foreach (HtmlNode volNode in document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),'volume-element')]"))
|
||||
{
|
||||
string volume = volNode.SelectNodes("div").First(node => node.HasClass("volume")).SelectSingleNode("p").InnerText.Split(' ')[^1];
|
||||
string volume = Regex.Match(volNode.SelectNodes("div").First(node => node.HasClass("volume")).SelectSingleNode("p").InnerText,
|
||||
@"[Vv]olume ([0-9]+).*").Groups[1].Value;
|
||||
foreach (HtmlNode chNode in volNode.SelectNodes("div").First(node => node.HasClass("volume-chapters")).SelectNodes("div"))
|
||||
{
|
||||
string number = chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText.Split(" ")[^1];
|
||||
|
||||
string number = Regex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText,
|
||||
@"[Cc]apitolo ([0-9]+).*").Groups[1].Value;
|
||||
string url = chNode.SelectSingleNode("a").GetAttributeValue("href", "");
|
||||
ret.Add(new Chapter(manga, null, volume, number, url));
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ public class NotificationManagerJsonConverter : JsonConverter
|
||||
case (byte)NotificationConnector.NotificationConnectorType.LunaSea:
|
||||
return new LunaSea(this._clone, jo.GetValue("id")!.Value<string>()!);
|
||||
case (byte)NotificationConnector.NotificationConnectorType.Ntfy:
|
||||
return new Ntfy(this._clone, jo.GetValue("endpoint")!.Value<string>()!, jo.GetValue("auth")!.Value<string>()!);
|
||||
return new Ntfy(this._clone, jo.GetValue("endpoint")!.Value<string>()!, jo.GetValue("topic")!.Value<string>()!, jo.GetValue("auth")!.Value<string>()!);
|
||||
}
|
||||
|
||||
throw new Exception();
|
||||
|
@ -1,34 +1,63 @@
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using Newtonsoft.Json;
|
||||
|
||||
namespace Tranga.NotificationConnectors;
|
||||
|
||||
public class Ntfy : NotificationConnector
|
||||
{
|
||||
// ReSharper disable once MemberCanBePrivate.Global
|
||||
// ReSharper disable twice MemberCanBePrivate.Global
|
||||
public string endpoint { get; init; }
|
||||
private string auth { get; init; }
|
||||
private const string Topic = "tranga";
|
||||
public string auth { get; init; }
|
||||
public string topic { get; init; }
|
||||
private readonly HttpClient _client = new();
|
||||
|
||||
[JsonConstructor]
|
||||
public Ntfy(GlobalBase clone, string endpoint, string auth) : base(clone, NotificationConnectorType.Ntfy)
|
||||
public Ntfy(GlobalBase clone, string endpoint, string topic, string auth) : base(clone, NotificationConnectorType.Ntfy)
|
||||
{
|
||||
if (!baseUrlRex.IsMatch(endpoint))
|
||||
throw new ArgumentException("endpoint does not match pattern");
|
||||
this.endpoint = endpoint;
|
||||
this.topic = topic;
|
||||
this.auth = auth;
|
||||
}
|
||||
|
||||
public Ntfy(GlobalBase clone, string endpoint, string username, string password, string? topic = null) :
|
||||
this(clone, EndpointAndTopicFromUrl(endpoint)[0], topic??EndpointAndTopicFromUrl(endpoint)[1], AuthFromUsernamePassword(username, password))
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
private static string AuthFromUsernamePassword(string username, string password)
|
||||
{
|
||||
string authHeader = "Basic " + Convert.ToBase64String(Encoding.UTF8.GetBytes($"{username}:{password}"));
|
||||
string authParam = Convert.ToBase64String(Encoding.UTF8.GetBytes(authHeader)).Replace("=","");
|
||||
return authParam;
|
||||
}
|
||||
|
||||
private static string[] EndpointAndTopicFromUrl(string url)
|
||||
{
|
||||
string[] ret = new string[2];
|
||||
if (!baseUrlRex.IsMatch(url))
|
||||
throw new ArgumentException("url does not match pattern");
|
||||
Regex rootUriRex = new(@"(https?:\/\/[a-zA-Z0-9-\.]+\.[a-zA-Z0-9]+)(?:\/([a-zA-Z0-9-\.]+))?.*");
|
||||
Match match = rootUriRex.Match(url);
|
||||
if(!match.Success)
|
||||
throw new ArgumentException($"Error getting URI from provided endpoint-URI: {url}");
|
||||
|
||||
ret[0] = match.Groups[1].Value;
|
||||
ret[1] = match.Groups[2].Success && match.Groups[2].Value.Length > 0 ? match.Groups[2].Value : "tranga";
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
return $"Ntfy {endpoint} {Topic}";
|
||||
return $"Ntfy {endpoint} {topic}";
|
||||
}
|
||||
|
||||
public override void SendNotification(string title, string notificationText)
|
||||
{
|
||||
Log($"Sending notification: {title} - {notificationText}");
|
||||
MessageData message = new(title, notificationText);
|
||||
MessageData message = new(title, topic, notificationText);
|
||||
HttpRequestMessage request = new(HttpMethod.Post, $"{this.endpoint}?auth={this.auth}");
|
||||
request.Content = new StringContent(JsonConvert.SerializeObject(message, Formatting.None), Encoding.UTF8, "application/json");
|
||||
HttpResponseMessage response = _client.Send(request);
|
||||
@ -47,9 +76,9 @@ public class Ntfy : NotificationConnector
|
||||
public string message { get; }
|
||||
public int priority { get; }
|
||||
|
||||
public MessageData(string title, string message)
|
||||
public MessageData(string title, string topic, string message)
|
||||
{
|
||||
this.topic = Topic;
|
||||
this.topic = topic;
|
||||
this.title = title;
|
||||
this.message = message;
|
||||
this.priority = 3;
|
||||
|
@ -122,7 +122,7 @@ public class Server : GlobalBase
|
||||
break;
|
||||
}
|
||||
|
||||
string filePath = settings.GetFullCoverPath((Manga)manga!);
|
||||
string filePath = manga?.coverFileNameInCache ?? "";
|
||||
if (File.Exists(filePath))
|
||||
{
|
||||
FileStream coverStream = new(filePath, FileMode.Open);
|
||||
@ -410,7 +410,7 @@ public class Server : GlobalBase
|
||||
break;
|
||||
case "Settings/AprilFoolsMode":
|
||||
if (!requestVariables.TryGetValue("enabled", out string? aprilFoolsModeEnabledStr) ||
|
||||
bool.TryParse(aprilFoolsModeEnabledStr, out bool aprilFoolsModeEnabled))
|
||||
!bool.TryParse(aprilFoolsModeEnabledStr, out bool aprilFoolsModeEnabled))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
@ -492,12 +492,13 @@ public class Server : GlobalBase
|
||||
}else if (notificationConnectorType is NotificationConnector.NotificationConnectorType.Ntfy)
|
||||
{
|
||||
if (!requestVariables.TryGetValue("ntfyUrl", out string? ntfyUrl) ||
|
||||
!requestVariables.TryGetValue("ntfyAuth", out string? ntfyAuth))
|
||||
!requestVariables.TryGetValue("ntfyUser", out string? ntfyUser)||
|
||||
!requestVariables.TryGetValue("ntfyPass", out string? ntfyPass))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
AddNotificationConnector(new Ntfy(this, ntfyUrl, ntfyAuth));
|
||||
AddNotificationConnector(new Ntfy(this, ntfyUrl, ntfyUser, ntfyPass, null));
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
}
|
||||
else
|
||||
@ -534,12 +535,13 @@ public class Server : GlobalBase
|
||||
}else if (notificationConnectorType is NotificationConnector.NotificationConnectorType.Ntfy)
|
||||
{
|
||||
if (!requestVariables.TryGetValue("ntfyUrl", out string? ntfyUrl) ||
|
||||
!requestVariables.TryGetValue("ntfyAuth", out string? ntfyAuth))
|
||||
!requestVariables.TryGetValue("ntfyUser", out string? ntfyUser)||
|
||||
!requestVariables.TryGetValue("ntfyPass", out string? ntfyPass))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
notificationConnector = new Ntfy(this, ntfyUrl, ntfyAuth);
|
||||
notificationConnector = new Ntfy(this, ntfyUrl, ntfyUser, ntfyPass, null);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -54,12 +54,7 @@ public partial class Tranga : GlobalBase
|
||||
return _connectors;
|
||||
}
|
||||
|
||||
public Manga? GetPublicationById(string internalId)
|
||||
{
|
||||
if (cachedPublications.Exists(publication => publication.internalId == internalId))
|
||||
return cachedPublications.First(publication => publication.internalId == internalId);
|
||||
return null;
|
||||
}
|
||||
public Manga? GetPublicationById(string internalId) => GetCachedManga(internalId);
|
||||
|
||||
public bool TryGetPublicationById(string internalId, out Manga? manga)
|
||||
{
|
||||
|
@ -8,9 +8,11 @@
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="GlaxArguments" Version="1.1.0" />
|
||||
<PackageReference Include="HtmlAgilityPack" Version="1.11.46" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="PuppeteerSharp" Version="10.0.0" />
|
||||
<PackageReference Include="Soenneker.Utils.String.NeedlemanWunsch" Version="2.1.301" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
@ -1,4 +1,5 @@
|
||||
using Logging;
|
||||
using GlaxArguments;
|
||||
|
||||
namespace Tranga;
|
||||
|
||||
@ -7,46 +8,53 @@ public partial class Tranga : GlobalBase
|
||||
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
Console.WriteLine(string.Join(' ', args));
|
||||
string[]? help = GetArg(args, ArgEnum.Help);
|
||||
if (help is not null)
|
||||
{
|
||||
PrintHelp();
|
||||
return;
|
||||
}
|
||||
Argument downloadLocation = new (new[] { "-d", "--downloadLocation" }, 1, "Directory to which downloaded Manga are saved");
|
||||
Argument workingDirectory = new (new[] { "-w", "--workingDirectory" }, 1, "Directory in which application-data is saved");
|
||||
Argument consoleLogger = new (new []{"-c", "--consoleLogger"}, 0, "Enables the consoleLogger");
|
||||
Argument fileLogger = new (new []{"-f", "--fileLogger"}, 0, "Enables the fileLogger");
|
||||
Argument fPath = new (new []{"-l", "--fPath"}, 1, "Log Folder Path");
|
||||
|
||||
string[]? consoleLogger = GetArg(args, ArgEnum.ConsoleLogger);
|
||||
string[]? fileLogger = GetArg(args, ArgEnum.FileLogger);
|
||||
string? directoryPath = GetArg(args, ArgEnum.FileLoggerPath)?[0];
|
||||
Argument[] arguments = new[]
|
||||
{
|
||||
downloadLocation,
|
||||
workingDirectory,
|
||||
consoleLogger,
|
||||
fileLogger,
|
||||
fPath
|
||||
};
|
||||
ArgumentFetcher fetcher = new (arguments);
|
||||
Dictionary<Argument, string[]> fetched = fetcher.Fetch(args);
|
||||
|
||||
string? directoryPath = fetched.TryGetValue(fPath, out string[]? path) ? path[0] : null;
|
||||
if (directoryPath is not null && !Directory.Exists(directoryPath))
|
||||
Directory.CreateDirectory(directoryPath);
|
||||
|
||||
List<Logger.LoggerType> enabledLoggers = new();
|
||||
if(consoleLogger is not null)
|
||||
if(fetched.ContainsKey(consoleLogger))
|
||||
enabledLoggers.Add(Logger.LoggerType.ConsoleLogger);
|
||||
if (fileLogger is not null)
|
||||
if (fetched.ContainsKey(fileLogger))
|
||||
enabledLoggers.Add(Logger.LoggerType.FileLogger);
|
||||
Logger logger = new(enabledLoggers.ToArray(), Console.Out, Console.OutputEncoding, directoryPath);
|
||||
|
||||
TrangaSettings? settings = null;
|
||||
string[]? downloadLocationPath = GetArg(args, ArgEnum.DownloadLocation);
|
||||
string[]? workingDirectory = GetArg(args, ArgEnum.WorkingDirectory);
|
||||
bool dlp = fetched.TryGetValue(downloadLocation, out string[]? downloadLocationPath);
|
||||
bool wdp = fetched.TryGetValue(downloadLocation, out string[]? workingDirectoryPath);
|
||||
|
||||
if (downloadLocationPath is not null && workingDirectory is not null)
|
||||
if (dlp && wdp)
|
||||
{
|
||||
settings = new TrangaSettings(downloadLocationPath[0], workingDirectory[0]);
|
||||
}else if (downloadLocationPath is not null)
|
||||
settings = new TrangaSettings(downloadLocationPath![0], workingDirectoryPath![0]);
|
||||
}else if (dlp)
|
||||
{
|
||||
if (settings is null)
|
||||
settings = new TrangaSettings(downloadLocation: downloadLocationPath[0]);
|
||||
settings = new TrangaSettings(downloadLocation: downloadLocationPath![0]);
|
||||
else
|
||||
settings = new TrangaSettings(downloadLocation: downloadLocationPath[0], settings.workingDirectory);
|
||||
}else if (workingDirectory is not null)
|
||||
settings = new TrangaSettings(downloadLocation: downloadLocationPath![0], settings.workingDirectory);
|
||||
}else if (wdp)
|
||||
{
|
||||
if (settings is null)
|
||||
settings = new TrangaSettings(downloadLocation: workingDirectory[0]);
|
||||
settings = new TrangaSettings(downloadLocation: workingDirectoryPath![0]);
|
||||
else
|
||||
settings = new TrangaSettings(settings.downloadLocation, workingDirectory[0]);
|
||||
settings = new TrangaSettings(settings.downloadLocation, workingDirectoryPath![0]);
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -58,84 +66,4 @@ public partial class Tranga : GlobalBase
|
||||
|
||||
Tranga _ = new (logger, settings);
|
||||
}
|
||||
|
||||
private static void PrintHelp()
|
||||
{
|
||||
Console.WriteLine("Tranga-Help:");
|
||||
foreach (Argument argument in Arguments.Values)
|
||||
{
|
||||
foreach(string name in argument.names)
|
||||
Console.Write("{0} ", name);
|
||||
if(argument.parameterCount > 0)
|
||||
Console.Write($"<{argument.parameterCount}>");
|
||||
Console.Write("\r\n {0}\r\n", argument.helpText);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns an array containing the parameters for the argument.
|
||||
/// </summary>
|
||||
/// <param name="args">List of argument-strings</param>
|
||||
/// <param name="arg">Requested parameter</param>
|
||||
/// <returns>
|
||||
/// If there are no parameters for an argument, returns an empty array.
|
||||
/// If the argument is not found returns null.
|
||||
/// </returns>
|
||||
private static string[]? GetArg(string[] args, ArgEnum arg)
|
||||
{
|
||||
List<string> argsList = args.ToList();
|
||||
List<string> ret = new();
|
||||
foreach (string name in Arguments[arg].names)
|
||||
{
|
||||
int argIndex = argsList.IndexOf(name);
|
||||
if (argIndex != -1)
|
||||
{
|
||||
if (Arguments[arg].parameterCount == 0)
|
||||
return ret.ToArray();
|
||||
for (int parameterIndex = 1; parameterIndex <= Arguments[arg].parameterCount; parameterIndex++)
|
||||
{
|
||||
if(argIndex + parameterIndex >= argsList.Count || args[argIndex + parameterIndex].Contains('-'))//End of arguments, or no parameter provided, when one is required
|
||||
Console.WriteLine($"No parameter provided for argument {name}. -h for help.");
|
||||
ret.Add(args[argIndex + parameterIndex]);
|
||||
}
|
||||
}
|
||||
}
|
||||
return ret.Any() ? ret.ToArray() : null;
|
||||
}
|
||||
|
||||
private static readonly Dictionary<ArgEnum, Argument> Arguments = new()
|
||||
{
|
||||
{ ArgEnum.DownloadLocation, new(new []{"-d", "--downloadLocation"}, 1, "Directory to which downloaded Manga are saved") },
|
||||
{ ArgEnum.WorkingDirectory, new(new []{"-w", "--workingDirectory"}, 1, "Directory in which application-data is saved") },
|
||||
{ ArgEnum.ConsoleLogger, new(new []{"-c", "--consoleLogger"}, 0, "Enables the consoleLogger") },
|
||||
{ ArgEnum.FileLogger, new(new []{"-f", "--fileLogger"}, 0, "Enables the fileLogger") },
|
||||
{ ArgEnum.FileLoggerPath, new (new []{"-l", "--fPath"}, 1, "Log Folder Path" ) },
|
||||
{ ArgEnum.Help, new(new []{"-h", "--help"}, 0, "Print this") }
|
||||
//{ ArgEnum., new(new []{""}, 1, "") }
|
||||
};
|
||||
|
||||
internal enum ArgEnum
|
||||
{
|
||||
TrangaSettings,
|
||||
DownloadLocation,
|
||||
WorkingDirectory,
|
||||
ConsoleLogger,
|
||||
FileLogger,
|
||||
FileLoggerPath,
|
||||
Help
|
||||
}
|
||||
|
||||
private struct Argument
|
||||
{
|
||||
public string[] names { get; }
|
||||
public byte parameterCount { get; }
|
||||
public string helpText { get; }
|
||||
|
||||
public Argument(string[] names, byte parameterCount, string helpText)
|
||||
{
|
||||
this.names = names;
|
||||
this.parameterCount = parameterCount;
|
||||
this.helpText = helpText;
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user