1275 Commits

Author SHA1 Message Date
6a8df2f5f8 TokenGen CreateTokenHash from array of strings. 2025-01-12 19:09:37 +01:00
524596ad85 Merge branch 'cuttingedge-merge-ServerV2' into postgres-Server-V2
# Conflicts:
#	API/Schema/MangaConnectors/Mangaworld.cs
2025-01-09 01:51:07 +01:00
6aa8413c40 Fix #311 MangaWorld now requires Javascript
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-01-09 01:48:13 +01:00
94adefa8e6 Fix some Jobs 2025-01-09 01:34:03 +01:00
7cf7eb85d2 Fix #307 Chapternumbers
ChapterNumbers now can be sub-decimal, like version-numbers (x.y.z.a...)
2025-01-09 01:33:30 +01:00
55c0e2c4e7 Manga latest downloaded and available via SQL Queries 2024-12-18 16:42:59 +01:00
5494f2b754 Merge branch 'master' into postgres-Server-V2 2024-12-18 00:45:32 +01:00
b96ae4a2d2 Merge pull request #304 from C9Glax/dependabot/github_actions/docker/setup-buildx-action-3.8.0
Bump docker/setup-buildx-action from 3.7.1 to 3.8.0
2024-12-17 17:38:07 +01:00
80190e1286 Job Run pass context to add new Data 2024-12-17 17:24:25 +01:00
3a25c0b221 Bump docker/setup-buildx-action from 3.7.1 to 3.8.0
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3.7.1 to 3.8.0.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3.7.1...v3.8.0)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-12-17 05:59:19 +00:00
16dd1ffa97 Job Run pass context to add new Data 2024-12-16 23:29:57 +01:00
9cb5f636dd Fix MangaDex wrong Id in requests for new chapters 2024-12-16 23:29:42 +01:00
df319e9afb Jobs change RunningJobs to Dictionary of Thread and Job instead of just List of threads 2024-12-16 23:21:13 +01:00
84388a469a Remove unncessary nullable in Job 2024-12-16 23:20:46 +01:00
d322445550 Add "GetAll" Paths to Manga and Jobcontroller 2024-12-16 23:20:31 +01:00
81d22bc022 Newtonsoft Enum Json Converter 2024-12-16 23:20:17 +01:00
6b0cefbc7c Simplify SearchPaths 2024-12-16 23:19:59 +01:00
519030861d Move GetConnectors to new MiscController 2024-12-16 23:19:47 +01:00
6940e6c64d Include Manga and Chapter in jobs 2024-12-16 23:05:12 +01:00
e66ab49e7d ConnectorController.cs -> SearchController.cs 2024-12-16 22:59:52 +01:00
67a15cec7f Path for SearchManga 2024-12-16 22:58:44 +01:00
ae11c31b9d ConnectorController.AddMangaToContext existing also check for connector 2024-12-16 22:55:52 +01:00
60b128fc30 Fix Arrays shall not be added to context 2024-12-16 22:54:23 +01:00
729f018712 Fix relation Manga -> Tags and Manga -> Authors with many-many 2024-12-16 21:52:35 +01:00
03e89913e3 Scoped PGSql Contexts for Threads 2024-12-16 21:24:00 +01:00
c4fc2f436b Notification-Thread Implemented 2024-12-16 21:02:55 +01:00
ebc30c85bf Notification Date notnull 2024-12-16 20:35:25 +01:00
d6b0e3a366 Notification Date notnull 2024-12-16 20:34:20 +01:00
e1bfdd675b NotificationConnector SendNotification public 2024-12-16 20:33:59 +01:00
e6f8853b49 Add NotificationUrgency.cs 2024-12-16 20:14:28 +01:00
99ddb06d6d TrangaSettings remove old properties 2024-12-16 20:08:23 +01:00
62876498d0 Notification add Date 2024-12-16 20:03:45 +01:00
1044821147 Fix relation Manga->Chapter (latest) 2024-12-16 20:03:38 +01:00
7f946da1c3 Remove docs/ we have SWAGger 2024-12-16 19:50:56 +01:00
79e7941dda Remove docs/ we have SWAGger 2024-12-16 19:49:56 +01:00
faa235783c remove leftover .dockerignore 2024-12-16 19:49:17 +01:00
87c5ad001d AutoInclude Navigation on Manga and Chapter 2024-12-16 19:49:03 +01:00
3b58e0498b Fix redundant keys, MangaSearch 2024-12-16 19:25:22 +01:00
87274aca19 Remove APISerializable and APIJsonSerializer 2024-12-16 18:55:52 +01:00
77c5903cf1 Fixup Docker files 2024-12-16 18:29:57 +01:00
0d32f15ee9 PGSqlContext add MangaConnector Discriminator
API use Newtonsoft Json,
2024-12-16 18:28:58 +01:00
a0774841bc PrimaryKey Notification 2024-12-16 18:02:48 +01:00
3ee3a07565 Urgency 2024-12-16 18:02:14 +01:00
b9eecd3afd Remove excess 2024-12-16 17:47:12 +01:00
6534341fd5 Library Connector Constructors 2024-12-16 17:45:58 +01:00
6737be4a20 Notification Connector Methods 2024-12-16 17:42:16 +01:00
84833acdeb Schema add Notifications 2024-12-16 17:35:36 +01:00
538e6fa60b MangaConnectors in API 2024-12-15 23:00:35 +01:00
8c5bcd2665 Weebcentral 2024-12-14 22:02:32 +01:00
50dfd92c91 Merge branch 'Server-V2' into postgres-Server-V2 2024-12-14 21:57:47 +01:00
bf9fe517b0 Merge branch 'cuttingedge-merge-ServerV2' into Server-V2 2024-12-14 21:57:34 +01:00
e1f1a05724 Merge pull request #302 from ale-ben/feature/weebcentral_build_error
Fix build error in Weebcentral
2024-12-14 21:54:28 +01:00
1008da7ee8 Add API 2024-12-14 21:53:29 +01:00
72d9bda0e8 [feature/weebcentral_build_error] fix type in equality check 2024-12-14 20:44:43 +01:00
a40a9c84df Merge pull request #298 from ale-ben/feature/weebcentral
Weebcentral implementation
2024-12-14 18:42:47 +01:00
ec884f888f Merge branch 'Server-V2' into postgres-Server-V2 2024-12-14 18:07:27 +01:00
57df419d65 Merge branch 'cuttingedge-merge-ServerV2' into Server-V2 2024-12-14 18:07:22 +01:00
825b945ad1 AsuraToon Crash on no Artists or Authors
Fix #296
2024-12-14 18:02:41 +01:00
b8c624f3ea AsuraToon crash when there is no search-results #296 2024-12-14 17:55:20 +01:00
93cfdddd19 Possible fix #300 chromium statup "Failed to launch browser! chrome_crashpad_handler: --database is required" 2024-12-14 17:51:22 +01:00
4c8d9bfaf2 [feature/weebcentral] Added Weebcentral to readme 2024-12-14 16:29:43 +01:00
dd988658c0 [feature/weebcentral] Added Weebcentral to connectors 2024-12-14 16:18:15 +01:00
cf4c84a47f [feature/weebcentral] Working download logic 2024-12-14 00:58:52 +01:00
5d9bfc3adf [feature/weebcentral] Get chapters 2024-12-14 00:45:10 +01:00
5a770c8e9f [feature/weebcentral] Working search 2024-12-13 23:42:35 +01:00
395619acd3 Merge branch 'Server-V2' into postgres-Server-V2 2024-12-13 18:54:16 +01:00
502821c246 Merge branch 'cuttingedge-merge-ServerV2' into postgres-Server-V2 2024-12-13 18:54:02 +01:00
9d6a8ed686 Merge branch 'cuttingedge-merge-ServerV2' into Server-V2 2024-12-13 18:53:52 +01:00
e3bd7620aa Fix #296 AsuraToon
AsuraComic does not use Static sites, use Chromium instead.
Make Puppeteer spam less logs
2024-12-13 18:53:25 +01:00
afcc2cacaf merge 2024-12-12 22:48:46 +01:00
4040b5845c Merge branch 'cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/Jobs/DownloadNewChapters.cs
#	Tranga/Jobs/JobBoss.cs
#	Tranga/Jobs/UpdateMetadata.cs
2024-12-12 22:47:47 +01:00
428d6e13d1 Fix UpdateJobFile with oldFile:
oldFilePath was fullname, not relative
2024-12-12 22:41:28 +01:00
1e6a65c0fd Chapter volume and chapternumber as float instead of string.
Possible fix #293
2024-12-12 22:33:13 +01:00
025d43b752 Fix duplicate job check.
We were still adding duplicate jobs if not *every* field in the Manga matched.
We now only compare publicationId.
2024-12-12 22:18:06 +01:00
113c0abba7 Merge pull request #294 from C9Glax/cuttingedge
Merge cuttingedge into master
2024-12-12 22:07:13 +01:00
7daebcb1c4 Merge branch 'cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/Tranga.csproj
2024-12-12 22:05:52 +01:00
747df0bde5 Add Puppeteer Logger 2024-12-12 21:42:21 +01:00
463f360808 Dependency updates 2024-12-12 21:28:58 +01:00
44ff158c66 Merge branch 'cuttingedge-merge-ServerV2' into Server-V2 2024-12-04 19:58:48 +01:00
85d7c07b13 Mangaworld add decimal-chapters (686.5) to regex
#289
2024-12-04 19:55:31 +01:00
b5b45d0801 Merge branch 'cuttingedge-merge-ServerV2' into Server-V2 2024-12-04 19:49:56 +01:00
553f56ecaf Longer ExceptionMessage when Chapter comparison fails
#289
2024-12-04 19:49:38 +01:00
9cc4f8c090 Merge pull request #283 from C9Glax/cuttingedge-merge-candiate
AsuraToon merge
2024-11-28 21:41:19 +01:00
29f3f1a16e Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-11-28 21:35:48 +01:00
204fb7614d Fix #281 Manganato errors when there is no chapters uploaded 2024-11-28 21:35:29 +01:00
d6e73ffcdf Merge pull request #276 from C9Glax/cuttingedge-merge-candiate
Cuttingedge merge candidate
2024-11-28 21:23:56 +01:00
5a8202f872 More logging 2024-11-11 17:59:48 +01:00
1bd914571c Asuratoon Server-V2 2024-11-11 17:09:19 +01:00
483dcc41df Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-11-02 17:52:26 +01:00
55cc2a2e84 Merge pull request #277 from C9Glax/asuratoon
Asuratoon
2024-11-02 17:51:12 +01:00
b619109ea1 fix #141 chapternames 2024-11-02 17:48:18 +01:00
72943330c3 Merge branch 'refs/heads/cuttingedge' into asuratoon 2024-11-02 17:45:13 +01:00
bc44a5333b Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-11-02 17:44:38 +01:00
38bc1e4d53 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-11-02 17:44:30 +01:00
47479f7a0d Fix chaptermarkers.
Don't create one if Chapter does not have an ID
2024-11-02 17:44:23 +01:00
b2381be860 #141 fix ParsePublicationsFromHtml, statusNode, titleNode, firstChapterNode
fix ParseChaptersFromHtml nodeCollection of ChapterURls
fix ParseImageUrlsFromHtml xPath
fix Chapterparsing names
2024-11-02 17:42:26 +01:00
657e1b338b resolves #141 Asuratoon connector 2024-11-02 17:19:17 +01:00
5018800d09 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/Jobs/JobBoss.cs
2024-11-02 16:25:49 +01:00
ee265a7519 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-11-02 16:24:55 +01:00
5b0624654b rename duplicates to append ".duplicate" 2024-11-02 16:24:44 +01:00
a75549c699 Only try loading .json files on startup (exclude .failed for example) 2024-11-02 16:24:25 +01:00
c7dc5e75f2 Add "Expires" Header to image responses 2024-10-31 23:00:33 +01:00
3f37eefe72 Include modified date in image responses for cachecontrol 2024-10-31 22:53:05 +01:00
b7bc04a045 Add zstd compression to all API Traffic 2024-10-31 22:16:18 +01:00
f7daacf0d4 Use Robidoux algorithm for resizing covers 2024-10-31 21:50:46 +01:00
1cb8899195 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-10-31 20:43:21 +01:00
f46244cb9c Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-10-31 20:43:11 +01:00
9db3f1b0da Extend logging on startup 2024-10-31 20:42:56 +01:00
dc9cd4b1dd Append ".failed" to job-files that werent successfully added. 2024-10-31 20:41:46 +01:00
3566ad774d Moved logging to actually say if we added a job to the list 2024-10-31 20:41:21 +01:00
94b81969c7 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-10-30 22:40:31 +01:00
3e581e2ddb Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/Jobs/JobBoss.cs
2024-10-30 22:34:17 +01:00
bd8cb86c52 Always set directory-permissions 2024-10-30 22:29:32 +01:00
34c5436b33 Always set directory-permissions 2024-10-30 22:29:16 +01:00
4690394437 Formatting 2024-10-30 22:27:55 +01:00
02cf8578c9 Explicitly set File/Directory permissions for jobs 2024-10-30 22:27:50 +01:00
067497ddd0 Delete duplicate files on startup. 2024-10-30 20:38:53 +01:00
4b88cdbd90 When updating Jobfiles, dont write a new file if we werent able to successfully delete the old one 2024-10-30 20:31:16 +01:00
420013f07b Delete chapterMarkers if the file doesn't exist anymore. 2024-10-30 18:23:14 +01:00
8cee11aa22 Fix #272 Manhuaplus missing year string 2024-10-29 19:15:19 +01:00
07c6081c03 #236 2024-10-27 03:49:55 +01:00
585d7e3380 Fix order of startup: Load Manga first, the jobs 2024-10-27 03:42:50 +01:00
febce6b92a Downloaded Image processing:
- Compression
- B/W threshold
2024-10-27 03:40:07 +01:00
fb7ed21d82 Update Types doc with last merge for Chapters 2024-10-27 03:39:40 +01:00
2db85e5070 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-10-27 02:09:16 +01:00
198bbdcf94 Set hidden Attribute to Markerfiles 2024-10-27 02:58:50 +02:00
c58adf64fa #271 Create Marker-files for Chapters.
If a Connector provides a unique ID for a chapter, Tranga will create a markerfile, containing the current name of the Chapter
This should prevent duplicates, or missing chapters.
2024-10-27 02:41:28 +02:00
957debea01 Mangahere change list-2 to list-1 in selector 2024-10-27 02:22:58 +02:00
5186ae66c9 Merge pull request #270 from C9Glax/dependabot/github_actions/docker/setup-buildx-action-3.7.1
Bump docker/setup-buildx-action from 3.6.1 to 3.7.1
2024-10-23 16:11:06 +02:00
c35e1ef517 Merge pull request #269 from C9Glax/dependabot/github_actions/docker/build-push-action-6.9.0
Bump docker/build-push-action from 6.7.0 to 6.9.0
2024-10-23 16:10:52 +02:00
8f6891142b Bump docker/setup-buildx-action from 3.6.1 to 3.7.1
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3.6.1 to 3.7.1.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3.6.1...v3.7.1)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-10-23 05:49:09 +00:00
b52e6d4908 Bump docker/build-push-action from 6.7.0 to 6.9.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.7.0 to 6.9.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.7.0...v6.9.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-10-23 05:49:07 +00:00
96b5921ed6 GET LibraryTypes Create and Test set url to lowercase
Set Komga to also require username and password
2024-10-23 02:29:08 +02:00
9d47445339 Assign numbers to ProgressToken.State
Update type docs
2024-10-20 20:58:59 +02:00
93696fbac1 docs Types documentation NotificationConnector 2024-10-20 20:43:40 +02:00
582b3af89c Add docs types LibraryConnector 2024-10-20 18:53:03 +02:00
f57667bc8f add documentation types settings 2024-10-20 18:33:19 +02:00
f9a30f2587 Types documentation add quotation marks 2024-10-20 18:32:49 +02:00
240af81fa9 Add doc types chapter 2024-10-20 02:16:22 +02:00
26b2910000 Add GET /v2/Jobs/Standby 2024-10-20 01:30:50 +02:00
a88b85e599 Add numbers to JobTypes (and type documentation) 2024-10-20 01:08:22 +02:00
27f823cfeb GET V2Manga with internalIds return distinct array. 2024-10-20 01:06:24 +02:00
70993a692a Add ReleaseStatus to docs/types.md 2024-10-18 19:31:09 +02:00
1a631362c9 Use Sixlabors.Imagesharp for resizing coverimages. 2024-10-18 19:30:57 +02:00
00c4f0533f Update documentation 2024-10-18 17:57:59 +02:00
8670863810 Add Job and ProgressToken Types to docs 2024-10-18 17:51:53 +02:00
2c9bd2532e Fix order of RequestPaths 2024-10-18 17:51:37 +02:00
575fb739cc typo 2024-10-18 00:48:58 +02:00
d4af068f0e Add BaseUris: string[] field to MangaConnector, to match Connector to uri 2024-10-18 00:48:46 +02:00
6a4d454a08 Extend Types.md documentation 2024-10-18 00:29:29 +02:00
225db8beda Change return type of api request to get Connectors to get connector-list instead of dictionary 2024-10-17 21:03:37 +02:00
d80fcd9039 Manga website url nullable 2024-09-30 23:19:17 +02:00
30c44760e7 Merge pull request #256 from C9Glax/cuttingedge-merge-candidate
Cuttingedge merge candidate
2024-09-29 01:13:56 +02:00
a3ae3c320d Merge branch 'refs/heads/cuttingedge' into cuttingedge-merge-candidate 2024-09-29 01:07:59 +02:00
4871bc801d Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-09-29 01:07:51 +02:00
ea262889e6 Its late. Set TARGETPLATFORM in base 2024-09-29 01:02:50 +02:00
445542b653 Set --platform to BUILDPLATFORM for dotnet 2024-09-29 00:58:24 +02:00
b7718220ef Merge branch 'refs/heads/cuttingedge' into cuttingedge-merge-candidate 2024-09-29 00:54:28 +02:00
34c62e8658 Remove cache step from cuttingedge workflow, set --platform to TARGETPLATFORM instead 2024-09-29 00:50:53 +02:00
a9fcc93670 Merge pull request #257 from C9Glax/master
Update docker-image-cuttingedge.yml
2024-09-29 00:44:17 +02:00
68d7ef258f Update docker-image-cuttingedge.yml
Clear Cache on build
2024-09-29 00:40:59 +02:00
fdea4f5ea5 Merge branch 'cuttingedge-merge-ServerV2' into cuttingedge 2024-09-27 17:09:19 +02:00
ac3039e587 Add Star-Graph to README 2024-09-27 17:08:59 +02:00
1c5f105a4d Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-09-27 15:04:03 +02:00
3829a1cf26 Merge branch 'refs/heads/cuttingedge' into cuttingedge-merge-candidate 2024-09-27 15:03:51 +02:00
c3daa0b751 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-27 15:03:44 +02:00
3a072beea3 Update Readme:
* Fix dotnet Version
* Link directly to new issue for new Connectors
* Add Ntfy as Notification Connector
* Remove Roadmap
2024-09-27 15:03:06 +02:00
8e6f2798a9 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge-merge-candidate 2024-09-27 14:58:07 +02:00
26a07f4a2f Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-09-27 14:58:02 +02:00
9cbde9a6b4 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-27 14:57:57 +02:00
0870aa9fdb Merge branch 'refs/heads/master' into cuttingedge-merge-ServerV2 2024-09-27 14:57:36 +02:00
172650e644 Merge pull request #254 from C9Glax/cuttingedge-merge-candidate
Cuttingedge merge candidate
2024-09-27 14:53:24 +02:00
48ab44c28d Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-09-27 14:51:20 +02:00
52ff2e54a8 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-27 14:51:11 +02:00
61d80a93cf Fix #255 MangaKatana sanitization. 2024-09-27 14:50:57 +02:00
32ecdcda76 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-09-23 15:41:02 +02:00
7be3ee52e9 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-23 15:40:53 +02:00
981eb0fd9f Fix notification batching:
Do not resend old notifications.
2024-09-23 15:40:43 +02:00
a92eba2d14 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/TrangaSettings.cs
2024-09-22 00:17:30 +02:00
47f3044a6d Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-22 00:15:59 +02:00
6d03cc5f8d Fix incorrect setting check for notificationsbuffer 2024-09-22 00:15:50 +02:00
290c405f52 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-22 00:09:54 +02:00
fcdbd32872 Include amount of notifications of type in title 2024-09-22 00:09:45 +02:00
eb6c37cc53 Output settings.json on startup 2024-09-22 00:05:09 +02:00
d922842186 Add NotificationBuffer, so Notification are not spammed on every chapter. 2024-09-22 00:02:43 +02:00
69323d6d60 Add LibraryBuffer, so Libraries are not spammed with scans on every download. 2024-09-21 21:02:55 +02:00
46a0fb8c48 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-21 20:34:57 +02:00
ec8eb40941 Allow Versions to lose their volume number, if site no longer lists it. 2024-09-21 20:30:55 +02:00
d2074fae35 Readable CheckChapterIsDownloaded check 2024-09-21 20:23:21 +02:00
713bbc230f Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-18 18:56:09 +02:00
32ab9a552f Also delete files on UpdateJobFile if we dont provide a filepath 2024-09-18 18:56:01 +02:00
7b6724ad38 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-09-18 18:47:28 +02:00
c11c68d6d7 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-18 18:46:02 +02:00
09fdb6e5f1 Fix #250 old jobs getting re-exported. 2024-09-18 18:45:55 +02:00
be68ddc9b7 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-09-17 00:52:06 +02:00
e86ad03b1e Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-17 00:51:30 +02:00
9dfbe89e87 include --platform=$BUILDPLATFORM in Dockerfile 2024-09-17 00:51:22 +02:00
96e2845a5b Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-09-17 00:23:49 +02:00
98e75af486 Merge branch 'cuttingedge' of ssh://git.bernloehr.eu:222/glax/Tranga into cuttingedge 2024-09-16 23:21:13 +02:00
e2f5c3badc Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-16 23:18:57 +02:00
cda07bb9aa Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-16 23:09:43 +02:00
7c18466e95 Fix NETSDK1194 on build 2024-09-16 23:09:34 +02:00
c36204c7a8 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-09-16 22:51:57 +02:00
ce1c4d3f65 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-16 22:48:06 +02:00
52d0489a1b Fix duplicate mangas on startup 2024-09-16 22:47:55 +02:00
18edcef1c3 Resolve #247
Modify API call:
`/v2/Connector/Types`
Returns: Dictionary with Connector-Names and supported languages.
2024-09-16 21:25:08 +02:00
73ad881600 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-09-16 21:19:30 +02:00
f89aea6ac8 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-16 21:19:27 +02:00
5f05ba1049 Make SupportedLanguages public. 2024-09-16 21:19:19 +02:00
c6cfd9eb6c Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/Server.cs
2024-09-16 21:17:32 +02:00
a20ee01cfa Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-16 21:17:18 +02:00
cf5cbba9a8 #247 Add supported languages to Mangaconnectors 2024-09-16 21:17:07 +02:00
600b56033d Upgrade to Dotnet 8.0 LangVer 12 2024-09-16 21:11:50 +02:00
fdea3659f1 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-16 20:38:19 +02:00
7f3754fb64 Fix startup issue/issue with existing chapters: ProgressToken would not complete 2024-09-16 20:36:40 +02:00
2dac5db4da Create single Chromium Instance that is shared between all Connectors.
Fix pages staying open when page could not be loaded.
2024-09-16 20:30:23 +02:00
99df9a9dfd Fix #248
Move contents of old DownloadLocation and WorkingDirectory to new paths. Overwrite existing files, and add from oldPath.
2024-09-16 20:10:38 +02:00
77bb309dfa Fix #248 double closing OutputStream in response 2024-09-16 19:58:26 +02:00
3456fc6564 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-16 19:52:39 +02:00
35f2625f05 Fix #249 Manhuaplus where author/tags are not set. 2024-09-16 19:52:25 +02:00
0b9948e367 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-16 18:32:45 +02:00
96f3dbce65 Throw more readable exceptions if deserialization fails for Mangaconnectors.
#249
2024-09-16 18:32:34 +02:00
895128a462 Merge remote-tracking branch 'origin/cuttingedge-merge-ServerV2' into cuttingedge-merge-ServerV2 2024-09-16 18:24:39 +02:00
3b9d4a6735 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-09-11 14:41:49 +02:00
a94186455b Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-09-11 14:41:35 +02:00
7d3deee74c Remove unused constant 2024-09-11 14:40:28 +02:00
5980b64caa Readable Chapter comparison 2024-09-11 14:40:03 +02:00
cbecb257ef Remove unused constant 2024-09-11 14:39:16 +02:00
8316ed08a7 Merge pull request #245 from C9Glax/cuttingedge
Prod didn't break, nice
2024-09-09 10:10:36 +02:00
190fa8cba7 Fix #239 multiple enumeration on Export 2024-09-09 09:54:09 +02:00
217700d08d Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	.gitignore
2024-09-09 09:44:36 +02:00
7ff9ac53ee Build all docker images with new workflow #233 2024-09-09 09:42:52 +02:00
6faaaf4139 Fix #243 Moving Publication folders, overwrite files, merge folders 2024-09-09 09:23:25 +02:00
9b8b80cd24 Fix response closed on OPTIONS request 2024-09-07 20:44:15 +02:00
15f3e2b8ec Use current time as internalId for Manga instead of BASE64 string of title
#232
Fix #237
2024-09-07 20:33:03 +02:00
2be29e4019 MangaDex only download single release for chapter.
Fix #219
2024-09-07 20:16:05 +02:00
e8dbf7a718 Merge pull request #233 from vonProteus/arm64
Added support for ARM
2024-08-31 20:57:44 +02:00
a968f4328d Added support for ARM 2024-08-31 20:38:10 +02:00
398b6fff05 Merge pull request #230 from C9Glax/cuttingedge-merge-candidate
Cuttingedge merge candidate
2024-08-31 20:25:33 +02:00
f5da2f8526 Merge pull request #231 from C9Glax/dependabot/github_actions/docker/build-push-action-6.7.0
Bump docker/build-push-action from 6.6.1 to 6.7.0
2024-08-31 20:24:43 +02:00
73093ab86c Bump docker/build-push-action from 6.6.1 to 6.7.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.6.1 to 6.7.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.6.1...v6.7.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-08-27 05:55:58 +00:00
75eea8c761 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-08-26 20:47:19 +02:00
fccaf9fcbe Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-08-26 20:47:06 +02:00
3122aa32e8 fix #223 wrong selector 2024-08-26 20:46:50 +02:00
06cdbbd283 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-08-26 20:28:57 +02:00
02fad2dd44 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-08-26 20:28:51 +02:00
e0a7d1a187 Fix #220 Mangaworld Chapter number parsing 2024-08-26 20:28:40 +02:00
054c88712e Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-08-26 20:18:51 +02:00
d0f9a4102c Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-08-26 20:18:44 +02:00
9f178821b6 Fix #223 Manganato chapter relative dates. 2024-08-26 20:18:35 +02:00
e95eb0497c #229 Resize cover Images if requested 2024-08-26 19:34:29 +02:00
3c3f7bb95a Merge recent changes to TrangaSettings backend 2024-08-26 19:08:59 +02:00
032ee95716 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/Jobs/DownloadNewChapters.cs
#	Tranga/Jobs/JobBoss.cs
#	Tranga/Jobs/UpdateMetadata.cs
#	Tranga/Server.cs
#	Tranga/TrangaArgs.cs
#	Tranga/TrangaSettings.cs
2024-08-26 19:04:05 +02:00
682fd0bc2a Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-08-26 13:22:09 +02:00
dfa8e66f34 Fix try-block in Server.cs 2024-08-26 13:21:54 +02:00
8f51d22303 Fix try-block in Server.cs 2024-08-26 13:21:34 +02:00
d41de84262 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge
# Conflicts:
#	Tranga/Server.cs
2024-08-26 13:21:05 +02:00
1bd20791b8 Add Cache-Control headers 2024-08-26 13:18:48 +02:00
03aeab44cd Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-08-26 13:11:41 +02:00
6d723b6355 Fix Settings not returning as JSON 2024-08-26 13:11:00 +02:00
7b91bb699f Fix Settings not loading on reload 2024-08-26 13:10:47 +02:00
14e33cc496 Fix Settings not loading on reload 2024-08-26 13:09:33 +02:00
6f3bba99b0 Fix Settings not returning as JSON 2024-08-26 12:59:19 +02:00
2d848843d0 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-08-26 12:37:03 +02:00
63b493fa9c Rework TrangaSettings 2024-08-26 12:36:35 +02:00
001a37b8ef Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-08-26 11:18:12 +02:00
69d6884517 #227 Fix wrong filtering, only return top 10 results 2024-08-26 11:17:59 +02:00
db73af3bdd Fix crash when outputstream closes before response could be sent.
#227
2024-08-26 10:38:45 +02:00
59547efab2 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-08-26 10:35:37 +02:00
f4336f9777 #227 Mangasee Return results that have similarity over 95% or at least top ten results 2024-08-26 10:35:16 +02:00
bec3ac52a9 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-08-20 20:53:09 +02:00
ea37e81ece Fix last commit 2024-08-20 20:53:03 +02:00
6a20783d48 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-08-20 20:47:21 +02:00
21af75f410 Faster download for images-urls.
#224
2024-08-20 20:47:13 +02:00
fc884adc9f Fix HandleRequest trying to send more than one response 2024-08-10 21:52:14 +02:00
960d3f7c62 Fix Cover location 2024-08-10 21:45:47 +02:00
6520aebcdf Cleanup MangaCache 2024-08-10 21:42:09 +02:00
1ee9b644aa Fix Permissions for manga-directory 2024-08-10 21:37:43 +02:00
2f36701fef Reduce Logspam 2024-08-10 21:25:24 +02:00
b18f8e4059 Fix GET /v2/Job/Types 2024-08-10 21:00:38 +02:00
8145abb744 Fix workign Directory in TrangaArgsMain 2024-08-10 18:53:18 +02:00
9dd52178b9 Update MangaHere bad ManhuaPlus to v2 architecture 2024-08-10 17:34:45 +02:00
cf242f81e1 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/Manga.cs
2024-08-10 17:32:56 +02:00
a629792818 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-08-08 21:09:26 +02:00
34dd78810d Update README.md 2024-08-08 21:09:08 +02:00
e1c504226c Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-08-08 21:04:09 +02:00
200a22228f add log output for Mangahere
https://github.com/C9Glax/tranga/issues/69
2024-08-08 21:02:13 +02:00
bc10136331 MangaHere image download sucks, you have to iterate all over all images one by one. Have some extra traffic then, idc.
https://github.com/C9Glax/tranga/issues/69
2024-08-08 21:00:37 +02:00
06df6e0767 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-08-08 19:00:26 +02:00
ba029b71f5 Merge branch 'refs/heads/manhuaplus' into cuttingedge-merge-ServerV2 2024-08-08 19:00:20 +02:00
082802ddbe Merge branch 'refs/heads/master' into cuttingedge-merge-ServerV2 2024-08-08 19:00:09 +02:00
d5f1df0400 Merge pull request #216 from C9Glax/dependabot/github_actions/docker/build-push-action-6.6.1
Bump docker/build-push-action from 6.5.0 to 6.6.1
2024-08-08 18:59:46 +02:00
d00881e611 Add Connector ManhuaPlus
https://github.com/C9Glax/tranga/issues/213
2024-08-08 18:58:40 +02:00
72bc7ec07b Bump docker/build-push-action from 6.5.0 to 6.6.1
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.5.0 to 6.6.1.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.5.0...v6.6.1)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-08-08 05:08:32 +00:00
89b5aa266e Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-07-31 19:25:03 +02:00
926c0d5833 fix #214 foldernames 2024-07-31 19:24:59 +02:00
80e2568113 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-07-31 17:48:21 +02:00
3b6417eff2 Fix #214 HTML encoded Characters 2024-07-31 17:48:15 +02:00
6b9ddca711 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-07-31 17:44:42 +02:00
2812a6dff1 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-07-31 17:44:37 +02:00
1991862a42 Merge remote-tracking branch 'refs/remotes/github/master' into cuttingedge-merge-ServerV2 2024-07-31 17:44:22 +02:00
40e4d5c203 Merge pull request #215 from C9Glax/dependabot/github_actions/docker/setup-buildx-action-3.6.1
Bump docker/setup-buildx-action from 3.4.0 to 3.6.1
2024-07-31 17:44:05 +02:00
49e9731184 Merge pull request #212 from C9Glax/dependabot/github_actions/docker/setup-qemu-action-3.2.0
Bump docker/setup-qemu-action from 3.1.0 to 3.2.0
2024-07-31 17:43:57 +02:00
a4e85f254f Merge pull request #210 from C9Glax/dependabot/github_actions/docker/build-push-action-6.5.0
Bump docker/build-push-action from 6.3.0 to 6.5.0
2024-07-31 17:43:48 +02:00
4f47aeadcf Bump docker/setup-buildx-action from 3.4.0 to 3.6.1
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3.4.0 to 3.6.1.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3.4.0...v3.6.1)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-30 05:45:04 +00:00
e0c1356fea Bump docker/setup-qemu-action from 3.1.0 to 3.2.0
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 3.1.0 to 3.2.0.
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
- [Commits](https://github.com/docker/setup-qemu-action/compare/v3.1.0...v3.2.0)

---
updated-dependencies:
- dependency-name: docker/setup-qemu-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-23 06:02:31 +00:00
0d9b3d2499 Bump docker/build-push-action from 6.3.0 to 6.5.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.3.0 to 6.5.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.3.0...v6.5.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-23 06:02:27 +00:00
d73bf70868 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/Server.cs
2024-07-11 15:46:35 +02:00
8e5d15ead9 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-07-11 15:46:27 +02:00
b8c28e6d21 Merge pull request #207 from C9Glax/master
Update active dev branch with changes to master
2024-07-11 15:45:33 +02:00
9ea5e436fe Merge pull request #204 from C9Glax/dependabot/github_actions/docker/setup-buildx-action-3.4.0
Bump docker/setup-buildx-action from 3.3.0 to 3.4.0
2024-07-11 15:44:39 +02:00
b4c310638a Merge pull request #205 from C9Glax/dependabot/github_actions/docker/build-push-action-6.3.0
Bump docker/build-push-action from 6.1.0 to 6.3.0
2024-07-11 15:44:17 +02:00
159341ff3c Merge pull request #206 from C9Glax/dependabot/github_actions/docker/setup-qemu-action-3.1.0
Bump docker/setup-qemu-action from 2.2.0 to 3.1.0
2024-07-11 15:43:58 +02:00
29338b9b17 Bump docker/setup-qemu-action from 2.2.0 to 3.1.0
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 2.2.0 to 3.1.0.
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
- [Commits](https://github.com/docker/setup-qemu-action/compare/v2.2.0...v3.1.0)

---
updated-dependencies:
- dependency-name: docker/setup-qemu-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-10 05:46:20 +00:00
0eda8913b0 Bump docker/build-push-action from 6.1.0 to 6.3.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.1.0 to 6.3.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.1.0...v6.3.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-10 05:46:17 +00:00
5ca50630e4 Bump docker/setup-buildx-action from 3.3.0 to 3.4.0
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3.3.0 to 3.4.0.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3.3.0...v3.4.0)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-10 05:46:15 +00:00
d0bfb262bf Merge remote-tracking branch 'refs/remotes/github/master' into cuttingedge-merge-ServerV2 2024-07-09 11:22:05 +02:00
4f14f15ade Merge pull request #200 from C9Glax/dependabot/github_actions/docker/setup-qemu-action-3.1.0
Bump docker/setup-qemu-action from 2.2.0 to 3.1.0
2024-07-09 11:20:29 +02:00
d89a24fd11 Merge pull request #201 from C9Glax/dependabot/github_actions/docker/build-push-action-6.3.0
Bump docker/build-push-action from 6.1.0 to 6.3.0
2024-07-09 11:20:14 +02:00
a5859e3c82 Merge pull request #203 from C9Glax/dependabot/github_actions/docker/setup-buildx-action-3.4.0
Bump docker/setup-buildx-action from 3.3.0 to 3.4.0
2024-07-09 11:19:55 +02:00
dd2fa3fbd7 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-07-09 11:17:58 +02:00
33e5d65785 fix Kavita GetLibraries 2024-07-09 11:17:50 +02:00
d60ed77dbe Bump docker/setup-buildx-action from 3.3.0 to 3.4.0
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3.3.0 to 3.4.0.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3.3.0...v3.4.0)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-05 05:11:09 +00:00
e15c6816b5 Bump docker/build-push-action from 6.1.0 to 6.3.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.1.0 to 6.3.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.1.0...v6.3.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-04 05:56:47 +00:00
4a4fe4b40d Bump docker/setup-qemu-action from 2.2.0 to 3.1.0
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 2.2.0 to 3.1.0.
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
- [Commits](https://github.com/docker/setup-qemu-action/compare/v2.2.0...v3.1.0)

---
updated-dependencies:
- dependency-name: docker/setup-qemu-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-04 05:56:42 +00:00
d221532e0d Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-06-29 22:50:14 +02:00
4881789970 Merge branch 'refs/heads/cuttingedge' 2024-06-29 22:50:07 +02:00
be1e6fe988 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-06-29 22:49:56 +02:00
f61e51e506 Fix crash when moving files, now overwrites. 2024-06-29 22:49:39 +02:00
eba511749b Merge pull request #199 from C9Glax/cuttingedge
Merge cuttingedge to latest.
2024-06-29 19:49:06 +02:00
5bc2a8909d Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/Server.cs
2024-06-29 19:38:01 +02:00
de4c57a0cd Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-06-29 19:37:09 +02:00
e368c3c98a Fix https://github.com/C9Glax/tranga/issues/193
Mangaworld Volume and Chapter number Parsing.
2024-06-29 19:37:02 +02:00
f3e0959be8 Merge pull request #198 from C9Glax/master
Merge Github Actions
2024-06-29 19:23:37 +02:00
d17ca1d97a Merge pull request #197 from C9Glax/master
Merge Github Actions
2024-06-29 19:22:59 +02:00
e9376e3782 Merge pull request #196 from C9Glax/master
Merge Github Actions
2024-06-29 19:21:41 +02:00
7c217a7e33 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-06-29 19:20:16 +02:00
a437fcbca1 Possible fix https://github.com/C9Glax/tranga/issues/185
Mangaworld publication id had invalid path characters.
2024-06-29 19:20:04 +02:00
1dcfecd66f Create CoverImageCache when saving coverimages. 2024-06-29 19:14:37 +02:00
6db4646336 Move/rename archives if volume number gets updated. 2024-06-29 19:11:18 +02:00
8a6298e3fd Merge pull request #157 from C9Glax/dependabot/github_actions/docker/setup-buildx-action-3.3.0
Bump docker/setup-buildx-action from 3.1.0 to 3.3.0
2024-06-27 00:08:31 +02:00
194705c124 Merge pull request #194 from C9Glax/dependabot/github_actions/docker/build-push-action-6.1.0
Bump docker/build-push-action from 5.3.0 to 6.1.0
2024-06-27 00:06:28 +02:00
f4d5969003 Bump docker/build-push-action from 5.3.0 to 6.1.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 5.3.0 to 6.1.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v5.3.0...v6.1.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-06-24 05:58:28 +00:00
8607bd2c89 #187 NTFY JsonConverter 2024-06-15 21:40:28 +02:00
9d92069a4b #187 NTFY JsonConverter 2024-06-15 21:39:53 +02:00
5614729eab #187 Server v1 NTFY username password 2024-06-15 21:33:42 +02:00
fab30dc5a7 Documentation
https://github.com/C9Glax/tranga/issues/187
2024-06-15 21:27:24 +02:00
fd20b9febf NTFY use Username and Password
https://github.com/C9Glax/tranga/issues/187
2024-06-15 21:26:23 +02:00
ee6de661c8 Merge branch 'refs/heads/C9Glax-tranga-issue-187' into Server-V2 2024-06-15 21:24:41 +02:00
d52ec8d36f NTFY username and password usage instead of auth. 2024-06-15 21:24:28 +02:00
790e77b00c Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-06-02 01:05:29 +02:00
37dfb4df02 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-06-02 01:05:20 +02:00
42feea3ad5 Fix covers returning wrong fileLocation if cover already exists. 2024-06-02 01:05:08 +02:00
4f14903538 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-06-02 00:23:29 +02:00
bbc750d731 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-06-02 00:23:23 +02:00
08dd01942f #183 Fix NTFY not exporting topic to notificationConnectors.json 2024-06-02 00:23:16 +02:00
6ae3918679 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-06-02 00:11:33 +02:00
351144e763 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-06-02 00:09:18 +02:00
aea4c0c61b Add GlaxArguments to fetch Runtime-Args 2024-06-02 00:09:03 +02:00
7b9e935db7 Commented optional second level only domains for cover-image-names 2024-06-01 22:10:09 +02:00
048b165d76 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-06-01 22:09:18 +02:00
ebe3012c69 NTFY check endpoint URI and add optional custom topic #183 2024-06-01 22:09:08 +02:00
8ccb6c0cb5 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-05-26 23:04:35 +02:00
a5dbed9525 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-05-26 23:04:27 +02:00
811ddd903f fix missing minus-sign from domain namers in coverimages 2024-05-26 23:04:16 +02:00
beb455308f Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-05-26 22:52:05 +02:00
f948809bcd Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-05-26 22:51:59 +02:00
7ceb9cd4cb #182 Changed filename to instead of remote filename have the format server-internalId.fileFormat 2024-05-26 22:51:46 +02:00
57f1e037ef Corrected check for if cover exists 2024-05-26 22:45:39 +02:00
5c309131ad Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/Jobs/JobBoss.cs
2024-05-26 18:56:30 +02:00
6ca8d58e43 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-05-26 18:46:58 +02:00
e3211b95e2 #182 Remove covers that have no asssociated Manga 2024-05-26 18:46:40 +02:00
b5e9e03f64 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-05-26 18:34:57 +02:00
98bd8a983b Possible Fix #182 2024-05-26 18:34:45 +02:00
27a559834f Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/Jobs/JobBoss.cs
2024-05-26 18:26:06 +02:00
f4996659ef Fix loading file results in "null"-job and crashes. 2024-05-26 18:23:16 +02:00
e05684d5d1 Fix loading file results in "null"-job and crashes. 2024-05-26 18:22:51 +02:00
4a7d23c0d9 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-05-26 18:10:45 +02:00
1d44b6d9c6 Log added Jobs during Startup 2024-05-26 18:10:29 +02:00
2cfc7ac2c5 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-04-27 19:09:31 +02:00
811a183af2 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-04-27 19:09:22 +02:00
fb0755eb89 Use NeedlemanWunsch for string comparison on Mangasee.cs
Resolves #132
#167
2024-04-27 19:09:12 +02:00
2e8b896f3b Fix #178 wrong check on parsing variable aprilfoolsmode 2024-04-27 17:53:08 +02:00
017f31ca83 Clean 2024-04-26 16:39:39 +02:00
4021237888 Add Endpoint GET /v2/Manga/Search GlobalSearch
Resolves #124
#167
2024-04-26 00:51:18 +02:00
7ed3846c5f Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2 2024-04-26 00:49:08 +02:00
4692cc297a Fix MangaDex linksNode is null 2024-04-26 00:48:55 +02:00
7f95ab9439 Add Endpoint GET /v2/Manga to request multiple Manga from internalIds #167 2024-04-26 00:22:17 +02:00
49a9b7ccb0 Corrected Job->Manga in return 2024-04-26 00:19:38 +02:00
0735e2c588 Change GET /v2/Manga to /v2/Mangas 2024-04-26 00:16:28 +02:00
5b22246c41 Add Endpoint GET /v2/Job returns list of jobs specified by jobid 2024-04-26 00:14:46 +02:00
2e1f633f40 Add Endpoint POST /v2/Manga/internalId/moveFolder #167 2024-04-26 00:05:48 +02:00
8887cea718 Add Endpoint POST /v2/Manga/internalId/ignoreChaptersBelow #167 2024-04-26 00:03:46 +02:00
061da1b4bf Add field customFolder and startChapter to CreateJob Endpoint
https://github.com/C9Glax/tranga/pull/167#issuecomment-2077909075
#167
2024-04-25 23:55:31 +02:00
80dc8fbe65 Resolves #176 Return 409 conflict if job already exists. 2024-04-25 23:50:06 +02:00
28a0efe488 Add Endpoint /v2/Manga/internalId/Chapters/Latest 2024-04-25 23:45:13 +02:00
3d08b1f9f2 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/GlobalBase.cs
#	Tranga/Jobs/JobBoss.cs
#	Tranga/Jobs/UpdateMetadata.cs
#	Tranga/Manga.cs
#	Tranga/MangaConnectors/Bato.cs
#	Tranga/MangaConnectors/MangaKatana.cs
#	Tranga/MangaConnectors/MangaLife.cs
#	Tranga/MangaConnectors/Manganato.cs
#	Tranga/MangaConnectors/Mangasee.cs
#	Tranga/MangaConnectors/Mangaworld.cs
2024-04-25 23:34:56 +02:00
3d855020eb Export job files indented. 2024-04-25 21:32:48 +02:00
c6d0168d2f Fix #174 auth not being written to file for ntfy. 2024-04-25 21:29:05 +02:00
d52213002e Delete old jobfiles. 2024-04-25 21:24:29 +02:00
ec9290f41f Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge
# Conflicts:
#	Tranga/Jobs/UpdateMetadata.cs
2024-04-25 21:10:42 +02:00
6b91796e5a Update manga in DownloadNewChapters Jobs 2024-04-25 21:10:26 +02:00
9f9ea569d5 fix bug Manga.WithMetadata coverfilenameincache not being replaced. 2024-04-25 21:03:57 +02:00
4bd1150a0e fix bug Manga.WithMetadata coverfilenameincache not being replaced. 2024-04-25 21:03:44 +02:00
8b62e2c467 Possible fix #175 Export jobs when Manga-Metadata is updated. 2024-04-25 20:57:59 +02:00
7ec262a2e4 Possible fix #175 Export jobs when Manga-Metadata is updated. 2024-04-25 20:57:46 +02:00
d32d5976ee Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-04-25 20:46:32 +02:00
58cff6513a Possible fix #175 2024-04-25 20:46:26 +02:00
783f229a6a Add LibraryConnector.Test to see if requests can be made to endpoint. 2024-04-23 00:58:33 +02:00
2651a0c53b Implemented /v2/NotificationConnector/* 2024-04-23 00:58:19 +02:00
0ced3a7dd9 Implement /v2/LibraryConnector/* 2024-04-23 00:51:24 +02:00
a56555eee4 Add LibraryConnector.Test to see if requests can be made to endpoint. 2024-04-23 00:48:08 +02:00
cee7870aad Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/Server.cs
2024-04-23 00:21:18 +02:00
aaf06da8e1 Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into cuttingedge 2024-04-23 00:20:50 +02:00
51a26a3cba Fix https://github.com/C9Glax/tranga/issues/143
ImageCache could never find files, because they were not in the expected location.
2024-04-23 00:20:34 +02:00
bce77180bc Merge branch 'refs/heads/cuttingedge-merge-ServerV2' into Server-V2
# Conflicts:
#	Tranga/Jobs/UpdateMetadata.cs
#	Tranga/Manga.cs
2024-04-22 23:54:26 +02:00
8c66bbc89f Use publicationCache to store and update Manga 2024-04-22 23:45:51 +02:00
762da4c859 Make cachedPublications private with getter-setter 2024-04-22 22:43:42 +02:00
daba940b45 Make cachePublications a dictionary with internalId as key. 2024-04-22 22:38:23 +02:00
79e61a62c7 Export Jobfiles after execution, update metadata in jobfiles 2024-04-22 22:29:22 +02:00
06fe98323a Fix crashing when comparing old Manga (missing websiteUrl) 2024-04-22 22:09:43 +02:00
5f820c53f5 Update websiteUrl on metadata-refresh https://github.com/C9Glax/tranga-website/issues/60 2024-04-22 22:03:09 +02:00
c69f1f6569 Addresses #170 Manganato authors and genres include "\r\n" 2024-04-22 04:45:49 +02:00
e360037fda Add "(?:/?)" to the end of all Regex RequestPaths 2024-04-22 04:43:08 +02:00
ea866e0136 Added Endpoint /v2/Manga lists all known Manga
Implemented /v2/Manga/*
2024-04-22 04:42:10 +02:00
c3231327f9 nullable 2024-04-22 04:21:39 +02:00
03e90eccd3 No longer require connector name to create job 2024-04-22 04:21:30 +02:00
64482931a3 Implemented GET /v2/LogFile 2024-04-22 03:19:56 +02:00
cce4901a5d Implement all /v2/Settings 2024-04-22 03:03:17 +02:00
3adb103fc4 Fix API-Path prematurely triggering match. 2024-04-22 03:02:49 +02:00
b6ffb97a04 Merge branch 'refs/heads/cuttingedge' into Server-V2 2024-04-22 02:27:34 +02:00
5bdbd9e2e4 Hack to resolve #60 Website-URL.
Field will have same name, just acquisition will be better.
2024-04-22 02:25:39 +02:00
49cfff8a2f Changed the Creation Job API to a single Endpoint /v2/Job/Create/<Type>
Added and implemented GET /v2/Job/Types
Implemented /v2/Job/<jobId>
Implemented /v2/Job/<jobId>/StartNow
Implemented /v2/Job/<jobId>/Cancel
Implemented /v2/Job/<jobId>/SetInterval
2024-04-22 00:00:35 +02:00
6d48a100ca Implemented GET
/v2/Jobs
/v2/Jobs/Running
/v2/Jobs/Waiting
/v2/Jobs/Monitoring
/v2/Job/<jobId>
/v2/Job/<jobId>/Progress
2024-04-21 21:48:24 +02:00
4104169c19 Fix path excluding symbols that are used in requests 2024-04-21 21:46:52 +02:00
4cb7c941a2 Implemented /v2/Connector/<ConnectorName>/GetManga 2024-04-21 21:32:03 +02:00
b3fb53f6d8 Corrected link 2024-04-20 18:55:54 +02:00
f729c44f88 Merge branch 'refs/heads/master' into cuttingedge 2024-04-20 18:49:19 +02:00
8b9769b816 Merge branch 'refs/heads/master' into Server-V2 2024-04-20 18:49:08 +02:00
f4966b0348 Docker Image build 2024-04-20 18:48:51 +02:00
9a02859f6b Docker Image build 2024-04-20 18:46:00 +02:00
e96dd07521 Link API Documentation in README.md 2024-04-20 18:41:12 +02:00
a610eff8f0 Merge branch 'refs/heads/cuttingedge' into Server-V2 2024-04-20 18:39:56 +02:00
df2fc4a036 Remove README CLI reference 2024-04-20 18:39:49 +02:00
c41f04d92d All Valid Request Paths return "Not Implemented".
Ping returns Pong.
2024-04-20 18:34:20 +02:00
5e647099cd Spelling 2024-04-20 17:56:54 +02:00
011af9c7a8 #114 API Documentation 2024-04-20 16:59:51 +02:00
630e507564 #74 API Documentation 2024-04-20 16:59:32 +02:00
fa2598084f Hard cutover https://github.com/C9Glax/tranga/pull/167#issuecomment-2067689986 2024-04-20 16:54:58 +02:00
f79743ee93 actually use v2 API 2024-04-19 22:20:24 +02:00
2828fec316 Merge 2024-04-19 22:08:03 +02:00
bd14722791 Merge remote-tracking branch 'refs/remotes/db-2001/json-api' into Server-V2 2024-04-19 22:06:55 +02:00
d22b49cfa8 Change Method Header for Handlers to return the response to HandleRequest so we don't forget to send a response. 2024-04-19 21:58:29 +02:00
595051b0fe Merge remote-tracking branch 'origin/Server-V2' into Server-V2 2024-04-19 21:54:21 +02:00
238395a3da Return JobIds instead of full jobs.
/v2/Jobs
/v2/Jobs/Running
/v2/Jobs/Waiting
/v2/Jobs/Monitoring
2024-04-19 21:54:16 +02:00
0313d81204 Return JobIds instead of full jobs.
/v2/Jobs
/v2/Jobs/Running
/v2/Jobs/Waiting
/v2/Jobs/Monitoring
2024-04-19 21:40:31 +02:00
f5cecb9e30 Github Reference Link Style 2024-04-19 21:35:38 +02:00
7e5fa6ce41 API v2 2024-04-19 21:23:15 +02:00
0ab2ae03ce unionby isntead of concat 2024-04-19 03:07:46 +02:00
95236daf41 Check if tags and authors are the same on Manga equals.
UpdateManga performs union/concat operation on alttitles, tags and authors
2024-04-19 03:00:31 +02:00
294ce01bc3 Set Manga.releaseStatus to new releaseStatus.
Fix #119
2024-04-19 02:37:17 +02:00
13565d1c7a Fixes #166 MangaDex crash on UpdateMetadata, needed to include cover_art in request 2024-04-19 02:21:20 +02:00
a8aa7d3370 Okay, actually write request variables to log. 2024-04-18 18:45:19 -04:00
01bab62190 Log request if unknown 2024-04-18 18:32:49 -04:00
2768ab38e6 Merge remote-tracking branch 'upstream/cuttingedge' into json-api 2024-04-18 18:24:26 -04:00
54b24ac37f Merge remote-tracking branch 'refs/remotes/db-2001/cuttingedge' into cuttingedge 2024-04-19 00:10:14 +02:00
c67e89f1dd null checks 2024-04-19 00:07:34 +02:00
4ba44d3ac3 Merge branch 'C9Glax:cuttingedge' into cuttingedge 2024-04-18 18:04:07 -04:00
33b8ede492 Use new requestParams variable for AprilFoolsMode setting 2024-04-18 17:58:23 -04:00
dbc1b94124 Merge branch 'cuttingedge' into json-api
Solved Merge conflicts with cuttingedge branch
2024-04-18 17:56:44 -04:00
8631cf6376 Merge pull request #161 from C9Glax/MangaDexRequestLimitChange
MangaDex request limit change
2024-04-18 23:54:44 +02:00
df4d547e2b Fix crash with old settings files 2024-04-18 23:52:52 +02:00
006b71b496 Merge remote-tracking branch 'upstream/cuttingedge' into cuttingedge 2024-04-18 17:48:43 -04:00
5f03b0d89c Closes #154 2024-04-18 23:05:04 +02:00
6dc1ea0030 Merge branch 'refs/heads/master' into cuttingedge 2024-04-18 22:52:51 +02:00
ff08754610 Bump docker/setup-buildx-action@v3.3.0
Bump docker/build-push-action@v5.3.0
2024-04-18 22:52:38 +02:00
d1a6c0ad3d Set Chromium Start Timeout to 30 seconds.
Resolves #135 ?
2024-04-18 22:13:10 +02:00
0260868968 Merge pull request #163 from C9Glax/cuttingedge
Connector Bugs, AprilFools Mode
2024-04-18 21:29:40 +02:00
b1f72dcb81 Legacy RateLimit remove 2024-04-18 19:00:28 +02:00
b0f353819b Legacy RateLimit 2024-04-18 18:58:42 +02:00
8f8d019861 Streamlined MangaDex information retrieval 2024-04-18 18:56:34 +02:00
21a7392493 Resolves #160, Rated Manga on Mangadex. 2024-04-18 18:01:02 +02:00
0d5db15f87 Merge remote-tracking branch 'upstream/cuttingedge' into cuttingedge 2024-04-16 21:51:58 -04:00
431fde0d76 Wrong April Fools check.
Resolves https://github.com/C9Glax/tranga/issues/159
2024-04-16 04:18:56 +02:00
e022bf3081 Merge branch 'cuttingedge' into dev 2024-04-15 15:02:52 +02:00
c25a4f69ec Cleanup 2024-04-15 14:51:01 +02:00
82bdb248b9 userAgent private set in settings 2024-04-15 14:50:44 +02:00
b27114eaad April Fools Mode
https://github.com/C9Glax/tranga/issues/155
2024-04-15 14:50:03 +02:00
051eb4a417 Merge pull request #158 from db-2001/cuttingedge
Reimplement Fix for Mangasee
2024-04-14 14:35:06 -04:00
482704af2c Merge remote-tracking branch 'upstream/cuttingedge' into cuttingedge 2024-04-14 14:29:30 -04:00
af4229920d Bump docker/setup-buildx-action from 3.1.0 to 3.3.0
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3.1.0 to 3.3.0.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3.1.0...v3.3.0)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-04-09 05:32:25 +00:00
6f5fb7e0bb API rewrite to parse JSON body for POST and DELETE 2024-04-07 18:20:28 -04:00
7628510b87 Documentation for API Calls 2024-04-06 18:13:31 -04:00
dd965d886a Revert "Added initial documentation for API Calls"
This reverts commit 7e54577c54.
2024-04-06 18:11:03 -04:00
7e54577c54 Added initial documentation for API Calls 2024-04-06 18:05:27 -04:00
537ad3a5f8 https://github.com/C9Glax/tranga/issues/142
Cleanup old temporary Folders and files
2024-04-01 20:35:47 +02:00
6a8697fc3a Manga4Life fix bug that made it impossible for Manga to be loaded if they did not have a "Load more Chapters" button.
https://github.com/C9Glax/tranga/issues/149
Created a check if the button exists before trying to click it.
2024-04-01 20:12:25 +02:00
94582496ef Mangadex do not try downloading externally linked chapters, or chapters that have no pages.
https://github.com/C9Glax/tranga/issues/153
2024-04-01 20:00:02 +02:00
17ef5eae0f Fix MangaDex request for new Chapter. 2024-03-30 21:53:11 +01:00
d5b6d4e8ee Fixes for https://github.com/C9Glax/tranga/issues/138 and bug fix for MDex 2024-03-29 23:59:16 -04:00
05190bc9e2 Holy moly a fix for Mangasee 2024-03-26 18:16:41 -04:00
d211dd2d01 Added check to prevent creation of empty chapter files 2024-03-18 22:32:26 -04:00
590547e407 Add Logline to print current logfilePath. 2024-03-05 02:55:10 +01:00
2ad04c5c46 Change LogFilePath to LogFolderPath
#139
2024-03-05 02:35:47 +01:00
189569ccdf dev image 2024-02-28 20:38:22 +01:00
2872eeea09 Merge pull request #134 from C9Glax/dependabot/github_actions/docker/setup-buildx-action-3.1.0
Bump docker/setup-buildx-action from 2.10.0 to 3.1.0
2024-02-28 07:03:31 +01:00
c0cfeaa35d Bump docker/setup-buildx-action from 2.10.0 to 3.1.0
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 2.10.0 to 3.1.0.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v2.10.0...v3.1.0)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-02-28 06:02:59 +00:00
2fd780996c Dockerfile maddnesssss 2024-02-28 04:03:53 +01:00
b390bb8ea5 LogFilePath 2024-02-28 03:59:09 +01:00
847829e617 Corrected DockerFile Arguments 2024-02-28 03:56:24 +01:00
0f29da00de Merge pull request #122 from C9Glax/tranga-website-41
Website Changes
2024-02-28 03:22:42 +01:00
9b2a6de841 Merge pull request #133 from C9Glax/cuttingedge
RateLimits, FileNames, Volume/Chapter Numbers
2024-02-28 02:49:48 +01:00
17a27c9922 Reset RequestLimits 2024-02-28 02:33:43 +01:00
6c9071b22b Reset UserAgent 2024-02-28 02:32:36 +01:00
abfe42b7c1 Reset UserAgent when Empty 2024-02-28 02:25:46 +01:00
72ae124418 Handle unauthorized kavita 2024-02-28 02:25:17 +01:00
bee6e7ba37 Export settings after updating rateLimits 2024-02-28 02:23:58 +01:00
8079ffc742 GlobalBase static is FileInUse 2024-02-28 02:17:48 +01:00
6d6e33491b Indented Json 2024-02-28 02:15:04 +01:00
a8697a14a3 GlobalBase static is FileInUse 2024-02-28 02:14:58 +01:00
e2adac937a Fix settings not being loaded from settingsfile 2024-02-28 02:13:18 +01:00
b4708c5d10 Encoding 850 issue for jsonconvert 2024-02-28 02:12:23 +01:00
597abde115 Fix wrong chapter (and volume) numbers for chapters 2024-02-27 22:04:14 +01:00
2a824bbb8d Correct "1" ChapterNumbers for Mangasee 2024-02-12 21:04:14 +01:00
9691eb0d08 Correct ChapterNumbers for Mangasee 2024-02-12 21:02:01 +01:00
4888e18fd2 Correct ChapterNumbers for Mangasee 2024-02-12 20:49:33 +01:00
0aa92a7913 Correct VolumeNumbers for Mangasee 2024-02-12 11:22:19 +01:00
db53e2156b API added POST
NotificationConnectors/Reset
LibraryConnectors/Reset
2024-02-11 20:44:27 +01:00
1cce0f204e API added POST
NotificationConnectors/Test
LibraryConnectors/Test
2024-02-11 20:41:55 +01:00
ce41c49a0e Merge branch 'master' into tranga-website-41 2024-02-11 01:11:41 +01:00
b8570e5eef Merge branch 'master' into cuttingedge 2024-02-11 01:11:34 +01:00
1f24a2349d Do not build latest/master on pull 2024-02-11 01:11:23 +01:00
ca95460218 https://github.com/C9Glax/tranga/pull/122
https://github.com/C9Glax/tranga-website/pull/41
LogFile
Enable LogFiles
2024-02-11 01:06:40 +01:00
e801cc4cbf #122 RateLimit GET
https://github.com/C9Glax/tranga-website/pull/41
2024-02-11 00:49:26 +01:00
2c4c8de8b5 Remove StyleSheet from TrangaSettings 2024-02-11 00:39:21 +01:00
0b4461265c #109 Rate Limits
Moved Config for RateLimits to TrangaSettings
Updated API: Settings/customRequestLimit
requestType in RequestType.cs
requestsPerMinute as int
2024-02-11 00:35:33 +01:00
c008d55f26 #103 Regeeeeex 2024-02-08 11:05:44 +01:00
9b990aecea With a passion 2024-02-07 19:40:07 +01:00
299fa6afda I hate Regex 2024-02-07 19:37:35 +01:00
c03e927565 Fix Mangaworld #103 Plurals 2024-02-07 19:23:55 +01:00
bb6c553afa One more Regex... 2024-02-07 19:05:11 +01:00
33d78ed757 https://github.com/C9Glax/tranga/issues/111#issuecomment-1932447848 2024-02-07 18:18:33 +01:00
84272ddd1e https://github.com/C9Glax/tranga/issues/111#issuecomment-1932447848 2024-02-07 18:08:57 +01:00
2f0fbbd3cb #111 Fix renaming of chapters.
Fixed check if Chapter exists
2024-02-07 15:50:26 +01:00
5bc414fd59 #113 old formatting of fileNames 2024-02-07 15:34:20 +01:00
2eaeadb92c #113 whitespaces 2024-02-07 15:29:42 +01:00
d8df6eccb1 Mangasee fix cloudflare 520 2024-02-07 14:53:57 +01:00
db64b717eb Fix regex for parsing publicationId 2024-02-02 19:38:16 +01:00
1afe36a525 add todo 2024-02-02 18:46:09 +01:00
aa692f6978 #108 2024-02-02 18:45:12 +01:00
c706824222 Merge pull request #110 from C9Glax/cuttingedge
Update Master
2024-01-31 19:14:41 +01:00
3ca6245fc2 safe Useragent as string and export settings after changing 2024-01-31 19:00:38 +01:00
2dd82aad13 https://datatracker.ietf.org/doc/html/rfc2616 2024-01-31 18:46:37 +01:00
3c4867a276 #105 2024-01-31 18:39:34 +01:00
bae157cdb4 Cleanup #90 2024-01-31 18:39:34 +01:00
3b818ff1af typo 2024-01-31 18:39:34 +01:00
5d12be2983 Fix crash when Request times out on ChromiumDownloadClient 2024-01-31 18:39:34 +01:00
31a4e693e0 Custom Request Limits #109 2024-01-31 18:39:34 +01:00
e49db9a4cb Change toplevel domain #103 2024-01-25 16:40:04 +01:00
54142e61fe Fix #103 2024-01-20 17:20:56 +01:00
cd5ca0e302 Fix #90 2024-01-20 16:44:22 +01:00
95da900213 Add url to Request-Error Output 2024-01-20 16:33:47 +01:00
b5be4e0dd8 Fixes #97 missing jobs.
Implemented Equals(obj) functions for Chapter, DownloadChapter and DownloadNewChapters to check if jobs already exist.
2024-01-11 20:19:04 +01:00
0c135aa89e Fixes #97 because stupid 2024-01-06 17:12:36 +01:00
e11ee4dafe Fixes #98 VolumeNumber can not be null for comparison 2024-01-04 17:04:08 +01:00
05573f65f9 #96 Added single click to load all chapters. 2024-01-03 18:37:29 +01:00
d986c808e3 Chapter as Comparable 2024-01-03 18:37:12 +01:00
5df63b00c2 Moved Struct RequestResult to own file 2024-01-03 17:31:00 +01:00
903bb5af5e Resolves #97 Manga4Life Volume Numbers 2024-01-03 17:05:33 +01:00
cc8453d4a8 #85 included characters with accents, umlauts, and + 2023-12-24 16:52:24 +01:00
800d4c1ec1 Amend 29f6de2590
Fix #87, manga that return no chapters, crash when updating latest released chapter.
2023-12-24 16:43:49 +01:00
b4f97eefcf Fix comparisons 2023-12-24 16:34:54 +01:00
29f6de2590 Catch parsing error #93 to prevent crashes and restart loops 2023-12-24 16:27:20 +01:00
23e5c4a7b1 Fix #93 2023-12-24 16:20:06 +01:00
e15717cb04 Merge pull request #84 from arxae/mangakatana_input_string_not_correct_format
Fixed input string not being in correct format
2023-11-13 11:54:02 +01:00
b995fc568a Requested changes 2023-11-13 06:49:20 +01:00
442d949371 Fix #80 UpdateMetaData failing 2023-11-12 13:03:33 +01:00
263d0e6036 Fix #82 Tranga crashes when cover is missing from imageCache.
Retrying download of cover and copy
2023-11-12 12:39:32 +01:00
7c7d43021e Fixed input string not being in correct format 2023-11-12 05:38:06 +01:00
5cdc7d7207 Fix wrong jobtype 2023-11-05 16:14:23 +01:00
1bcbd1517f Addresses #81 2023-11-05 16:14:12 +01:00
b72da45ae9 Add GetMangaFromId for MangaWorld 2023-11-02 15:58:16 +01:00
01041e43ac Fix publicationId for MangaWorld 2023-11-02 15:58:04 +01:00
4c1a659f16 Add API: POST Jobs/UpdateMetadata 2023-11-02 15:48:46 +01:00
2e02f0b237 Exception message. 2023-11-02 15:48:31 +01:00
77f93d87f9 UpdateMetadata now finishes correctly. 2023-11-02 15:48:17 +01:00
45c0f19a9d Added override Manga.Equals 2023-11-02 15:48:03 +01:00
7c09deb143 Remove Manga.WebsiteUrl 2023-11-02 15:47:43 +01:00
449d406eab Add MangaConnector.GetMangaFromId 2023-11-02 15:47:16 +01:00
083ce238d8 Add UpdateMetadata Job to DownloadNewChapters 2023-11-02 15:20:34 +01:00
5f9ffb8aad Improved UpdateMetadata 2023-11-02 15:20:20 +01:00
92bc3d5aa8 Catch HttpRequestException in LibraryConnector 2023-11-02 15:19:56 +01:00
49ab8928b1 Add parameter JobBoss to Job.ExecuteTask (and Internal) 2023-11-02 15:19:36 +01:00
391efcb9bc Add Field jobType to Job 2023-11-02 15:18:41 +01:00
963ad375e8 Add Job UpdateMetadata --> untested! 2023-11-01 14:17:11 +01:00
0a5ded2036 Add field WebsiteUrl to Manga 2023-11-01 14:15:55 +01:00
4843c7f05c Overwrite SeriesInfo.json parameter in SaveSeriesInfoJson. 2023-11-01 14:04:35 +01:00
6adbda2359 #77 Added field releaseStatus to Manga 2023-11-01 13:59:21 +01:00
425cf7e0d6 Re-add forgotten seriesInfo.json to new downloads 2023-11-01 13:36:58 +01:00
8f5dd5aab5 #78 Manganato chapternumber parsing from url 2023-11-01 13:22:33 +01:00
733ae285f1 #76 debug 2023-10-31 16:46:41 +01:00
2e1c8ce34f #75 Reimplemented own search.
At the moment returns too many results, levenshtein distance still too inefficient.
2023-10-31 15:47:39 +01:00
c965bc38d1 https://github.com/C9Glax/tranga-website/issues/19
Wrong regex for URLs with ports
2023-10-30 19:30:51 +01:00
37266ea095 https://github.com/C9Glax/tranga-website/issues/19
Add exception handling if host doesnt exist
2023-10-30 13:48:25 +01:00
8caac538c9 https://github.com/C9Glax/tranga-website/issues/19 Send a badrequest response if not a valid libraryconnector 2023-10-30 13:39:50 +01:00
7c7f711bb4 https://github.com/C9Glax/tranga-website/pull/17 2023-10-28 12:47:13 +02:00
d78897eb74 #74 untested 2023-10-27 14:09:34 +02:00
438c11af4f #73 api side, untested 2023-10-27 13:47:37 +02:00
38df54baff Exception handling on request failed HttpDownloadClient 2023-10-25 18:22:00 +02:00
98d187d133 Possible fix #72
Volume Numbers broke Regex
Now can also parse volume numbers!
2023-10-25 18:16:26 +02:00
5352cca058 Possible fix for #72
RegexMatching was off for last element sometimes on bato
2023-10-23 17:01:26 +02:00
3381909afd Fix #72 Chapternumber Parsing Bato 2023-10-21 15:44:37 +02:00
7219641859 #68 Because XML is sometimes broken, we parse from somewhere else
Also fixed the faulty url completion.
2023-10-20 15:01:55 +02:00
f63851d95d #68 JsonConverter 2023-10-20 14:50:26 +02:00
e72301d062 #68 and other chromium connectors: Wait for page to be fully loaded 2023-10-20 14:49:48 +02:00
2302e1009b Merge branch 'issue_70' into cuttingedge 2023-10-20 14:40:37 +02:00
40fea6cc7f Fix #70 invalid chapter numbers 2023-10-20 14:40:24 +02:00
5458c43f21 Merge branch 'timeout-bug' into cuttingedge 2023-10-19 13:00:03 +02:00
f78bec43d6 Fix an issue where a request-timeout would cause a restartloop. 2023-10-19 12:59:20 +02:00
88876fb8f4 #68 corrected url in GetChapters 2023-10-19 12:09:43 +02:00
c71aec8882 #68 Readme and Name 2023-10-19 12:08:49 +02:00
ddfba0d864 #68 MangaLife untested code, XML on site is broken 2023-10-19 12:06:03 +02:00
ca9c0b22c1 Merge pull request '#67 prevent crash if xml document does not exist' (!60) from cuttingedge into master
Reviewed-on: #60
2023-10-15 12:21:31 +02:00
6844d0a242 #67 prevent crash if xml document does not exist 2023-10-15 12:19:44 +02:00
fd9319de27 Merge pull request 'Fix #66 Mangasee search and parsing' (!59) from cuttingedge into master
Reviewed-on: #59
2023-10-14 13:07:46 +02:00
726be70af3 #66 Mangasee search sanitization 2023-10-14 12:59:35 +02:00
19c9ecb3e7 #66 Mangasee empty search breaks 2023-10-14 12:59:06 +02:00
f01a786e59 Merge pull request 'cuttingedge' (!58) from cuttingedge into master
Reviewed-on: #58
2023-10-12 20:48:15 +02:00
59f9bcc7d0 Merge remote-tracking branch 'origin/cuttingedge' into cuttingedge 2023-10-12 20:47:25 +02:00
2796a2adb5 Merge branch 'master' into cuttingedge 2023-10-12 20:47:16 +02:00
e07b191293 Merge branch 'master' into cuttingedge 2023-10-12 20:46:43 +02:00
9bf650f5fc New Issue Template: New Connector 2023-10-12 20:45:56 +02:00
334795b263 Update readme to reflect new connectors 2023-10-10 22:58:05 +02:00
51a6f216af Remove extraneous covers from imageCache. 2023-10-10 22:51:24 +02:00
238a2775f4 Author formatting bato 2023-10-10 22:45:11 +02:00
fec970d7d6 #64 fix empty search 2023-10-10 22:43:34 +02:00
e642d50c47 #64 Bato
Comment: This website suuuucks to scrape. There is gonna be so many issues
2023-10-10 22:40:44 +02:00
fafcdac00a Fix file-extension on image download 2023-10-10 22:40:07 +02:00
1785aa28ea Change coverCacheFilenames, to avoid conflicts and malformatted filenames 2023-10-10 22:34:47 +02:00
f22c332cab Merge pull request 'cuttingedge' (!57) from cuttingedge into master
Reviewed-on: #57
2023-10-10 21:21:34 +02:00
b3bf523e1e Fix #63 Chapter numbering. 2023-10-09 15:28:37 +02:00
06b2e11164 Add Mangaworld to dict. 2023-10-09 15:15:42 +02:00
7972f07801 housekeeping 2023-10-04 22:09:33 +02:00
d89af7cc5b Fix multiple enumeration 2023-10-04 22:09:27 +02:00
31a0c6ffb2 Fix build warnings 2023-10-04 18:14:46 +02:00
668a3b3a96 MangaDex nullchecking in response 2023-10-04 18:14:12 +02:00
3938c61297 #62 https://github.com/C9Glax/tranga/issues/62#issuecomment-1747064431
Parsing, parsing, parsing
2023-10-04 17:45:13 +02:00
4f3bcd245d #62 fix one bug, create another 2023-10-04 15:44:06 +02:00
129c95f123 Set timeout on chromiumclient
#62
2023-10-04 11:20:14 +02:00
e2cdf27d40 https://github.com/C9Glax/tranga/issues/62#issuecomment-1746422154
#62
ChapterNumber Parsing on Manganato
2023-10-04 11:15:24 +02:00
4156365b18 Improved logic on QueueContainsJob and AddJobTo Queue
Added some documentation
2023-10-04 09:38:40 +02:00
d3ccddd8db Fix multiple enumeration 2023-10-04 09:33:11 +02:00
13075a8704 Improved logic in LoadJobsList 2023-10-04 09:31:03 +02:00
e7d9f53a93 Prevent override of List-jobs in AddJobsQueue-method 2023-10-04 09:30:42 +02:00
dc6dfd4aa1 Renamed method ExportJob(s) to UpdateJobFiles 2023-10-04 09:30:08 +02:00
0fba09b1e8 Logic removed unecessary call 2023-10-04 09:24:21 +02:00
f08b9e85ec Add log message for inactive jobs 2023-10-03 20:46:59 +02:00
95fcc73c74 Cancel Running Jobs if inactive for more than 5 minutes 2023-10-03 20:46:21 +02:00
73492d8102 #62 even more debug logging 2023-10-03 20:38:45 +02:00
c69dd22ecf #62 more debug-logging
Instead of assigning buffer copy directy from result to filestream
2023-10-03 14:07:58 +02:00
17b6c523a2 Print results before downloading covers 2023-09-28 15:53:57 +02:00
6c3f7604fe Better Mangasee search 2023-09-28 15:53:40 +02:00
94f88f08e9 Update bug_report.yml
WHAT
2023-09-26 18:50:56 +02:00
47327524be body can not be empty? 2023-09-26 18:47:02 +02:00
3b96419739 will this work 2023-09-26 18:39:12 +02:00
b7c9b4e9b4 Update issue templates 2023-09-26 18:37:59 +02:00
13adb45444 File.extensions.matter 2023-09-26 18:32:44 +02:00
b8fbee578e Update readme 2023-09-26 18:30:52 +02:00
c1fb42b537 Update docker compose to latest 2023-09-26 18:29:49 +02:00
dcc12ec3ea Merge remote-tracking branch 'github/master' 2023-09-26 18:28:23 +02:00
8c554076b2 Merge branch 'cuttingedge' 2023-09-26 18:28:15 +02:00
a10fbdf3a5 Merge pull request #59 from C9Glax/C9Glax-patch-1
Update issue templates
2023-09-26 18:27:38 +02:00
f246209685 Changed to template 2023-09-26 18:26:42 +02:00
41c561bd1d Update issue templates 2023-09-26 18:18:06 +02:00
fc7d5463c3 Fix #58
Mangaworld: Manga without volumes crash
2023-09-26 18:03:18 +02:00
3c2ce266f6 Changed (fixed?) queuelogic 2023-09-20 21:59:39 +02:00
306cb87d67 Fix Check for subjobs 2023-09-20 21:34:04 +02:00
23cda74487 Fix wrong domain regex 2023-09-20 21:33:53 +02:00
3ceee63dfc Only send notification on successful downloads 2023-09-20 14:40:03 +02:00
4e5a6fe97b Export Library and notification connectors on deletion
Added logging
2023-09-20 14:11:31 +02:00
b3b1971dad Startup notification 2023-09-20 13:58:10 +02:00
2699f35b62 housekeeping 2023-09-20 13:33:13 +02:00
7a14583d6a Moved Regex for baseUrl to Globalbase 2023-09-20 13:30:52 +02:00
660f6a1648 Logmessages for creation of library and notification Connector 2023-09-20 13:28:09 +02:00
482fcb7102 better logging for removing files 2023-09-19 23:24:39 +02:00
b6cdb07e3f Remove filewrites 2023-09-19 23:15:18 +02:00
0875e7ee12 Remove log clutter and filewrites 2023-09-19 23:07:26 +02:00
cb6482ebae Add logmessage on startup for next job 2023-09-19 20:04:25 +02:00
87ea077281 Remove log clutter and filewrites 2023-09-19 20:02:56 +02:00
c1aa4cf6b5 Fi bug with exportjobslist not exporting updated jobs 2023-09-19 19:59:51 +02:00
f5b6b1785f small improvements 2023-09-19 19:57:35 +02:00
2553a150d1 Add log to see wait time 2023-09-19 19:54:26 +02:00
b149d377dc Add log to see wait time 2023-09-19 19:54:00 +02:00
0209159c5c Add log to see wait time 2023-09-19 19:50:39 +02:00
e31820eb00 Export Jobs list when finished. 2023-09-19 19:49:42 +02:00
c4d69c27a4 copy cover 2023-09-19 19:43:58 +02:00
3ee53b7436 copy cover 2023-09-19 19:43:39 +02:00
64ec0963e1 copy cover 2023-09-19 19:42:50 +02:00
27c4ed719c Cancel failed jobs 2023-09-19 19:33:43 +02:00
4f4b0cb3a8 LibraryConnector baseUrl regex 2023-09-19 19:22:49 +02:00
48d312da0b File Permissions 2023-09-19 19:21:37 +02:00
1fe4b75ac7 Folder permissions 2023-09-19 19:04:55 +02:00
c580fafc62 Added user tranga to container and set permissions 2023-09-19 19:00:00 +02:00
58040ecb10 Order of returned API Jobs/MonitorJobs And Jobs/Waiting 2023-09-19 18:06:08 +02:00
2960a9b8f0 Merge branch 'cuttingedge'
# Conflicts:
#	Tranga/Connectors/Mangasee.cs
2023-09-19 16:59:58 +02:00
f52bb8eb89 Get Readme ready for migration to master 2023-09-19 16:54:17 +02:00
ae0dc548ae Changed working directory on linux to /usr/share/tranga-api
Updated docker-compose to include settings-volume
2023-09-19 16:47:49 +02:00
051b85d08b Added contentType to response for images and logs 2023-09-19 16:43:08 +02:00
d89ca0a2ef Changed Jobs ToString 2023-09-19 16:30:55 +02:00
f1f640c1f6 Mangaworld fix volume and chapter numbers 2023-09-19 16:30:44 +02:00
9319aa7d1f Fix Mangaworld empty search-result crash 2023-09-19 16:24:07 +02:00
656e62628e Fix Mangaworld search 2023-09-19 16:23:52 +02:00
ba27adf255 Show startmessage and log settings 2023-09-19 16:08:00 +02:00
88ca75e883 Use lock statement instead of variable to lock logmessages 2023-09-19 15:59:52 +02:00
67c23b357f Add console-output to Dockerfile 2023-09-14 14:55:45 +02:00
4a5271e2a7 Added italian tags to series.json 2023-09-13 23:33:12 +02:00
fec5ad664c Fix possible nullreference 2023-09-13 23:02:36 +02:00
3cea5fb431 #50 Added Mangaworld.bz connector 2023-09-13 23:00:52 +02:00
7fa44fba54 Fix filename for coverimage if url contains parameters 2023-09-13 23:00:27 +02:00
d6b5a29fdc Fix Manganato kaguya-bug: volumenumber, chapternumber, chaptername match 2023-09-13 21:47:50 +02:00
a4a49d40f0 API GET LogMessages new optional parameter count 2023-09-13 14:40:23 +02:00
28fa85f05c #50 Added parameter translatedLanguage POST Jobs/DownloadNewChapters
POST Jobs/MonitorManga
2023-09-13 14:20:10 +02:00
1066e1ca2e #50 translated-language support (if connector supports it)
API GET Manga/Chapters new parameter "translatedLanguage"
2023-09-13 14:09:47 +02:00
39307f4313 Changed jobs.json to instead be a directory with one file per job
#48
2023-09-09 19:15:20 +02:00
a316ee3d48 Changed id creation for Jobs to be more descriptive 2023-09-09 19:14:47 +02:00
569622099d DownloadClient and MangaConnector improvements
DownloadClient is now abstract for HttpDownloadClient and ChromiumDownloadClient
The chromium client will exit the headless browser (on clean exit of the program).
The field "name" of MangaConnector is no longer abstract, instead set through constructor.
2023-09-08 23:27:09 +02:00
017701867d Fixed logic on API GET Jobs/Progress 2023-09-08 19:58:44 +02:00
c3d62bd337 Added ProgressToken timeRemaining 2023-09-08 19:58:29 +02:00
dc9e9e705c Fix FileLogger filePath 2023-09-08 19:28:44 +02:00
9eee6683fa Add API GET Ping 2023-09-08 16:31:38 +02:00
1265c7a072 Added API: GET Manga 2023-09-05 20:26:31 +02:00
c601541249 Added API: GET LogMessages and LogFile
resolves #10
2023-09-05 20:02:24 +02:00
ae1184320f Added API: customFolderName to Jobs/MonitorManga and Jobs/DownloadNewChapters
resolves #30
2023-09-05 19:51:18 +02:00
384e4c4f43 Added parameter "ignoreBelowChapterNum" tp API: Jobs/MonitorManga and Jobs/DownloadNewChapters 2023-09-05 19:44:14 +02:00
76a2b2498a Added numberFormatDecimalPoint to GlobalBase 2023-09-05 19:42:46 +02:00
2ab21b15cf Merge pull request #47 from C9Glax/dependabot/github_actions/actions/checkout-4
Bump actions/checkout from 3 to 4
2023-09-05 19:33:20 +02:00
7acdf7a19b Bump actions/checkout from 3 to 4
Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v3...v4)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-09-05 17:31:28 +00:00
af8716fcb1 Possible fix for #20 2023-09-05 19:28:43 +02:00
5f2c66b729 Merge remote-tracking branch 'origin/cuttingedge' into cuttingedge 2023-09-03 18:37:03 +02:00
e030f02431 Merge remote-tracking branch 'origin/cuttingedge' into cuttingedge
# Conflicts:
#	README.md
2023-09-03 18:36:57 +02:00
bdeb75f4e4 Merge remote-tracking branch 'origin/cuttingedge' into cuttingedge
# Conflicts:
#	README.md
2023-09-03 18:36:05 +02:00
4ce114986d Updated Readme to reflect the seperation of tranga (the api) and the website. 2023-09-03 18:35:51 +02:00
8035bf3fcd Updated Readme to reflect the seperation of tranga (the api) and the website. 2023-09-03 18:34:17 +02:00
85bf3ec7e8 Fix MangaSee publicationId 2023-09-02 23:14:31 +02:00
0f17615b10 Fix FileInUse 2023-09-02 23:14:16 +02:00
0c8145803e Possibly related to #20 2023-09-02 22:49:00 +02:00
b2e0c3db97 docker-compose to cuttingedge 2023-09-02 22:43:09 +02:00
ca283fcfff Fix Dockerfile, copy CLI 2023-09-02 22:39:54 +02:00
1d55070daf Merge branch '41_-_trash_everything' into cuttingedge
# Conflicts:
#	Tranga/MangaConnectors/DownloadClient.cs
2023-09-02 22:33:29 +02:00
32fd75bdae Add Manga to cached on parsing 2023-09-02 22:12:49 +02:00
99ad702163 Fixed MangaDex GetMangaFromUrl Regex-Group and resultobject 2023-09-02 22:12:34 +02:00
6e3a9c2a78 Added Lock to MemoryLogger 2023-09-02 21:53:09 +02:00
ad1d4dfe23 Fixed naming errors containing Manga
Added GetMangaFromUrl(url) to Mangaconnector
2023-09-02 21:52:48 +02:00
14ba71005f CheckJobs combined cancelled and completed checks,
added standby check
2023-09-02 16:16:00 +02:00
22c4c0eb2c Fixed GetJobsLike, for empty publication, but existing chapter 2023-09-02 16:15:06 +02:00
44f8d369c3 Added AddJobs to JobBoss 2023-09-02 16:14:36 +02:00
c0e6da144e Changed Job.ExecuteNow to ExecutionEnqueue
Instead of replacing progressToken, change Increments based in completed increments
2023-09-02 16:14:21 +02:00
51a1ae72ca Added parentJobId for deserialization
When creating Jobs with null as recurrence time, set it to zero
Job.NextExecution() removed the recurrence check
2023-09-02 16:12:10 +02:00
79bbc92467 Added lastExecution time on jobs.json parse 2023-09-02 15:05:15 +02:00
ae5be31c89 Fixed Jobs/StartNow 2023-09-02 14:49:31 +02:00
eebe25a378 Added check if jobQueue is empty 2023-09-02 14:46:38 +02:00
0f3da4ec81 Added check to read/write jobs.json if file is in use
Write jobs.json on change
2023-09-02 14:46:13 +02:00
0b77dc1172 Added ProgressToken state Cancelled 2023-09-02 14:45:46 +02:00
37cf47bc17 Reduced CheckJobs timer to 100ms 2023-09-02 14:45:02 +02:00
4cce2e04cb Renamed Job.Reset to ResetProgress 2023-09-02 14:13:30 +02:00
5465ac4e5c Removed DELETE Jobs/DownloadChapter and Jobs/MonitorManga. Can both be reached with DELETE Jobs (jobId)
Added POST Jobs/Cancel
CancelJob and RemoveJob cancels/removes subJobs
2023-09-02 14:13:15 +02:00
dd4d5a81ee Fix JobId variable in API requests 2023-09-02 14:11:44 +02:00
a05e1914e3 Log output changes 2023-09-02 14:11:11 +02:00
ed79ee5d0f Add Manga from Jobs to cachedManga 2023-09-01 23:41:50 +02:00
28e05e549d Added import and export for Jobs
Renamed tasksFilePath -> jobsFilePath and changed to jobs.json
2023-09-01 23:37:50 +02:00
eaab7c5235 Fixed jobs not starting at all 2023-09-01 23:08:31 +02:00
0552b3db82 Fix crash on null Logmessage 2023-09-01 22:53:38 +02:00
c813e1854d Do not add duplicate jobs 2023-09-01 22:39:22 +02:00
32036df057 Added API call to retrieve cover with internalId.
No need to mount imageCache over multiple containers.
2023-09-01 21:40:56 +02:00
394829ee36 Revert "Download Covers only when Downloading Chapters"
This reverts commit e663163d

Covers might be important
2023-09-01 21:17:46 +02:00
2a389f1ede Changed default download and working directories.
ExportSettings() now created folder
2023-08-31 17:07:54 +02:00
3167f6c3e6 Changed default log-folder path, and log-encoding to utf8 2023-08-31 17:07:17 +02:00
89c5f4b820 Added API-call GET Jobs/MonitorJobs 2023-08-31 16:40:08 +02:00
1c1169e5ce Renamed Managers to Connectors 2023-08-31 16:39:39 +02:00
d5d34c5381 Changed return-values of API: NotifcationConnectors/Types and LibraryConnectors/Types 2023-08-31 15:52:47 +02:00
c0efbb22cc Fixed JsonParsing of NotifcationConnector and LibraryConnector with GlobalBase 2023-08-31 15:41:02 +02:00
9f30e52713 Added new API-Calls:
POST: Jobs/StartNow
DELETE: Jobs
2023-08-31 13:12:03 +02:00
1fd36c91d6 Renamed Publication.cs to Manga.cs
Renamed Request-Paths "Tasks" to "Jobs"
2023-08-31 12:16:02 +02:00
e663163de8 Download Covers only when Downloading Chapters 2023-08-31 12:14:03 +02:00
4827b90c3d Merge pull request #45 from C9Glax/dependabot/github_actions/docker/setup-buildx-action-2.10.0
Bump docker/setup-buildx-action from 2.9.1 to 2.10.0
2023-08-29 19:09:40 +02:00
e274c864f9 CLI: Add Status Code to output 2023-08-29 14:11:46 +02:00
f4bc182954 CLI: Prompt directy for HttpMethod, ignore input when exiting log 2023-08-29 14:09:35 +02:00
3365be219c Logger: Logmessage time 2023-08-29 14:08:57 +02:00
10708b3abd Add CLI with basic functionality. 2023-08-29 14:00:55 +02:00
c1e939f1e3 Server correct shutdown/force shutdown 2023-08-29 12:40:10 +02:00
21d53dabec TrangaSettings corrected logic for loading settingsfile, and overwriting settings 2023-08-29 12:39:48 +02:00
a9417dbba6 Trangasettings fix infinite loop on load 2023-08-29 12:39:21 +02:00
4ca7b107eb Bump docker/setup-buildx-action from 2.9.1 to 2.10.0
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 2.9.1 to 2.10.0.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v2.9.1...v2.10.0)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-08-29 05:58:02 +00:00
61024bcee9 UserDictionary and variable readonly 2023-08-27 01:22:39 +02:00
ea1b8749a6 Removed unnecessary check 2023-08-27 01:22:21 +02:00
2fcab1f1b1 More Logging 2023-08-27 01:22:08 +02:00
bbd716383a Added ToString overrides 2023-08-27 01:21:23 +02:00
6e1a0ab06c Corrected order of constructor (GlobalBase clone) 2023-08-27 01:15:02 +02:00
181942153b Fixed some variables 2023-08-27 01:05:32 +02:00
fe04af4a2b Added most API-functions 2023-08-27 01:01:39 +02:00
4240a1eb6a Added methods to search for jobs, and remove multiple jobs. 2023-08-27 01:01:20 +02:00
32349c1ddf Added methods to Update Downloadlocation and WorkingDirectory 2023-08-27 01:00:42 +02:00
a94d3d6b40 Added method to delete Library/Notification-Connector 2023-08-27 01:00:13 +02:00
f916cda0f1 Corrected order of constructor (GlobalBase clone) 2023-08-27 00:59:54 +02:00
a8f0f1af15 More API Requests 2023-08-26 02:43:24 +02:00
0cf3a95f58 cachedPublications 2023-08-26 02:42:57 +02:00
a89a526fda Default language GetChapters: en 2023-08-26 02:42:31 +02:00
4d1e43e7b3 Job: add Id 2023-08-26 02:40:24 +02:00
4f9749d09e Fix bug with MangaDex, Useragent 2023-08-26 01:51:16 +02:00
7614f9aad3 Add User Agent to MangaConnectors 2023-08-26 01:50:31 +02:00
97c0e42512 Handle first requests, add parameter parser 2023-08-26 01:47:36 +02:00
565bc0775d Add Connectors to Tranga 2023-08-26 01:47:15 +02:00
e6a3fa2899 public GetPublications 2023-08-26 01:46:36 +02:00
2d82279d98 Added startup args, and first http-requesthandler 2023-08-24 13:35:07 +02:00
c5559a4ceb Save api-Portnumber in settings 2023-08-24 13:34:43 +02:00
2572a537ab Job Inherits from GlobalBase 2023-08-24 13:34:23 +02:00
58db049496 Merged MonitorJobs and CheckJobs in JobBoss 2023-08-24 13:34:09 +02:00
8f309fcfd7 Library- and NotificationConnectors in GlobalBase 2023-08-24 13:33:33 +02:00
11461051f3 Fixed missing filelogger crash 2023-08-24 12:13:34 +02:00
a4aa571870 Added Jobs and ProgressToken 2023-08-04 14:51:40 +02:00
e4086a8892 Rename TBaseObject -> GlobalBase
Remove Notification and Library Connectors from GlobalBase
2023-08-01 18:24:19 +02:00
c45e4ddf90 Rename Connectors -> MangaConnectors 2023-08-01 18:22:24 +02:00
675effd317 Trash everything and writing everything from scratch 2023-08-01 18:21:29 +02:00
a4f67c9ab4 Merge pull request 'Fixes for MangaKatana' (!53) from cuttingedge into master
Reviewed-on: #53
2023-07-31 23:09:24 +02:00
2538a29788 MangaKatana fix search result characters 2023-07-31 23:05:29 +02:00
81d5802092 MangaKatana fix bug where empty result in search would crash program 2023-07-31 23:03:46 +02:00
436edfde66 Fix issue where closed connection crashes api 2023-07-31 22:58:41 +02:00
00c1cd56b8 Merge pull request '#31 #40' (!52) from cuttingedge into master
Reviewed-on: #52
2023-07-31 22:50:22 +02:00
a63154b581 Fix new installation startup issue where version would be null on new installs 2023-07-31 22:47:35 +02:00
53fe7ee983 Possible fix for #31
chapter regex
2023-07-31 22:47:14 +02:00
6fb4098c16 Merge pull request 'Missing logger, breaking version in settings.json' (!51) from cuttingedge into master
Reviewed-on: #51
2023-07-31 02:14:06 +02:00
7a024e8733 Add logger to CommonObjects on deserialiazation 2023-07-31 02:11:53 +02:00
835e239be5 Cleanup 2023-07-31 02:07:39 +02:00
df8538c3b4 Merge pull request 'version' (!50) from cuttingedge into master
Reviewed-on: #50
2023-07-31 01:59:42 +02:00
f832fe0de3 version 2023-07-31 01:58:00 +02:00
ebdb38bd57 Merge pull request 'Moving away from API/CLI model, combined into single executable.' (!49) from cuttingedge into master
Reviewed-on: #49
2023-07-31 01:53:50 +02:00
e3201a9b99 Ignore Logger 2023-07-31 01:50:26 +02:00
eb50b84266 Converters 2023-07-31 01:48:40 +02:00
b3d778ff56 accessibility 2023-07-31 01:45:55 +02:00
00861c406a added logging 2023-07-31 01:42:15 +02:00
01c8784bab wrong array 2023-07-31 01:30:32 +02:00
3aa299e48a deserialization of enum 2023-07-31 01:28:32 +02:00
d1ce244135 New Migration to new commonObjects 2023-07-31 01:26:38 +02:00
c91754614b weird env 2023-07-31 00:58:22 +02:00
70b1ae4812 isLinux 2023-07-31 00:52:27 +02:00
336e08aebf If not running cli add back console output 2023-07-31 00:46:14 +02:00
18134cdf01 If not running cli add back console output 2023-07-31 00:43:57 +02:00
5b89cbd042 Only run TaskMode on Windows 2023-07-31 00:41:25 +02:00
74aca86b62 Wrong entrypoint 2023-07-31 00:36:56 +02:00
e5abaa4549 Wrong entrypoint 2023-07-31 00:35:11 +02:00
eb0eb71e86 wrong dockerfile 2023-07-31 00:33:57 +02:00
4e73b0a4cf wrong dockerfile 2023-07-31 00:32:42 +02:00
140074208f Merged API and CLI into one. 2023-07-31 00:31:19 +02:00
fa19d3da14 Fix missing file on loading settings/commonobjects 2023-07-31 00:01:18 +02:00
3d6657b483 Moved libraryManagers, notificationManagers and logger to commonObjects class. 2023-07-30 23:31:25 +02:00
f9b5e05974 Merge pull request #39 from C9Glax/cuttingedge
Move Namespaces, move logger to TrangaSettings, move downloadClient to seperate File, remove deprecated calls
2023-07-30 17:34:06 +02:00
ad4027779f Remove Deprecated CreateUpdateLibraryTask 2023-07-30 17:29:30 +02:00
98ec0b837f Remove Enter input from settings, instead update all settings on click of "Update" Button.
resolves #38
2023-07-30 17:27:47 +02:00
1afa3df316 Cleanup build warnings, ReShaper, Dictionary 2023-07-30 17:25:04 +02:00
d83aa1ef5b deprecated 2023-07-30 17:11:11 +02:00
b610ec734e Chapter readonly struct 2023-07-30 17:09:39 +02:00
abf587377c API: Changed uninstantiated class Program to static 2023-07-30 17:09:30 +02:00
437349bd27 TrangaSettings changed set directive 2023-07-30 17:09:10 +02:00
000539d6a6 Moved logger to Trangasettings 2023-07-30 17:08:43 +02:00
b4bef25a22 Moved downloadclient to separate file 2023-07-30 17:04:43 +02:00
579e400a5d Moved class to appropriate namespaces 2023-07-30 17:01:54 +02:00
8af2b12fc0 Moved class to appropriate namespaces 2023-07-30 16:26:29 +02:00
bad4330330 introduce branch cuttingedge 2023-07-30 16:21:04 +02:00
42596752d3 FIX: null Publications in tasks 2023-07-29 18:55:06 +02:00
16238c590b Remove UpdateLibrariesTask 2023-07-29 18:20:41 +02:00
9f38dc3b6a Revert "Remove UpdateLibrariesTask"
This reverts commit de14ff0b75.
2023-07-29 18:18:02 +02:00
485637d99a Added Min-Chapter-Number to API 2023-07-28 10:47:36 +02:00
de14ff0b75 Remove UpdateLibrariesTask 2023-07-28 10:41:20 +02:00
f947c37bd6 Change website context to revert location to / instead of /Website 2023-07-28 10:30:54 +02:00
77eec0f696 Fix wrong deserialization 2023-07-21 00:32:18 +02:00
18323f9f51 remove debug 2023-07-21 00:22:41 +02:00
2cd2b6842d arch armv7 fails to build 2023-07-21 00:20:13 +02:00
09f815903f arch arm64 fails to build 2023-07-21 00:18:08 +02:00
c108478039 context 2 2023-07-21 00:15:35 +02:00
74289e43b7 context 2023-07-21 00:14:07 +02:00
2779f9ba09 Merge remote-tracking branch 'origin/master' 2023-07-21 00:12:23 +02:00
59a8e556f0 wrong build path 2023-07-21 00:12:09 +02:00
074b137b5c Merge pull request 'dev' (!48) from dev into master
Reviewed-on: #48
2023-07-21 00:10:33 +02:00
3cb2540794 debugging 2023-07-21 00:09:59 +02:00
02c9934896 change context back to API 2023-07-21 00:09:51 +02:00
b2e1c95bca Merge remote-tracking branch 'origin/master' 2023-07-21 00:07:44 +02:00
8c9e3ea6b6 Merge pull request 'split into two actions, dont always build tranga-base' (!47) from dev into master
Reviewed-on: #47
2023-07-21 00:07:16 +02:00
db441607ad Merge branch 'master' into dev 2023-07-21 00:04:24 +02:00
91c56783dc restore absolute path 2023-07-21 00:03:32 +02:00
2c288eeeea Don't rebuild tranga-base every time. 2023-07-20 23:54:30 +02:00
57a1ea91fc Merge pull request 'dev' (!46) from dev into master
Reviewed-on: #46
2023-07-20 23:50:15 +02:00
06138a3927 Workflow change context 2023-07-20 23:49:33 +02:00
84b053e672 Merge remote-tracking branch 'origin/dev' 2023-07-20 23:44:17 +02:00
0fe0cbc4ad Merge pull request #34 from C9Glax/dev
Unsupported arch
2023-07-20 23:42:04 +02:00
62e6ce8363 remove unsupported platforms 2023-07-20 23:38:10 +02:00
a4f3ec6580 Merge pull request #33 from schklom/master
Automatic build of Docker images for many platforms (ARM too)
2023-07-20 23:25:10 +02:00
8b4e996b7e Create dependabot.yml 2023-07-20 23:10:46 +02:00
964540d30f Create docker-image.yml 2023-07-20 23:10:15 +02:00
fa69f4488f Removed UpdateLibraryTask (deprecated).
Libraries will be updated on new Chapters downloaded.
Added Migrator, for future file-changes
2023-07-20 18:15:14 +02:00
42c2876188 Mangakatana chapter num fix 2023-07-16 20:22:33 +02:00
715244ff1b Mangasee more logging 2023-07-16 18:15:28 +02:00
2333cd9095 Mangasee more bad words 2023-07-16 18:15:11 +02:00
c8225db4fe #30 #31 2023-07-16 17:47:00 +02:00
6741ca096b Startup Message 2023-07-16 17:38:42 +02:00
a897a7b3a2 Better Logger.
Includes a formatted Console-Log
2023-07-16 17:33:15 +02:00
0f8932e712 Fixed missing logger for notificationManagers on deserialization 2023-07-09 21:38:49 +02:00
78023ef0fd resolves #21 lunasea 2023-07-09 21:35:15 +02:00
d171f34e4e Update README.md 2023-07-07 14:23:33 +02:00
aa0dc4fa35 Fixes single result redirect 2023-07-06 02:09:56 +02:00
25f48592c0 Added more badwords to filter out when searching mangasee, resolves #26 2023-07-04 22:44:01 +02:00
398ac304d2 Update Komga/Kavita immediately after new chapter is Downloaded 2023-07-03 00:01:08 +02:00
58a62f8272 Mangasee search all title-fields. 2023-07-02 23:54:02 +02:00
86752c9a7e Order of task Execution by due-time 2023-07-02 23:10:16 +02:00
f9a7828d02 Moved notification back to DownloadChapterTask
temp: Dont wait for childTasks to finish to finish parent task
2023-07-02 23:06:24 +02:00
c97ff69148 Fix for new publications: Add to collection 2023-07-02 22:46:01 +02:00
1735bbcf8a Fix wrong query from allTasks to runningTasks 2023-06-30 00:23:00 +02:00
9ae8ca65df resolves #25 characters encoding mistake 2023-06-29 21:09:42 +02:00
00599cd24e Infinite loop on unavailable chapters 2023-06-28 23:00:24 +02:00
6d5618a1f7 Infinite loop on unavailable chapters 2023-06-28 22:46:22 +02:00
a1202a875d Moved sucessstate to taskmanager 2023-06-28 22:43:46 +02:00
98946b4aa3 Fixed null chapterNumber on mangadex 2023-06-28 22:43:24 +02:00
41b6bb77b6 Moved GetPublicationsFromConnector to connector.
Moved GetNewChaptersList to Connector.
Removed knownPublications file
Renamed chapterCollection to collection and only contains Publications
2023-06-28 22:43:03 +02:00
e70a14ca56 Only send notifications if more than 0 new chapters 2023-06-28 19:23:06 +02:00
b099da1156 Chapter fix RegexMatching on chapter number 2023-06-28 00:13:23 +02:00
01d1f922c2 MangaDex chapterNumber non.nullable 2023-06-28 00:13:09 +02:00
47a80d67a8 TrangaTask Success-State and child task deletion 2023-06-27 23:55:13 +02:00
16e3549455 Export Data after deleting task 2023-06-27 23:54:44 +02:00
be8c6b50ba Notification moved to TrangaTask 2023-06-27 23:37:13 +02:00
a38fcf50ca nullable types removed 2023-06-27 23:25:35 +02:00
82f6c7b3fe Moved GetArchiveFilePath, CheckChapterIsDownloaded and GetComicInfoXmlString to Chapter.cs 2023-06-27 23:22:23 +02:00
5586d2c104 Connector CheckChapterIsDownloaded more Regex 2023-06-27 23:14:22 +02:00
62dc9fee2a GetComicInfoXmlString: protected -> internal 2023-06-27 23:09:09 +02:00
ac96fca6dc Chapter illegalstring regex 2023-06-27 23:08:29 +02:00
25a6ceff10 Remove sortNumber-field from Chapter
API: Change Tasks/Progress chapterSortNumber to ChapterNumber
2023-06-27 23:06:37 +02:00
b3e1d39d0f Rename Connector.SearchChapters -> SelectChapters
Added "a(ll)"-option to SelectChapters
2023-06-27 23:02:55 +02:00
2833b7f22a Remove Legacy support for "DownloadNewChapters" 2023-06-27 22:59:33 +02:00
cbdd305b69 TaskManager AddTask make better use of GetTasksMatching and GetTasksMatching easier usage 2023-06-27 22:59:23 +02:00
b88890817e TaskManager _runningDownloadChapterTasks -> _runningTasks for all TrangaTasks 2023-06-27 22:58:40 +02:00
f66ab7d40b Connector use TrangaSettings instead of own values for imageCache and downloadLocation 2023-06-27 22:57:44 +02:00
4cb3694cd5 Re-add task timeout 2023-06-27 22:23:53 +02:00
a05d4c8bd9 Merge remote-tracking branch 'origin/master' 2023-06-27 22:23:23 +02:00
22f87a74b2 Re-add task timeout 2023-06-27 22:23:19 +02:00
ba57282879 Re-add task timeout 2023-06-27 22:19:06 +02:00
9ccba6fba6 Fix CheckChapterIsDownloaded Directory does not exists exception returning 0 chapters 2023-06-25 23:56:22 +02:00
4f01c1166f Fix taskIds being changed during requests, no workaround this time 2023-06-25 23:56:00 +02:00
0a51e7ad3d Fix taskIds being changed during requests 2023-06-25 23:26:36 +02:00
e541b922dc Merge pull request #24 from arxae/master
Added MangaKatana connector
2023-06-25 21:38:18 +02:00
604abd5f9a Fix bug where ChildTasks hung parentTasks 2023-06-24 21:00:26 +02:00
7b311eae75 Will break: CheckChapterIsDownloaded 2023-06-24 20:46:35 +02:00
d4eb72cd99 Required changes 2023-06-23 22:14:27 +02:00
b515215f4b Fix taskIds being changed during requests 2023-06-22 23:09:59 +02:00
a16686dfbf Fix wrong taskNames 2023-06-22 22:52:26 +02:00
4275703941 Added MangaKatana connector 2023-06-22 14:22:21 +02:00
c3342984ea Server fixed bug where ?& in request url caused variables to not parse 2023-06-21 18:04:41 +02:00
ed4bdb5b33 TrangaSettings export after change 2023-06-21 18:04:12 +02:00
0f0902c932 LunaSea changed to id device/id or user/id instead of full url 2023-06-21 18:03:48 +02:00
6508055b43 API Fix closed response socket 2023-06-21 17:42:56 +02:00
abc66511d8 Fixed progress tracking this time for realsies. resolves #5 2023-06-21 17:30:31 +02:00
9ed36c47b5 Fixed taskId on init deserialization 2023-06-21 17:29:48 +02:00
fd1b2a8470 API Fix closed response socket 2023-06-21 17:29:20 +02:00
8058749ab5 Website fix wrong task on deletion 2023-06-21 16:53:56 +02:00
8737617e5f Fix deletion of successful child tasks 2023-06-21 16:53:41 +02:00
7e4f43f1e2 API fix CORS preflight 2023-06-21 16:53:07 +02:00
12b1b2afd6 Server fix interfaces on windows 2023-06-21 16:52:57 +02:00
0f9ac60fcd closes #11 readme update 2023-06-21 16:17:40 +02:00
8c87f2948c README updated screenshots 2023-06-21 16:08:36 +02:00
e0fb817256 Changed glax/tranga-base to latest 2023-06-20 23:26:49 +02:00
cdd2d94ba1 Wrote my own Http-Server.
ASP-NET can **** my **** and *** :)
2023-06-20 23:15:56 +02:00
d5b7645cd2 "Thread-safe" message adding.. 2023-06-20 23:15:22 +02:00
9af5c1603e Using HttpStatusCode to signify Task-Success
When DownloadChapterTask returns notfound, do not retry.
2023-06-20 15:46:54 +02:00
1035939309 Fix overflow 2023-06-20 15:18:58 +02:00
3b542c04f6 ReShaper cleanup,
Remove unnecessary using directives
2023-06-20 14:59:08 +02:00
a809b7c285 Added timeout to Connector DownloadClient 2023-06-20 14:58:02 +02:00
e883277400 Renamed DownloadNewChaptersTask to MonitorPublicationTask
Added TrangaTask.Clone() method
Rewrote TrangaTask.progress for the billionth+1 time.
Removed Increment and DecrementProgress methods
Removed TrangaTask.ReplaceFailedChildTask method
Changed return type of TrangaTask.ExecuteTask to bool, signifying success.
Added Failed Execution state to TrangaTask
Replaced taskManager failed-task logic
Removed TaskManager bulky AddTask and DeleteTask methods
Removed TaskManager bulky Constructor
2023-06-20 14:57:44 +02:00
23dfdc0933 Connector DownloadChapter, DownloadImage, DownloadChapterImages returns successState.
RequestResult replace HttpStatusCode with success-status boolean.
DownloadChapterTask: Only send Notification when Chapter download successful
2023-06-19 22:45:33 +02:00
edc24fff5b Moved notification to DownloadChapterTask, sends when parentTask exists. 2023-06-19 22:34:34 +02:00
6cdccdf66b Fix infinite loop of DownloadNewChaptersTask 2023-06-19 22:32:32 +02:00
a4c9168551 Selector for task-sanitizer 2023-06-19 17:17:47 +02:00
821a1b7c3a Unique IDs for TrangaTask now based on Random-generator 2023-06-19 17:17:24 +02:00
b2b4256972 Startup message api 2023-06-19 16:46:12 +02:00
d2f46e4637 #21 Deserialization of LunaSea Object 2023-06-19 11:27:07 +02:00
303fc293ba Fixed Bug on AddTask where no new UpdateLibraryTask would be added 2023-06-15 22:32:55 +02:00
36c145da26 Gotify change to normal priority 2023-06-15 21:24:01 +02:00
c822c74f42 website fix taskSelectOutput overflow issue 2023-06-15 21:16:56 +02:00
dda4054d34 API: Fix nullable bug on Getchapters 2023-06-15 21:15:44 +02:00
5b2546fdbc removed unnecessary log 2023-06-15 19:07:25 +02:00
c11e3993ea Added successmessage to NotificationManager 2023-06-15 19:06:53 +02:00
02a382a99a Website: Added connector NotificationManager LunaSea
Added Update Method for TrangaSettings for LunaSea
#21
2023-06-15 18:57:50 +02:00
c6c8f5cdf6 TrangaSettings nullable library and notificationManagers will initialize a new Hashset 2023-06-15 18:50:50 +02:00
84842aed3c Added connector NotificationManager LunaSea 2023-06-15 18:50:19 +02:00
d9ced11cd1 Website: Added gotify config 2023-06-15 18:38:47 +02:00
25c90782dc Moved UpdateSettings to TrangaSettings
Added NotificaitonManager
Added Gotify
Added Notification on MonitorTask download new chapters
2023-06-15 18:25:32 +02:00
e789c429cd TaskManager when deleting task also remove from parent. 2023-06-15 18:24:19 +02:00
93de471836 Added TrangaTask.RemoveChildTask 2023-06-15 18:22:59 +02:00
8b58e7dd13 Website: On Download Chapters only show chapters that have not yet been downloaded
API: Added new variables to /Publications/GetChapters: onlyNew and onlyExisting. API will return only new, only existing or all chapters depending on variables.
#19
2023-06-15 17:14:20 +02:00
b571bfa43d Moved GetNewChaptersList to taskManager and added GetExistingChaptersList 2023-06-15 17:07:32 +02:00
088d1c4647 Derived Constructor 2023-06-15 17:06:41 +02:00
f280c01802 Browser Version for both windows and linux 2023-06-15 16:30:07 +02:00
1be10b310d Fix Regex Bug on downlaod volumes 2023-06-11 19:17:03 +02:00
a0469f3145 Cancel DownloadChapter-Task on removal 2023-06-11 19:16:05 +02:00
fcd81f03b3 resolves #17 no cover image 2023-06-11 19:05:08 +02:00
76604d84d8 Better way of handling progress, and childProgress. 2023-06-11 18:24:26 +02:00
af822febbe fixed nullable warning 2023-06-11 18:01:04 +02:00
8e207c3119 Better way of handling progress, and childProgress. 2023-06-11 17:27:33 +02:00
b6f8c8aab5 TaskType check 2023-06-11 17:05:24 +02:00
36f7cbd3e9 Better way of handling progress, and childProgress.
More reliable taskFinishTime
2023-06-11 17:04:33 +02:00
3b2643d949 Website show remaining time instead of percentage 2023-06-11 16:38:12 +02:00
9fd8bf1741 website uses taskId 2023-06-10 16:00:41 +02:00
d5c9c5ba96 Redid progress calcuation on DownloadNewChaptersTask and DownloadChapterTask 2023-06-10 16:00:16 +02:00
c8e27921ab Added taskId to trangaTask and parentTaskId to DownloadChapterTask as unique identifier to attach ChildTasks to ParentTask on deserialization. 2023-06-10 15:59:42 +02:00
6eaba07801 Changed progress type from float to double 2023-06-10 15:58:11 +02:00
41929e0c72 DownloadChapterTask sets execution of parentTask 2023-06-10 15:04:37 +02:00
4fcaca1a6e Multiple authors resolves #7 2023-06-10 14:45:04 +02:00
0e3c7f32d7 Added CancellationToken to TrangaTask #14 2023-06-10 14:34:30 +02:00
1c94625840 Added CancellationToken to TrangaTask #14 2023-06-10 14:27:09 +02:00
32f89f9dce Multiple authors resolves #7 2023-06-10 14:05:23 +02:00
234735a562 Order of tasks closes #15
Also API /Queue/Get orders in order of nextExecution
2023-06-10 00:45:55 +02:00
8b916eb854 invalid Ids 2023-06-10 00:23:23 +02:00
29e1790c93 website tasks-width now max 95vw 2023-06-10 00:10:16 +02:00
ac4c799a74 Better indication if tasks have started. 2023-06-10 00:07:41 +02:00
7c62883c37 invalid id 2023-06-10 00:02:51 +02:00
02018253bf wrong nesting ... 2023-06-10 00:01:38 +02:00
2aec884009 Moved update interval for task-progress to own interval, progress gets continually updated. 2023-06-09 23:58:04 +02:00
b3321ff030 unnecessary log 2023-06-09 23:48:39 +02:00
16c1094875 Replaced Task-Progress-Tracking Window with more fancy one 2023-06-09 23:46:10 +02:00
5763d50409 #14 temporary workaround for disposing tasks 2023-06-09 23:45:53 +02:00
ad43297358 API: Updated /Tasks/GetProgress to return progress of specific task (by sortNumber) 2023-06-09 23:43:57 +02:00
b17800e0ef Decrement progress of parenttask when childtask fails 2023-06-09 23:43:19 +02:00
89c80d2997 Fixed bug where tasks would instantly failed when launched #14 2023-06-09 23:42:54 +02:00
6485b8744f API: Updated /Tasks/GetProgress to return progress of specific task (by sortNumber) 2023-06-09 23:42:18 +02:00
a3a96b6b55 Added DecrementProgress function to TrangaTask 2023-06-09 23:38:28 +02:00
5bce3c6fdd Website: Monitor task creation styling 2023-06-09 22:15:29 +02:00
5fa0c98d05 Documentation how to create tasks #11 2023-06-09 11:26:51 +02:00
b166013770 resolves #13 Website: Clear previous results 2023-06-09 11:12:43 +02:00
02fe849046 Better downloadChapter selection 2023-06-09 11:06:18 +02:00
d42393c83a Website + API ability to download specific volumes 2023-06-08 19:53:05 +02:00
c685bd622f Website:
New task-Creation dialog
Redesigned Settings dialog
2023-06-08 19:25:28 +02:00
dc83cc2194 Fixed Range on CLI downloadchaptertask creation 2023-06-08 19:25:03 +02:00
7784f2024e API changes:
/Tranga/GetAvailableControllers => /Controllers/Get
/Tranga/GetKnownPublications =>/Publications/GetKnown
/Tranga/GetPublicationsFromConnector => /Publications/GetFromConnector
/Tasks/GetTaskTypes => /Tasks/GetTypes
/Tasks/GetTaskProgress => /Tasks/GetProgress
/Tasks/Create is now split in 3:
    /Tasks/CreateMonitorTask
    /Tasks/CreateUpdateLibraryTask
    /Tasks/CreateDownloadChaptersTask
2023-06-08 19:24:46 +02:00
4895079887 Remove DownloadChapterTask from _runningDownloadChapterTasks after completion 2023-06-07 15:01:24 +02:00
ab1ddc6dc8 Less cluttered log 2023-06-07 00:31:27 +02:00
87eade10cf #40 task timeout criteria 2023-06-07 00:27:53 +02:00
1f3ac41b30 removed unnecessary cast 2023-06-07 00:24:58 +02:00
6a304bb330 #40 task timeout 2023-06-07 00:24:27 +02:00
b0642d1251 removed unnecessary check 2023-06-06 22:11:57 +02:00
63b5139e93 Split error message for better logging 2023-06-06 22:11:38 +02:00
e938784388 Created own base image for tranga-api (to stop apt always updating) 2023-06-06 22:11:26 +02:00
c436389426 renamed wrong variable names publicationId -> internalId 2023-06-06 21:57:10 +02:00
5099e25f3f Fixed wrong comparison on add new task 2023-06-06 21:56:51 +02:00
cf6fc3b8f6 Update readme HtmlAgilityPack 2023-06-06 21:27:12 +02:00
f5141d0f8e removed unnecessary async 2023-06-06 21:27:02 +02:00
5c753e7a7d Added fields to TrangaTask:
executionStarted,
executionApproximatelyFinished,
executionApproximatelyRemaining
to track progress
2023-06-06 21:19:30 +02:00
17ce820cf3 Changed taskProgress increment to call method that updates progress accordingly (with parent-tasks being also updated) 2023-06-06 20:54:21 +02:00
5b4a3b9d7c README update 2023-06-06 15:41:26 +02:00
f73997e563 Fix browser startup 2023-06-05 21:38:32 +02:00
437136804d Also delete downloadChapterTask when delete download new chapter tasks 2023-06-05 21:16:04 +02:00
e14683d21a Merge remote-tracking branch 'origin/master'
# Conflicts:
#	Tranga/TaskManager.cs
2023-06-05 21:11:10 +02:00
5ae02ee0ed Fix Bug where all tasks would be deleted... 2023-06-05 21:10:42 +02:00
a2e9a3f34a Fix Bug where all tasks would be deleted... 2023-06-05 20:52:21 +02:00
bbf05e3dec name and volume number 2023-06-05 20:37:44 +02:00
d95839e5df Chromium Version and Dependencies 2023-06-05 20:36:01 +02:00
5a303598fe Download progress updates every 5 seconds 2023-06-05 19:53:00 +02:00
db2103963e Merge remote-tracking branch 'origin/master' 2023-06-05 19:47:08 +02:00
2c1105527a Add Connector Mangasee #34 2023-06-05 19:47:04 +02:00
ed19dcb5c3 Add Connector Mangasee 2023-06-05 19:46:22 +02:00
46f06c2685 Duplicate logs 2023-06-05 19:46:10 +02:00
d4f47e057c Fix sortNumber when volume null 2023-06-05 19:45:50 +02:00
61712d0537 variable name 2023-06-05 01:03:03 +02:00
1f8f8c09e3 Wrong comparison publicationId -> internalId 2023-06-05 00:50:51 +02:00
0522fa6215 Fix wrong cast 2023-06-05 00:40:19 +02:00
0383a7d686 Merge remote-tracking branch 'origin/master' 2023-06-05 00:38:11 +02:00
bd189984a9 Rewrote entire Task-Structure:
TrangaTask now only contains essentials, derived classes contain specific information such as connectorName, publication, chapter, etc.
Removed taskQueue system, instead all tasks are kept in _allTasks.
Progress is being tracked in TrangaTask resolves #36 resolves #32
Added new TrangaTask: DownloadChapter to download single chapters. #35
Fixed duplicate file-access when writing settings.
2023-06-05 00:38:07 +02:00
58c01b2174 Rewrote entire Task-Structure:
TrangaTask now only contains essentials, derived classes contain specific information such as connectorName, publication, chapter, etc.
Removed taskQueue system, instead all tasks are kept in _allTasks.
Progress is being tracked in TrangaTask
Added new TrangaTask: DownloadChapter to download single chapters.
Fixed duplicate file-access when writing settings.
2023-06-05 00:35:57 +02:00
459558a514 Merge remote-tracking branch 'origin/master' 2023-06-04 21:15:37 +02:00
721b316209 Legal Characters in folder names #38 2023-06-04 21:15:33 +02:00
b1befa2ecc Legal Characters in folder names 2023-06-04 21:14:45 +02:00
57a4cc4ab5 #38 Filenames 2023-06-03 23:44:58 +02:00
655e8db2b6 #38 Publication naming scheme 2023-06-03 23:42:59 +02:00
7cdf77cbb9 Less logging clutter 2023-06-03 22:55:53 +02:00
5a9aed4969 Kavita naming convention 2023-06-03 22:39:27 +02:00
5b41f687d0 Kavita naming convention 2023-06-03 22:34:02 +02:00
d6a62dc315 Manganato download order 2023-06-03 22:25:34 +02:00
6d91788655 Kavita naming convention 2023-06-03 22:25:24 +02:00
14785e5672 increased refresh time 2023-06-03 21:37:21 +02:00
496d502cd2 Kavita Auth is a pain. 2023-06-03 21:26:29 +02:00
7612411917 #33 Update Website 2023-06-03 16:25:04 +02:00
ed1402b5ec TrangaSettings Return libraryManageres on serialization 2023-06-03 16:24:54 +02:00
5adaee4821 redundant calls 2023-06-03 16:24:30 +02:00
2d82fe1489 libraryType in constructor 2023-06-03 16:24:14 +02:00
783fd8129e API: Kavita Auth #33 2023-06-03 15:40:26 +02:00
4f29eff48e Kavita authorization 2023-06-03 15:39:48 +02:00
e0e7abb62b #33 Added custom serializer for LibraryManager
Added Code for Kavita
2023-06-03 15:32:54 +02:00
ae63a216a5 unnecessary params 2023-06-03 15:27:35 +02:00
5d98295c59 #33 Preparation:
TrangaSettings now stores Hashset of LibraryManagers
2023-06-03 15:17:08 +02:00
0c580933f9 #33 Preparation:
Abstracted class Komga into LibraryManager
Fixed logger not attaching to LibraryManager
2023-06-03 15:02:15 +02:00
06f735aadd #32 API endpoint 2023-06-01 23:08:43 +02:00
439d69d8e0 Sanitization Manganto 2023-06-01 22:59:51 +02:00
933df58712 Moved publicationFolder creation to Publication with Permissions 2023-06-01 22:59:04 +02:00
165bbc412e Adjusted Manganato ratelimit 2023-06-01 22:49:20 +02:00
6158fa072b File permissions 2023-06-01 22:32:11 +02:00
0d3799e00d Fix Bug when strings where shorter than 25 characters on logger.writeline
Fixed CLI output
2023-06-01 22:27:37 +02:00
e977bed5a5 #32 formatting length 2023-06-01 22:14:00 +02:00
cacd5fede2 removed unnecessary todo 2023-06-01 22:06:10 +02:00
1bca99cb6a #32 Added progress tracking to task (internal and log use for now) 2023-06-01 22:05:48 +02:00
15fc367263 logging 2023-06-01 21:16:57 +02:00
8bb6fb902b File Permissions 2023-06-01 18:28:58 +02:00
a57903cd5a readme update 2023-06-01 16:04:21 +02:00
1cd819b21d update docker-compose 2023-06-01 15:40:14 +02:00
27afedc1b4 year in series.json 2023-06-01 15:25:26 +02:00
fac0a3f7eb resolves #2 2023-06-01 15:08:32 +02:00
03ca480fe8 remove empty lines at start of description 2023-06-01 14:59:09 +02:00
c2915468a5 status .tolower 2023-06-01 14:58:58 +02:00
8805c53cb8 wrong url for manga info page 2023-06-01 13:37:06 +02:00
adbbe3f6cc logs 2023-06-01 13:27:58 +02:00
14b694d3be Description value duplicate #2 2023-06-01 13:25:58 +02:00
72ce75c6e0 #2 Multiple alt titles 2023-06-01 13:18:26 +02:00
8381951168 #2 First Attempt 2023-06-01 13:13:53 +02:00
b24032d124 remove env var 2023-06-01 11:29:47 +02:00
8bc23f7c69 Instead of relying on concreate tasks to do chores, create method in abstract class that calls the BL in concrete class and does chores before and after execution 2023-06-01 10:35:23 +02:00
48b7371a18 Issue missing parameter 2023-05-31 22:34:13 +02:00
61ecefb615 Logging and Chores in abstract class 2023-05-31 22:32:37 +02:00
8ff65bf400 compatibility with older tasks.json 2023-05-31 22:26:53 +02:00
932057cca0 update execution time 2023-05-31 22:20:33 +02:00
67d06cd887 resolves #23 website filter 2023-05-31 22:16:14 +02:00
cbb012a659 alt and label 2023-05-31 22:16:01 +02:00
e4f33bcca9 Set ExecutionState to waiting after finished 2023-05-31 21:52:50 +02:00
fbba7c45b9 annotation 2023-05-31 21:44:16 +02:00
d9b6062767 Custom JSON Deserializer for concrete classes of abstract class TrangaTask 2023-05-31 21:43:07 +02:00
d477cd1ccd corrected check for tasktype on due 2023-05-31 21:42:23 +02:00
f892db7dda Switch Execution State to Running when executing 2023-05-31 21:40:00 +02:00
16c1b5c506 Create imageCacheFolder 2023-05-31 21:39:18 +02:00
d5ecc1c37d Spelling, redundant calls 2023-05-31 21:18:41 +02:00
1b9ebd096b Removed TaskExecutor
TrangaTask is now abstract
TrangaTask implements Execute Method, that is now called instead of TaskExecutor
Created inheriting classes of TrangaTask: UpdateKomgaLibraryTask, DownloadNewChaptersTask
2023-05-31 21:15:32 +02:00
8619630269 Renamed ExportData to ExportDataAndSettings 2023-05-31 21:14:11 +02:00
2bc92556e3 Method description and Name CopyCoverFromCacheToDownloadLocation 2023-05-31 20:39:57 +02:00
f1ab823e7f Method description 2023-05-31 20:39:23 +02:00
8261d02cc7 Renamed methods, made some methods non-static in Connector, some more logging 2023-05-31 20:29:30 +02:00
8d3b8be95c settings placeholders for komga if configured 2023-05-31 17:55:09 +02:00
60519910de cursors 2023-05-31 17:54:54 +02:00
0940afe41f publication viewer tags and styling 2023-05-31 17:54:09 +02:00
3dc376c19f favicon 2023-05-31 17:52:47 +02:00
3e56ef842b Update docker-compose.yaml with version 2023-05-30 19:37:09 +02:00
5a44e3b8b9 #25 only replace settings if parameter actually contains value. 2023-05-30 19:32:22 +02:00
732c2f119c More logging 2023-05-26 15:09:26 +02:00
81638f4b4a Path.join joins paths ya know 2023-05-26 14:51:11 +02:00
c547aa6422 favicon <3 2023-05-26 14:49:17 +02:00
d80980512e #28 is a pain in the buttcheeks 2023-05-26 14:47:13 +02:00
f9f802155d #28 why was there a '!' 2023-05-26 14:43:47 +02:00
eef0955009 #28 wrong filesnames 2023-05-26 14:36:02 +02:00
ec25900ac0 resolves #29 start manual execution 2023-05-26 14:32:08 +02:00
e5fe14a09e #28 2023-05-26 14:31:34 +02:00
5dc91095f8 #28 2023-05-26 14:30:52 +02:00
985ac8fc7a Fix #28 coverimages 2023-05-26 14:07:11 +02:00
c9537a9963 #24 2023-05-26 13:39:42 +02:00
4fd3c03804 Styling 2023-05-26 13:30:20 +02:00
a1e9dd0232 resolves #27 Foldernames ending in '.' 2023-05-25 22:22:57 +02:00
aa1f9b1b56 background fade 2023-05-25 22:17:47 +02:00
6069578b6e style choices 2023-05-25 22:15:06 +02:00
a84b768e24 design choices 2023-05-25 22:05:29 +02:00
d1a21af15d resolves #26 2023-05-25 21:58:45 +02:00
7423ae6ace Update README.md 2023-05-25 18:36:21 +02:00
3aa7ba9d96 screenshots 2023-05-25 18:28:43 +02:00
fdbb4570be Adjusted settings style 2023-05-25 18:18:31 +02:00
b643a0c2a9 Fix Wrong API uri for GetRunningTasks
Added GetQueue function

Added display for running, queued, total tasks
2023-05-25 18:09:18 +02:00
6fa6f897aa More legal characters 2023-05-25 17:34:24 +02:00
2bfab0298d border-radius 2023-05-25 17:33:55 +02:00
147a20385b illegal filenames 2023-05-25 16:55:58 +02:00
afa18d6a2c Illlegal characters on linux 2023-05-25 16:47:24 +02:00
66980eef23 Position publication viewer always withing display 2023-05-25 16:05:54 +02:00
65f468a30a popup position now "fixed"
changed publicationviewer width
2023-05-25 16:05:40 +02:00
a91c33ee4f image sizing 2023-05-25 15:50:00 +02:00
f39482fe4c Corrected image path in publication preview 2023-05-25 15:48:38 +02:00
41f47b4d6b styling 2023-05-25 15:47:59 +02:00
be40091102 Publication background fade 2023-05-25 15:41:24 +02:00
665092be6a image scaling 2023-05-25 15:40:03 +02:00
653cb699d0 Removed Sidebar
Moved settings tab to popup
Added footer
2023-05-25 15:34:10 +02:00
8dbc5446ad depends_on compose 2023-05-25 14:46:05 +02:00
750df4ed52 Wrong return value 2023-05-25 14:38:43 +02:00
4772ae0756 No unnecessary downloads of covers if they already exist 2023-05-25 14:35:33 +02:00
23f703d5a5 imageCache readonly for website 2023-05-25 14:30:33 +02:00
6aa0ea277b #22 2023-05-25 14:28:56 +02:00
780df1cd6e Created Image-Cache 2023-05-25 14:25:23 +02:00
0b7da2e9cb Merge remote-tracking branch 'origin/master'
# Conflicts:
#	Website/interaction.js
2023-05-25 13:59:06 +02:00
01a059d26b Base 64 images #22 2023-05-25 13:58:54 +02:00
a8dbece237 Base 64 images #22 2023-05-25 13:58:10 +02:00
5efa00e059 Added field posterBase64 to Publication #22 2023-05-25 13:50:48 +02:00
02075ed1b1 Renamed RequestType Cover to CoverUrl 2023-05-25 13:50:08 +02:00
fabd16ccea Remove unnecessary output from dockerfile 2023-05-25 13:49:29 +02:00
79928075b0 docker-compose.yaml 2023-05-25 10:49:24 +02:00
9b8eb6a197 add seconds field to addtask recurrence 2023-05-25 10:47:12 +02:00
1d263ef45a Configurable API-location 2023-05-25 10:42:19 +02:00
e0877add30 Paths for Linux 2023-05-25 10:25:24 +02:00
046cad8072 Dockerfile for Tranga 2023-05-25 10:25:11 +02:00
b2ce55be96 Port 2023-05-25 01:25:21 +02:00
a6e9013495 Latest alpine image 2023-05-25 00:05:31 +02:00
14c69631a6 Corrected port 2023-05-24 23:53:39 +02:00
ccc4e42a49 Komga update can now be configured in seconds 2023-05-24 23:53:32 +02:00
d6e75fda31 Fixed empty returns if some value were null 2023-05-24 23:52:40 +02:00
fc89537f63 Fixed Authorization on redirect 2023-05-24 23:52:25 +02:00
fd3423d03c Correct Port 2023-05-24 22:56:15 +02:00
878f77766f Fix CORS 2023-05-24 22:56:10 +02:00
08001fd684 SSL cert error 2023-05-24 22:55:32 +02:00
e2917d2f2e Changed CORS policy allow all origins
Added Dockerfile to website
Changed Ports
2023-05-24 22:30:11 +02:00
32dc58715e Merge branch 'Website' 2023-05-24 21:52:31 +02:00
add0583776 Changed default-download folder for API 2023-05-24 21:52:08 +02:00
6fed0e5473 removed console.log 2023-05-24 21:51:26 +02:00
a0636ac7a2 Finished Settings-Cart 2023-05-24 21:48:54 +02:00
7aeb78e2f6 Merge branch 'master' into Website 2023-05-24 21:04:52 +02:00
5cf512f2b2 API: /Tasks/GetList has become /Tasks/Get with options to search for specific tasks 2023-05-24 21:04:24 +02:00
7d96b0901f Search Button on AddTask 2023-05-24 20:57:41 +02:00
68e80bc066 Settings 2023-05-24 20:57:17 +02:00
ad971fb065 Code-Comments 2023-05-24 20:17:50 +02:00
86052472bc Merge pull request 'Website' (#21) from Website into master
Reviewed-on: #21
2023-05-23 18:53:40 +02:00
ec30bb40fa Merge pull request 'CORS, API-Path' (#20) from dev into master
Reviewed-on: #20
2023-05-23 18:53:03 +02:00
2fa96e9793 undo gitignore 2023-05-23 18:52:27 +02:00
78e44b7704 Fix Popup no closing bug
Fix wrong button (add) bug
2023-05-23 18:48:50 +02:00
8bf9df4419 Done Better Task-Adder 2023-05-23 18:46:06 +02:00
4bd54f096d WIP Better Task-Adder 2023-05-23 18:28:27 +02:00
877daf0a1e Fix bug with interval 2023-05-23 18:19:46 +02:00
6d0fcc13fb Only refresh items when tasks are added/removed #1 2023-05-23 18:17:39 +02:00
f0256494fd HidePopup after interaction 2023-05-23 18:12:45 +02:00
39fa905733 Access-Control-Allow-Methods 2023-05-23 18:11:18 +02:00
c557389967 Delete Task 2023-05-23 18:07:15 +02:00
201773af50 Craeted Publication Viewer 2023-05-23 17:57:48 +02:00
f85e02fb0a empty results when opening addtaskmenu add when searching. 2023-05-23 16:59:45 +02:00
73d98b9c0f Add Task Window styling 2023-05-23 16:54:39 +02:00
b0ee888c82 Exist popup by clicking outside of it 2023-05-23 16:29:09 +02:00
5c4431778e Task can now be added 2023-05-23 16:27:09 +02:00
ccfa213b77 some bugfixes 2023-05-23 15:19:09 +02:00
22d6389d38 Fix wrong API* Path create task 2023-05-23 15:17:47 +02:00
f53dfb0822 update task select window #1 2023-05-23 15:15:29 +02:00
a966bd788d Return array for GetAvailableControllers 2023-05-23 14:45:51 +02:00
dd651adc15 Add Task window 2023-05-23 14:44:59 +02:00
ba5ae67aa7 Fix wrong API path for GetTaskTypes 2023-05-23 14:44:45 +02:00
da4a5bed09 All API-calls #1 2023-05-23 13:52:35 +02:00
947b521163 Changed API: GetAvailableControllers, GetKnownPublications, GetPublicationsFromConnector to Tranga/* 2023-05-23 13:17:05 +02:00
5674adbd5e Added CORS for localhost 2023-05-23 13:16:37 +02:00
290819de09 Created first api-calls #1 2023-05-23 13:15:29 +02:00
0d0b68a8f9 add Website to .gitignore for dev-branch 2023-05-23 12:52:09 +02:00
87d2357b41 CORS Error 2023-05-23 12:51:21 +02:00
e3186aebb0 Merge branch 'master' into Website 2023-05-23 00:17:25 +02:00
1cd37e2b1b Update gitignore 2023-05-23 00:16:48 +02:00
9c267f395f But I like this! #1 2023-05-23 00:12:30 +02:00
e2b8888130 #1 Basic layout and colors 2023-05-22 23:52:35 +02:00
b6ac2682f6 #1 First commit
It do be uglyyyyy
2023-05-22 22:25:50 +02:00
eddf50483f Fixed some nullable types 2023-05-22 21:44:52 +02:00
a71d65e666 Fix negative sleep time 2023-05-22 21:41:11 +02:00
9a640aed27 Rewrote CoverDownload check if exists. 2023-05-22 21:38:44 +02:00
30b6c4680b Better Rate-Limits
Added Logger to DownloadClient
2023-05-22 21:38:23 +02:00
7b6253de0f Create Publication Folder at start of DownloadNewChapters 2023-05-22 21:37:30 +02:00
5aa3214ce5 TrangaTask.ToString() rewrite for logs-readability.
LogMessages only include class-name without path
2023-05-22 21:37:02 +02:00
9b70994f71 Adjusted RateLimit 2023-05-22 18:55:26 +02:00
93cf341f2d Fixed Publication.InternalId 2023-05-22 18:28:42 +02:00
01cb74c088 First attempt at #18 Rate Limits 2023-05-22 18:15:59 +02:00
ec480dffad Merge pull request 'closes #7' (#17) from Issue_7 into master
Reviewed-on: #17
2023-05-22 17:21:42 +02:00
b7014cbff5 Merge pull request 'fixes #14' (#16) from Issue_14_ChapterIsDownlaoded into master
Reviewed-on: #16
2023-05-22 17:21:19 +02:00
0cab921402 Merge pull request 'fixes #11' (#15) from Issue_11 into master
Reviewed-on: #15
2023-05-22 17:20:54 +02:00
0e0ba1796e closes #7 2023-05-22 17:20:07 +02:00
27d8565dc1 fixes #14 2023-05-22 17:09:47 +02:00
79dc44d707 fixes 11 2023-05-22 17:04:31 +02:00
bb6a0ad0d4 Merge pull request 'fixes #9' (#13) from Issue_9 into master
Reviewed-on: #13
2023-05-22 16:53:40 +02:00
43db463ba6 fixes #9 2023-05-22 16:52:52 +02:00
9eb8ddbc40 Changed Publication:
downloadUrl is now publicationId, internal to Connector
posterUrl is now a URL to the file, instead of an id
2023-05-22 16:45:55 +02:00
972cba69ec JsonIgnore
And better working directory stuff
2023-05-22 02:06:49 +02:00
962fe9529e Merge remote-tracking branch 'origin/master' 2023-05-22 01:53:36 +02:00
da1b0cb1cd Change to CommonApplicationFolder as applicationPath 2023-05-22 01:53:27 +02:00
7f88e57e47 Change to CommonApplicationFolder as applicationPath 2023-05-22 01:49:53 +02:00
8865bf284f Corrected applicationFolder in API 2023-05-22 01:42:53 +02:00
5fc2de5fcb logging 2023-05-22 01:20:32 +02:00
4bae223d95 Custom UniqueIdentifier. 2023-05-22 00:33:58 +02:00
0486168b43 AddMangaTaskToQueue Shortcut 2023-05-22 00:15:08 +02:00
b64ab5c6d4 Created TrangaSettings
Different files for settings, tasks, and known publications
Komga connector is stored in TrangaSettings
2023-05-22 00:13:24 +02:00
578fa5e6be JsonIgnore 2023-05-21 23:27:28 +02:00
4d33e78123 unused variable 2023-05-21 22:24:23 +02:00
52ac3e4e4e Proper Mapping for deleting and dequeueing 2023-05-21 22:24:12 +02:00
105 changed files with 8165 additions and 2332 deletions

View File

@ -1,25 +0,0 @@
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/.idea
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/azds.yaml
**/bin
**/charts
**/docker-compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/obj
**/secrets.dev.yaml
**/values.dev.yaml
LICENSE
README.md

21
.github/ISSUE_TEMPLATE/bug_report.yml vendored Normal file
View File

@ -0,0 +1,21 @@
name: Bug Report
description: File a bug report
title: "[It broke]: "
labels: ["bug"]
body:
- type: textarea
attributes:
label: What is broken?
description: What happened? How did we get here?
placeholder: The place where you tell me what you expected to happen, and what happened instead.
validations:
required: true
- type: textarea
attributes:
label: Log-output
description: The output of `docker logs tranga-api`
render: C#
- type: textarea
attributes:
label: Additional stuff
description: Screenshots, anything you think might help

View File

@ -0,0 +1,23 @@
name: New Connector Request
description: Request a new site to be added
title: "[New Connector]: "
labels: ["New Connector"]
body:
- type: input
attributes:
label: Website-Link
placeholder: https://
validations:
required: true
- type: checkboxes
attributes:
label: Is the Website free to access?
description: We can't support pay-to-use sites.
options:
- label: The Website is freely accessible.
required: true
- type: textarea
attributes:
label: Anything else?
validations:
required: false

7
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,7 @@
version: 2
updates:
# Maintain dependencies for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"

View File

@ -0,0 +1,45 @@
name: Docker Image CI
on:
push:
branches: [ "cuttingedge" ]
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
# https://github.com/docker/setup-qemu-action#usage
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
# https://github.com/marketplace/actions/docker-setup-buildx
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3.8.0
# https://github.com/docker/login-action#docker-hub
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
# https://github.com/docker/build-push-action#multi-platform-image
- name: Build and push API
uses: docker/build-push-action@v6.9.0
with:
context: ./
file: ./Dockerfile
#platforms: linux/amd64,linux/arm64,linux/riscv64,linux/ppc64le,linux/s390x,linux/386,linux/mips64le,linux/mips64,linux/arm/v7,linux/arm/v6
platforms: linux/amd64,linux/arm64
pull: true
push: true
tags: |
glax/tranga-api:cuttingedge

45
.github/workflows/docker-image-dev.yml vendored Normal file
View File

@ -0,0 +1,45 @@
name: Docker Image CI
on:
push:
branches: [ "dev" ]
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
# https://github.com/docker/setup-qemu-action#usage
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
# https://github.com/marketplace/actions/docker-setup-buildx
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3.8.0
# https://github.com/docker/login-action#docker-hub
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
# https://github.com/docker/build-push-action#multi-platform-image
- name: Build and push API
uses: docker/build-push-action@v6.9.0
with:
context: ./
file: ./Dockerfile
#platforms: linux/amd64,linux/arm64,linux/riscv64,linux/ppc64le,linux/s390x,linux/386,linux/mips64le,linux/mips64,linux/arm/v7,linux/arm/v6
platforms: linux/amd64,linux/arm64
pull: true
push: true
tags: |
glax/tranga-api:dev

View File

@ -0,0 +1,45 @@
name: Docker Image CI
on:
push:
branches: [ "master" ]
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
# https://github.com/docker/setup-qemu-action#usage
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
# https://github.com/marketplace/actions/docker-setup-buildx
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3.8.0
# https://github.com/docker/login-action#docker-hub
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
# https://github.com/docker/build-push-action#multi-platform-image
- name: Build and push API
uses: docker/build-push-action@v6.9.0
with:
context: ./
file: ./Dockerfile
#platforms: linux/amd64,linux/arm64,linux/riscv64,linux/ppc64le,linux/s390x,linux/386,linux/mips64le,linux/mips64,linux/arm/v7,linux/arm/v6
platforms: linux/amd64,linux/arm64
pull: true
push: true
tags: |
glax/tranga-api:latest

View File

@ -0,0 +1,45 @@
name: Docker Image CI
on:
push:
branches: [ "Server-V2" ]
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
# https://github.com/docker/setup-qemu-action#usage
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
# https://github.com/marketplace/actions/docker-setup-buildx
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3.8.0
# https://github.com/docker/login-action#docker-hub
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
# https://github.com/docker/build-push-action#multi-platform-image
- name: Build and push API
uses: docker/build-push-action@v6.9.0
with:
context: ./
file: ./Dockerfile
#platforms: linux/amd64,linux/arm64,linux/riscv64,linux/ppc64le,linux/s390x,linux/386,linux/mips64le,linux/mips64,linux/arm/v7,linux/arm/v6
platforms: linux/amd64,linux/arm64
pull: true
push: true
tags: |
glax/tranga-api:Server-V2

10
.gitignore vendored
View File

@ -16,4 +16,12 @@ riderModule.iml
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml
/.idea
/.idea
cover.jpg
cover.png
/.vscode
/.vs/
Tranga/Properties/launchSettings.json
/Manga
/settings
*.DotSettings.user

37
API/API.csproj Normal file
View File

@ -0,0 +1,37 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
<NoWarn>$(NoWarn);1591</NoWarn>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Asp.Versioning.Mvc.ApiExplorer" Version="8.1.0" />
<PackageReference Include="HtmlAgilityPack" Version="1.11.71" />
<PackageReference Include="log4net" Version="3.0.3" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="9.0.0" />
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.0">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="Npgsql" Version="9.0.2" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.2" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.Design" Version="1.1.0" />
<PackageReference Include="PuppeteerSharp" Version="20.0.5" />
<PackageReference Include="SixLabors.ImageSharp" Version="3.1.5" />
<PackageReference Include="Soenneker.Utils.String.NeedlemanWunsch" Version="3.0.697" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="7.2.0" />
<PackageReference Include="System.Drawing.Common" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<Folder Include="Migrations\" />
</ItemGroup>
</Project>

6
API/API.http Normal file
View File

@ -0,0 +1,6 @@
@API_HostAddress = http://localhost:5105
GET {{API_HostAddress}}/weatherforecast/
Accept: application/json
###

View File

@ -0,0 +1,223 @@
using API.Schema;
using API.Schema.Jobs;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{version:apiVersion}/[controller]")]
public class JobController(PgsqlContext context) : Controller
{
/// <summary>
/// Returns all Jobs
/// </summary>
/// <returns>Array of Jobs</returns>
[HttpGet]
[ProducesResponseType<Job[]>(Status200OK)]
public IActionResult GetAllJobs()
{
Job[] ret = context.Jobs.ToArray();
return Ok(ret);
}
/// <summary>
/// Returns Jobs with requested Job-IDs
/// </summary>
/// <param name="ids">Array of Job-IDs</param>
/// <returns>Array of Jobs</returns>
[HttpPost("WithIDs")]
[ProducesResponseType<Job[]>(Status200OK)]
public IActionResult GetJobs([FromBody]string[] ids)
{
Job[] ret = context.Jobs.Where(job => ids.Contains(job.JobId)).ToArray();
return Ok(ret);
}
/// <summary>
/// Get all Jobs in requested State
/// </summary>
/// <param name="state">Requested Job-State</param>
/// <returns>Array of Jobs</returns>
[HttpGet("State/{state}")]
[ProducesResponseType<Job[]>(Status200OK)]
public IActionResult GetJobsInState(JobState state)
{
Job[] jobsInState = context.Jobs.Where(job => job.state == state).ToArray();
return Ok(jobsInState);
}
/// <summary>
/// Returns all Jobs of requested Type
/// </summary>
/// <param name="type">Requested Job-Type</param>
/// <returns>Array of Jobs</returns>
[HttpGet("Type/{type}")]
[ProducesResponseType<Job[]>(Status200OK)]
public IActionResult GetJobsOfType(JobType type)
{
Job[] jobsOfType = context.Jobs.Where(job => job.JobType == type).ToArray();
return Ok(jobsOfType);
}
/// <summary>
/// Return Job with ID
/// </summary>
/// <param name="id">Job-ID</param>
/// <returns>Job</returns>
[HttpGet("{id}")]
[ProducesResponseType<Job>(Status200OK)]
[ProducesResponseType(Status404NotFound)]
public IActionResult GetJob(string id)
{
Job? ret = context.Jobs.Find(id);
return (ret is not null) switch
{
true => Ok(ret),
false => NotFound()
};
}
/// <summary>
/// Create a new CreateNewDownloadChapterJob
/// </summary>
/// <param name="request">ID of the Manga, and how often we check again</param>
/// <returns>Nothing</returns>
[HttpPut("NewDownloadChapterJob/{mangaId}")]
[ProducesResponseType(Status201Created)]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult CreateNewDownloadChapterJob(string mangaId, [FromBody]ulong recurrenceTime)
{
Job job = new DownloadNewChaptersJob(recurrenceTime, mangaId);
return AddJob(job);
}
/// <summary>
/// Create a new DownloadSingleChapterJob
/// </summary>
/// <param name="chapterId">ID of the Chapter</param>
/// <returns>Nothing</returns>
[HttpPut("DownloadSingleChapterJob/{chapterId}")]
[ProducesResponseType(Status201Created)]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult CreateNewDownloadChapterJob(string chapterId)
{
Job job = new DownloadSingleChapterJob(chapterId);
return AddJob(job);
}
/// <summary>
/// Create a new UpdateMetadataJob
/// </summary>
/// <param name="mangaId">ID of the Manga</param>
/// <returns>Nothing</returns>
[HttpPut("UpdateMetadataJob/{mangaId}")]
[ProducesResponseType(Status201Created)]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult CreateUpdateMetadataJob(string mangaId)
{
Job job = new UpdateMetadataJob(0, mangaId);
return AddJob(job);
}
/// <summary>
/// Create a new UpdateMetadataJob for all Manga
/// </summary>
/// <returns>Nothing</returns>
[HttpPut("UpdateMetadataJob")]
[ProducesResponseType(Status201Created)]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult CreateUpdateAllMetadataJob()
{
List<string> ids = context.Manga.Select(m => m.MangaId).ToList();
List<UpdateMetadataJob> jobs = ids.Select(id => new UpdateMetadataJob(0, id)).ToList();
try
{
context.Jobs.AddRange(jobs);
context.SaveChanges();
return Created();
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
private IActionResult AddJob(Job job)
{
try
{
context.Jobs.Add(job);
context.SaveChanges();
return Created();
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
/// <summary>
/// Delete Job with ID
/// </summary>
/// <param name="id">Job-ID</param>
/// <returns>Nothing</returns>
[HttpDelete("{id}")]
[ProducesResponseType(Status200OK)]
[ProducesResponseType(Status404NotFound)]
[ProducesResponseType(Status500InternalServerError)]
public IActionResult DeleteJob(string id)
{
try
{
Job? ret = context.Jobs.Find(id);
switch (ret is not null)
{
case true:
context.Remove(ret);
context.SaveChanges();
return Ok();
case false: return NotFound();
}
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
/// <summary>
/// Starts the Job with the requested ID
/// </summary>
/// <param name="id">Job-ID</param>
/// <returns>Nothing</returns>
[HttpPost("{id}/Start")]
[ProducesResponseType(Status202Accepted)]
[ProducesResponseType(Status404NotFound)]
[ProducesResponseType(Status500InternalServerError)]
public IActionResult StartJob(string id)
{
Job? ret = context.Jobs.Find(id);
if (ret is null)
return NotFound();
try
{
context.Update(ret);
context.SaveChanges();
return Accepted();
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
[HttpPost("{id}/Stop")]
public IActionResult StopJob(string id)
{
return NotFound(new ProblemResponse("Not implemented")); //TODO
}
}

View File

@ -0,0 +1,95 @@
using API.Schema;
using API.Schema.LibraryConnectors;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{v:apiVersion}/[controller]")]
public class LibraryConnectorController(PgsqlContext context) : Controller
{
/// <summary>
/// Gets all configured Library-Connectors
/// </summary>
/// <returns>Array of configured Library-Connectors</returns>
[HttpGet]
[ProducesResponseType<LibraryConnector[]>(Status200OK)]
public IActionResult GetAllConnectors()
{
LibraryConnector[] connectors = context.LibraryConnectors.ToArray();
return Ok(connectors);
}
/// <summary>
/// Returns Library-Connector with requested ID
/// </summary>
/// <param name="id">Library-Connector-ID</param>
/// <returns>Library-Connector</returns>
[HttpGet("{id}")]
[ProducesResponseType<LibraryConnector>(Status200OK)]
[ProducesResponseType(Status404NotFound)]
public IActionResult GetConnector(string id)
{
LibraryConnector? ret = context.LibraryConnectors.Find(id);
return (ret is not null) switch
{
true => Ok(ret),
false => NotFound()
};
}
/// <summary>
/// Creates a new Library-Connector
/// </summary>
/// <param name="libraryConnector">Library-Connector</param>
/// <returns>Nothing</returns>
[HttpPut]
[ProducesResponseType(Status200OK)]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult CreateConnector([FromBody]LibraryConnector libraryConnector)
{
try
{
context.LibraryConnectors.Add(libraryConnector);
context.SaveChanges();
return Created();
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
/// <summary>
/// Deletes the Library-Connector with the requested ID
/// </summary>
/// <param name="id">Library-Connector-ID</param>
/// <returns>Nothing</returns>
[HttpDelete("{id}")]
[ProducesResponseType(Status200OK)]
[ProducesResponseType(Status404NotFound)]
[ProducesResponseType(Status500InternalServerError)]
public IActionResult DeleteConnector(string id)
{
try
{
LibraryConnector? ret = context.LibraryConnectors.Find(id);
switch (ret is not null)
{
case true:
context.Remove(ret);
context.SaveChanges();
return Ok();
case false: return NotFound();
}
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
}

View File

@ -0,0 +1,163 @@
using API.Schema;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{v:apiVersion}/[controller]")]
public class MangaController(PgsqlContext context) : Controller
{
/// <summary>
/// Returns all cached Manga
/// </summary>
/// <returns>Array of Manga</returns>
[HttpGet]
[ProducesResponseType<Manga[]>(Status200OK)]
public IActionResult GetAllManga()
{
Manga[] ret = context.Manga.ToArray();
return Ok(ret);
}
/// <summary>
/// Returns all cached Manga with IDs
/// </summary>
/// <param name="ids">Array of Manga-IDs</param>
/// <returns>Array of Manga</returns>
[HttpPost("WithIDs")]
[ProducesResponseType<Manga[]>(Status200OK)]
public IActionResult GetManga([FromBody]string[] ids)
{
Manga[] ret = context.Manga.Where(m => ids.Contains(m.MangaId)).ToArray();
return Ok(ret);
}
/// <summary>
/// Return Manga with ID
/// </summary>
/// <param name="id">Manga-ID</param>
/// <returns>Manga</returns>
[HttpGet("{id}")]
[ProducesResponseType<Manga>(Status200OK)]
[ProducesResponseType(Status404NotFound)]
public IActionResult GetManga(string id)
{
Manga? ret = context.Manga.Find(id);
return (ret is not null) switch
{
true => Ok(ret),
false => NotFound()
};
}
/// <summary>
/// Delete Manga with ID
/// </summary>
/// <param name="id">Manga-ID</param>
/// <returns>Nothing</returns>
[HttpDelete("{id}")]
[ProducesResponseType(Status200OK)]
[ProducesResponseType(Status404NotFound)]
[ProducesResponseType(Status500InternalServerError)]
public IActionResult DeleteManga(string id)
{
try
{
Manga? ret = context.Manga.Find(id);
switch (ret is not null)
{
case true:
context.Remove(ret);
context.SaveChanges();
return Ok();
case false: return NotFound();
}
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
/// <summary>
/// Returns URL of Cover of Manga
/// </summary>
/// <param name="id">Manga-ID</param>
/// <returns>URL of Cover</returns>
[HttpGet("{id}/Cover")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetCover(string id)
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Returns all Chapters of Manga
/// </summary>
/// <param name="id">Manga-ID</param>
/// <returns>Array of Chapters</returns>
[HttpGet("{id}/Chapters")]
[ProducesResponseType<Chapter[]>(Status200OK)]
[ProducesResponseType<string>(Status404NotFound)]
public IActionResult GetChapters(string id)
{
Manga? m = context.Manga.Find(id);
if (m is null)
return NotFound("Manga could not be found");
Chapter[] ret = context.Chapters.Where(c => c.ParentManga.MangaId == m.MangaId).ToArray();
return Ok(ret);
}
/// <summary>
/// Returns the latest Chapter of requested Manga
/// </summary>
/// <param name="id">Manga-ID</param>
/// <returns>Latest Chapter</returns>
[HttpGet("{id}/Chapter/Latest")]
[ProducesResponseType<Chapter>(Status200OK)]
[ProducesResponseType<string>(Status404NotFound)]
public IActionResult GetLatestChapter(string id)
{
Manga? m = context.Manga.Find(id);
if (m is null)
return NotFound("Manga could not be found");
List<Chapter> chapters = context.Chapters.Where(c => c.ParentManga.MangaId == m.MangaId).ToList();
Chapter? max = chapters.Max();
if (max is null)
return NotFound("Chapter could not be found");
return Ok(max);
}
/// <summary>
/// Configure the cut-off for Manga
/// </summary>
/// <remarks>This is important for the DownloadNewChapters-Job</remarks>
/// <param name="id">Manga-ID</param>
/// <returns>Nothing</returns>
[HttpPatch("{id}/IgnoreChaptersBefore")]
[ProducesResponseType<float>(Status200OK)]
public IActionResult IgnoreChaptersBefore(string id)
{
Manga? m = context.Manga.Find(id);
if (m is null)
return NotFound("Manga could not be found");
return Ok(m.IgnoreChapterBefore);
}
/// <summary>
/// Move the Directory the .cbz-files are located in
/// </summary>
/// <param name="id">Manga-ID</param>
/// <param name="folder">New Directory-Path</param>
/// <returns>Nothing</returns>
[HttpPost("{id}/MoveFolder")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult MoveFolder(string id, [FromBody]string folder)
{
return StatusCode(500, "Not implemented"); //TODO
}
}

View File

@ -0,0 +1,26 @@
using API.Schema;
using API.Schema.MangaConnectors;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{v:apiVersion}")]
public class MiscController(PgsqlContext context) : Controller
{
/// <summary>
/// Get all available Connectors (Scanlation-Sites)
/// </summary>
/// <returns>Array of MangaConnector</returns>
[HttpGet("GetConnectors")]
[ProducesResponseType<MangaConnector[]>(Status200OK)]
public IActionResult GetConnectors()
{
MangaConnector[] connectors = context.MangaConnectors.ToArray();
return Ok(connectors);
}
}

View File

@ -0,0 +1,95 @@
using API.Schema;
using API.Schema.NotificationConnectors;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{v:apiVersion}/[controller]")]
public class NotificationConnectorController(PgsqlContext context) : Controller
{
/// <summary>
/// Gets all configured Notification-Connectors
/// </summary>
/// <returns>Array of configured Notification-Connectors</returns>
[HttpGet]
[ProducesResponseType<NotificationConnector[]>(Status200OK)]
public IActionResult GetAllConnectors()
{
NotificationConnector[] ret = context.NotificationConnectors.ToArray();
return Ok(ret);
}
/// <summary>
/// Returns Notification-Connector with requested ID
/// </summary>
/// <param name="id">Notification-Connector-ID</param>
/// <returns>Notification-Connector</returns>
[HttpGet("{id}")]
[ProducesResponseType<NotificationConnector>(Status200OK)]
[ProducesResponseType(Status404NotFound)]
public IActionResult GetConnector(string id)
{
NotificationConnector? ret = context.NotificationConnectors.Find(id);
return (ret is not null) switch
{
true => Ok(ret),
false => NotFound()
};
}
/// <summary>
/// Creates a new Notification-Connector
/// </summary>
/// <param name="notificationConnector">Notification-Connector</param>
/// <returns>Nothing</returns>
[HttpPut]
[ProducesResponseType<NotificationConnector[]>(Status200OK)]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult CreateConnector([FromBody]NotificationConnector notificationConnector)
{
try
{
context.NotificationConnectors.Add(notificationConnector);
context.SaveChanges();
return Created();
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
/// <summary>
/// Deletes the Notification-Connector with the requested ID
/// </summary>
/// <param name="id">Notification-Connector-ID</param>
/// <returns>Nothing</returns>
[HttpDelete("{id}")]
[ProducesResponseType(Status200OK)]
[ProducesResponseType(Status404NotFound)]
[ProducesResponseType(Status500InternalServerError)]
public IActionResult DeleteConnector(string id)
{
try
{
NotificationConnector? ret = context.NotificationConnectors.Find(id);
switch (ret is not null)
{
case true:
context.Remove(ret);
context.SaveChanges();
return Ok();
case false: return NotFound();
}
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
}

View File

@ -0,0 +1,150 @@
using API.Schema;
using API.Schema.MangaConnectors;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{v:apiVersion}/[controller]")]
public class SearchController(PgsqlContext context) : Controller
{
/// <summary>
/// Initiate a search for a Manga on all Connectors
/// </summary>
/// <param name="name">Name/Title of the Manga</param>
/// <returns>Array of Manga</returns>
[HttpPost("{name}")]
[ProducesResponseType<Manga[]>(Status500InternalServerError)]
public IActionResult SearchMangaGlobal(string name)
{
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> allManga = new();
foreach (MangaConnector contextMangaConnector in context.MangaConnectors)
allManga.AddRange(contextMangaConnector.GetManga(name));
List<Manga> retMangas = new();
foreach ((Manga? manga, List<Author>? authors, List<MangaTag>? tags, List<Link>? links, List<MangaAltTitle>? altTitles) in allManga)
{
try
{
Manga? add = AddMangaToContext(manga, authors, tags, links, altTitles);
if(add is not null)
retMangas.Add(add);
}
catch (DbUpdateException)
{
return StatusCode(500, new ProblemResponse("An error occurred while processing your request."));
}
}
return Ok(retMangas.ToArray());
}
/// <summary>
/// Initiate a search for a Manga on a specific Connector
/// </summary>
/// <param name="id">Manga-Connector-ID</param>
/// <param name="name">Name/Title of the Manga</param>
/// <returns>Manga</returns>
[HttpPost("{id}/{name}")]
[ProducesResponseType<Manga[]>(Status200OK)]
[ProducesResponseType<ProblemResponse>(Status404NotFound)]
[ProducesResponseType<ProblemResponse>(Status500InternalServerError)]
public IActionResult SearchManga(string id, string name)
{
MangaConnector? connector = context.MangaConnectors.Find(id);
if (connector is null)
return NotFound(new ProblemResponse("Connector not found."));
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] mangas = connector.GetManga(name);
List<Manga> retMangas = new();
foreach ((Manga? manga, List<Author>? authors, List<MangaTag>? tags, List<Link>? links, List<MangaAltTitle>? altTitles) in mangas)
{
try
{
Manga? add = AddMangaToContext(manga, authors, tags, links, altTitles);
if(add is not null)
retMangas.Add(add);
}
catch (DbUpdateException e)
{
return StatusCode(500, new ProblemResponse("An error occurred while processing your request.", e.Message));
}
}
return Ok(retMangas.ToArray());
}
private Manga? AddMangaToContext(Manga? manga, List<Author>? authors, List<MangaTag>? tags, List<Link>? links,
List<MangaAltTitle>? altTitles)
{
if (manga is null)
return null;
Manga? existing = context.Manga.FirstOrDefault(m =>
m.MangaConnector == manga.MangaConnector && m.ConnectorId == manga.ConnectorId);
if (tags is not null)
{
IEnumerable<MangaTag> mergedTags = tags.Select(mt =>
{
MangaTag? inDb = context.Tags.FirstOrDefault(t => t.Equals(mt));
return inDb ?? mt;
});
manga.Tags = mergedTags.ToList();
IEnumerable<MangaTag> newTags = manga.Tags.Where(mt => !context.Tags.Any(t => t.Tag.Equals(mt.Tag)));
context.Tags.AddRange(newTags);
}
if (authors is not null)
{
IEnumerable<Author> mergedAuthors = authors.Select(ma =>
{
Author? inDb = context.Authors.FirstOrDefault(a => a.AuthorName == ma.AuthorName);
return inDb ?? ma;
});
manga.Authors = mergedAuthors.ToList();
IEnumerable<Author> newAuthors = manga.Authors.Where(ma => !context.Authors.Any(a =>
a.AuthorName == ma.AuthorName));
context.Authors.AddRange(newAuthors);
}
if (links is not null)
{
IEnumerable<Link> mergedLinks = links.Select(ml =>
{
Link? inDb = context.Link.FirstOrDefault(l =>
l.LinkProvider == ml.LinkProvider && l.LinkUrl == ml.LinkUrl);
return inDb ?? ml;
});
manga.Links = mergedLinks.ToList();
IEnumerable<Link> newLinks = manga.Links.Where(ml => !context.Link.Any(l =>
l.LinkProvider == ml.LinkProvider && l.LinkUrl == ml.LinkUrl));
context.Link.AddRange(newLinks);
}
if (altTitles is not null)
{
IEnumerable<MangaAltTitle> mergedAltTitles = altTitles.Select(mat =>
{
MangaAltTitle? inDb = context.AltTitles.FirstOrDefault(at =>
at.Language == mat.Language && at.Title == mat.Title);
return inDb ?? mat;
});
manga.AltTitles = mergedAltTitles.ToList();
IEnumerable<MangaAltTitle> newAltTitles = manga.AltTitles.Where(mat =>
!context.AltTitles.Any(at => at.Language == mat.Language && at.Title == mat.Title));
context.AltTitles.AddRange(newAltTitles);
}
existing?.UpdateWithInfo(manga);
if(existing is not null)
context.Manga.Update(existing);
else
context.Manga.Add(manga);
context.SaveChanges();
return existing ?? manga;
}
}

View File

@ -0,0 +1,161 @@
using API.Schema;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{v:apiVersion}/[controller]")]
public class SettingsController(PgsqlContext context) : Controller
{
/// <summary>
/// Get all Settings
/// </summary>
/// <returns></returns>
[HttpGet]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetSettings()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Get the current UserAgent used by Tranga
/// </summary>
/// <returns>UserAgent as string</returns>
[HttpGet("UserAgent")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetUserAgent()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Set a new UserAgent
/// </summary>
/// <returns>Nothing</returns>
[HttpPatch("UserAgent")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult SetUserAgent()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Reset the UserAgent to default
/// </summary>
/// <returns>Nothing</returns>
[HttpDelete("UserAgent")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult ResetUserAgent()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Get all Request-Limits
/// </summary>
/// <returns></returns>
[HttpGet("RequestLimits")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetRequestLimits()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Update all Request-Limits to new values
/// </summary>
/// <returns>Nothing</returns>
[HttpPatch("RequestLimits")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult SetRequestLimits()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Reset all Request-Limits
/// </summary>
/// <returns>Nothing</returns>
[HttpDelete("RequestLimits")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult ResetRequestLimits()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Returns Level of Image-Compression for Images
/// </summary>
/// <returns></returns>
[HttpGet("ImageCompression")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetImageCompression()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Set the Image-Compression-Level for Images
/// </summary>
/// <param name="percentage">100 to disable, 0-99 for JPEG compression-Level</param>
/// <returns>Nothing</returns>
[HttpPatch("ImageCompression")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult SetImageCompression(int percentage)
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Get state of Black/White-Image setting
/// </summary>
/// <returns>True if enabled</returns>
[HttpGet("BWImages")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetBwImagesToggle()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Enable/Disable conversion of Images to Black and White
/// </summary>
/// <param name="enabled">true to enable</param>
/// <returns>Nothing</returns>
[HttpPatch("BWImages")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult SetBwImagesToggle(bool enabled)
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Get state of April Fools Mode
/// </summary>
/// <remarks>April Fools Mode disables all downloads on April 1st</remarks>
/// <returns>True if enabled</returns>
[HttpGet("AprilFoolsMode")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetAprilFoolsMode()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Enable/Disable April Fools Mode
/// </summary>
/// <remarks>April Fools Mode disables all downloads on April 1st</remarks>
/// <param name="enabled">true to enable</param>
/// <returns>Nothing</returns>
[HttpPatch("AprilFoolsMode")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult SetAprilFoolsMode(bool enabled)
{
return StatusCode(500, "Not implemented"); //TODO
}
}

View File

@ -0,0 +1,110 @@
using System.Net;
using System.Text;
using System.Text.RegularExpressions;
using HtmlAgilityPack;
using PuppeteerSharp;
namespace API.MangaDownloadClients;
internal class ChromiumDownloadClient : DownloadClient
{
private static IBrowser? _browser;
private const int StartTimeoutMs = 10000;
private readonly HttpDownloadClient _httpDownloadClient;
private static async Task<IBrowser> StartBrowser()
{
return await Puppeteer.LaunchAsync(new LaunchOptions
{
Headless = true,
Args = new [] {
"--disable-gpu",
"--disable-dev-shm-usage",
"--disable-setuid-sandbox",
"--no-sandbox"},
Timeout = StartTimeoutMs
}, new LoggerFactory([new LogProvider()])); //TODO
}
private class LogProvider : ILoggerProvider
{
//TODO
public void Dispose() { }
public ILogger CreateLogger(string categoryName) => new Logger();
}
private class Logger : ILogger
{
public Logger() : base() { }
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter)
{
if (logLevel <= LogLevel.Information)
return;
//TODO
}
public bool IsEnabled(LogLevel logLevel) => true;
public IDisposable? BeginScope<TState>(TState state) where TState : notnull => null;
}
public ChromiumDownloadClient()
{
_httpDownloadClient = new();
if(_browser is null)
_browser = StartBrowser().Result;
}
private readonly Regex _imageUrlRex = new(@"https?:\/\/.*\.(?:p?jpe?g|gif|a?png|bmp|avif|webp)(\?.*)?");
internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null)
{
return _imageUrlRex.IsMatch(url)
? _httpDownloadClient.MakeRequestInternal(url, referrer)
: MakeRequestBrowser(url, referrer, clickButton);
}
private RequestResult MakeRequestBrowser(string url, string? referrer = null, string? clickButton = null)
{
IPage page = _browser.NewPageAsync().Result;
page.DefaultTimeout = 10000;
IResponse response;
try
{
response = page.GoToAsync(url, WaitUntilNavigation.Networkidle0).Result;
}
catch (Exception e)
{
page.CloseAsync();
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
}
Stream stream = Stream.Null;
HtmlDocument? document = null;
if (response.Headers.TryGetValue("Content-Type", out string? content))
{
if (content.Contains("text/html"))
{
if (clickButton is not null && page.QuerySelectorAsync(clickButton).Result is not null)
page.ClickAsync(clickButton).Wait();
string htmlString = page.GetContentAsync().Result;
stream = new MemoryStream(Encoding.Default.GetBytes(htmlString));
document = new ();
document.LoadHtml(htmlString);
}else if (content.Contains("image"))
{
stream = new MemoryStream(response.BufferAsync().Result);
}
}
else
{
page.CloseAsync();
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
}
page.CloseAsync();
return new RequestResult(response.Status, document, stream, false, "");
}
}

View File

@ -0,0 +1,42 @@
using System.Net;
using API.Schema;
namespace API.MangaDownloadClients;
internal abstract class DownloadClient
{
private readonly Dictionary<RequestType, DateTime> _lastExecutedRateLimit;
protected DownloadClient()
{
this._lastExecutedRateLimit = new();
}
public RequestResult MakeRequest(string url, RequestType requestType, string? referrer = null, string? clickButton = null)
{
if (!TrangaSettings.requestLimits.ContainsKey(requestType))
{
return new RequestResult(HttpStatusCode.NotAcceptable, null, Stream.Null);
}
int rateLimit = TrangaSettings.userAgent == TrangaSettings.DefaultUserAgent
? TrangaSettings.DefaultRequestLimits[requestType]
: TrangaSettings.requestLimits[requestType];
TimeSpan timeBetweenRequests = TimeSpan.FromMinutes(1).Divide(rateLimit);
_lastExecutedRateLimit.TryAdd(requestType, DateTime.Now.Subtract(timeBetweenRequests));
TimeSpan rateLimitTimeout = timeBetweenRequests.Subtract(DateTime.Now.Subtract(_lastExecutedRateLimit[requestType]));
if (rateLimitTimeout > TimeSpan.Zero)
{
Thread.Sleep(rateLimitTimeout);
}
RequestResult result = MakeRequestInternal(url, referrer, clickButton);
_lastExecutedRateLimit[requestType] = DateTime.Now;
return result;
}
internal abstract RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null);
}

View File

@ -0,0 +1,73 @@
using System.Net;
using API.Schema;
using HtmlAgilityPack;
namespace API.MangaDownloadClients;
internal class HttpDownloadClient : DownloadClient
{
private static readonly HttpClient Client = new()
{
Timeout = TimeSpan.FromSeconds(10)
};
public HttpDownloadClient()
{
Client.DefaultRequestHeaders.TryAddWithoutValidation("User-Agent", TrangaSettings.userAgent);
}
internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null)
{
//TODO
//if (clickButton is not null)
//Log("Can not click button on static site.");
HttpResponseMessage? response = null;
while (response is null)
{
HttpRequestMessage requestMessage = new(HttpMethod.Get, url);
if (referrer is not null)
requestMessage.Headers.Referrer = new Uri(referrer);
//Log($"Requesting {requestType} {url}");
try
{
response = Client.Send(requestMessage);
}
catch (Exception e)
{
switch (e)
{
case TaskCanceledException:
return new RequestResult(HttpStatusCode.RequestTimeout, null, Stream.Null);
case HttpRequestException:
return new RequestResult(HttpStatusCode.BadRequest, null, Stream.Null);
}
}
}
if (!response.IsSuccessStatusCode)
{
return new RequestResult(response.StatusCode, null, Stream.Null);
}
Stream stream = response.Content.ReadAsStream();
HtmlDocument? document = null;
if (response.Content.Headers.ContentType?.MediaType == "text/html")
{
StreamReader reader = new (stream);
document = new ();
document.LoadHtml(reader.ReadToEnd());
stream.Position = 0;
}
// Request has been redirected to another page. For example, it redirects directly to the results when there is only 1 result
if (response.RequestMessage is not null && response.RequestMessage.RequestUri is not null)
{
return new RequestResult(response.StatusCode, document, stream, true,
response.RequestMessage.RequestUri.AbsoluteUri);
}
return new RequestResult(response.StatusCode, document, stream);
}
}

View File

@ -0,0 +1,27 @@
using System.Net;
using HtmlAgilityPack;
namespace API.MangaDownloadClients;
public struct RequestResult
{
public HttpStatusCode statusCode { get; }
public Stream result { get; }
public bool hasBeenRedirected { get; }
public string? redirectedToUrl { get; }
public HtmlDocument? htmlDocument { get; }
public RequestResult(HttpStatusCode statusCode, HtmlDocument? htmlDocument, Stream result)
{
this.statusCode = statusCode;
this.htmlDocument = htmlDocument;
this.result = result;
}
public RequestResult(HttpStatusCode statusCode, HtmlDocument? htmlDocument, Stream result, bool hasBeenRedirected, string redirectedTo)
: this(statusCode, htmlDocument, result)
{
this.hasBeenRedirected = hasBeenRedirected;
redirectedToUrl = redirectedTo;
}
}

View File

@ -0,0 +1,11 @@
namespace API.MangaDownloadClients;
public enum RequestType : byte
{
Default = 0,
MangaDexFeed = 1,
MangaImage = 2,
MangaCover = 3,
MangaDexImage = 5,
MangaInfo = 6
}

View File

@ -0,0 +1,781 @@
// <auto-generated />
using System;
using API.Schema;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace API.Migrations
{
[DbContext(typeof(PgsqlContext))]
[Migration("20241201235443_Initial")]
partial class Initial
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.0")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("API.Schema.Author", b =>
{
b.Property<string>("AuthorId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("AuthorName")
.IsRequired()
.HasColumnType("text");
b.HasKey("AuthorId");
b.ToTable("Authors");
});
modelBuilder.Entity("API.Schema.Chapter", b =>
{
b.Property<string>("ChapterId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("ArchiveFileName")
.IsRequired()
.HasColumnType("text");
b.Property<string>("ChapterIds")
.IsRequired()
.HasColumnType("text");
b.Property<float>("ChapterNumber")
.HasColumnType("real");
b.Property<bool>("Downloaded")
.HasColumnType("boolean");
b.Property<string>("ParentMangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("Title")
.HasColumnType("text");
b.Property<string>("Url")
.IsRequired()
.HasColumnType("text");
b.Property<float?>("VolumeNumber")
.HasColumnType("real");
b.HasKey("ChapterId");
b.HasIndex("ParentMangaId");
b.ToTable("Chapters");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.Property<string>("JobId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.PrimitiveCollection<string[]>("DependsOnJobIds")
.HasMaxLength(64)
.HasColumnType("text[]");
b.Property<string>("JobId1")
.HasColumnType("character varying(64)");
b.Property<byte>("JobType")
.HasColumnType("smallint");
b.Property<DateTime>("LastExecution")
.HasColumnType("timestamp with time zone");
b.Property<DateTime>("NextExecution")
.HasColumnType("timestamp with time zone");
b.Property<string>("ParentJobId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<decimal>("RecurrenceMs")
.HasColumnType("numeric(20,0)");
b.Property<int>("state")
.HasColumnType("integer");
b.HasKey("JobId");
b.HasIndex("JobId1");
b.ToTable("Jobs");
b.HasDiscriminator<byte>("JobType");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("API.Schema.LibraryConnectors.LibraryConnector", b =>
{
b.Property<string>("LibraryConnectorId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("Auth")
.IsRequired()
.HasColumnType("text");
b.Property<string>("BaseUrl")
.IsRequired()
.HasColumnType("text");
b.Property<byte>("LibraryType")
.HasColumnType("smallint");
b.HasKey("LibraryConnectorId");
b.ToTable("LibraryConnectors");
b.HasDiscriminator<byte>("LibraryType");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("API.Schema.Link", b =>
{
b.Property<string>("LinkId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("LinkIds")
.HasColumnType("text");
b.Property<string>("LinkProvider")
.IsRequired()
.HasColumnType("text");
b.Property<string>("LinkUrl")
.IsRequired()
.HasColumnType("text");
b.Property<string>("MangaId")
.IsRequired()
.HasColumnType("character varying(64)");
b.HasKey("LinkId");
b.HasIndex("MangaId");
b.ToTable("Link");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.Property<string>("MangaId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.PrimitiveCollection<string[]>("AltTitleIds")
.IsRequired()
.HasColumnType("text[]");
b.PrimitiveCollection<string[]>("AuthorIds")
.IsRequired()
.HasColumnType("text[]");
b.Property<string>("ConnectorId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("CoverFileNameInCache")
.HasColumnType("text");
b.Property<string>("CoverUrl")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Description")
.IsRequired()
.HasColumnType("text");
b.Property<string>("FolderName")
.IsRequired()
.HasColumnType("text");
b.Property<float>("IgnoreChapterBefore")
.HasColumnType("real");
b.Property<string>("LatestChapterAvailableId")
.HasColumnType("character varying(64)");
b.Property<string>("LatestChapterDownloadedId")
.HasColumnType("character varying(64)");
b.PrimitiveCollection<string[]>("LinkIds")
.IsRequired()
.HasColumnType("text[]");
b.Property<string>("MangaConnectorName")
.IsRequired()
.HasColumnType("character varying(32)");
b.Property<string>("MangaIds")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("text");
b.Property<string>("OriginalLanguage")
.HasColumnType("text");
b.Property<byte>("ReleaseStatus")
.HasColumnType("smallint");
b.PrimitiveCollection<string[]>("TagIds")
.IsRequired()
.HasColumnType("text[]");
b.Property<long>("year")
.HasColumnType("bigint");
b.HasKey("MangaId");
b.HasIndex("LatestChapterAvailableId")
.IsUnique();
b.HasIndex("LatestChapterDownloadedId")
.IsUnique();
b.HasIndex("MangaConnectorName");
b.ToTable("Manga");
});
modelBuilder.Entity("API.Schema.MangaAltTitle", b =>
{
b.Property<string>("AltTitleId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("AltTitleIds")
.HasColumnType("text");
b.Property<string>("Language")
.IsRequired()
.HasMaxLength(8)
.HasColumnType("character varying(8)");
b.Property<string>("MangaId")
.IsRequired()
.HasColumnType("character varying(64)");
b.Property<string>("Title")
.IsRequired()
.HasColumnType("text");
b.HasKey("AltTitleId");
b.HasIndex("MangaId");
b.ToTable("AltTitles");
});
modelBuilder.Entity("API.Schema.MangaConnector", b =>
{
b.Property<string>("Name")
.HasMaxLength(32)
.HasColumnType("character varying(32)");
b.PrimitiveCollection<string[]>("BaseUris")
.IsRequired()
.HasColumnType("text[]");
b.PrimitiveCollection<string[]>("SupportedLanguages")
.IsRequired()
.HasColumnType("text[]");
b.HasKey("Name");
b.ToTable("MangaConnectors");
});
modelBuilder.Entity("API.Schema.MangaTag", b =>
{
b.Property<string>("Tag")
.HasColumnType("text");
b.HasKey("Tag");
b.ToTable("Tags");
});
modelBuilder.Entity("API.Schema.NotificationConnectors.NotificationConnector", b =>
{
b.Property<string>("NotificationConnectorId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<byte>("NotificationConnectorType")
.HasColumnType("smallint");
b.HasKey("NotificationConnectorId");
b.ToTable("NotificationConnectors");
b.HasDiscriminator<byte>("NotificationConnectorType");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("MangaAuthor", b =>
{
b.Property<string>("MangaId")
.HasColumnType("character varying(64)");
b.Property<string>("AuthorId")
.HasColumnType("character varying(64)");
b.Property<string>("AuthorIds")
.HasColumnType("text");
b.Property<string>("MangaIds")
.HasColumnType("text");
b.HasKey("MangaId", "AuthorId");
b.HasIndex("AuthorId");
b.ToTable("MangaAuthor");
});
modelBuilder.Entity("MangaTag", b =>
{
b.Property<string>("MangaId")
.HasColumnType("character varying(64)");
b.Property<string>("Tag")
.HasColumnType("text");
b.Property<string>("MangaIds")
.IsRequired()
.HasColumnType("text");
b.Property<string>("TagIds")
.HasColumnType("text");
b.HasKey("MangaId", "Tag");
b.HasIndex("MangaIds");
b.HasIndex("Tag");
b.ToTable("MangaTag");
});
modelBuilder.Entity("API.Schema.Jobs.CreateArchiveJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("ChapterId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("ComicInfoLocation")
.IsRequired()
.HasColumnType("text");
b.Property<string>("ImagesLocation")
.IsRequired()
.HasColumnType("text");
b.HasIndex("ChapterId");
b.ToTable("Jobs", t =>
{
t.Property("ChapterId")
.HasColumnName("CreateArchiveJob_ChapterId");
});
b.HasDiscriminator().HasValue((byte)4);
});
modelBuilder.Entity("API.Schema.Jobs.CreateComicInfoXmlJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("ChapterId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("Path")
.IsRequired()
.HasColumnType("text");
b.HasIndex("ChapterId");
b.ToTable("Jobs", t =>
{
t.Property("ChapterId")
.HasColumnName("CreateComicInfoXmlJob_ChapterId");
});
b.HasDiscriminator().HasValue((byte)6);
});
modelBuilder.Entity("API.Schema.Jobs.DownloadNewChaptersJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.HasDiscriminator().HasValue((byte)1);
});
modelBuilder.Entity("API.Schema.Jobs.DownloadSingleChapterJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("ChapterId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("ChapterId");
b.HasDiscriminator().HasValue((byte)0);
});
modelBuilder.Entity("API.Schema.Jobs.MoveFileOrFolderJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("FromLocation")
.IsRequired()
.HasColumnType("text");
b.Property<string>("ToLocation")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)3);
});
modelBuilder.Entity("API.Schema.Jobs.ProcessImagesJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<bool>("Bw")
.HasColumnType("boolean");
b.Property<int>("Compression")
.HasColumnType("integer");
b.Property<string>("Path")
.IsRequired()
.HasColumnType("text");
b.ToTable("Jobs", t =>
{
t.Property("Path")
.HasColumnName("ProcessImagesJob_Path");
});
b.HasDiscriminator().HasValue((byte)5);
});
modelBuilder.Entity("API.Schema.Jobs.SearchMangaJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaConnectorName")
.IsRequired()
.HasColumnType("text");
b.Property<string>("SearchString")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)7);
});
modelBuilder.Entity("API.Schema.Jobs.UpdateMetadataJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.ToTable("Jobs", t =>
{
t.Property("MangaId")
.HasColumnName("UpdateMetadataJob_MangaId");
});
b.HasDiscriminator().HasValue((byte)2);
});
modelBuilder.Entity("API.Schema.LibraryConnectors.Kavita", b =>
{
b.HasBaseType("API.Schema.LibraryConnectors.LibraryConnector");
b.HasDiscriminator().HasValue((byte)1);
});
modelBuilder.Entity("API.Schema.LibraryConnectors.Komga", b =>
{
b.HasBaseType("API.Schema.LibraryConnectors.LibraryConnector");
b.HasDiscriminator().HasValue((byte)0);
});
modelBuilder.Entity("API.Schema.NotificationConnectors.Gotify", b =>
{
b.HasBaseType("API.Schema.NotificationConnectors.NotificationConnector");
b.Property<string>("AppToken")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Endpoint")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)0);
});
modelBuilder.Entity("API.Schema.NotificationConnectors.Lunasea", b =>
{
b.HasBaseType("API.Schema.NotificationConnectors.NotificationConnector");
b.Property<string>("Id")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)1);
});
modelBuilder.Entity("API.Schema.NotificationConnectors.Ntfy", b =>
{
b.HasBaseType("API.Schema.NotificationConnectors.NotificationConnector");
b.Property<string>("Auth")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Endpoint")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Topic")
.IsRequired()
.HasColumnType("text");
b.ToTable("NotificationConnectors", t =>
{
t.Property("Endpoint")
.HasColumnName("Ntfy_Endpoint");
});
b.HasDiscriminator().HasValue((byte)2);
});
modelBuilder.Entity("API.Schema.Chapter", b =>
{
b.HasOne("API.Schema.Manga", "ParentManga")
.WithMany("Chapters")
.HasForeignKey("ParentMangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("ParentManga");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.HasOne("API.Schema.Jobs.Job", null)
.WithMany("DependsOnJobs")
.HasForeignKey("JobId1");
});
modelBuilder.Entity("API.Schema.Link", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany("Links")
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.HasOne("API.Schema.Chapter", "LatestChapterAvailable")
.WithOne()
.HasForeignKey("API.Schema.Manga", "LatestChapterAvailableId");
b.HasOne("API.Schema.Chapter", "LatestChapterDownloaded")
.WithOne()
.HasForeignKey("API.Schema.Manga", "LatestChapterDownloadedId");
b.HasOne("API.Schema.MangaConnector", "MangaConnector")
.WithMany("Mangas")
.HasForeignKey("MangaConnectorName")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("LatestChapterAvailable");
b.Navigation("LatestChapterDownloaded");
b.Navigation("MangaConnector");
});
modelBuilder.Entity("API.Schema.MangaAltTitle", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany("AltTitles")
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("MangaAuthor", b =>
{
b.HasOne("API.Schema.Author", null)
.WithMany()
.HasForeignKey("AuthorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.Manga", null)
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("MangaTag", b =>
{
b.HasOne("API.Schema.Manga", null)
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.MangaTag", null)
.WithMany()
.HasForeignKey("MangaIds")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.MangaTag", null)
.WithMany()
.HasForeignKey("Tag")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("API.Schema.Jobs.CreateArchiveJob", b =>
{
b.HasOne("API.Schema.Chapter", "Chapter")
.WithMany()
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Schema.Jobs.CreateComicInfoXmlJob", b =>
{
b.HasOne("API.Schema.Chapter", "Chapter")
.WithMany()
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Schema.Jobs.DownloadNewChaptersJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Jobs.DownloadSingleChapterJob", b =>
{
b.HasOne("API.Schema.Chapter", "Chapter")
.WithMany()
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Schema.Jobs.UpdateMetadataJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.Navigation("DependsOnJobs");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.Navigation("AltTitles");
b.Navigation("Chapters");
b.Navigation("Links");
});
modelBuilder.Entity("API.Schema.MangaConnector", b =>
{
b.Navigation("Mangas");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,447 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace API.Migrations
{
/// <inheritdoc />
public partial class Initial : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "Authors",
columns: table => new
{
AuthorId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
AuthorName = table.Column<string>(type: "text", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Authors", x => x.AuthorId);
});
migrationBuilder.CreateTable(
name: "LibraryConnectors",
columns: table => new
{
LibraryConnectorId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
LibraryType = table.Column<byte>(type: "smallint", nullable: false),
BaseUrl = table.Column<string>(type: "text", nullable: false),
Auth = table.Column<string>(type: "text", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_LibraryConnectors", x => x.LibraryConnectorId);
});
migrationBuilder.CreateTable(
name: "MangaConnectors",
columns: table => new
{
Name = table.Column<string>(type: "character varying(32)", maxLength: 32, nullable: false),
SupportedLanguages = table.Column<string[]>(type: "text[]", nullable: false),
BaseUris = table.Column<string[]>(type: "text[]", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_MangaConnectors", x => x.Name);
});
migrationBuilder.CreateTable(
name: "NotificationConnectors",
columns: table => new
{
NotificationConnectorId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
NotificationConnectorType = table.Column<byte>(type: "smallint", nullable: false),
Endpoint = table.Column<string>(type: "text", nullable: true),
AppToken = table.Column<string>(type: "text", nullable: true),
Id = table.Column<string>(type: "text", nullable: true),
Ntfy_Endpoint = table.Column<string>(type: "text", nullable: true),
Auth = table.Column<string>(type: "text", nullable: true),
Topic = table.Column<string>(type: "text", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_NotificationConnectors", x => x.NotificationConnectorId);
});
migrationBuilder.CreateTable(
name: "Tags",
columns: table => new
{
Tag = table.Column<string>(type: "text", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Tags", x => x.Tag);
});
migrationBuilder.CreateTable(
name: "AltTitles",
columns: table => new
{
AltTitleId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
Language = table.Column<string>(type: "character varying(8)", maxLength: 8, nullable: false),
Title = table.Column<string>(type: "text", nullable: false),
MangaId = table.Column<string>(type: "character varying(64)", nullable: false),
AltTitleIds = table.Column<string>(type: "text", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AltTitles", x => x.AltTitleId);
});
migrationBuilder.CreateTable(
name: "Chapters",
columns: table => new
{
ChapterId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
VolumeNumber = table.Column<float>(type: "real", nullable: true),
ChapterNumber = table.Column<float>(type: "real", nullable: false),
Url = table.Column<string>(type: "text", nullable: false),
Title = table.Column<string>(type: "text", nullable: true),
ArchiveFileName = table.Column<string>(type: "text", nullable: false),
Downloaded = table.Column<bool>(type: "boolean", nullable: false),
ParentMangaId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
ChapterIds = table.Column<string>(type: "text", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Chapters", x => x.ChapterId);
});
migrationBuilder.CreateTable(
name: "Manga",
columns: table => new
{
MangaId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
ConnectorId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
Name = table.Column<string>(type: "text", nullable: false),
Description = table.Column<string>(type: "text", nullable: false),
CoverUrl = table.Column<string>(type: "text", nullable: false),
CoverFileNameInCache = table.Column<string>(type: "text", nullable: true),
year = table.Column<long>(type: "bigint", nullable: false),
OriginalLanguage = table.Column<string>(type: "text", nullable: true),
ReleaseStatus = table.Column<byte>(type: "smallint", nullable: false),
FolderName = table.Column<string>(type: "text", nullable: false),
IgnoreChapterBefore = table.Column<float>(type: "real", nullable: false),
LatestChapterDownloadedId = table.Column<string>(type: "character varying(64)", nullable: true),
LatestChapterAvailableId = table.Column<string>(type: "character varying(64)", nullable: true),
MangaConnectorName = table.Column<string>(type: "character varying(32)", nullable: false),
AuthorIds = table.Column<string[]>(type: "text[]", nullable: false),
TagIds = table.Column<string[]>(type: "text[]", nullable: false),
LinkIds = table.Column<string[]>(type: "text[]", nullable: false),
AltTitleIds = table.Column<string[]>(type: "text[]", nullable: false),
MangaIds = table.Column<string>(type: "text", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Manga", x => x.MangaId);
table.ForeignKey(
name: "FK_Manga_Chapters_LatestChapterAvailableId",
column: x => x.LatestChapterAvailableId,
principalTable: "Chapters",
principalColumn: "ChapterId");
table.ForeignKey(
name: "FK_Manga_Chapters_LatestChapterDownloadedId",
column: x => x.LatestChapterDownloadedId,
principalTable: "Chapters",
principalColumn: "ChapterId");
table.ForeignKey(
name: "FK_Manga_MangaConnectors_MangaConnectorName",
column: x => x.MangaConnectorName,
principalTable: "MangaConnectors",
principalColumn: "Name",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Jobs",
columns: table => new
{
JobId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
ParentJobId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: true),
DependsOnJobIds = table.Column<string[]>(type: "text[]", maxLength: 64, nullable: true),
JobType = table.Column<byte>(type: "smallint", nullable: false),
RecurrenceMs = table.Column<decimal>(type: "numeric(20,0)", nullable: false),
LastExecution = table.Column<DateTime>(type: "timestamp with time zone", nullable: false),
NextExecution = table.Column<DateTime>(type: "timestamp with time zone", nullable: false),
state = table.Column<int>(type: "integer", nullable: false),
JobId1 = table.Column<string>(type: "character varying(64)", nullable: true),
ImagesLocation = table.Column<string>(type: "text", nullable: true),
ComicInfoLocation = table.Column<string>(type: "text", nullable: true),
CreateArchiveJob_ChapterId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: true),
Path = table.Column<string>(type: "text", nullable: true),
CreateComicInfoXmlJob_ChapterId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: true),
MangaId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: true),
ChapterId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: true),
FromLocation = table.Column<string>(type: "text", nullable: true),
ToLocation = table.Column<string>(type: "text", nullable: true),
ProcessImagesJob_Path = table.Column<string>(type: "text", nullable: true),
Bw = table.Column<bool>(type: "boolean", nullable: true),
Compression = table.Column<int>(type: "integer", nullable: true),
SearchString = table.Column<string>(type: "text", nullable: true),
MangaConnectorName = table.Column<string>(type: "text", nullable: true),
UpdateMetadataJob_MangaId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Jobs", x => x.JobId);
table.ForeignKey(
name: "FK_Jobs_Chapters_ChapterId",
column: x => x.ChapterId,
principalTable: "Chapters",
principalColumn: "ChapterId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_Jobs_Chapters_CreateArchiveJob_ChapterId",
column: x => x.CreateArchiveJob_ChapterId,
principalTable: "Chapters",
principalColumn: "ChapterId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_Jobs_Chapters_CreateComicInfoXmlJob_ChapterId",
column: x => x.CreateComicInfoXmlJob_ChapterId,
principalTable: "Chapters",
principalColumn: "ChapterId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_Jobs_Jobs_JobId1",
column: x => x.JobId1,
principalTable: "Jobs",
principalColumn: "JobId");
table.ForeignKey(
name: "FK_Jobs_Manga_MangaId",
column: x => x.MangaId,
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_Jobs_Manga_UpdateMetadataJob_MangaId",
column: x => x.UpdateMetadataJob_MangaId,
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Link",
columns: table => new
{
LinkId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
LinkProvider = table.Column<string>(type: "text", nullable: false),
LinkUrl = table.Column<string>(type: "text", nullable: false),
MangaId = table.Column<string>(type: "character varying(64)", nullable: false),
LinkIds = table.Column<string>(type: "text", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Link", x => x.LinkId);
table.ForeignKey(
name: "FK_Link_Manga_MangaId",
column: x => x.MangaId,
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "MangaAuthor",
columns: table => new
{
MangaId = table.Column<string>(type: "character varying(64)", nullable: false),
AuthorId = table.Column<string>(type: "character varying(64)", nullable: false),
AuthorIds = table.Column<string>(type: "text", nullable: true),
MangaIds = table.Column<string>(type: "text", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_MangaAuthor", x => new { x.MangaId, x.AuthorId });
table.ForeignKey(
name: "FK_MangaAuthor_Authors_AuthorId",
column: x => x.AuthorId,
principalTable: "Authors",
principalColumn: "AuthorId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_MangaAuthor_Manga_MangaId",
column: x => x.MangaId,
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "MangaTag",
columns: table => new
{
MangaId = table.Column<string>(type: "character varying(64)", nullable: false),
Tag = table.Column<string>(type: "text", nullable: false),
MangaIds = table.Column<string>(type: "text", nullable: false),
TagIds = table.Column<string>(type: "text", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_MangaTag", x => new { x.MangaId, x.Tag });
table.ForeignKey(
name: "FK_MangaTag_Manga_MangaId",
column: x => x.MangaId,
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_MangaTag_Tags_MangaIds",
column: x => x.MangaIds,
principalTable: "Tags",
principalColumn: "Tag",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_MangaTag_Tags_Tag",
column: x => x.Tag,
principalTable: "Tags",
principalColumn: "Tag",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateIndex(
name: "IX_AltTitles_MangaId",
table: "AltTitles",
column: "MangaId");
migrationBuilder.CreateIndex(
name: "IX_Chapters_ParentMangaId",
table: "Chapters",
column: "ParentMangaId");
migrationBuilder.CreateIndex(
name: "IX_Jobs_ChapterId",
table: "Jobs",
column: "ChapterId");
migrationBuilder.CreateIndex(
name: "IX_Jobs_CreateArchiveJob_ChapterId",
table: "Jobs",
column: "CreateArchiveJob_ChapterId");
migrationBuilder.CreateIndex(
name: "IX_Jobs_CreateComicInfoXmlJob_ChapterId",
table: "Jobs",
column: "CreateComicInfoXmlJob_ChapterId");
migrationBuilder.CreateIndex(
name: "IX_Jobs_JobId1",
table: "Jobs",
column: "JobId1");
migrationBuilder.CreateIndex(
name: "IX_Jobs_MangaId",
table: "Jobs",
column: "MangaId");
migrationBuilder.CreateIndex(
name: "IX_Jobs_UpdateMetadataJob_MangaId",
table: "Jobs",
column: "UpdateMetadataJob_MangaId");
migrationBuilder.CreateIndex(
name: "IX_Link_MangaId",
table: "Link",
column: "MangaId");
migrationBuilder.CreateIndex(
name: "IX_Manga_LatestChapterAvailableId",
table: "Manga",
column: "LatestChapterAvailableId",
unique: true);
migrationBuilder.CreateIndex(
name: "IX_Manga_LatestChapterDownloadedId",
table: "Manga",
column: "LatestChapterDownloadedId",
unique: true);
migrationBuilder.CreateIndex(
name: "IX_Manga_MangaConnectorName",
table: "Manga",
column: "MangaConnectorName");
migrationBuilder.CreateIndex(
name: "IX_MangaAuthor_AuthorId",
table: "MangaAuthor",
column: "AuthorId");
migrationBuilder.CreateIndex(
name: "IX_MangaTag_MangaIds",
table: "MangaTag",
column: "MangaIds");
migrationBuilder.CreateIndex(
name: "IX_MangaTag_Tag",
table: "MangaTag",
column: "Tag");
migrationBuilder.AddForeignKey(
name: "FK_AltTitles_Manga_MangaId",
table: "AltTitles",
column: "MangaId",
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
migrationBuilder.AddForeignKey(
name: "FK_Chapters_Manga_ParentMangaId",
table: "Chapters",
column: "ParentMangaId",
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropForeignKey(
name: "FK_Chapters_Manga_ParentMangaId",
table: "Chapters");
migrationBuilder.DropTable(
name: "AltTitles");
migrationBuilder.DropTable(
name: "Jobs");
migrationBuilder.DropTable(
name: "LibraryConnectors");
migrationBuilder.DropTable(
name: "Link");
migrationBuilder.DropTable(
name: "MangaAuthor");
migrationBuilder.DropTable(
name: "MangaTag");
migrationBuilder.DropTable(
name: "NotificationConnectors");
migrationBuilder.DropTable(
name: "Authors");
migrationBuilder.DropTable(
name: "Tags");
migrationBuilder.DropTable(
name: "Manga");
migrationBuilder.DropTable(
name: "Chapters");
migrationBuilder.DropTable(
name: "MangaConnectors");
}
}
}

View File

@ -0,0 +1,688 @@
// <auto-generated />
using System;
using API.Schema;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace API.Migrations
{
[DbContext(typeof(PgsqlContext))]
partial class PgsqlContextModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.0")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("API.Schema.Author", b =>
{
b.Property<string>("AuthorId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("AuthorName")
.IsRequired()
.HasColumnType("text");
b.HasKey("AuthorId");
b.ToTable("Authors");
});
modelBuilder.Entity("API.Schema.Chapter", b =>
{
b.Property<string>("ChapterId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("ArchiveFileName")
.IsRequired()
.HasColumnType("text");
b.Property<float>("ChapterNumber")
.HasColumnType("real");
b.Property<bool>("Downloaded")
.HasColumnType("boolean");
b.Property<string>("ParentMangaId")
.IsRequired()
.HasColumnType("character varying(64)");
b.Property<string>("Title")
.HasColumnType("text");
b.Property<string>("Url")
.IsRequired()
.HasColumnType("text");
b.Property<float?>("VolumeNumber")
.HasColumnType("real");
b.HasKey("ChapterId");
b.HasIndex("ParentMangaId");
b.ToTable("Chapters");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.Property<string>("JobId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.PrimitiveCollection<string[]>("DependsOnJobsIds")
.HasMaxLength(64)
.HasColumnType("text[]");
b.Property<string>("JobId1")
.HasColumnType("character varying(64)");
b.Property<byte>("JobType")
.HasColumnType("smallint");
b.Property<DateTime>("LastExecution")
.HasColumnType("timestamp with time zone");
b.Property<string>("ParentJobId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<decimal>("RecurrenceMs")
.HasColumnType("numeric(20,0)");
b.Property<int>("state")
.HasColumnType("integer");
b.HasKey("JobId");
b.HasIndex("JobId1");
b.HasIndex("ParentJobId");
b.ToTable("Jobs");
b.HasDiscriminator<byte>("JobType");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("API.Schema.LibraryConnectors.LibraryConnector", b =>
{
b.Property<string>("LibraryConnectorId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("Auth")
.IsRequired()
.HasColumnType("text");
b.Property<string>("BaseUrl")
.IsRequired()
.HasColumnType("text");
b.Property<byte>("LibraryType")
.HasColumnType("smallint");
b.HasKey("LibraryConnectorId");
b.ToTable("LibraryConnectors");
b.HasDiscriminator<byte>("LibraryType");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("API.Schema.Link", b =>
{
b.Property<string>("LinkId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("LinkProvider")
.IsRequired()
.HasColumnType("text");
b.Property<string>("LinkUrl")
.IsRequired()
.HasColumnType("text");
b.Property<string>("MangaId")
.HasColumnType("character varying(64)");
b.HasKey("LinkId");
b.HasIndex("MangaId");
b.ToTable("Link");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.Property<string>("MangaId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("ConnectorId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("CoverFileNameInCache")
.HasColumnType("text");
b.Property<string>("CoverUrl")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Description")
.IsRequired()
.HasColumnType("text");
b.Property<string>("FolderName")
.IsRequired()
.HasColumnType("text");
b.Property<float>("IgnoreChapterBefore")
.HasColumnType("real");
b.Property<string>("MangaConnectorId")
.IsRequired()
.HasColumnType("character varying(32)");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("text");
b.Property<string>("OriginalLanguage")
.HasColumnType("text");
b.Property<byte>("ReleaseStatus")
.HasColumnType("smallint");
b.Property<string>("WebsiteUrl")
.IsRequired()
.HasColumnType("text");
b.Property<long>("Year")
.HasColumnType("bigint");
b.HasKey("MangaId");
b.HasIndex("MangaConnectorId");
b.ToTable("Manga");
});
modelBuilder.Entity("API.Schema.MangaAltTitle", b =>
{
b.Property<string>("AltTitleId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("Language")
.IsRequired()
.HasMaxLength(8)
.HasColumnType("character varying(8)");
b.Property<string>("MangaId")
.HasColumnType("character varying(64)");
b.Property<string>("Title")
.IsRequired()
.HasColumnType("text");
b.HasKey("AltTitleId");
b.HasIndex("MangaId");
b.ToTable("AltTitles");
});
modelBuilder.Entity("API.Schema.MangaConnectors.MangaConnector", b =>
{
b.Property<string>("Name")
.HasMaxLength(32)
.HasColumnType("character varying(32)");
b.PrimitiveCollection<string[]>("BaseUris")
.IsRequired()
.HasColumnType("text[]");
b.PrimitiveCollection<string[]>("SupportedLanguages")
.IsRequired()
.HasColumnType("text[]");
b.HasKey("Name");
b.ToTable("MangaConnectors");
b.HasDiscriminator<string>("Name").HasValue("MangaConnector");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("API.Schema.MangaTag", b =>
{
b.Property<string>("Tag")
.HasColumnType("text");
b.HasKey("Tag");
b.ToTable("Tags");
});
modelBuilder.Entity("API.Schema.Notification", b =>
{
b.Property<string>("NotificationId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<DateTime>("Date")
.HasColumnType("timestamp with time zone");
b.Property<string>("Message")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Title")
.IsRequired()
.HasColumnType("text");
b.Property<byte>("Urgency")
.HasColumnType("smallint");
b.HasKey("NotificationId");
b.ToTable("Notifications");
});
modelBuilder.Entity("API.Schema.NotificationConnectors.NotificationConnector", b =>
{
b.Property<string>("NotificationConnectorId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<byte>("NotificationConnectorType")
.HasColumnType("smallint");
b.HasKey("NotificationConnectorId");
b.ToTable("NotificationConnectors");
b.HasDiscriminator<byte>("NotificationConnectorType");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("AuthorManga", b =>
{
b.Property<string>("AuthorsAuthorId")
.HasColumnType("character varying(64)");
b.Property<string>("MangaId")
.HasColumnType("character varying(64)");
b.HasKey("AuthorsAuthorId", "MangaId");
b.HasIndex("MangaId");
b.ToTable("AuthorManga");
});
modelBuilder.Entity("MangaMangaTag", b =>
{
b.Property<string>("MangaId")
.HasColumnType("character varying(64)");
b.Property<string>("TagsTag")
.HasColumnType("text");
b.HasKey("MangaId", "TagsTag");
b.HasIndex("TagsTag");
b.ToTable("MangaMangaTag");
});
modelBuilder.Entity("API.Schema.Jobs.DownloadNewChaptersJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.HasDiscriminator().HasValue((byte)1);
});
modelBuilder.Entity("API.Schema.Jobs.DownloadSingleChapterJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("ChapterId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("ChapterId");
b.HasDiscriminator().HasValue((byte)0);
});
modelBuilder.Entity("API.Schema.Jobs.MoveFileOrFolderJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("FromLocation")
.IsRequired()
.HasColumnType("text");
b.Property<string>("ToLocation")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)3);
});
modelBuilder.Entity("API.Schema.Jobs.UpdateMetadataJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.ToTable("Jobs", t =>
{
t.Property("MangaId")
.HasColumnName("UpdateMetadataJob_MangaId");
});
b.HasDiscriminator().HasValue((byte)2);
});
modelBuilder.Entity("API.Schema.LibraryConnectors.Kavita", b =>
{
b.HasBaseType("API.Schema.LibraryConnectors.LibraryConnector");
b.HasDiscriminator().HasValue((byte)1);
});
modelBuilder.Entity("API.Schema.LibraryConnectors.Komga", b =>
{
b.HasBaseType("API.Schema.LibraryConnectors.LibraryConnector");
b.HasDiscriminator().HasValue((byte)0);
});
modelBuilder.Entity("API.Schema.MangaConnectors.AsuraToon", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("AsuraToon");
});
modelBuilder.Entity("API.Schema.MangaConnectors.Bato", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("Bato");
});
modelBuilder.Entity("API.Schema.MangaConnectors.MangaDex", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("MangaDex");
});
modelBuilder.Entity("API.Schema.MangaConnectors.MangaHere", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("MangaHere");
});
modelBuilder.Entity("API.Schema.MangaConnectors.MangaKatana", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("MangaKatana");
});
modelBuilder.Entity("API.Schema.MangaConnectors.MangaLife", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("Manga4Life");
});
modelBuilder.Entity("API.Schema.MangaConnectors.Manganato", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("Manganato");
});
modelBuilder.Entity("API.Schema.MangaConnectors.Mangasee", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("Mangasee");
});
modelBuilder.Entity("API.Schema.MangaConnectors.Mangaworld", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("Mangaworld");
});
modelBuilder.Entity("API.Schema.MangaConnectors.ManhuaPlus", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("ManhuaPlus");
});
modelBuilder.Entity("API.Schema.MangaConnectors.Weebcentral", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("Weebcentral");
});
modelBuilder.Entity("API.Schema.NotificationConnectors.Gotify", b =>
{
b.HasBaseType("API.Schema.NotificationConnectors.NotificationConnector");
b.Property<string>("AppToken")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Endpoint")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)0);
});
modelBuilder.Entity("API.Schema.NotificationConnectors.Lunasea", b =>
{
b.HasBaseType("API.Schema.NotificationConnectors.NotificationConnector");
b.Property<string>("Id")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)1);
});
modelBuilder.Entity("API.Schema.NotificationConnectors.Ntfy", b =>
{
b.HasBaseType("API.Schema.NotificationConnectors.NotificationConnector");
b.Property<string>("Auth")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Endpoint")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Topic")
.IsRequired()
.HasColumnType("text");
b.ToTable("NotificationConnectors", t =>
{
t.Property("Endpoint")
.HasColumnName("Ntfy_Endpoint");
});
b.HasDiscriminator().HasValue((byte)2);
});
modelBuilder.Entity("API.Schema.Chapter", b =>
{
b.HasOne("API.Schema.Manga", "ParentManga")
.WithMany()
.HasForeignKey("ParentMangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("ParentManga");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.HasOne("API.Schema.Jobs.Job", null)
.WithMany("DependsOnJobs")
.HasForeignKey("JobId1");
b.HasOne("API.Schema.Jobs.Job", "ParentJob")
.WithMany()
.HasForeignKey("ParentJobId");
b.Navigation("ParentJob");
});
modelBuilder.Entity("API.Schema.Link", b =>
{
b.HasOne("API.Schema.Manga", null)
.WithMany("Links")
.HasForeignKey("MangaId");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.HasOne("API.Schema.MangaConnectors.MangaConnector", "MangaConnector")
.WithMany()
.HasForeignKey("MangaConnectorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("MangaConnector");
});
modelBuilder.Entity("API.Schema.MangaAltTitle", b =>
{
b.HasOne("API.Schema.Manga", null)
.WithMany("AltTitles")
.HasForeignKey("MangaId");
});
modelBuilder.Entity("AuthorManga", b =>
{
b.HasOne("API.Schema.Author", null)
.WithMany()
.HasForeignKey("AuthorsAuthorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.Manga", null)
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("MangaMangaTag", b =>
{
b.HasOne("API.Schema.Manga", null)
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.MangaTag", null)
.WithMany()
.HasForeignKey("TagsTag")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("API.Schema.Jobs.DownloadNewChaptersJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Jobs.DownloadSingleChapterJob", b =>
{
b.HasOne("API.Schema.Chapter", "Chapter")
.WithMany()
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Schema.Jobs.UpdateMetadataJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.Navigation("DependsOnJobs");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.Navigation("AltTitles");
b.Navigation("Links");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,46 @@
using Asp.Versioning.ApiExplorer;
using Microsoft.Extensions.Options;
using Microsoft.OpenApi.Models;
using Swashbuckle.AspNetCore.SwaggerGen;
namespace API;
public class NamedSwaggerGenOptions : IConfigureNamedOptions<SwaggerGenOptions>
{
private readonly IApiVersionDescriptionProvider provider;
public NamedSwaggerGenOptions(IApiVersionDescriptionProvider provider)
{
this.provider = provider;
}
public void Configure(string? name, SwaggerGenOptions options)
{
Configure(options);
}
public void Configure(SwaggerGenOptions options)
{
// add swagger document for every API version discovered
foreach (var description in provider.ApiVersionDescriptions)
{
options.SwaggerDoc(
description.GroupName,
CreateVersionInfo(description));
}
}
private OpenApiInfo CreateVersionInfo(
ApiVersionDescription description)
{
var info = new OpenApiInfo()
{
Title = "Test API " + description.GroupName,
Version = description.ApiVersion.ToString()
};
if (description.IsDeprecated)
{
info.Description += " This API version has been deprecated.";
}
return info;
}
}

3
API/ProblemResponse.cs Normal file
View File

@ -0,0 +1,3 @@
namespace API;
public record ProblemResponse(string title, string? message = null);

133
API/Program.cs Normal file
View File

@ -0,0 +1,133 @@
using System.Reflection;
using System.Text.Json.Serialization;
using API;
using API.Schema;
using API.Schema.Jobs;
using API.Schema.MangaConnectors;
using Asp.Versioning;
using Asp.Versioning.Builder;
using Asp.Versioning.Conventions;
using Microsoft.EntityFrameworkCore;
using Newtonsoft.Json.Converters;
var builder = WebApplication.CreateBuilder(args);
builder.Services.AddCors(options =>
{
options.AddPolicy("AllowAll",
policy =>
{
policy
.AllowAnyOrigin()
.AllowAnyMethod()
.AllowAnyHeader();
});
});
builder.Services.AddApiVersioning(option =>
{
option.AssumeDefaultVersionWhenUnspecified = true;
option.DefaultApiVersion = new ApiVersion(2);
option.ReportApiVersions = true;
option.ApiVersionReader = ApiVersionReader.Combine(
new UrlSegmentApiVersionReader(),
new QueryStringApiVersionReader("api-version"),
new HeaderApiVersionReader("X-Version"),
new MediaTypeApiVersionReader("x-version"));
})
.AddMvc(options =>
{
options.Conventions.Add(new VersionByNamespaceConvention());
})
.AddApiExplorer(options => {
options.GroupNameFormat = "'v'V";
options.SubstituteApiVersionInUrl = true;
});
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen(opt =>
{
var xmlFilename = $"{Assembly.GetExecutingAssembly().GetName().Name}.xml";
opt.IncludeXmlComments(Path.Combine(AppContext.BaseDirectory, xmlFilename));
});
builder.Services.ConfigureOptions<NamedSwaggerGenOptions>();
builder.Services.AddDbContext<PgsqlContext>(options =>
options.UseNpgsql($"Host={Environment.GetEnvironmentVariable("POSTGRES_HOST")??"localhost:5432"}; " +
$"Database={Environment.GetEnvironmentVariable("POSTGRES_DB")??"postgres"}; " +
$"Username={Environment.GetEnvironmentVariable("POSTGRES_USER")??"postgres"}; " +
$"Password={Environment.GetEnvironmentVariable("POSTGRES_PASSWORD")??"postgres"}"));
builder.Services.AddControllers().AddNewtonsoftJson(opts =>
{
opts.SerializerSettings.Converters.Add(new StringEnumConverter());
});
builder.WebHost.UseUrls("http://*:6531");
var app = builder.Build();
ApiVersionSet apiVersionSet = app.NewApiVersionSet()
.HasApiVersion(new ApiVersion(2))
.ReportApiVersions()
.Build();
app.UseCors("AllowAll");
app.MapControllers()
.WithApiVersionSet(apiVersionSet)
.MapToApiVersion(2);
app.UseSwagger();
app.UseSwaggerUI(options =>
{
options.SwaggerEndpoint(
$"/swagger/v2/swagger.json", "v2");
});
app.UseHttpsRedirection();
using (var scope = app.Services.CreateScope())
{
var db = scope.ServiceProvider.GetRequiredService<PgsqlContext>();
db.Database.Migrate();
}
using (var scope = app.Services.CreateScope())
{
PgsqlContext context = scope.ServiceProvider.GetService<PgsqlContext>()!;
MangaConnector[] connectors =
[
new AsuraToon(),
new Bato(),
new MangaDex(),
new MangaHere(),
new MangaKatana(),
new MangaLife(),
new Manganato(),
new Mangasee(),
new Mangaworld(),
new ManhuaPlus(),
new Weebcentral()
];
MangaConnector[] newConnectors = connectors.Where(c => !context.MangaConnectors.Contains(c)).ToArray();
context.MangaConnectors.AddRange(newConnectors);
context.Jobs.RemoveRange(context.Jobs.Where(j => j.state == JobState.Completed && j.RecurrenceMs < 1));
string[] emojis = { "(•‿•)", "(づ \u25d5‿\u25d5 )づ", "( \u02d8\u25bd\u02d8)っ\u2668", "=\uff3e\u25cf \u22cf \u25cf\uff3e=", "(ΦωΦ)", "(\u272a\u3268\u272a)", "( ノ・o・ )ノ", "(〜^\u2207^ )〜", "~(\u2267ω\u2266)~","૮ \u00b4• ﻌ \u00b4• ა", "(\u02c3ᆺ\u02c2)", "(=\ud83d\udf66 \u0f1d \ud83d\udf66=)"};
context.Notifications.Add(new Notification("Tranga Started", emojis[Random.Shared.Next(0, emojis.Length - 1)], NotificationUrgency.High));
context.SaveChanges();
}
TrangaSettings.Load();
Tranga.StartLogger();
Tranga.JobStarterThread.Start(app.Services.CreateScope().ServiceProvider.GetService<PgsqlContext>());
Tranga.NotificationSenderThread.Start(app.Services.CreateScope().ServiceProvider.GetService<PgsqlContext>());
app.UseCors("AllowAll");
app.Run();

View File

@ -0,0 +1,47 @@
{
"$schema": "http://json.schemastore.org/launchsettings.json",
"iisSettings": {
"windowsAuthentication": false,
"anonymousAuthentication": true,
"iisExpress": {
"applicationUrl": "http://localhost:5976",
"sslPort": 44332,
"environmentVariables": {
"POSTGRES_Host": "localhost:5432"
}
}
},
"profiles": {
"http": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": true,
"launchUrl": "swagger",
"applicationUrl": "http://localhost:5287",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development",
"POSTGRES_Host": "localhost:5432"
}
},
"https": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": true,
"launchUrl": "swagger",
"applicationUrl": "https://localhost:7206;http://localhost:5287",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development",
"POSTGRES_Host": "localhost:5432"
}
},
"IIS Express": {
"commandName": "IISExpress",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development",
"POSTGRES_Host": "localhost:5432"
}
}
}
}

12
API/Schema/Author.cs Normal file
View File

@ -0,0 +1,12 @@
using System.ComponentModel.DataAnnotations;
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
[PrimaryKey("AuthorId")]
public class Author(string authorName)
{
[MaxLength(64)]
public string AuthorId { get; init; } = TokenGen.CreateToken(typeof(Author), 64);
public string AuthorName { get; init; } = authorName;
}

113
API/Schema/Chapter.cs Normal file
View File

@ -0,0 +1,113 @@
using System.ComponentModel.DataAnnotations;
using System.Xml.Linq;
using API.Schema.Jobs;
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
[PrimaryKey("ChapterId")]
public class Chapter : IComparable<Chapter>
{
[MaxLength(64)]
public string ChapterId { get; init; } = TokenGen.CreateToken(typeof(Chapter), 64);
public int? VolumeNumber { get; private set; }
public ChapterNumber ChapterNumber { get; private set; }
public string Url { get; internal set; }
public string? Title { get; private set; }
public string ArchiveFileName { get; private set; }
public bool Downloaded { get; internal set; } = false;
public string ParentMangaId { get; internal set; }
public Manga? ParentManga { get; init; }
public Chapter(Manga parentManga, string url, ChapterNumber chapterNumber, int? volumeNumber = null, string? title = null)
: this(parentManga.MangaId, url, chapterNumber, volumeNumber, title)
{
this.ParentManga = parentManga;
}
public Chapter(string parentMangaId, string url, ChapterNumber chapterNumber,
int? volumeNumber = null, string? title = null)
{
this.ParentMangaId = parentMangaId;
this.Url = url;
this.ChapterNumber = chapterNumber;
this.VolumeNumber = volumeNumber;
this.Title = title;
this.ArchiveFileName = BuildArchiveFileName();
}
public MoveFileOrFolderJob? UpdateChapterNumber(ChapterNumber chapterNumber)
{
this.ChapterNumber = chapterNumber;
return UpdateArchiveFileName();
}
public MoveFileOrFolderJob? UpdateVolumeNumber(int? volumeNumber)
{
this.VolumeNumber = volumeNumber;
return UpdateArchiveFileName();
}
public MoveFileOrFolderJob? UpdateTitle(string? title)
{
this.Title = title;
return UpdateArchiveFileName();
}
private string BuildArchiveFileName()
{
return $"{this.ParentManga.Name} - Vol.{this.VolumeNumber ?? 0} Ch.{this.ChapterNumber}{(this.Title is null ? "" : $" - {this.Title}")}.cbz";
}
private MoveFileOrFolderJob? UpdateArchiveFileName()
{
string oldPath = GetArchiveFilePath();
this.ArchiveFileName = BuildArchiveFileName();
if (Downloaded)
{
return new MoveFileOrFolderJob(oldPath, GetArchiveFilePath());
}
return null;
}
/// <summary>
/// Creates full file path of chapter-archive
/// </summary>
/// <returns>Filepath</returns>
internal string GetArchiveFilePath()
{
return Path.Join(TrangaSettings.downloadLocation, ParentManga.FolderName, ArchiveFileName);
}
public bool IsDownloaded()
{
string path = GetArchiveFilePath();
return File.Exists(path);
}
public int CompareTo(Chapter? other)
{
if(other is not { } otherChapter)
throw new ArgumentException($"{other} can not be compared to {this}");
return this.VolumeNumber?.CompareTo(otherChapter.VolumeNumber) switch
{
<0 => -1,
>0 => 1,
_ => this.ChapterNumber.CompareTo(otherChapter.ChapterNumber)
};
}
internal string GetComicInfoXmlString()
{
XElement comicInfo = new XElement("ComicInfo",
new XElement("Tags", string.Join(',', ParentManga.Tags.Select(tag => tag.Tag))),
new XElement("LanguageISO", ParentManga.OriginalLanguage),
new XElement("Title", this.Title),
new XElement("Writer", string.Join(',', ParentManga.Authors.Select(author => author.AuthorName))),
new XElement("Volume", this.VolumeNumber),
new XElement("Number", this.ChapterNumber)
);
return comicInfo.ToString();
}
}

305
API/Schema/ChapterNumber.cs Normal file
View File

@ -0,0 +1,305 @@
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Numerics;
using System.Text.RegularExpressions;
namespace API.Schema;
public readonly struct ChapterNumber : INumber<ChapterNumber>
{
private readonly uint[] _numbers;
private readonly bool _naN;
private ChapterNumber(uint[] numbers, bool naN = false)
{
this._numbers = numbers;
this._naN = naN;
}
public ChapterNumber(string number)
{
if (!CanParse(number))
{
this._numbers = [];
this._naN = true;
}
this._numbers = number.Split('.').Select(uint.Parse).ToArray();
}
public ChapterNumber(float number) : this(number.ToString("F")) {}
public ChapterNumber(double number) : this((float)number) {}
public ChapterNumber(uint number)
{
this._numbers = [number];
this._naN = false;
}
public ChapterNumber(int number)
{
if (int.IsNegative(number))
{
this._numbers = [];
this._naN = true;
}
this._numbers = [(uint)number];
this._naN = false;
}
public int CompareTo(ChapterNumber other)
{
byte index = 0;
do
{
if (this._numbers[index] < other._numbers[index])
return -1;
else if (this._numbers[index] > other._numbers[index])
return 1;
}while(index < this._numbers.Length && index < other._numbers.Length);
if (index >= this._numbers.Length && index >= other._numbers.Length)
return 0;
else if (index >= this._numbers.Length)
return -1;
else if (index >= other._numbers.Length)
return 1;
throw new UnreachableException();
}
private static readonly Regex Pattern = new(@"[0-9]+(?:\.[0-9]+)*");
public static bool CanParse(string? number) => number is not null && Pattern.Match(number).Length == number.Length && number.Length > 0;
public bool Equals(ChapterNumber other) => CompareTo(other) == 0;
public string ToString(string? format, IFormatProvider? formatProvider)
{
return string.Join('.', _numbers);
}
public override bool Equals(object? obj)
{
return obj is ChapterNumber other && Equals(other);
}
public override int GetHashCode()
{
return HashCode.Combine(_numbers, _naN);
}
public bool TryFormat(Span<char> destination, out int charsWritten, ReadOnlySpan<char> format, IFormatProvider? provider)
{
throw new NotImplementedException();
}
public int CompareTo(object? obj)
{
if(obj is ChapterNumber other)
return CompareTo(other);
throw new ArgumentException();
}
public static ChapterNumber Parse(string s, IFormatProvider? provider)
{
if(!CanParse(s))
throw new FormatException($"Invalid ChapterNumber-String: {s}");
return new ChapterNumber(s);
}
public static bool TryParse([NotNullWhen(true)] string? s, IFormatProvider? provider, out ChapterNumber result)
{
result = new ChapterNumber([], true);;
if (!CanParse(s))
return false;
if (s is null)
return false;
result = new ChapterNumber(s);
return true;
}
public static ChapterNumber Parse(ReadOnlySpan<char> s, IFormatProvider? provider) => Parse(s.ToString(), provider);
public static bool TryParse(ReadOnlySpan<char> s, IFormatProvider? provider, out ChapterNumber result) => TryParse(s.ToString(), provider, out result);
public static ChapterNumber operator +(ChapterNumber left, ChapterNumber right)
{
if (IsNaN(left) || IsNaN(right))
return new ChapterNumber([], true);
int size = left._numbers.Length > right._numbers.Length ? left._numbers.Length : right._numbers.Length;
uint[] numbers = new uint[size];
for (int i = 0; i < size; i++)
{
if(left._numbers.Length < i)
numbers[i] = right._numbers[i];
else if(right._numbers.Length < i)
numbers[i] = left._numbers[i];
else
numbers[i] = left._numbers[i] + right._numbers[i];
}
return new ChapterNumber(numbers);
}
private static bool BothNotNaN(ChapterNumber left, ChapterNumber right) => !IsNaN(left) && !IsNaN(right);
public static ChapterNumber AdditiveIdentity => Zero;
public static bool operator ==(ChapterNumber left, ChapterNumber right) => BothNotNaN(left, right) && left.Equals(right);
public static bool operator !=(ChapterNumber left, ChapterNumber right) => !(left == right);
public static bool operator >(ChapterNumber left, ChapterNumber right) => BothNotNaN(left, right) && left.CompareTo(right) > 0;
public static bool operator >=(ChapterNumber left, ChapterNumber right) => BothNotNaN(left, right) && left.CompareTo(right) >= 0;
public static bool operator <(ChapterNumber left, ChapterNumber right) => BothNotNaN(left, right) && left.CompareTo(right) < 0;
public static bool operator <=(ChapterNumber left, ChapterNumber right) => BothNotNaN(left, right) && left.CompareTo(right) <= 0;
public static ChapterNumber operator %(ChapterNumber left, ChapterNumber right) => throw new ArithmeticException();
public static ChapterNumber operator +(ChapterNumber value) => throw new InvalidOperationException();
public static ChapterNumber operator --(ChapterNumber value)
{
if (IsNaN(value))
return value;
uint[] numbers = value._numbers;
numbers[0]--;
return new ChapterNumber(numbers);
}
public static ChapterNumber operator /(ChapterNumber left, ChapterNumber right) => throw new InvalidOperationException();
public static ChapterNumber operator ++(ChapterNumber value)
{
if (IsNaN(value))
return value;
uint[] numbers = value._numbers;
numbers[0]++;
return new ChapterNumber(numbers);
}
public static ChapterNumber MultiplicativeIdentity => One;
public static ChapterNumber operator *(ChapterNumber left, ChapterNumber right) => throw new InvalidOperationException();
public static ChapterNumber operator -(ChapterNumber left, ChapterNumber right) => throw new InvalidOperationException();
public static ChapterNumber operator -(ChapterNumber value) => throw new InvalidOperationException();
public static ChapterNumber Abs(ChapterNumber value) => value;
public static bool IsCanonical(ChapterNumber value) => true;
public static bool IsComplexNumber(ChapterNumber value) => false;
public static bool IsEvenInteger(ChapterNumber value) => IsInteger(value) && uint.IsEvenInteger(value._numbers[0]);
public static bool IsFinite(ChapterNumber value) => true;
public static bool IsImaginaryNumber(ChapterNumber value) => false;
public static bool IsInfinity(ChapterNumber value) => false;
public static bool IsInteger(ChapterNumber value) => !IsNaN(value) && value._numbers.Length == 1;
public static bool IsNaN(ChapterNumber value) => value._naN;
public static bool IsNegative(ChapterNumber value) => false;
public static bool IsNegativeInfinity(ChapterNumber value) => false;
public static bool IsNormal(ChapterNumber value) => true;
public static bool IsOddInteger(ChapterNumber value) => false;
public static bool IsPositive(ChapterNumber value) => true;
public static bool IsPositiveInfinity(ChapterNumber value) => false;
public static bool IsRealNumber(ChapterNumber value) => false;
public static bool IsSubnormal(ChapterNumber value) => false;
public static bool IsZero(ChapterNumber value) => value._numbers.All(n => n == 0);
public static ChapterNumber MaxMagnitude(ChapterNumber x, ChapterNumber y)
{
if(IsNaN(x))
return new ChapterNumber([], true);
if (IsNaN(y))
return new ChapterNumber([], true);
return x >= y ? x : y;
}
public static ChapterNumber MaxMagnitudeNumber(ChapterNumber x, ChapterNumber y)
{
if (IsNaN(x))
return y;
if (IsNaN(y))
return x;
return x >= y ? x : y;
}
public static ChapterNumber MinMagnitude(ChapterNumber x, ChapterNumber y)
{
if(IsNaN(x))
return new ChapterNumber([], true);
if (IsNaN(y))
return new ChapterNumber([], true);
return x <= y ? x : y;
}
public static ChapterNumber MinMagnitudeNumber(ChapterNumber x, ChapterNumber y)
{
if (IsNaN(x))
return y;
if (IsNaN(y))
return x;
return x <= y ? x : y;
}
public static ChapterNumber Parse(ReadOnlySpan<char> s, NumberStyles style, IFormatProvider? provider) => throw new NotImplementedException();
public static ChapterNumber Parse(string s, NumberStyles style, IFormatProvider? provider) => throw new NotImplementedException();
public static bool TryConvertFromChecked<TOther>(TOther value, out ChapterNumber result) where TOther : INumberBase<TOther>
{
throw new NotImplementedException();
}
public static bool TryConvertFromSaturating<TOther>(TOther value, out ChapterNumber result) where TOther : INumberBase<TOther>
{
throw new NotImplementedException();
}
public static bool TryConvertFromTruncating<TOther>(TOther value, out ChapterNumber result) where TOther : INumberBase<TOther>
{
throw new NotImplementedException();
}
public static bool TryConvertToChecked<TOther>(ChapterNumber value, [MaybeNullWhen(false)] out TOther result) where TOther : INumberBase<TOther>
{
throw new NotImplementedException();
}
public static bool TryConvertToSaturating<TOther>(ChapterNumber value, [MaybeNullWhen(false)] out TOther result) where TOther : INumberBase<TOther>
{
throw new NotImplementedException();
}
public static bool TryConvertToTruncating<TOther>(ChapterNumber value, [MaybeNullWhen(false)] out TOther result) where TOther : INumberBase<TOther>
{
throw new NotImplementedException();
}
public static bool TryParse(ReadOnlySpan<char> s, NumberStyles style, IFormatProvider? provider, out ChapterNumber result)
=> TryParse(s.ToString(), style, provider, out result);
public static bool TryParse([NotNullWhen(true)] string? s, NumberStyles style, IFormatProvider? provider, out ChapterNumber result)
=> TryParse(s, provider, out result);
public static ChapterNumber One => new(1);
public static int Radix => 10;
public static ChapterNumber Zero => new(0);
}

View File

@ -0,0 +1,136 @@
using System.ComponentModel.DataAnnotations;
using System.IO.Compression;
using System.Runtime.InteropServices;
using API.MangaDownloadClients;
using API.Schema.MangaConnectors;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.Formats.Jpeg;
using SixLabors.ImageSharp.Processing;
using SixLabors.ImageSharp.Processing.Processors.Binarization;
using static System.IO.UnixFileMode;
namespace API.Schema.Jobs;
public class DownloadMangaCoverJob(string chapterId, string? parentJobId = null, ICollection<string>? dependsOnJobsIds = null)
: Job(TokenGen.CreateToken(typeof(DownloadMangaCoverJob), 64), JobType.DownloadMangaCoverJob, 0, parentJobId, dependsOnJobsIds)
{
[MaxLength(64)]
public string ChapterId { get; init; } = chapterId;
public Chapter? Chapter { get; init; }
protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{
MangaConnector connector = Chapter.ParentManga?.MangaConnector ?? context.MangaConnectors.Find(context.Manga.Find(Chapter.ParentMangaId)?.MangaId)!;
DownloadChapterImages(Chapter, connector);
return [];
}
private bool DownloadChapterImages(Chapter chapter, MangaConnector connector)
{
string[] imageUrls = connector.GetChapterImageUrls(Chapter);
string saveArchiveFilePath = chapter.GetArchiveFilePath();
//Check if Publication Directory already exists
string directoryPath = Path.GetDirectoryName(saveArchiveFilePath)!;
if (!Directory.Exists(directoryPath))
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
Directory.CreateDirectory(directoryPath,
UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute );
else
Directory.CreateDirectory(directoryPath);
if (File.Exists(saveArchiveFilePath)) //Don't download twice. Redownload
File.Delete(saveArchiveFilePath);
//Create a temporary folder to store images
string tempFolder = Directory.CreateTempSubdirectory("trangatemp").FullName;
int chapterNum = 0;
//Download all Images to temporary Folder
if (imageUrls.Length == 0)
{
Directory.Delete(tempFolder, true);
return false;
}
foreach (string imageUrl in imageUrls)
{
string extension = imageUrl.Split('.')[^1].Split('?')[0];
string imagePath = Path.Join(tempFolder, $"{chapterNum++}.{extension}");
bool status = DownloadImage(imageUrl, imagePath);
if (status is false)
return false;
}
CopyCoverFromCacheToDownloadLocation();
File.WriteAllText(Path.Join(tempFolder, "ComicInfo.xml"), chapter.GetComicInfoXmlString());
//ZIP-it and ship-it
ZipFile.CreateFromDirectory(tempFolder, saveArchiveFilePath);
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(saveArchiveFilePath, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute | OtherRead | OtherExecute);
Directory.Delete(tempFolder, true); //Cleanup
return true;
}
private void ProcessImage(string imagePath)
{
if (!TrangaSettings.bwImages && TrangaSettings.compression == 100)
return;
DateTime start = DateTime.Now;
using Image image = Image.Load(imagePath);
File.Delete(imagePath);
if(TrangaSettings.bwImages)
image.Mutate(i => i.ApplyProcessor(new AdaptiveThresholdProcessor()));
image.SaveAsJpeg(imagePath, new JpegEncoder()
{
Quality = TrangaSettings.compression
});
}
private void CopyCoverFromCacheToDownloadLocation(int? retries = 1)
{
//Check if Publication already has a Folder and cover
string publicationFolder = Chapter.ParentManga.CreatePublicationFolder();
DirectoryInfo dirInfo = new (publicationFolder);
if (dirInfo.EnumerateFiles().Any(info => info.Name.Contains("cover", StringComparison.InvariantCultureIgnoreCase)))
{
return;
}
string? fileInCache = Chapter.ParentManga.CoverFileNameInCache;
if (fileInCache is null || !File.Exists(fileInCache))
{
if (retries > 0 && Chapter.ParentManga.CoverUrl is not null)
{
Chapter.ParentManga.SaveCoverImageToCache();
CopyCoverFromCacheToDownloadLocation(--retries);
}
return;
}
string newFilePath = Path.Join(publicationFolder, $"cover.{Path.GetFileName(fileInCache).Split('.')[^1]}" );
File.Copy(fileInCache, newFilePath, true);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(newFilePath, GroupRead | GroupWrite | UserRead | UserWrite);
}
private bool DownloadImage(string imageUrl, string savePath)
{
HttpDownloadClient downloadClient = new();
RequestResult requestResult = downloadClient.MakeRequest(imageUrl, RequestType.MangaImage);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return false;
if (requestResult.result == Stream.Null)
return false;
FileStream fs = new (savePath, FileMode.Create);
requestResult.result.CopyTo(fs);
fs.Close();
ProcessImage(savePath);
return true;
}
}

View File

@ -0,0 +1,22 @@
using System.ComponentModel.DataAnnotations;
using API.Schema.MangaConnectors;
namespace API.Schema.Jobs;
public class DownloadNewChaptersJob(ulong recurrenceMs, string mangaId, string? parentJobId = null, ICollection<string>? dependsOnJobsIds = null)
: Job(TokenGen.CreateToken(typeof(DownloadNewChaptersJob), 64), JobType.DownloadNewChaptersJob, recurrenceMs, parentJobId, dependsOnJobsIds)
{
[MaxLength(64)]
public string MangaId { get; init; } = mangaId;
public Manga? Manga { get; init; }
protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{
Manga m = Manga ?? context.Manga.Find(MangaId)!;
MangaConnector connector = m.MangaConnector ?? context.MangaConnectors.Find(m.MangaConnectorId)!;
Chapter[] newChapters = connector.GetNewChapters(m);
context.Chapters.AddRangeAsync(newChapters).Wait();
context.SaveChangesAsync().Wait();
return newChapters.Select(chapter => new DownloadSingleChapterJob(chapter.ChapterId, this.JobId));
}
}

View File

@ -0,0 +1,138 @@
using System.ComponentModel.DataAnnotations;
using System.IO.Compression;
using System.Runtime.InteropServices;
using API.MangaDownloadClients;
using API.Schema.MangaConnectors;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.Formats.Jpeg;
using SixLabors.ImageSharp.Processing;
using SixLabors.ImageSharp.Processing.Processors.Binarization;
using static System.IO.UnixFileMode;
namespace API.Schema.Jobs;
public class DownloadSingleChapterJob(string chapterId, string? parentJobId = null, ICollection<string>? dependsOnJobsIds = null)
: Job(TokenGen.CreateToken(typeof(DownloadSingleChapterJob), 64), JobType.DownloadSingleChapterJob, 0, parentJobId, dependsOnJobsIds)
{
[MaxLength(64)]
public string ChapterId { get; init; } = chapterId;
public Chapter? Chapter { get; init; }
protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{
Chapter c = Chapter ?? context.Chapters.Find(ChapterId)!;
Manga m = c.ParentManga ?? context.Manga.Find(c.ParentMangaId)!;
MangaConnector connector = m.MangaConnector ?? context.MangaConnectors.Find(m.MangaConnectorId)!;
DownloadChapterImages(c, connector, m);
return [];
}
private bool DownloadChapterImages(Chapter chapter, MangaConnector connector, Manga manga)
{
string[] imageUrls = connector.GetChapterImageUrls(chapter);
string saveArchiveFilePath = chapter.GetArchiveFilePath();
//Check if Publication Directory already exists
string directoryPath = Path.GetDirectoryName(saveArchiveFilePath)!;
if (!Directory.Exists(directoryPath))
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
Directory.CreateDirectory(directoryPath,
UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute );
else
Directory.CreateDirectory(directoryPath);
if (File.Exists(saveArchiveFilePath)) //Don't download twice. Redownload
File.Delete(saveArchiveFilePath);
//Create a temporary folder to store images
string tempFolder = Directory.CreateTempSubdirectory("trangatemp").FullName;
int chapterNum = 0;
//Download all Images to temporary Folder
if (imageUrls.Length == 0)
{
Directory.Delete(tempFolder, true);
return false;
}
foreach (string imageUrl in imageUrls)
{
string extension = imageUrl.Split('.')[^1].Split('?')[0];
string imagePath = Path.Join(tempFolder, $"{chapterNum++}.{extension}");
bool status = DownloadImage(imageUrl, imagePath);
if (status is false)
return false;
}
CopyCoverFromCacheToDownloadLocation(manga);
File.WriteAllText(Path.Join(tempFolder, "ComicInfo.xml"), chapter.GetComicInfoXmlString());
//ZIP-it and ship-it
ZipFile.CreateFromDirectory(tempFolder, saveArchiveFilePath);
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(saveArchiveFilePath, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute | OtherRead | OtherExecute);
Directory.Delete(tempFolder, true); //Cleanup
return true;
}
private void ProcessImage(string imagePath)
{
if (!TrangaSettings.bwImages && TrangaSettings.compression == 100)
return;
DateTime start = DateTime.Now;
using Image image = Image.Load(imagePath);
File.Delete(imagePath);
if(TrangaSettings.bwImages)
image.Mutate(i => i.ApplyProcessor(new AdaptiveThresholdProcessor()));
image.SaveAsJpeg(imagePath, new JpegEncoder()
{
Quality = TrangaSettings.compression
});
}
private void CopyCoverFromCacheToDownloadLocation(Manga manga, int? retries = 1)
{
//Check if Publication already has a Folder and cover
string publicationFolder = manga.CreatePublicationFolder();
DirectoryInfo dirInfo = new (publicationFolder);
if (dirInfo.EnumerateFiles().Any(info => info.Name.Contains("cover", StringComparison.InvariantCultureIgnoreCase)))
{
return;
}
string? fileInCache = manga.CoverFileNameInCache;
if (fileInCache is null || !File.Exists(fileInCache))
{
if (retries > 0)
{
manga.SaveCoverImageToCache();
CopyCoverFromCacheToDownloadLocation(manga, --retries);
}
return;
}
string newFilePath = Path.Join(publicationFolder, $"cover.{Path.GetFileName(fileInCache).Split('.')[^1]}" );
File.Copy(fileInCache, newFilePath, true);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(newFilePath, GroupRead | GroupWrite | UserRead | UserWrite);
}
private bool DownloadImage(string imageUrl, string savePath)
{
HttpDownloadClient downloadClient = new();
RequestResult requestResult = downloadClient.MakeRequest(imageUrl, RequestType.MangaImage);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return false;
if (requestResult.result == Stream.Null)
return false;
FileStream fs = new (savePath, FileMode.Create);
requestResult.result.CopyTo(fs);
fs.Close();
ProcessImage(savePath);
return true;
}
}

55
API/Schema/Jobs/Job.cs Normal file
View File

@ -0,0 +1,55 @@
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using Microsoft.EntityFrameworkCore;
using Newtonsoft.Json;
namespace API.Schema.Jobs;
[PrimaryKey("JobId")]
public abstract class Job
{
[MaxLength(64)]
public string JobId { get; init; }
[MaxLength(64)]
public string? ParentJobId { get; init; }
public Job? ParentJob { get; init; }
[MaxLength(64)]
public ICollection<string>? DependsOnJobsIds { get; init; }
public ICollection<Job>? DependsOnJobs { get; init; }
public JobType JobType { get; init; }
public ulong RecurrenceMs { get; set; }
public DateTime LastExecution { get; internal set; } = DateTime.UnixEpoch;
[NotMapped]
public DateTime NextExecution => LastExecution.AddMilliseconds(RecurrenceMs);
public JobState state { get; internal set; } = JobState.Waiting;
public Job(string jobId, JobType jobType, ulong recurrenceMs, Job? parentJob = null, ICollection<Job>? dependsOnJobs = null)
: this(jobId, jobType, recurrenceMs, parentJob?.JobId, dependsOnJobs?.Select(j => j.JobId).ToList())
{
this.ParentJob = parentJob;
this.DependsOnJobs = dependsOnJobs;
}
public Job(string jobId, JobType jobType, ulong recurrenceMs, string? parentJobId = null, ICollection<string>? dependsOnJobsIds = null)
{
JobId = jobId;
ParentJobId = parentJobId;
DependsOnJobsIds = dependsOnJobsIds;
JobType = jobType;
RecurrenceMs = recurrenceMs;
}
public IEnumerable<Job> Run(PgsqlContext context)
{
this.state = JobState.Running;
IEnumerable<Job> newJobs = RunInternal(context);
this.state = JobState.Completed;
return newJobs;
}
protected abstract IEnumerable<Job> RunInternal(PgsqlContext context);
}

View File

@ -0,0 +1,29 @@
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using JsonSerializer = Newtonsoft.Json.JsonSerializer;
namespace API.Schema.Jobs;
public class JobJsonDeserializer : JsonConverter<Job>
{
public override bool CanWrite { get; } = false;
public override void WriteJson(JsonWriter writer, Job? value, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override Job? ReadJson(JsonReader reader, Type objectType, Job? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
JObject j = JObject.Load(reader);
JobType? type = Enum.Parse<JobType>(j.GetValue("jobType")!.Value<string>()!);
return type switch
{
JobType.DownloadSingleChapterJob => j.ToObject<DownloadSingleChapterJob>(),
JobType.DownloadNewChaptersJob => j.ToObject<DownloadNewChaptersJob>(),
JobType.UpdateMetaDataJob => j.ToObject<UpdateMetadataJob>(),
JobType.MoveFileOrFolderJob => j.ToObject<MoveFileOrFolderJob>(),
_ => null
};
}
}

View File

@ -0,0 +1,8 @@
namespace API.Schema.Jobs;
public enum JobState
{
Waiting,
Running,
Completed
}

View File

@ -0,0 +1,11 @@
namespace API.Schema.Jobs;
public enum JobType : byte
{
DownloadSingleChapterJob = 0,
DownloadNewChaptersJob = 1,
UpdateMetaDataJob = 2,
MoveFileOrFolderJob = 3,
DownloadMangaCoverJob = 4
}

View File

@ -0,0 +1,13 @@
namespace API.Schema.Jobs;
public class MoveFileOrFolderJob(string fromLocation, string toLocation, string? parentJobId = null, ICollection<string>? dependsOnJobsIds = null)
: Job(TokenGen.CreateToken(typeof(MoveFileOrFolderJob), 64), JobType.MoveFileOrFolderJob, 0, parentJobId, dependsOnJobsIds)
{
public string FromLocation { get; init; } = fromLocation;
public string ToLocation { get; init; } = toLocation;
protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{
throw new NotImplementedException();
}
}

View File

@ -0,0 +1,16 @@
using System.ComponentModel.DataAnnotations;
namespace API.Schema.Jobs;
public class UpdateMetadataJob(ulong recurrenceMs, string mangaId, string? parentJobId = null, ICollection<string>? dependsOnJobsIds = null)
: Job(TokenGen.CreateToken(typeof(UpdateMetadataJob), 64), JobType.UpdateMetaDataJob, recurrenceMs, parentJobId, dependsOnJobsIds)
{
[MaxLength(64)]
public string MangaId { get; init; } = mangaId;
public virtual Manga Manga { get; init; }
protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{
throw new NotImplementedException();
}
}

View File

@ -0,0 +1,111 @@
using System.Text.Json;
using System.Text.Json.Nodes;
namespace API.Schema.LibraryConnectors;
public class Kavita : LibraryConnector
{
public Kavita(string baseUrl, string auth) : base(TokenGen.CreateToken(typeof(Kavita), 64), LibraryType.Kavita, baseUrl, auth)
{
}
public Kavita(string baseUrl, string username, string password) :
this(baseUrl, GetToken(baseUrl, username, password))
{
}
private static string GetToken(string baseUrl, string username, string password)
{
HttpClient client = new()
{
DefaultRequestHeaders =
{
{ "Accept", "application/json" }
}
};
HttpRequestMessage requestMessage = new ()
{
Method = HttpMethod.Post,
RequestUri = new Uri($"{baseUrl}/api/Account/login"),
Content = new StringContent($"{{\"username\":\"{username}\",\"password\":\"{password}\"}}", System.Text.Encoding.UTF8, "application/json")
};
try
{
HttpResponseMessage response = client.Send(requestMessage);
if (response.IsSuccessStatusCode)
{
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(response.Content.ReadAsStream());
if (result is not null)
return result["token"]!.GetValue<string>();
}
else
{
}
}
catch (HttpRequestException e)
{
}
return "";
}
protected override void UpdateLibraryInternal()
{
foreach (KavitaLibrary lib in GetLibraries())
NetClient.MakePost($"{BaseUrl}/api/Library/scan?libraryId={lib.id}", "Bearer", Auth);
}
internal override bool Test()
{
foreach (KavitaLibrary lib in GetLibraries())
if (NetClient.MakePost($"{BaseUrl}/api/Library/scan?libraryId={lib.id}", "Bearer", Auth))
return true;
return false;
}
/// <summary>
/// Fetches all libraries available to the user
/// </summary>
/// <returns>Array of KavitaLibrary</returns>
private IEnumerable<KavitaLibrary> GetLibraries()
{
Stream data = NetClient.MakeRequest($"{BaseUrl}/api/Library/libraries", "Bearer", Auth);
if (data == Stream.Null)
{
return Array.Empty<KavitaLibrary>();
}
JsonArray? result = JsonSerializer.Deserialize<JsonArray>(data);
if (result is null)
{
return Array.Empty<KavitaLibrary>();
}
List<KavitaLibrary> ret = new();
foreach (JsonNode? jsonNode in result)
{
JsonObject? jObject = (JsonObject?)jsonNode;
if(jObject is null)
continue;
int libraryId = jObject!["id"]!.GetValue<int>();
string libraryName = jObject["name"]!.GetValue<string>();
ret.Add(new KavitaLibrary(libraryId, libraryName));
}
return ret;
}
private struct KavitaLibrary
{
public int id { get; }
// ReSharper disable once UnusedAutoPropertyAccessor.Local
public string name { get; }
public KavitaLibrary(int id, string name)
{
this.id = id;
this.name = name;
}
}
}

View File

@ -0,0 +1,74 @@
using System.Text.Json;
using System.Text.Json.Nodes;
namespace API.Schema.LibraryConnectors;
public class Komga : LibraryConnector
{
public Komga(string baseUrl, string auth) : base(TokenGen.CreateToken(typeof(Komga), 64), LibraryType.Komga,
baseUrl, auth)
{
}
public Komga(string baseUrl, string username, string password)
: this(baseUrl, Convert.ToBase64String(System.Text.Encoding.ASCII.GetBytes($"{username}:{password}")))
{
}
protected override void UpdateLibraryInternal()
{
foreach (KomgaLibrary lib in GetLibraries())
NetClient.MakePost($"{BaseUrl}/api/v1/libraries/{lib.id}/scan", "Basic", Auth);
}
internal override bool Test()
{
foreach (KomgaLibrary lib in GetLibraries())
if (NetClient.MakePost($"{BaseUrl}/api/v1/libraries/{lib.id}/scan", "Basic", Auth))
return true;
return false;
}
/// <summary>
/// Fetches all libraries available to the user
/// </summary>
/// <returns>Array of KomgaLibraries</returns>
private IEnumerable<KomgaLibrary> GetLibraries()
{
Stream data = NetClient.MakeRequest($"{BaseUrl}/api/v1/libraries", "Basic", Auth);
if (data == Stream.Null)
{
return Array.Empty<KomgaLibrary>();
}
JsonArray? result = JsonSerializer.Deserialize<JsonArray>(data);
if (result is null)
{
return Array.Empty<KomgaLibrary>();
}
HashSet<KomgaLibrary> ret = new();
foreach (JsonNode? jsonNode in result)
{
var jObject = (JsonObject?)jsonNode;
string libraryId = jObject!["id"]!.GetValue<string>();
string libraryName = jObject["name"]!.GetValue<string>();
ret.Add(new KomgaLibrary(libraryId, libraryName));
}
return ret;
}
private struct KomgaLibrary
{
public string id { get; }
// ReSharper disable once UnusedAutoPropertyAccessor.Local
public string name { get; }
public KomgaLibrary(string id, string name)
{
this.id = id;
this.name = name;
}
}
}

View File

@ -0,0 +1,18 @@
using System.ComponentModel.DataAnnotations;
using Microsoft.EntityFrameworkCore;
namespace API.Schema.LibraryConnectors;
[PrimaryKey("LibraryConnectorId")]
public abstract class LibraryConnector(string libraryConnectorId, LibraryType libraryType, string baseUrl, string auth)
{
[MaxLength(64)]
public string LibraryConnectorId { get; } = libraryConnectorId;
public LibraryType LibraryType { get; init; } = libraryType;
public string BaseUrl { get; init; } = baseUrl;
public string Auth { get; init; } = auth;
protected abstract void UpdateLibraryInternal();
internal abstract bool Test();
}

View File

@ -0,0 +1,7 @@
namespace API.Schema.LibraryConnectors;
public enum LibraryType : byte
{
Komga = 0,
Kavita = 1
}

View File

@ -0,0 +1,69 @@
using System.Net;
using System.Net.Http.Headers;
namespace API.Schema.LibraryConnectors;
public class NetClient
{
public static Stream MakeRequest(string url, string authScheme, string auth)
{
HttpClient client = new();
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(authScheme, auth);
HttpRequestMessage requestMessage = new ()
{
Method = HttpMethod.Get,
RequestUri = new Uri(url)
};
try
{
HttpResponseMessage response = client.Send(requestMessage);
if (response.StatusCode is HttpStatusCode.Unauthorized &&
response.RequestMessage!.RequestUri!.AbsoluteUri != url)
return MakeRequest(response.RequestMessage!.RequestUri!.AbsoluteUri, authScheme, auth);
else if (response.IsSuccessStatusCode)
return response.Content.ReadAsStream();
else
return Stream.Null;
}
catch (Exception e)
{
switch (e)
{
case HttpRequestException:
break;
default:
throw;
}
return Stream.Null;
}
}
public static bool MakePost(string url, string authScheme, string auth)
{
HttpClient client = new()
{
DefaultRequestHeaders =
{
{ "Accept", "application/json" },
{ "Authorization", new AuthenticationHeaderValue(authScheme, auth).ToString() }
}
};
HttpRequestMessage requestMessage = new ()
{
Method = HttpMethod.Post,
RequestUri = new Uri(url)
};
HttpResponseMessage response = client.Send(requestMessage);
if(response.StatusCode is HttpStatusCode.Unauthorized && response.RequestMessage!.RequestUri!.AbsoluteUri != url)
return MakePost(response.RequestMessage!.RequestUri!.AbsoluteUri, authScheme, auth);
else if (response.IsSuccessStatusCode)
return true;
else
return false;
}
}

20
API/Schema/Link.cs Normal file
View File

@ -0,0 +1,20 @@
using System.ComponentModel.DataAnnotations;
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
[PrimaryKey("LinkId")]
public class Link(string linkProvider, string linkUrl)
{
[MaxLength(64)]
public string LinkId { get; init; } = TokenGen.CreateToken(typeof(Link), 64);
public string LinkProvider { get; init; } = linkProvider;
public string LinkUrl { get; init; } = linkUrl;
public override bool Equals(object? obj)
{
if (obj is not Link other)
return false;
return other.LinkProvider == LinkProvider && other.LinkUrl == LinkUrl;
}
}

130
API/Schema/Manga.cs Normal file
View File

@ -0,0 +1,130 @@
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using API.Schema.Jobs;
using API.Schema.MangaConnectors;
using Microsoft.EntityFrameworkCore;
using static System.IO.UnixFileMode;
namespace API.Schema;
[PrimaryKey("MangaId")]
public class Manga
{
[MaxLength(64)]
public string MangaId { get; init; } = TokenGen.CreateToken(typeof(Manga), 64);
[MaxLength(64)]
public string ConnectorId { get; init; }
public string Name { get; internal set; }
public string Description { get; internal set; }
public string WebsiteUrl { get; internal set; }
public string CoverUrl { get; internal set; }
public string? CoverFileNameInCache { get; internal set; }
public uint Year { get; internal set; }
public string? OriginalLanguage { get; internal set; }
public MangaReleaseStatus ReleaseStatus { get; internal set; }
public string FolderName { get; private set; }
public float IgnoreChapterBefore { get; internal set; }
public string MangaConnectorId { get; private set; }
public MangaConnector? MangaConnector { get; private set; }
public ICollection<Author>? Authors { get; internal set; }
public ICollection<MangaTag>? Tags { get; internal set; }
public ICollection<Link>? Links { get; internal set; }
public ICollection<MangaAltTitle>? AltTitles { get; internal set; }
public Manga(string connectorId, string name, string description, string websiteUrl, string coverUrl,
string? coverFileNameInCache, uint year, string? originalLanguage, MangaReleaseStatus releaseStatus,
float ignoreChapterBefore, MangaConnector mangaConnector, ICollection<Author> authors,
ICollection<MangaTag> tags, ICollection<Link> links, ICollection<MangaAltTitle> altTitles)
: this(connectorId, name, description, websiteUrl, coverUrl, coverFileNameInCache, year, originalLanguage,
releaseStatus, ignoreChapterBefore, mangaConnector.Name)
{
this.Authors = authors;
this.Tags = tags;
this.Links = links;
this.AltTitles = altTitles;
}
public Manga(string connectorId, string name, string description, string websiteUrl, string coverUrl,
string? coverFileNameInCache, uint year, string? originalLanguage, MangaReleaseStatus releaseStatus,
float ignoreChapterBefore, string mangaConnectorId)
{
ConnectorId = connectorId;
Name = name;
Description = description;
WebsiteUrl = websiteUrl;
CoverUrl = coverUrl;
CoverFileNameInCache = coverFileNameInCache;
Year = year;
OriginalLanguage = originalLanguage;
ReleaseStatus = releaseStatus;
IgnoreChapterBefore = ignoreChapterBefore;
MangaConnectorId = mangaConnectorId;
FolderName = BuildFolderName(name);
}
public MoveFileOrFolderJob UpdateFolderName(string downloadLocation, string newName)
{
string oldName = this.FolderName;
this.FolderName = newName;
return new MoveFileOrFolderJob(Path.Join(downloadLocation, oldName), Path.Join(downloadLocation, this.FolderName));
}
internal void UpdateWithInfo(Manga other)
{
this.Name = other.Name;
this.Year = other.Year;
this.Description = other.Description;
this.CoverUrl = other.CoverUrl;
this.OriginalLanguage = other.OriginalLanguage;
this.Authors = other.Authors;
this.Links = other.Links;
this.Tags = other.Tags;
this.AltTitles = other.AltTitles;
this.ReleaseStatus = other.ReleaseStatus;
}
private static string BuildFolderName(string mangaName)
{
return mangaName;
}
internal string SaveCoverImageToCache()
{
Regex urlRex = new (@"https?:\/\/((?:[a-zA-Z0-9-]+\.)+[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+))");
//https?:\/\/[a-zA-Z0-9-]+\.([a-zA-Z0-9-]+\.[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+)) for only second level domains
Match match = urlRex.Match(CoverUrl);
string filename = $"{match.Groups[1].Value}-{MangaId}.{match.Groups[3].Value}";
string saveImagePath = Path.Join(TrangaSettings.coverImageCache, filename);
if (File.Exists(saveImagePath))
return saveImagePath;
RequestResult coverResult = new HttpDownloadClient().MakeRequest(CoverUrl, RequestType.MangaCover);
using MemoryStream ms = new();
coverResult.result.CopyTo(ms);
Directory.CreateDirectory(TrangaSettings.coverImageCache);
File.WriteAllBytes(saveImagePath, ms.ToArray());
return saveImagePath;
}
public string CreatePublicationFolder()
{
string publicationFolder = Path.Join(TrangaSettings.downloadLocation, this.FolderName);
if(!Directory.Exists(publicationFolder))
Directory.CreateDirectory(publicationFolder);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(publicationFolder, GroupRead | GroupWrite | GroupExecute | OtherRead | OtherWrite | OtherExecute | UserRead | UserWrite | UserExecute);
return publicationFolder;
}
//TODO onchanges create job to update metadata files in archives, etc.
}

View File

@ -0,0 +1,15 @@
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
[PrimaryKey("AltTitleId")]
public class MangaAltTitle(string language, string title)
{
[MaxLength(64)]
public string AltTitleId { get; init; } = TokenGen.CreateToken("AltTitle", 64);
[MaxLength(8)]
public string Language { get; init; } = language;
public string Title { get; set; } = title;
}

View File

@ -0,0 +1,192 @@
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using HtmlAgilityPack;
namespace API.Schema.MangaConnectors;
public class AsuraToon : MangaConnector
{
public AsuraToon() : base("AsuraToon", ["en"], ["https://asuracomic.net"])
{
this.downloadClient = new ChromiumDownloadClient();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
string requestUrl = $"https://asuracomic.net/series?name={sanitizedTitle}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return [];
if (requestResult.htmlDocument is null)
{
return [];
}
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
return publications;
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://asuracomic.net/series/{publicationId}");
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return null;
if (requestResult.htmlDocument is null)
{
return null;
}
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document)
{
HtmlNodeCollection mangaList = document.DocumentNode.SelectNodes("//a[starts-with(@href,'series')]");
if (mangaList is null || mangaList.Count < 1)
return [];
IEnumerable<string> urls = mangaList.Select(a => $"https://asuracomic.net/{a.GetAttributeValue("href", "")}");
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new();
foreach (string url in urls)
{
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url);
if (manga is { } x)
ret.Add(x);
}
return ret.ToArray();
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
string? originalLanguage = null;
Dictionary<string, string> altTitles = new(), links = new();
HtmlNodeCollection genreNodes = document.DocumentNode.SelectNodes("//h3[text()='Genres']/../div/button");
string[] tags = genreNodes.Select(b => b.InnerText).ToArray();
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
HtmlNode statusNode = document.DocumentNode.SelectSingleNode("//h3[text()='Status']/../h3[2]");
MangaReleaseStatus releaseStatus = statusNode.InnerText.ToLower() switch
{
"ongoing" => MangaReleaseStatus.Continuing,
"hiatus" => MangaReleaseStatus.OnHiatus,
"completed" => MangaReleaseStatus.Completed,
"dropped" => MangaReleaseStatus.Cancelled,
"season end" => MangaReleaseStatus.Continuing,
"coming soon" => MangaReleaseStatus.Unreleased,
_ => MangaReleaseStatus.Unreleased
};
HtmlNode coverNode =
document.DocumentNode.SelectSingleNode("//img[@alt='poster']");
string coverUrl = coverNode.GetAttributeValue("src", "");
HtmlNode titleNode =
document.DocumentNode.SelectSingleNode("//title");
string sortName = Regex.Match(titleNode.InnerText, @"(.*) - Asura Scans").Groups[1].Value;
HtmlNode descriptionNode =
document.DocumentNode.SelectSingleNode("//h3[starts-with(text(),'Synopsis')]/../span");
string description = descriptionNode?.InnerText??"";
HtmlNodeCollection authorNodes = document.DocumentNode.SelectNodes("//h3[text()='Author']/../h3[not(text()='Author' or text()='_')]");
HtmlNodeCollection artistNodes = document.DocumentNode.SelectNodes("//h3[text()='Artist']/../h3[not(text()='Artist' or text()='_')]");
IEnumerable<string> authorNames = authorNodes is null ? [] : authorNodes.Select(a => a.InnerText);
IEnumerable<string> artistNames = artistNodes is null ? [] : artistNodes.Select(a => a.InnerText);
List<string> authorStrings = authorNames.Concat(artistNames).ToList();
List<Author> authors = authorStrings.Select(author => new Author(author)).ToList();
HtmlNode? firstChapterNode = document.DocumentNode.SelectSingleNode("//a[contains(@href, 'chapter/1')]/../following-sibling::h3");
uint year = uint.Parse(firstChapterNode?.InnerText.Split(' ')[^1] ?? "2000");
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
[]);
return (manga, authors, mangaTags, [], []);
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
string requestUrl = $"https://asuracomic.net/series/{manga.MangaId}";
// Leaving this in for verification if the page exists
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return [];
//Return Chapters ordered by Chapter-Number
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl);
return chapters.Order().ToArray();
}
private List<Chapter> ParseChaptersFromHtml(Manga manga, string mangaUrl)
{
RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default);
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
{
return new List<Chapter>();
}
List<Chapter> ret = new();
HtmlNodeCollection chapterURLNodes = result.htmlDocument.DocumentNode.SelectNodes("//a[contains(@href, '/chapter/')]");
Regex infoRex = new(@"Chapter ([0-9]+)(.*)?");
foreach (HtmlNode chapterInfo in chapterURLNodes)
{
string chapterUrl = chapterInfo.GetAttributeValue("href", "");
Match match = infoRex.Match(chapterInfo.InnerText);
if(!ChapterNumber.CanParse(match.Groups[1].Value))
continue;
ChapterNumber chapterNumber = new(match.Groups[1].Value);
string? chapterName = match.Groups[2].Success && match.Groups[2].Length > 1 ? match.Groups[2].Value : null;
string url = $"https://asuracomic.net/series/{chapterUrl}";
try
{
ret.Add(new Chapter(manga, url, chapterNumber, null, chapterName));
}
catch (Exception e)
{
}
}
return ret;
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
string requestUrl = chapter.Url;
// Leaving this in to check if the page exists
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
{
return [];
}
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
return imageUrls;
}
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
{
HtmlNodeCollection images = document.DocumentNode.SelectNodes("//img[contains(@alt, 'chapter page')]");
return images.Select(i => i.GetAttributeValue("src", "")).ToArray();
}
}

View File

@ -0,0 +1,205 @@
using System.Net;
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using HtmlAgilityPack;
namespace API.Schema.MangaConnectors;
public class Bato : MangaConnector
{
public Bato() : base("Bato", ["en"], ["bato.to"])
{
this.downloadClient = new HttpDownloadClient();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
string requestUrl = $"https://bato.to/v3x-search?word={sanitizedTitle}&lang=en";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return [];
if (requestResult.htmlDocument is null)
{
return [];
}
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
return publications;
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://bato.to/title/{publicationId}");
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return null;
if (requestResult.htmlDocument is null)
{
return null;
}
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document)
{
HtmlNode mangaList = document.DocumentNode.SelectSingleNode("//div[@data-hk='0-0-2']");
if (!mangaList.ChildNodes.Any(node => node.Name == "div"))
return [];
List<string> urls = mangaList.ChildNodes
.Select(node => $"https://bato.to{node.Descendants("div").First().FirstChild.GetAttributeValue("href", "")}").ToList();
HashSet<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new();
foreach (string url in urls)
{
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url);
if (manga is { } x)
ret.Add(x);
}
return ret.ToArray();
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
HtmlNode infoNode = document.DocumentNode.SelectSingleNode("/html/body/div/main/div[1]/div[2]");
string sortName = infoNode.Descendants("h3").First().InnerText;
string description = document.DocumentNode
.SelectSingleNode("//div[contains(concat(' ',normalize-space(@class),' '),'prose')]").InnerText;
string[] altTitlesList = infoNode.ChildNodes[1].ChildNodes[2].InnerText.Split('/');
int i = 0;
List<MangaAltTitle> altTitles = altTitlesList.Select(a => new MangaAltTitle(i++.ToString(), a)).ToList();
string coverUrl = document.DocumentNode.SelectNodes("//img")
.First(child => child.GetAttributeValue("data-hk", "") == "0-1-0").GetAttributeValue("src", "").Replace("&amp;", "&");
List<HtmlNode> genreNodes = document.DocumentNode.SelectSingleNode("//b[text()='Genres:']/..").SelectNodes("span").ToList();
string[] tags = genreNodes.Select(node => node.FirstChild.InnerText).ToArray();
List<MangaTag> mangaTags = tags.Select(s => new MangaTag(s)).ToList();
List<HtmlNode> authorsNodes = infoNode.ChildNodes[1].ChildNodes[3].Descendants("a").ToList();
List<string> authorNames = authorsNodes.Select(node => node.InnerText.Replace("amp;", "")).ToList();
List<Author> authors = authorNames.Select(n => new Author(n)).ToList();
HtmlNode? originalLanguageNode = document.DocumentNode.SelectSingleNode("//span[text()='Tr From']/..");
string originalLanguage = originalLanguageNode is not null ? originalLanguageNode.LastChild.InnerText : "";
if (!uint.TryParse(
document.DocumentNode.SelectSingleNode("//span[text()='Original Publication:']/..").LastChild.InnerText.Split('-')[0],
out uint year))
year = (uint)DateTime.Now.Year;
string status = document.DocumentNode.SelectSingleNode("//span[text()='Original Publication:']/..")
.ChildNodes[2].InnerText;
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
switch (status.ToLower())
{
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break;
case "completed": releaseStatus = MangaReleaseStatus.Completed; break;
case "hiatus": releaseStatus = MangaReleaseStatus.OnHiatus; break;
case "cancelled": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "pending": releaseStatus = MangaReleaseStatus.Unreleased; break;
}
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
altTitles);
return (manga, authors, mangaTags, [], altTitles);
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
string requestUrl = $"https://bato.to/title/{manga.MangaId}";
// Leaving this in for verification if the page exists
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return [];
//Return Chapters ordered by Chapter-Number
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl);
return chapters.Order().ToArray();
}
private List<Chapter> ParseChaptersFromHtml(Manga manga, string mangaUrl)
{
RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default);
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
{
return new List<Chapter>();
}
List<Chapter> ret = new();
HtmlNode chapterList =
result.htmlDocument.DocumentNode.SelectSingleNode("/html/body/div/main/div[3]/astro-island/div/div[2]/div/div/astro-slot");
Regex numberRex = new(@"\/title\/.+\/([0-9])+(?:-vol_([0-9]+))?-ch_([0-9\.]+)");
foreach (HtmlNode chapterInfo in chapterList.SelectNodes("div"))
{
HtmlNode infoNode = chapterInfo.FirstChild.FirstChild;
string chapterUrl = infoNode.GetAttributeValue("href", "");
Match match = numberRex.Match(chapterUrl);
string id = match.Groups[1].Value;
int? volumeNumber = match.Groups[2].Success ? int.Parse(match.Groups[2].Value) : null;
if(ChapterNumber.CanParse(match.Groups[3].Value))
continue;
ChapterNumber chapterNumber = new(match.Groups[3].Value);
string url = $"https://bato.to{chapterUrl}?load=2";
try
{
ret.Add(new Chapter(manga, url, chapterNumber, volumeNumber, null));
}
catch (Exception e)
{
}
}
return ret;
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
string requestUrl = chapter.Url;
// Leaving this in to check if the page exists
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
{
return [];
}
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
return imageUrls;
}
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
{
HtmlNode images = document.DocumentNode.SelectNodes("//astro-island").First(node =>
node.GetAttributeValue("component-url", "").Contains("/_astro/ImageList."));
string weirdString = images.OuterHtml;
string weirdString2 = Regex.Match(weirdString, @"props=\""(.*)}\""").Groups[1].Value;
string[] urls = Regex.Matches(weirdString2, @"(https:\/\/[A-z\-0-9\.\?\&\;\=\/]+)\\")
.Select(match => match.Groups[1].Value.Replace("&amp;", "&")).ToArray();
return urls;
}
}

View File

@ -0,0 +1,39 @@
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using API.MangaDownloadClients;
using Microsoft.EntityFrameworkCore;
using Newtonsoft.Json;
namespace API.Schema.MangaConnectors;
[PrimaryKey("Name")]
public abstract class MangaConnector(string name, string[] supportedLanguages, string[] baseUris)
{
[MaxLength(32)]
public string Name { get; init; } = name;
public string[] SupportedLanguages { get; init; } = supportedLanguages;
public string[] BaseUris { get; init; } = baseUris;
public abstract (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "");
public abstract (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url);
public abstract (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId);
public abstract Chapter[] GetChapters(Manga manga, string language="en");
[JsonIgnore]
[NotMapped]
internal DownloadClient downloadClient { get; init; } = null!;
public Chapter[] GetNewChapters(Manga manga)
{
Chapter[] allChapters = GetChapters(manga);
if (allChapters.Length < 1)
return [];
return allChapters.Where(chapter => !chapter.IsDownloaded()).ToArray();
}
internal abstract string[] GetChapterImageUrls(Chapter chapter);
}

View File

@ -0,0 +1,281 @@
using System.Net;
using System.Text.Json.Nodes;
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using JsonSerializer = System.Text.Json.JsonSerializer;
namespace API.Schema.MangaConnectors;
public class MangaDex : MangaConnector
{
//https://api.mangadex.org/docs/3-enumerations/#language-codes--localization
//https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes
//https://gist.github.com/Josantonius/b455e315bc7f790d14b136d61d9ae469
public MangaDex() : base("MangaDex", ["en","pt","pt-br","it","de","ru","aa","ab","ae","af","ak","am","an","ar-ae","ar-bh","ar-dz","ar-eg","ar-iq","ar-jo","ar-kw","ar-lb","ar-ly","ar-ma","ar-om","ar-qa","ar-sa","ar-sy","ar-tn","ar-ye","ar","as","av","ay","az","ba","be","bg","bh","bi","bm","bn","bo","br","bs","ca","ce","ch","co","cr","cs","cu","cv","cy","da","de-at","de-ch","de-de","de-li","de-lu","div","dv","dz","ee","el","en-au","en-bz","en-ca","en-cb","en-gb","en-ie","en-jm","en-nz","en-ph","en-tt","en-us","en-za","en-zw","eo","es-ar","es-bo","es-cl","es-co","es-cr","es-do","es-ec","es-es","es-gt","es-hn","es-la","es-mx","es-ni","es-pa","es-pe","es-pr","es-py","es-sv","es-us","es-uy","es-ve","es","et","eu","fa","ff","fi","fj","fo","fr-be","fr-ca","fr-ch","fr-fr","fr-lu","fr-mc","fr","fy","ga","gd","gl","gn","gu","gv","ha","he","hi","ho","hr-ba","hr-hr","hr","ht","hu","hy","hz","ia","id","ie","ig","ii","ik","in","io","is","it-ch","it-it","iu","iw","ja","ja-ro","ji","jv","jw","ka","kg","ki","kj","kk","kl","km","kn","ko","ko-ro","kr","ks","ku","kv","kw","ky","kz","la","lb","lg","li","ln","lo","ls","lt","lu","lv","mg","mh","mi","mk","ml","mn","mo","mr","ms-bn","ms-my","ms","mt","my","na","nb","nd","ne","ng","nl-be","nl-nl","nl","nn","no","nr","ns","nv","ny","oc","oj","om","or","os","pa","pi","pl","ps","pt-pt","qu-bo","qu-ec","qu-pe","qu","rm","rn","ro","rw","sa","sb","sc","sd","se-fi","se-no","se-se","se","sg","sh","si","sk","sl","sm","sn","so","sq","sr-ba","sr-sp","sr","ss","st","su","sv-fi","sv-se","sv","sw","sx","syr","ta","te","tg","th","ti","tk","tl","tn","to","tr","ts","tt","tw","ty","ug","uk","ur","us","uz","ve","vi","vo","wa","wo","xh","yi","yo","za","zh-cn","zh-hk","zh-mo","zh-ro","zh-sg","zh-tw","zh","zu"], ["mangadex.org"])
{
this.downloadClient = new HttpDownloadClient();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
const int limit = 100; //How many values we want returned at once
int offset = 0; //"Page"
int total = int.MaxValue; //How many total results are there, is updated on first request
HashSet<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> retManga = new();
int loadedPublicationData = 0;
List<JsonNode> results = new();
//Request all search-results
while (offset < total) //As long as we haven't requested all "Pages"
{
//Request next Page
RequestResult requestResult = downloadClient.MakeRequest(
$"https://api.mangadex.org/manga?limit={limit}&title={publicationTitle}&offset={offset}" +
$"&contentRating%5B%5D=safe&contentRating%5B%5D=suggestive&contentRating%5B%5D=erotica" +
$"&contentRating%5B%5D=pornographic" +
$"&includes%5B%5D=manga&includes%5B%5D=cover_art&includes%5B%5D=author" +
$"&includes%5B%5D=artist&includes%5B%5D=tag", RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
break;
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
offset += limit;
if (result is null)
break;
if(result.ContainsKey("total"))
total = result["total"]!.GetValue<int>(); //Update the total number of Publications
else continue;
if (result.ContainsKey("data"))
results.AddRange(result["data"]!.AsArray()!);//Manga-data-Array
}
foreach (JsonNode mangaNode in results)
{
if(MangaFromJsonObject(mangaNode.AsObject()) is { } manga)
retManga.Add(manga); //Add Publication (Manga) to result
}
return retManga.ToArray();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
RequestResult requestResult =
downloadClient.MakeRequest($"https://api.mangadex.org/manga/{publicationId}?includes%5B%5D=manga&includes%5B%5D=cover_art&includes%5B%5D=author&includes%5B%5D=artist&includes%5B%5D=tag", RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return null;
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
if(result is not null)
return MangaFromJsonObject(result["data"]!.AsObject());
return null;
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
Regex idRex = new (@"https:\/\/mangadex.org\/title\/([A-z0-9-]*)\/.*");
string id = idRex.Match(url).Groups[1].Value;
return GetMangaFromId(id);
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? MangaFromJsonObject(JsonObject manga)
{
if (!manga.TryGetPropertyValue("id", out JsonNode? idNode))
return null;
string publicationId = idNode!.GetValue<string>();
if (!manga.TryGetPropertyValue("attributes", out JsonNode? attributesNode))
return null;
JsonObject attributes = attributesNode!.AsObject();
if (!attributes.TryGetPropertyValue("title", out JsonNode? titleNode))
return null;
string sortName = titleNode!.AsObject().ContainsKey("en") switch
{
true => titleNode.AsObject()["en"]!.GetValue<string>(),
false => titleNode.AsObject().First().Value!.GetValue<string>()
};
Dictionary<string, string> altTitlesDict = new();
if (attributes.TryGetPropertyValue("altTitles", out JsonNode? altTitlesNode))
{
foreach (JsonNode? altTitleNode in altTitlesNode!.AsArray())
{
JsonObject altTitleNodeObject = altTitleNode!.AsObject();
altTitlesDict.TryAdd(altTitleNodeObject.First().Key, altTitleNodeObject.First().Value!.GetValue<string>());
}
}
List<MangaAltTitle> altTitles = altTitlesDict.Select(t => new MangaAltTitle(t.Key, t.Value)).ToList();
if (!attributes.TryGetPropertyValue("description", out JsonNode? descriptionNode))
return null;
string description = descriptionNode!.AsObject().ContainsKey("en") switch
{
true => descriptionNode.AsObject()["en"]!.GetValue<string>(),
false => descriptionNode.AsObject().FirstOrDefault().Value?.GetValue<string>() ?? ""
};
Dictionary<string, string> linksDict = new();
if (attributes.TryGetPropertyValue("links", out JsonNode? linksNode) && linksNode is not null)
foreach (KeyValuePair<string, JsonNode?> linkKv in linksNode!.AsObject())
linksDict.TryAdd(linkKv.Key, linkKv.Value.GetValue<string>());
List<Link> links = linksDict.Select(x => new Link(x.Key, x.Value)).ToList();
string? originalLanguage =
attributes.TryGetPropertyValue("originalLanguage", out JsonNode? originalLanguageNode) switch
{
true => originalLanguageNode?.GetValue<string>(),
false => null
};
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
if (attributes.TryGetPropertyValue("status", out JsonNode? statusNode))
{
releaseStatus = statusNode?.GetValue<string>().ToLower() switch
{
"ongoing" => MangaReleaseStatus.Continuing,
"completed" => MangaReleaseStatus.Completed,
"hiatus" => MangaReleaseStatus.OnHiatus,
"cancelled" => MangaReleaseStatus.Cancelled,
_ => MangaReleaseStatus.Unreleased
};
}
uint year = attributes.TryGetPropertyValue("year", out JsonNode? yearNode) switch
{
true => yearNode?.GetValue<uint>()??0,
false => 0
};
HashSet<string> tags = new(128);
if (attributes.TryGetPropertyValue("tags", out JsonNode? tagsNode))
foreach (JsonNode? tagNode in tagsNode!.AsArray())
tags.Add(tagNode!["attributes"]!["name"]!["en"]!.GetValue<string>());
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
if (!manga.TryGetPropertyValue("relationships", out JsonNode? relationshipsNode))
return null;
JsonNode? coverNode = relationshipsNode!.AsArray()
.FirstOrDefault(rel => rel!["type"]!.GetValue<string>().Equals("cover_art"));
if (coverNode is null)
return null;
string fileName = coverNode["attributes"]!["fileName"]!.GetValue<string>();
string coverUrl = $"https://uploads.mangadex.org/covers/{publicationId}/{fileName}";
List<string> authorNames = new();
JsonNode?[] authorNodes = relationshipsNode.AsArray()
.Where(rel => rel!["type"]!.GetValue<string>().Equals("author") || rel!["type"]!.GetValue<string>().Equals("artist")).ToArray();
foreach (JsonNode? authorNode in authorNodes)
{
string authorName = authorNode!["attributes"]!["name"]!.GetValue<string>();
if(!authorNames.Contains(authorName))
authorNames.Add(authorName);
}
List<Author> authors = authorNames.Select(a => new Author(a)).ToList();
Manga pub = new (publicationId, sortName, description, $"https://mangadex.org/title/{publicationId}", coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
links,
altTitles);
return (pub, authors, mangaTags, links, altTitles);
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
const int limit = 100; //How many values we want returned at once
int offset = 0; //"Page"
int total = int.MaxValue; //How many total results are there, is updated on first request
List<Chapter> chapters = new();
//As long as we haven't requested all "Pages"
while (offset < total)
{
//Request next "Page"
RequestResult requestResult =
downloadClient.MakeRequest(
$"https://api.mangadex.org/manga/{manga.ConnectorId}/feed?limit={limit}&offset={offset}&translatedLanguage%5B%5D={language}&contentRating%5B%5D=safe&contentRating%5B%5D=suggestive&contentRating%5B%5D=erotica&contentRating%5B%5D=pornographic", RequestType.MangaDexFeed);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
break;
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
offset += limit;
if (result is null)
break;
total = result["total"]!.GetValue<int>();
JsonArray chaptersInResult = result["data"]!.AsArray();
//Loop through all Chapters in result and extract information from JSON
foreach (JsonNode? jsonNode in chaptersInResult)
{
JsonObject chapter = (JsonObject)jsonNode!;
JsonObject attributes = chapter["attributes"]!.AsObject();
string chapterId = chapter["id"]!.GetValue<string>();
string url = $"https://mangadex.org/chapter/{chapterId}";
string? title = attributes.ContainsKey("title") && attributes["title"] is not null
? attributes["title"]!.GetValue<string>()
: null;
int? volume = attributes.ContainsKey("volume") && attributes["volume"] is not null
? int.Parse(attributes["volume"]!.GetValue<string>())
: null;
string? chapterNumStr = attributes.ContainsKey("chapter") && attributes["chapter"] is not null
? attributes["chapter"]!.GetValue<string>()
: null;
if(chapterNumStr is null || ChapterNumber.CanParse(chapterNumStr))
continue;
ChapterNumber chapterNumber = new(chapterNumStr);
if (attributes.ContainsKey("pages") && attributes["pages"] is not null &&
attributes["pages"]!.GetValue<int>() < 1)
{
continue;
}
try
{
Chapter newChapter = new Chapter(manga, url, chapterNumber, volume, title);
if(!chapters.Contains(newChapter))
chapters.Add(newChapter);
}
catch (Exception e)
{
}
}
}
//Return Chapters ordered by Chapter-Number
return chapters.Order().ToArray();
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{//Request URLs for Chapter-Images
RequestResult requestResult =
downloadClient.MakeRequest($"https://api.mangadex.org/at-home/server/{chapter.ChapterId}?forcePort443=false", RequestType.MangaDexImage);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
return [];
}
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
if (result is null)
{
return [];
}
string baseUrl = result["baseUrl"]!.GetValue<string>();
string hash = result["chapter"]!["hash"]!.GetValue<string>();
JsonArray imageFileNames = result["chapter"]!["data"]!.AsArray();
//Loop through all imageNames and construct urls (imageUrl)
List<string> imageUrls = new();
foreach (JsonNode? image in imageFileNames)
imageUrls.Add($"{baseUrl}/data/{hash}/{image!.GetValue<string>()}");
return imageUrls.ToArray();
}
}

View File

@ -0,0 +1,185 @@
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using HtmlAgilityPack;
namespace API.Schema.MangaConnectors;
public class MangaHere : MangaConnector
{
public MangaHere() : base("MangaHere", ["en"], ["www.mangahere.cc"])
{
this.downloadClient = new ChromiumDownloadClient();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
string sanitizedTitle = string.Join('+', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
string requestUrl = $"https://www.mangahere.cc/search?title={sanitizedTitle}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
return [];
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
return publications;
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document)
{
if (document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),' container ')]").Any(node => node.ChildNodes.Any(cNode => cNode.HasClass("search-keywords"))))
return [];
List<string> urls = document.DocumentNode
.SelectNodes("//a[contains(@href, '/manga/') and not(contains(@href, '.html'))]")
.Select(thumb => $"https://www.mangahere.cc{thumb.GetAttributeValue("href", "")}").Distinct().ToList();
HashSet<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new();
foreach (string url in urls)
{
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url);
if (manga is { } x)
ret.Add(x);
}
return ret.ToArray();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://www.mangahere.cc/manga/{publicationId}");
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
RequestResult requestResult =
downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
return null;
Regex idRex = new (@"https:\/\/www\.mangahere\.[a-z]{0,63}\/manga\/([0-9A-z\-]+).*");
string id = idRex.Match(url).Groups[1].Value;
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id, url);
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
string originalLanguage = "", status = "";
Dictionary<string, string> altTitles = new(), links = new();
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
//We dont get posters, because same origin bs HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//img[contains(concat(' ',normalize-space(@class),' '),' detail-info-cover-img ')]");
string coverUrl = "http://static.mangahere.cc/v20230914/mangahere/images/nopicture.jpg";
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//span[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-title-font ')]");
string sortName = titleNode.InnerText;
List<string> authorNames = document.DocumentNode
.SelectNodes("//p[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-say ')]/a")
.Select(node => node.InnerText)
.ToList();
List<Author> authors = authorNames.Select(n => new Author(n)).ToList();
HashSet<string> tags = document.DocumentNode
.SelectNodes("//p[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-tag-list ')]/a")
.Select(node => node.InnerText)
.ToHashSet();
List<MangaTag> mangaTags = tags.Select(n => new MangaTag(n)).ToList();
status = document.DocumentNode.SelectSingleNode("//span[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-title-tip ')]").InnerText;
switch (status.ToLower())
{
case "cancelled": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "hiatus": releaseStatus = MangaReleaseStatus.OnHiatus; break;
case "discontinued": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "complete": releaseStatus = MangaReleaseStatus.Completed; break;
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break;
}
HtmlNode descriptionNode = document.DocumentNode
.SelectSingleNode("//p[contains(concat(' ',normalize-space(@class),' '),' fullcontent ')]");
string description = descriptionNode.InnerText;
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, 0,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
[]);
return (manga, authors, mangaTags, [], []);
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
string requestUrl = $"https://www.mangahere.cc/manga/{manga.MangaId}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
return Array.Empty<Chapter>();
List<string> urls = requestResult.htmlDocument.DocumentNode.SelectNodes("//div[@id='list-1']/ul//li//a[contains(@href, '/manga/')]")
.Select(node => node.GetAttributeValue("href", "")).ToList();
Regex chapterRex = new(@".*\/manga\/[a-zA-Z0-9\-\._\~\!\$\&\'\(\)\*\+\,\;\=\:\@]+\/v([0-9(TBD)]+)\/c([0-9\.]+)\/.*");
List<Chapter> chapters = new();
foreach (string url in urls)
{
Match rexMatch = chapterRex.Match(url);
int? volumeNumber = rexMatch.Groups[1].Value == "TBD" ? null : int.Parse(rexMatch.Groups[1].Value);
if(!ChapterNumber.CanParse(rexMatch.Groups[2].Value))
continue;
ChapterNumber chapterNumber = new(rexMatch.Groups[2].Value);
string fullUrl = $"https://www.mangahere.cc{url}";
try
{
chapters.Add(new Chapter(manga, fullUrl, chapterNumber, volumeNumber, null));
}
catch (Exception e)
{
}
}
//Return Chapters ordered by Chapter-Number
return chapters.Order().ToArray();
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
List<string> imageUrls = new();
int downloaded = 1;
int images = 1;
string url = string.Join('/', chapter.Url.Split('/')[..^1]);
do
{
RequestResult requestResult =
downloadClient.MakeRequest($"{url}/{downloaded}.html", RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
{
return [];
}
imageUrls.AddRange(ParseImageUrlsFromHtml(requestResult.htmlDocument));
images = requestResult.htmlDocument.DocumentNode
.SelectNodes("//a[contains(@href, '/manga/')]")
.MaxBy(node => node.GetAttributeValue("data-page", 0))!.GetAttributeValue("data-page", 0);
} while (downloaded++ <= images);
return imageUrls.ToArray();
}
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
{
return document.DocumentNode
.SelectNodes("//img[contains(concat(' ',normalize-space(@class),' '),' reader-main-img ')]")
.Select(node =>
{
string url = node.GetAttributeValue("src", "");
return url.StartsWith("//") ? $"https:{url}" : url;
})
.ToArray();
}
}

View File

@ -0,0 +1,234 @@
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using HtmlAgilityPack;
namespace API.Schema.MangaConnectors;
public class MangaKatana : MangaConnector
{
public MangaKatana() : base("MangaKatana", ["en"], ["mangakatana.com"])
{
this.downloadClient = new HttpDownloadClient();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
string sanitizedTitle = string.Join("%20", Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
string requestUrl = $"https://mangakatana.com/?search={sanitizedTitle}&search_by=book_name";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return [];
// ReSharper disable once MergeIntoPattern
// If a single result is found, the user will be redirected to the results directly instead of a result page
if(requestResult.hasBeenRedirected
&& requestResult.redirectedToUrl is not null
&& requestResult.redirectedToUrl.Contains("mangakatana.com/manga"))
{
return new [] { ParseSinglePublicationFromHtml(requestResult.result, requestResult.redirectedToUrl.Split('/')[^1], requestResult.redirectedToUrl) };
}
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.result);
return publications;
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://mangakatana.com/manga/{publicationId}");
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
RequestResult requestResult =
downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return null;
return ParseSinglePublicationFromHtml(requestResult.result, url.Split('/')[^1], url);
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(Stream html)
{
StreamReader reader = new(html);
string htmlString = reader.ReadToEnd();
HtmlDocument document = new();
document.LoadHtml(htmlString);
IEnumerable<HtmlNode> searchResults = document.DocumentNode.SelectNodes("//*[@id='book_list']/div");
if (searchResults is null || !searchResults.Any())
return [];
List<string> urls = new();
foreach (HtmlNode mangaResult in searchResults)
{
urls.Add(mangaResult.Descendants("a").First().GetAttributes()
.First(a => a.Name == "href").Value);
}
HashSet<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new();
foreach (string url in urls)
{
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url);
if (manga is { } x)
ret.Add(x);
}
return ret.ToArray();
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(Stream html, string publicationId, string websiteUrl)
{
StreamReader reader = new(html);
string htmlString = reader.ReadToEnd();
HtmlDocument document = new();
document.LoadHtml(htmlString);
Dictionary<string, string> altTitlesDict = new();
Dictionary<string, string>? links = null;
HashSet<string> tags = new();
string[] authorNames = [];
string originalLanguage = "";
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
HtmlNode infoNode = document.DocumentNode.SelectSingleNode("//*[@id='single_book']");
string sortName = infoNode.Descendants("h1").First(n => n.HasClass("heading")).InnerText;
HtmlNode infoTable = infoNode.SelectSingleNode("//*[@id='single_book']/div[2]/div/ul");
foreach (HtmlNode row in infoTable.Descendants("li"))
{
string key = row.SelectNodes("div").First().InnerText.ToLower();
string value = row.SelectNodes("div").Last().InnerText;
string keySanitized = string.Concat(Regex.Matches(key, "[a-z]"));
switch (keySanitized)
{
case "altnames":
string[] alts = value.Split(" ; ");
for (int i = 0; i < alts.Length; i++)
altTitlesDict.Add(i.ToString(), alts[i]);
break;
case "authorsartists":
authorNames = value.Split(',');
break;
case "status":
switch (value.ToLower())
{
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break;
case "completed": releaseStatus = MangaReleaseStatus.Completed; break;
}
break;
case "genres":
tags = row.SelectNodes("div").Last().Descendants("a").Select(a => a.InnerText).ToHashSet();
break;
}
}
string coverUrl = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[1]/div").Descendants("img").First()
.GetAttributes().First(a => a.Name == "src").Value;
string description = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[3]/p").InnerText;
while (description.StartsWith('\n'))
description = description.Substring(1);
uint year = (uint)DateTime.Now.Year;
string yearString = infoTable.Descendants("div").First(d => d.HasClass("updateAt"))
.InnerText.Split('-')[^1];
if(yearString.Contains("ago") == false)
{
year = uint.Parse(yearString);
}
List<Author> authors = authorNames.Select(n => new Author(n)).ToList();
List<MangaTag> mangaTags = tags.Select(n => new MangaTag(n)).ToList();
List<MangaAltTitle> altTitles = altTitlesDict.Select(x => new MangaAltTitle(x.Key, x.Value)).ToList();
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
altTitles);
return (manga, authors, mangaTags, [], altTitles);
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
string requestUrl = $"https://mangakatana.com/manga/{manga.MangaId}";
// Leaving this in for verification if the page exists
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return Array.Empty<Chapter>();
//Return Chapters ordered by Chapter-Number
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl);
return chapters.Order().ToArray();
}
private List<Chapter> ParseChaptersFromHtml(Manga manga, string mangaUrl)
{
// Using HtmlWeb will include the chapters since they are loaded with js
HtmlWeb web = new();
HtmlDocument document = web.Load(mangaUrl);
List<Chapter> ret = new();
HtmlNode chapterList = document.DocumentNode.SelectSingleNode("//div[contains(@class, 'chapters')]/table/tbody");
Regex volumeRex = new(@"[0-9a-z\-\.]+\/[0-9a-z\-]*v([0-9\.]+)");
Regex chapterNumRex = new(@"[0-9a-z\-\.]+\/[0-9a-z\-]*c([0-9\.]+)");
Regex chapterNameRex = new(@"Chapter [0-9\.]+:? (.*)");
foreach (HtmlNode chapterInfo in chapterList.Descendants("tr"))
{
string fullString = chapterInfo.Descendants("a").First().InnerText;
string url = chapterInfo.Descendants("a").First()
.GetAttributeValue("href", "");
int? volumeNumber = volumeRex.IsMatch(url) ? int.Parse(volumeRex.Match(url).Groups[1].Value) : null;
if(!ChapterNumber.CanParse(chapterNumRex.Match(url).Groups[1].Value))
continue;
ChapterNumber chapterNumber = new(chapterNumRex.Match(url).Groups[1].Value);
string chapterName = chapterNameRex.Match(fullString).Groups[1].Value;
try
{
ret.Add(new Chapter(manga, url, chapterNumber, volumeNumber, chapterName));
}
catch (Exception e)
{
}
}
return ret;
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
string requestUrl = chapter.Url;
// Leaving this in to check if the page exists
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
{
return [];
}
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
return imageUrls;
}
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
{
// Images are loaded dynamically, but the urls are present in a piece of js code on the page
string js = document.DocumentNode.SelectSingleNode("//script[contains(., 'data-src')]").InnerText
.Replace("\r", "")
.Replace("\n", "")
.Replace("\t", "");
// ReSharper disable once StringLiteralTypo
string regexPat = @"(var thzq=\[')(.*)(,];function)";
var group = Regex.Matches(js, regexPat).First().Groups[2].Value.Replace("'", "");
var urls = group.Split(',');
return urls;
}
}

View File

@ -0,0 +1,188 @@
using System.Net;
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using HtmlAgilityPack;
namespace API.Schema.MangaConnectors;
public class MangaLife : MangaConnector
{
public MangaLife() : base("Manga4Life", ["en"], ["manga4life.com"])
{
this.downloadClient = new ChromiumDownloadClient();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
string sanitizedTitle = WebUtility.UrlEncode(publicationTitle);
string requestUrl = $"https://manga4life.com/search/?name={sanitizedTitle}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return [];
if (requestResult.htmlDocument is null)
return [];
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
return publications;
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://manga4life.com/manga/{publicationId}");
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
Regex publicationIdRex = new(@"https:\/\/(www\.)?manga4life.com\/manga\/(.*)(\/.*)*");
string publicationId = publicationIdRex.Match(url).Groups[2].Value;
RequestResult requestResult = this.downloadClient.MakeRequest(url, RequestType.MangaInfo);
if(requestResult.htmlDocument is not null)
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
return null;
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document)
{
HtmlNode resultsNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']/div[last()]/div[1]/div");
if (resultsNode.Descendants("div").Count() == 1 && resultsNode.Descendants("div").First().HasClass("NoResults"))
{
return [];
}
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new();
foreach (HtmlNode resultNode in resultsNode.SelectNodes("div"))
{
string url = resultNode.Descendants().First(d => d.HasClass("SeriesName")).GetAttributeValue("href", "");
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl($"https://manga4life.com{url}");
if (manga is { } x)
ret.Add(x);
}
return ret.ToArray();
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
string originalLanguage = "", status = "";
Dictionary<string, string> altTitles = new(), links = new();
HashSet<string> tags = new();
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//img");
string coverUrl = posterNode.GetAttributeValue("src", "");
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//h1");
string sortName = titleNode.InnerText;
HtmlNode[] authorsNodes = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Author(s):']/..").Descendants("a")
.ToArray();
List<string> authorNames = new();
foreach (HtmlNode authorNode in authorsNodes)
authorNames.Add(authorNode.InnerText);
List<Author> authors = authorNames.Select(a => new Author(a)).ToList();
HtmlNode[] genreNodes = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Genre(s):']/..").Descendants("a")
.ToArray();
foreach (HtmlNode genreNode in genreNodes)
tags.Add(genreNode.InnerText);
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
HtmlNode yearNode = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Released:']/..").Descendants("a")
.First();
uint year = uint.Parse(yearNode.InnerText);
HtmlNode[] statusNodes = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Status:']/..").Descendants("a")
.ToArray();
foreach (HtmlNode statusNode in statusNodes)
if (statusNode.InnerText.Contains("publish", StringComparison.CurrentCultureIgnoreCase))
status = statusNode.InnerText.Split(' ')[0];
switch (status.ToLower())
{
case "cancelled": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "hiatus": releaseStatus = MangaReleaseStatus.OnHiatus; break;
case "discontinued": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "complete": releaseStatus = MangaReleaseStatus.Completed; break;
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break;
}
HtmlNode descriptionNode = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Description:']/..")
.Descendants("div").First();
string description = descriptionNode.InnerText;
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
[]);
return (manga, authors, mangaTags, [], []);
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
RequestResult result = downloadClient.MakeRequest($"https://manga4life.com/manga/{manga.MangaId}", RequestType.Default, clickButton:"[class*='ShowAllChapters']");
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
{
return Array.Empty<Chapter>();
}
HtmlNodeCollection chapterNodes = result.htmlDocument.DocumentNode.SelectNodes(
"//a[contains(concat(' ',normalize-space(@class),' '),' ChapterLink ')]");
string[] urls = chapterNodes.Select(node => node.GetAttributeValue("href", "")).ToArray();
Regex urlRex = new (@"-chapter-([0-9\\.]+)(-index-([0-9\\.]+))?");
List<Chapter> chapters = new();
foreach (string url in urls)
{
Match rexMatch = urlRex.Match(url);
int? volumeNumber = rexMatch.Groups[3].Success && rexMatch.Groups[3].Value.Length > 0
? int.Parse(rexMatch.Groups[3].Value)
: null;
if(!ChapterNumber.CanParse(rexMatch.Groups[1].Value))
continue;
ChapterNumber chapterNumber = new(rexMatch.Groups[1].Value);
string fullUrl = $"https://manga4life.com{url}";
fullUrl = fullUrl.Replace(Regex.Match(url,"(-page-[0-9])").Value,"");
try
{
chapters.Add(new Chapter(manga, fullUrl, chapterNumber, volumeNumber, null));
}
catch (Exception e)
{
}
}
//Return Chapters ordered by Chapter-Number
return chapters.Order().ToArray();
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
RequestResult requestResult = this.downloadClient.MakeRequest(chapter.Url, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
{
return [];
}
HtmlDocument document = requestResult.htmlDocument;
HtmlNode gallery = document.DocumentNode.Descendants("div").First(div => div.HasClass("ImageGallery"));
HtmlNode[] images = gallery.Descendants("img").Where(img => img.HasClass("img-fluid")).ToArray();
List<string> urls = new();
foreach(HtmlNode galleryImage in images)
urls.Add(galleryImage.GetAttributeValue("src", ""));
return urls.ToArray();
}
}

View File

@ -0,0 +1,226 @@
using System.Globalization;
using System.Net;
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using HtmlAgilityPack;
namespace API.Schema.MangaConnectors;
public class Manganato : MangaConnector
{
public Manganato() : base("Manganato", ["en"], ["manganato.com"])
{
this.downloadClient = new HttpDownloadClient();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
string sanitizedTitle = string.Join('_', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
string requestUrl = $"https://manganato.com/search/story/{sanitizedTitle}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 ||requestResult.htmlDocument is null)
return [];
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
return publications;
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document)
{
List<HtmlNode> searchResults = document.DocumentNode.Descendants("div").Where(n => n.HasClass("search-story-item")).ToList();
List<string> urls = new();
foreach (HtmlNode mangaResult in searchResults)
{
urls.Add(mangaResult.Descendants("a").First(n => n.HasClass("item-title")).GetAttributes()
.First(a => a.Name == "href").Value);
}
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new();
foreach (string url in urls)
{
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url);
if (manga is { } x)
ret.Add(x);
}
return ret.ToArray();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://chapmanganato.com/{publicationId}");
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
RequestResult requestResult =
downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return null;
if (requestResult.htmlDocument is null)
return null;
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
Dictionary<string, string> altTitlesDict = new();
Dictionary<string, string>? links = null;
HashSet<string> tags = new();
string[] authorNames = [];
string originalLanguage = "";
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
HtmlNode infoNode = document.DocumentNode.Descendants("div").First(d => d.HasClass("story-info-right"));
string sortName = infoNode.Descendants("h1").First().InnerText;
HtmlNode infoTable = infoNode.Descendants().First(d => d.Name == "table");
foreach (HtmlNode row in infoTable.Descendants("tr"))
{
string key = row.SelectNodes("td").First().InnerText.ToLower();
string value = row.SelectNodes("td").Last().InnerText;
string keySanitized = string.Concat(Regex.Matches(key, "[a-z]"));
switch (keySanitized)
{
case "alternative":
string[] alts = value.Split(" ; ");
for(int i = 0; i < alts.Length; i++)
altTitlesDict.Add(i.ToString(), alts[i]);
break;
case "authors":
authorNames = value.Split('-');
for (int i = 0; i < authorNames.Length; i++)
authorNames[i] = authorNames[i].Replace("\r\n", "");
break;
case "status":
switch (value.ToLower())
{
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break;
case "completed": releaseStatus = MangaReleaseStatus.Completed; break;
}
break;
case "genres":
string[] genres = value.Split(" - ");
for (int i = 0; i < genres.Length; i++)
genres[i] = genres[i].Replace("\r\n", "");
tags = genres.ToHashSet();
break;
}
}
List<Author> authors = authorNames.Select(n => new Author(n)).ToList();
List<MangaTag> mangaTags = tags.Select(n => new MangaTag(n)).ToList();
List<MangaAltTitle> mangaAltTitles = altTitlesDict.Select(a => new MangaAltTitle(a.Key, a.Value)).ToList();
string coverUrl = document.DocumentNode.Descendants("span").First(s => s.HasClass("info-image")).Descendants("img").First()
.GetAttributes().First(a => a.Name == "src").Value;
string description = document.DocumentNode.Descendants("div").First(d => d.HasClass("panel-story-info-description"))
.InnerText.Replace("Description :", "");
while (description.StartsWith('\n'))
description = description.Substring(1);
string pattern = "MMM dd,yyyy HH:mm";
HtmlNode? oldestChapter = document.DocumentNode
.SelectNodes("//span[contains(concat(' ',normalize-space(@class),' '),' chapter-time ')]").MaxBy(
node => DateTime.ParseExact(node.GetAttributeValue("title", "Dec 31 2400, 23:59"), pattern,
CultureInfo.InvariantCulture).Millisecond);
uint year = (uint)DateTime.ParseExact(oldestChapter?.GetAttributeValue("title", "Dec 31 2400, 23:59")??"Dec 31 2400, 23:59", pattern,
CultureInfo.InvariantCulture).Year;
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
mangaAltTitles);
return (manga, authors, mangaTags, [], mangaAltTitles);
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
string requestUrl = $"https://chapmanganato.com/{manga.MangaId}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return [];
//Return Chapters ordered by Chapter-Number
if (requestResult.htmlDocument is null)
return [];
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
return chapters.Order().ToArray();
}
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
{
List<Chapter> ret = new();
HtmlNode chapterList = document.DocumentNode.Descendants("ul").First(l => l.HasClass("row-content-chapter"));
Regex volRex = new(@"Vol\.([0-9]+).*");
Regex chapterRex = new(@"https:\/\/chapmanganato.[A-z]+\/manga-[A-z0-9]+\/chapter-([0-9\.]+)");
Regex nameRex = new(@"Chapter ([0-9]+(\.[0-9]+)*){1}:? (.*)");
foreach (HtmlNode chapterInfo in chapterList.Descendants("li"))
{
string fullString = chapterInfo.Descendants("a").First(d => d.HasClass("chapter-name")).InnerText;
string url = chapterInfo.Descendants("a").First(d => d.HasClass("chapter-name"))
.GetAttributeValue("href", "");
int? volumeNumber = volRex.IsMatch(fullString)
? int.Parse(volRex.Match(fullString).Groups[1].Value)
: null;
if(!ChapterNumber.CanParse(chapterRex.Match(url).Groups[1].Value))
continue;
ChapterNumber chapterNumber = new(chapterRex.Match(url).Groups[1].Value);
string chapterName = nameRex.Match(fullString).Groups[3].Value;
try
{
ret.Add(new Chapter(manga, url, chapterNumber, volumeNumber, chapterName));
}
catch (Exception e)
{
}
}
ret.Reverse();
return ret;
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
string requestUrl = chapter.Url;
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 ||
requestResult.htmlDocument is null)
{
return [];
}
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
return imageUrls;
}
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
{
List<string> ret = new();
HtmlNode imageContainer =
document.DocumentNode.Descendants("div").First(i => i.HasClass("container-chapter-reader"));
foreach(HtmlNode imageNode in imageContainer.Descendants("img"))
ret.Add(imageNode.GetAttributeValue("src", ""));
return ret.ToArray();
}
}

View File

@ -0,0 +1,215 @@
using System.Data;
using System.Net;
using System.Text.RegularExpressions;
using System.Xml.Linq;
using API.MangaDownloadClients;
using HtmlAgilityPack;
using Newtonsoft.Json;
using Soenneker.Utils.String.NeedlemanWunsch;
namespace API.Schema.MangaConnectors;
public class Mangasee : MangaConnector
{
public Mangasee() : base("Mangasee", ["en"], ["mangasee123.com"])
{
this.downloadClient = new ChromiumDownloadClient();
}
private struct SearchResult
{
public string i { get; set; }
public string s { get; set; }
public string[] a { get; set; }
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
string requestUrl = "https://mangasee123.com/_search.php";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
{
return [];
}
try
{
SearchResult[] searchResults = JsonConvert.DeserializeObject<SearchResult[]>(requestResult.htmlDocument!.DocumentNode.InnerText) ??
throw new NoNullAllowedException();
SearchResult[] filteredResults = FilteredResults(publicationTitle, searchResults);
string[] urls = filteredResults.Select(result => $"https://mangasee123.com/manga/{result.i}").ToArray();
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> searchResultManga = new();
foreach (string url in urls)
{
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? newManga = GetMangaFromUrl(url);
if(newManga is { } manga)
searchResultManga.Add(manga);
}
return searchResultManga.ToArray();
}
catch (NoNullAllowedException)
{
return [];
}
}
private readonly string[] _filterWords = {"a", "the", "of", "as", "to", "no", "for", "on", "with", "be", "and", "in", "wa", "at", "be", "ni"};
private string ToFilteredString(string input) => string.Join(' ', input.ToLower().Split(' ').Where(word => _filterWords.Contains(word) == false));
private SearchResult[] FilteredResults(string publicationTitle, SearchResult[] unfilteredSearchResults)
{
Dictionary<SearchResult, int> similarity = new();
foreach (SearchResult sr in unfilteredSearchResults)
{
List<int> scores = new();
string filteredPublicationString = ToFilteredString(publicationTitle);
string filteredSString = ToFilteredString(sr.s);
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(filteredSString, filteredPublicationString));
foreach (string srA in sr.a)
{
string filteredAString = ToFilteredString(srA);
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(filteredAString, filteredPublicationString));
}
similarity.Add(sr, scores.Sum() / scores.Count);
}
List<SearchResult> ret = similarity.OrderBy(s => s.Value).Take(10).Select(s => s.Key).ToList();
return ret.ToArray();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://mangasee123.com/manga/{publicationId}");
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
Regex publicationIdRex = new(@"https:\/\/mangasee123.com\/manga\/(.*)(\/.*)*");
string publicationId = publicationIdRex.Match(url).Groups[1].Value;
RequestResult requestResult = this.downloadClient.MakeRequest(url, RequestType.MangaInfo);
if((int)requestResult.statusCode < 300 && (int)requestResult.statusCode >= 200 && requestResult.htmlDocument is not null)
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
return null;
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
string originalLanguage = "", status = "";
Dictionary<string, string> altTitles = new(), links = new();
HashSet<string> tags = new();
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//img");
string coverUrl = posterNode.GetAttributeValue("src", "");
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//h1");
string sortName = titleNode.InnerText;
HtmlNode[] authorsNodes = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Author(s):']/..").Descendants("a")
.ToArray();
List<string> authorNames = new();
foreach (HtmlNode authorNode in authorsNodes)
authorNames.Add(authorNode.InnerText);
List<Author> authors = authorNames.Select(a => new Author(a)).ToList();
HtmlNode[] genreNodes = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Genre(s):']/..").Descendants("a")
.ToArray();
foreach (HtmlNode genreNode in genreNodes)
tags.Add(genreNode.InnerText);
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
HtmlNode yearNode = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Released:']/..").Descendants("a")
.First();
uint year = uint.Parse(yearNode.InnerText);
HtmlNode[] statusNodes = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Status:']/..").Descendants("a")
.ToArray();
foreach (HtmlNode statusNode in statusNodes)
if (statusNode.InnerText.Contains("publish", StringComparison.CurrentCultureIgnoreCase))
status = statusNode.InnerText.Split(' ')[0];
switch (status.ToLower())
{
case "cancelled": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "hiatus": releaseStatus = MangaReleaseStatus.OnHiatus; break;
case "discontinued": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "complete": releaseStatus = MangaReleaseStatus.Completed; break;
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break;
}
HtmlNode descriptionNode = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Description:']/..")
.Descendants("div").First();
string description = descriptionNode.InnerText;
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
[]);
return (manga, authors, mangaTags, [], []);
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
try
{
XDocument doc = XDocument.Load($"https://mangasee123.com/rss/{manga.MangaId}.xml");
XElement[] chapterItems = doc.Descendants("item").ToArray();
List<Chapter> chapters = new();
Regex chVolRex = new(@".*chapter-([0-9\.]+)(?:-index-([0-9\.]+))?.*");
foreach (XElement chapter in chapterItems)
{
string url = chapter.Descendants("link").First().Value;
Match m = chVolRex.Match(url);
int? volumeNumber = m.Groups[2].Success ? int.Parse(m.Groups[2].Value) : null;
if(!ChapterNumber.CanParse(m.Groups[1].Value))
continue;
ChapterNumber chapterNumber = new(m.Groups[1].Value);
string chapterUrl = Regex.Replace(url, @"-page-[0-9]+(\.html)", ".html");
try
{
chapters.Add(new Chapter(manga, chapterUrl,chapterNumber, volumeNumber, null));
}
catch (Exception e)
{
}
}
//Return Chapters ordered by Chapter-Number
return chapters.Order().ToArray();
}
catch (HttpRequestException e)
{
return [];
}
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
RequestResult requestResult = this.downloadClient.MakeRequest(chapter.Url, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
{
return [];
}
HtmlDocument document = requestResult.htmlDocument;
HtmlNode gallery = document.DocumentNode.Descendants("div").First(div => div.HasClass("ImageGallery"));
HtmlNode[] images = gallery.Descendants("img").Where(img => img.HasClass("img-fluid")).ToArray();
List<string> urls = new();
foreach(HtmlNode galleryImage in images)
urls.Add(galleryImage.GetAttributeValue("src", ""));
return urls.ToArray();
}
}

View File

@ -0,0 +1,225 @@
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using HtmlAgilityPack;
namespace API.Schema.MangaConnectors;
public class Mangaworld : MangaConnector
{
public Mangaworld() : base("Mangaworld", ["it"], ["www.mangaworld.ac"])
{
this.downloadClient = new ChromiumDownloadClient();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
string requestUrl = $"https://www.mangaworld.ac/archive?keyword={sanitizedTitle}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return [];
if (requestResult.htmlDocument is null)
return [];
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
return publications;
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document)
{
if (!document.DocumentNode.SelectSingleNode("//div[@class='comics-grid']").ChildNodes
.Any(node => node.HasClass("entry")))
return [];
List<string> urls = document.DocumentNode
.SelectNodes(
"//div[@class='comics-grid']//div[@class='entry']//a[contains(concat(' ',normalize-space(@class),' '),'thumb')]")
.Select(thumb => thumb.GetAttributeValue("href", "")).ToList();
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new();
foreach (string url in urls)
{
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url);
if (manga is { } x)
ret.Add(x);
}
return ret.ToArray();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://www.mangaworld.ac/manga/{publicationId}");
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
RequestResult requestResult =
downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return null;
if (requestResult.htmlDocument is null)
return null;
Regex idRex = new (@"https:\/\/www\.mangaworld\.[a-z]{0,63}\/manga\/([0-9]+\/[0-9A-z\-]+).*");
string id = idRex.Match(url).Groups[1].Value;
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id, url);
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
Dictionary<string, string> altTitlesDict = new();
string originalLanguage = "";
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
HtmlNode infoNode = document.DocumentNode.Descendants("div").First(d => d.HasClass("info"));
string sortName = infoNode.Descendants("h1").First().InnerText;
HtmlNode metadata = infoNode.Descendants().First(d => d.HasClass("meta-data"));
HtmlNode altTitlesNode = metadata.SelectSingleNode("//span[text()='Titoli alternativi: ' or text()='Titolo alternativo: ']/..").ChildNodes[1];
string[] alts = altTitlesNode.InnerText.Split(", ");
for(int i = 0; i < alts.Length; i++)
altTitlesDict.Add(i.ToString(), alts[i]);
List<MangaAltTitle> altTitles = altTitlesDict.Select(a => new MangaAltTitle(a.Key, a.Value)).ToList();
HtmlNode genresNode =
metadata.SelectSingleNode("//span[text()='Generi: ' or text()='Genero: ']/..");
HashSet<string> tags = genresNode.SelectNodes("a").Select(node => node.InnerText).ToHashSet();
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
HtmlNode authorsNode =
metadata.SelectSingleNode("//span[text()='Autore: ' or text()='Autori: ']/..");
string[] authorNames = authorsNode.SelectNodes("a").Select(node => node.InnerText).ToArray();
List<Author> authors = authorNames.Select(n => new Author(n)).ToList();
string status = metadata.SelectSingleNode("//span[text()='Stato: ']/..").SelectNodes("a").First().InnerText;
// ReSharper disable 5 times StringLiteralTypo
switch (status.ToLower())
{
case "cancellato": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "in pausa": releaseStatus = MangaReleaseStatus.OnHiatus; break;
case "droppato": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "finito": releaseStatus = MangaReleaseStatus.Completed; break;
case "in corso": releaseStatus = MangaReleaseStatus.Continuing; break;
}
string coverUrl = document.DocumentNode.SelectSingleNode("//img[@class='rounded']").GetAttributeValue("src", "");
string description = document.DocumentNode.SelectSingleNode("//div[@id='noidungm']").InnerText;
string yearString = metadata.SelectSingleNode("//span[text()='Anno di uscita: ']/..").SelectNodes("a").First().InnerText;
uint year = uint.Parse(yearString);
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
altTitles);
return (manga, authors, mangaTags, [], altTitles);
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
string requestUrl = $"https://www.mangaworld.ac/manga/{manga.MangaId}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
return [];
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
return chapters.Order().ToArray();
}
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
{
List<Chapter> ret = new();
HtmlNode chaptersWrapper =
document.DocumentNode.SelectSingleNode(
"//div[contains(concat(' ',normalize-space(@class),' '),'chapters-wrapper')]");
Regex volumeRex = new(@"[Vv]olume ([0-9]+).*");
Regex chapterRex = new(@"[Cc]apitolo ([0-9]+(?:\.[0-9]+)?).*");
Regex idRex = new(@".*\/read\/([a-z0-9]+)(?:[?\/].*)?");
if (chaptersWrapper.Descendants("div").Any(descendant => descendant.HasClass("volume-element")))
{
foreach (HtmlNode volNode in document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),'volume-element')]"))
{
string volumeStr = volumeRex.Match(volNode.SelectNodes("div").First(node => node.HasClass("volume")).SelectSingleNode("p").InnerText).Groups[1].Value;
int volume = int.Parse(volumeStr);
foreach (HtmlNode chNode in volNode.SelectNodes("div").First(node => node.HasClass("volume-chapters")).SelectNodes("div"))
{
string numberStr = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
if(!ChapterNumber.CanParse(numberStr))
continue;
ChapterNumber chapterNumber = new(numberStr);
string url = chNode.SelectSingleNode("a").GetAttributeValue("href", "");
string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value;
try
{
ret.Add(new Chapter(manga, url, chapterNumber, volume, null));
}
catch (Exception e)
{
}
}
}
}
else
{
foreach (HtmlNode chNode in chaptersWrapper.SelectNodes("div").Where(node => node.HasClass("chapter")))
{
string numberStr = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
if(!ChapterNumber.CanParse(numberStr))
continue;
ChapterNumber chapterNumber = new(numberStr);
string url = chNode.SelectSingleNode("a").GetAttributeValue("href", "");
string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value;
try
{
ret.Add(new Chapter(manga, url, chapterNumber, null, null));
}
catch (Exception e)
{
}
}
}
ret.Reverse();
return ret;
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
string requestUrl = $"{chapter.Url}?style=list";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
{
return [];
}
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
return imageUrls;
}
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
{
List<string> ret = new();
HtmlNode imageContainer =
document.DocumentNode.SelectSingleNode("//div[@id='page']");
foreach(HtmlNode imageNode in imageContainer.Descendants("img"))
ret.Add(imageNode.GetAttributeValue("src", ""));
return ret.ToArray();
}
}

View File

@ -0,0 +1,181 @@
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using HtmlAgilityPack;
namespace API.Schema.MangaConnectors;
public class ManhuaPlus : MangaConnector
{
public ManhuaPlus() : base("ManhuaPlus", ["en"], ["manhuaplus.org"])
{
this.downloadClient = new ChromiumDownloadClient();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
string requestUrl = $"https://manhuaplus.org/search?keyword={sanitizedTitle}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
return [];
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
return publications;
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document)
{
if (document.DocumentNode.SelectSingleNode("//h1/../..").ChildNodes//I already want to not.
.Any(node => node.InnerText.Contains("No manga found")))
return [];
List<string> urls = document.DocumentNode
.SelectNodes("//h1/../..//a[contains(@href, 'https://manhuaplus.org/manga/') and contains(concat(' ',normalize-space(@class),' '),' clamp ') and not(contains(@href, '/chapter'))]")
.Select(mangaNode => mangaNode.GetAttributeValue("href", "")).ToList();
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new();
foreach (string url in urls)
{
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url);
if (manga is { } x)
ret.Add(x);
}
return ret.ToArray();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://manhuaplus.org/manga/{publicationId}");
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
Regex publicationIdRex = new(@"https:\/\/manhuaplus.org\/manga\/(.*)(\/.*)*");
string publicationId = publicationIdRex.Match(url).Groups[1].Value;
RequestResult requestResult = this.downloadClient.MakeRequest(url, RequestType.MangaInfo);
if((int)requestResult.statusCode < 300 && (int)requestResult.statusCode >= 200 && requestResult.htmlDocument is not null && requestResult.redirectedToUrl != "https://manhuaplus.org/home") //When manga doesnt exists it redirects to home
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
return null;
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
string originalLanguage = "", status = "";
Dictionary<string, string> altTitles = new(), links = new();
HashSet<string> tags = new();
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("/html/body/main/div/div/div[2]/div[1]/figure/a/img");//BRUH
Regex posterRex = new(@".*(\/uploads/covers/[a-zA-Z0-9\-\._\~\!\$\&\'\(\)\*\+\,\;\=\:\@]+).*");
string coverUrl = $"https://manhuaplus.org/{posterRex.Match(posterNode.GetAttributeValue("src", "")).Groups[1].Value}";
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//h1");
string sortName = titleNode.InnerText.Replace("\n", "");
List<string> authorNames = new();
try
{
HtmlNode[] authorsNodes = document.DocumentNode
.SelectNodes("//a[contains(@href, 'https://manhuaplus.org/authors/')]")
.ToArray();
foreach (HtmlNode authorNode in authorsNodes)
authorNames.Add(authorNode.InnerText);
}
catch (ArgumentNullException e)
{
}
List<Author> authors = authorNames.Select(a => new Author(a)).ToList();
try
{
HtmlNode[] genreNodes = document.DocumentNode
.SelectNodes("//a[contains(@href, 'https://manhuaplus.org/genres/')]").ToArray();
foreach (HtmlNode genreNode in genreNodes)
tags.Add(genreNode.InnerText.Replace("\n", ""));
}
catch (ArgumentNullException e)
{
}
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
Regex yearRex = new(@"(?:[0-9]{1,2}\/){2}([0-9]{2,4}) [0-9]{1,2}:[0-9]{1,2}");
HtmlNode yearNode = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-clock ')]/../span");
Match match = yearRex.Match(yearNode.InnerText);
uint year = match.Success && match.Groups[1].Success ? uint.Parse(match.Groups[1].Value) : 0;
status = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-rss ')]/../span").InnerText.Replace("\n", "");
switch (status.ToLower())
{
case "cancelled": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "hiatus": releaseStatus = MangaReleaseStatus.OnHiatus; break;
case "discontinued": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "complete": releaseStatus = MangaReleaseStatus.Completed; break;
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break;
}
HtmlNode descriptionNode = document.DocumentNode
.SelectSingleNode("//div[@id='syn-target']");
string description = descriptionNode.InnerText;
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
[]);
return (manga, authors, mangaTags, [], []);
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
RequestResult result = downloadClient.MakeRequest($"https://manhuaplus.org/manga/{manga.MangaId}", RequestType.Default);
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
{
return Array.Empty<Chapter>();
}
HtmlNodeCollection chapterNodes = result.htmlDocument.DocumentNode.SelectNodes("//li[contains(concat(' ',normalize-space(@class),' '),' chapter ')]//a");
string[] urls = chapterNodes.Select(node => node.GetAttributeValue("href", "")).ToArray();
Regex urlRex = new (@".*\/chapter-([0-9\-]+).*");
List<Chapter> chapters = new();
foreach (string url in urls)
{
Match rexMatch = urlRex.Match(url);
if(!ChapterNumber.CanParse(rexMatch.Groups[1].Value))
continue;
ChapterNumber chapterNumber = new(rexMatch.Groups[1].Value);
string fullUrl = url;
try
{
chapters.Add(new Chapter(manga, fullUrl, chapterNumber, null, null));
}
catch (Exception e)
{
}
}
//Return Chapters ordered by Chapter-Number
return chapters.Order().ToArray();
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
RequestResult requestResult = this.downloadClient.MakeRequest(chapter.Url, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
{
return [];
}
HtmlDocument document = requestResult.htmlDocument;
HtmlNode[] images = document.DocumentNode.SelectNodes("//a[contains(concat(' ',normalize-space(@class),' '),' readImg ')]/img").ToArray();
List<string> urls = images.Select(node => node.GetAttributeValue("src", "")).ToList();
return urls.ToArray();
}
}

View File

@ -0,0 +1,228 @@
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using HtmlAgilityPack;
using Soenneker.Utils.String.NeedlemanWunsch;
namespace API.Schema.MangaConnectors;
public class Weebcentral : MangaConnector
{
private readonly string _baseUrl = "https://weebcentral.com";
private readonly string[] _filterWords =
{ "a", "the", "of", "as", "to", "no", "for", "on", "with", "be", "and", "in", "wa", "at", "be", "ni" };
public Weebcentral() : base("Weebcentral", ["en"], ["https://weebcentral.com"])
{
downloadClient = new ChromiumDownloadClient();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
const int limit = 32; //How many values we want returned at once
var offset = 0; //"Page"
var requestUrl =
$"{_baseUrl}/search/data?limit={limit}&offset={offset}&text={publicationTitle}&sort=Best+Match&order=Ascending&official=Any&display_mode=Minimal%20Display";
var requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 ||
requestResult.htmlDocument == null)
{
return [];
}
var publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
return publications;
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document)
{
if (document.DocumentNode.SelectNodes("//article") == null)
return [];
var urls = document.DocumentNode.SelectNodes("/html/body/article/a[@class='link link-hover']")
.Select(elem => elem.GetAttributeValue("href", "")).ToList();
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new();
foreach (var url in urls)
{
var manga = GetMangaFromUrl(url);
if (manga is { } x)
ret.Add(x);
}
return ret.ToArray();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
Regex publicationIdRex = new(@"https:\/\/weebcentral\.com\/series\/(\w*)\/(.*)");
var publicationId = publicationIdRex.Match(url).Groups[1].Value;
var requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 300 && (int)requestResult.statusCode >= 200 &&
requestResult.htmlDocument is not null)
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
return null;
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
var posterNode =
document.DocumentNode.SelectSingleNode("//section[@class='flex items-center justify-center']/picture/img");
var coverUrl = posterNode?.GetAttributeValue("src", "") ?? "";
var titleNode = document.DocumentNode.SelectSingleNode("//section/h1");
var sortName = titleNode?.InnerText ?? "Undefined";
HtmlNode[] authorsNodes =
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Author(s): ']/span")?.ToArray() ?? [];
var authorNames = authorsNodes.Select(n => n.InnerText).ToList();
List<Author> authors = authorNames.Select(n => new Author(n)).ToList();
HtmlNode[] genreNodes =
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Tags(s): ']/span")?.ToArray() ?? [];
HashSet<string> tags = genreNodes.Select(n => n.InnerText).ToHashSet();
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
var statusNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Status: ']/a");
var status = statusNode?.InnerText ?? "";
var releaseStatus = MangaReleaseStatus.Unreleased;
switch (status.ToLower())
{
case "cancelled": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "hiatus": releaseStatus = MangaReleaseStatus.OnHiatus; break;
case "complete": releaseStatus = MangaReleaseStatus.Completed; break;
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break;
}
var yearNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Released: ']/span");
var year = uint.Parse(yearNode?.InnerText ?? "0");
var descriptionNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Description']/p");
var description = descriptionNode?.InnerText ?? "Undefined";
HtmlNode[] altTitleNodes = document.DocumentNode
.SelectNodes("//ul/li[strong/text() = 'Associated Name(s)']/ul/li")?.ToArray() ?? [];
Dictionary<string, string> altTitlesDict = new(), links = new();
for (var i = 0; i < altTitleNodes.Length; i++)
altTitlesDict.Add(i.ToString(), altTitleNodes[i].InnerText);
List<MangaAltTitle> altTitles = altTitlesDict.Select(a => new MangaAltTitle(a.Key, a.Value)).ToList();
var originalLanguage = "";
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
altTitles);
return (manga, authors, mangaTags, [], altTitles);
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://weebcentral.com/series/{publicationId}");
}
private string ToFilteredString(string input)
{
return string.Join(' ', input.ToLower().Split(' ').Where(word => _filterWords.Contains(word) == false));
}
private SearchResult[] FilteredResults(string publicationTitle, SearchResult[] unfilteredSearchResults)
{
Dictionary<SearchResult, int> similarity = new();
foreach (var sr in unfilteredSearchResults)
{
List<int> scores = new();
var filteredPublicationString = ToFilteredString(publicationTitle);
var filteredSString = ToFilteredString(sr.s);
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(filteredSString, filteredPublicationString));
foreach (var srA in sr.a)
{
var filteredAString = ToFilteredString(srA);
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(filteredAString, filteredPublicationString));
}
similarity.Add(sr, scores.Sum() / scores.Count);
}
var ret = similarity.OrderBy(s => s.Value).Take(10).Select(s => s.Key).ToList();
return ret.ToArray();
}
public override Chapter[] GetChapters(Manga manga, string language = "en")
{
var requestUrl = $"{_baseUrl}/series/{manga.MangaId}/full-chapter-list";
var requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return Array.Empty<Chapter>();
//Return Chapters ordered by Chapter-Number
if (requestResult.htmlDocument is null)
return Array.Empty<Chapter>();
var chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
return chapters.Order().ToArray();
}
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
{
var chaptersWrapper = document.DocumentNode.SelectSingleNode("/html/body");
Regex chapterRex = new(@".* (\d+)");
Regex idRex = new(@"https:\/\/weebcentral\.com\/chapters\/(\w*)");
var ret = chaptersWrapper.Descendants("a").Select(elem =>
{
var url = elem.GetAttributeValue("href", "") ?? "Undefined";
if (!url.StartsWith("https://") && !url.StartsWith("http://"))
return new Chapter(manga, "undefined", new ChapterNumber(-1), null, null);
var idMatch = idRex.Match(url);
var id = idMatch.Success ? idMatch.Groups[1].Value : null;
var chapterNode = elem.SelectSingleNode("span[@class='grow flex items-center gap-2']/span")?.InnerText ??
"Undefined";
var chapterNumberMatch = chapterRex.Match(chapterNode);
if(!chapterNumberMatch.Success || !ChapterNumber.CanParse(chapterNumberMatch.Groups[1].Value))
return new Chapter(manga, "undefined", new ChapterNumber(-1), null, null);
ChapterNumber chapterNumber = new(chapterNumberMatch.Groups[1].Value);
return new Chapter(manga, url, chapterNumber, null, null);
}).Where(elem => elem.ChapterNumber < ChapterNumber.Zero && elem.Url != "undefined").ToList();
ret.Reverse();
return ret;
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
var requestResult = downloadClient.MakeRequest(chapter.Url, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 ||requestResult.htmlDocument is null)
{
return [];
}
var document = requestResult.htmlDocument;
var imageNodes =
document.DocumentNode.SelectNodes($"//section[@hx-get='{chapter.Url}/images']/img")?.ToArray() ?? [];
var urls = imageNodes.Select(imgNode => imgNode.GetAttributeValue("src", "")).ToArray();
return urls;
}
private struct SearchResult
{
public string i { get; set; }
public string s { get; set; }
public string[] a { get; set; }
}
}

View File

@ -0,0 +1,10 @@
namespace API.Schema;
public enum MangaReleaseStatus : byte
{
Continuing = 0,
Completed = 1,
OnHiatus = 2,
Cancelled = 3,
Unreleased = 4
}

9
API/Schema/MangaTag.cs Normal file
View File

@ -0,0 +1,9 @@
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
[PrimaryKey("Tag")]
public class MangaTag(string tag)
{
public string Tag { get; init; } = tag;
}

View File

@ -0,0 +1,21 @@
using System.ComponentModel.DataAnnotations;
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
[PrimaryKey("NotificationId")]
public class Notification(string title, string message = "", NotificationUrgency urgency = NotificationUrgency.Normal, DateTime? date = null)
{
[MaxLength(64)]
public string NotificationId { get; init; } = TokenGen.CreateToken("Notification", 64);
public NotificationUrgency Urgency { get; init; } = urgency;
public string Title { get; init; } = title;
public string Message { get; init; } = message;
public DateTime Date { get; init; } = date ?? DateTime.UtcNow;
public Notification() : this("") { }
}

View File

@ -0,0 +1,42 @@
using System.Text;
using Newtonsoft.Json;
namespace API.Schema.NotificationConnectors;
public class Gotify(string endpoint, string appToken)
: NotificationConnector(TokenGen.CreateToken(typeof(Gotify), 64), NotificationConnectorType.Gotify)
{
public string Endpoint { get; init; } = endpoint;
public string AppToken { get; init; } = appToken;
public override void SendNotification(string title, string notificationText)
{
MessageData message = new(title, notificationText);
HttpRequestMessage request = new(HttpMethod.Post, $"{endpoint}/message");
request.Headers.Add("X-Gotify-Key", this.AppToken);
request.Content = new StringContent(JsonConvert.SerializeObject(message, Formatting.None), Encoding.UTF8, "application/json");
HttpResponseMessage response = _client.Send(request);
if (!response.IsSuccessStatusCode)
{
StreamReader sr = new (response.Content.ReadAsStream());
//TODO
}
}
private class MessageData
{
// ReSharper disable four times UnusedAutoPropertyAccessor.Local
public string message { get; }
public long priority { get; }
public string title { get; }
public Dictionary<string, object> extras { get; }
public MessageData(string title, string message)
{
this.title = title;
this.message = message;
this.extras = new();
this.priority = 4;
}
}
}

View File

@ -0,0 +1,35 @@
using System.Text;
using Newtonsoft.Json;
namespace API.Schema.NotificationConnectors;
public class Lunasea(string id)
: NotificationConnector(TokenGen.CreateToken(typeof(Lunasea), 64), NotificationConnectorType.LunaSea)
{
public string Id { get; init; } = id;
public override void SendNotification(string title, string notificationText)
{
MessageData message = new(title, notificationText);
HttpRequestMessage request = new(HttpMethod.Post, $"https://notify.lunasea.app/v1/custom/{id}");
request.Content = new StringContent(JsonConvert.SerializeObject(message, Formatting.None), Encoding.UTF8, "application/json");
HttpResponseMessage response = _client.Send(request);
if (!response.IsSuccessStatusCode)
{
StreamReader sr = new (response.Content.ReadAsStream());
//TODO
}
}
private class MessageData
{
// ReSharper disable twice UnusedAutoPropertyAccessor.Local
public string title { get; }
public string body { get; }
public MessageData(string title, string body)
{
this.title = title;
this.body = body;
}
}
}

View File

@ -0,0 +1,20 @@
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using Microsoft.EntityFrameworkCore;
using Newtonsoft.Json;
namespace API.Schema.NotificationConnectors;
[PrimaryKey("NotificationConnectorId")]
public abstract class NotificationConnector(string notificationConnectorId, NotificationConnectorType notificationConnectorType)
{
[MaxLength(64)]
public string NotificationConnectorId { get; } = notificationConnectorId;
public NotificationConnectorType NotificationConnectorType { get; init; } = notificationConnectorType;
[JsonIgnore]
[NotMapped]
protected readonly HttpClient _client = new();
public abstract void SendNotification(string title, string notificationText);
}

View File

@ -0,0 +1,9 @@
namespace API.Schema.NotificationConnectors;
public enum NotificationConnectorType : byte
{
Gotify = 0,
LunaSea = 1,
Ntfy = 2
}

View File

@ -0,0 +1,77 @@
using System.Text;
using System.Text.RegularExpressions;
using Newtonsoft.Json;
namespace API.Schema.NotificationConnectors;
public class Ntfy : NotificationConnector
{
private NotificationConnector _notificationConnectorImplementation;
public string Endpoint { get; init; }
public string Auth { get; init; }
public string Topic { get; init; }
public Ntfy(string endpoint, string auth, string topic): base(TokenGen.CreateToken(typeof(Ntfy), 64), NotificationConnectorType.Ntfy)
{
Endpoint = endpoint;
Auth = auth;
Topic = topic;
}
public Ntfy(string endpoint, string username, string password, string? topic = null) :
this(EndpointAndTopicFromUrl(endpoint)[0], topic??EndpointAndTopicFromUrl(endpoint)[1], AuthFromUsernamePassword(username, password))
{
}
private static string AuthFromUsernamePassword(string username, string password)
{
string authHeader = "Basic " + Convert.ToBase64String(Encoding.UTF8.GetBytes($"{username}:{password}"));
string authParam = Convert.ToBase64String(Encoding.UTF8.GetBytes(authHeader)).Replace("=","");
return authParam;
}
private static string[] EndpointAndTopicFromUrl(string url)
{
string[] ret = new string[2];
Regex rootUriRex = new(@"(https?:\/\/[a-zA-Z0-9-\.]+\.[a-zA-Z0-9]+)(?:\/([a-zA-Z0-9-\.]+))?.*");
Match match = rootUriRex.Match(url);
if(!match.Success)
throw new ArgumentException($"Error getting URI from provided endpoint-URI: {url}");
ret[0] = match.Groups[1].Value;
ret[1] = match.Groups[2].Success && match.Groups[2].Value.Length > 0 ? match.Groups[2].Value : "tranga";
return ret;
}
public override void SendNotification(string title, string notificationText)
{
MessageData message = new(title, Topic, notificationText);
HttpRequestMessage request = new(HttpMethod.Post, $"{this.Endpoint}?auth={this.Auth}");
request.Content = new StringContent(JsonConvert.SerializeObject(message, Formatting.None), Encoding.UTF8, "application/json");
HttpResponseMessage response = _client.Send(request);
if (!response.IsSuccessStatusCode)
{
StreamReader sr = new (response.Content.ReadAsStream());
//TODO
}
}
private class MessageData
{
// ReSharper disable UnusedAutoPropertyAccessor.Local
public string topic { get; }
public string title { get; }
public string message { get; }
public int priority { get; }
public MessageData(string title, string topic, string message)
{
this.topic = topic;
this.title = title;
this.message = message;
this.priority = 3;
}
}
}

View File

@ -0,0 +1,8 @@
namespace API.Schema;
public enum NotificationUrgency : byte
{
Low = 1,
Normal = 3,
High = 5
}

109
API/Schema/PgsqlContext.cs Normal file
View File

@ -0,0 +1,109 @@
using API.Schema.Jobs;
using API.Schema.LibraryConnectors;
using API.Schema.MangaConnectors;
using API.Schema.NotificationConnectors;
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
public class PgsqlContext(DbContextOptions<PgsqlContext> options) : DbContext(options)
{
public DbSet<Job> Jobs { get; set; }
public DbSet<MangaConnector> MangaConnectors { get; set; }
public DbSet<Manga> Manga { get; set; }
public DbSet<Chapter> Chapters { get; set; }
public DbSet<Author> Authors { get; set; }
public DbSet<Link> Link { get; set; }
public DbSet<MangaTag> Tags { get; set; }
public DbSet<MangaAltTitle> AltTitles { get; set; }
public DbSet<LibraryConnector> LibraryConnectors { get; set; }
public DbSet<NotificationConnector> NotificationConnectors { get; set; }
public DbSet<Notification> Notifications { get; set; }
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.Entity<MangaConnector>()
.HasDiscriminator(c => c.Name)
.HasValue<AsuraToon>("AsuraToon")
.HasValue<Bato>("Bato")
.HasValue<MangaHere>("MangaHere")
.HasValue<MangaKatana>("MangaKatana")
.HasValue<MangaLife>("Manga4Life")
.HasValue<Manganato>("Manganato")
.HasValue<Mangasee>("Mangasee")
.HasValue<Mangaworld>("Mangaworld")
.HasValue<ManhuaPlus>("ManhuaPlus")
.HasValue<Weebcentral>("Weebcentral")
.HasValue<MangaDex>("MangaDex");
modelBuilder.Entity<LibraryConnector>()
.HasDiscriminator<LibraryType>(l => l.LibraryType)
.HasValue<Komga>(LibraryType.Komga)
.HasValue<Kavita>(LibraryType.Kavita);
modelBuilder.Entity<NotificationConnector>()
.HasDiscriminator<NotificationConnectorType>(n => n.NotificationConnectorType)
.HasValue<Gotify>(NotificationConnectorType.Gotify)
.HasValue<Ntfy>(NotificationConnectorType.Ntfy)
.HasValue<Lunasea>(NotificationConnectorType.LunaSea);
modelBuilder.Entity<Job>()
.HasDiscriminator<JobType>(j => j.JobType)
.HasValue<MoveFileOrFolderJob>(JobType.MoveFileOrFolderJob)
.HasValue<DownloadNewChaptersJob>(JobType.DownloadNewChaptersJob)
.HasValue<DownloadSingleChapterJob>(JobType.DownloadSingleChapterJob)
.HasValue<UpdateMetadataJob>(JobType.UpdateMetaDataJob);
modelBuilder.Entity<Job>()
.HasOne<Job>(j => j.ParentJob)
.WithMany()
.HasForeignKey(j => j.ParentJobId);
modelBuilder.Entity<Job>()
.HasMany<Job>(j => j.DependsOnJobs);
modelBuilder.Entity<DownloadNewChaptersJob>()
.Navigation(dncj => dncj.Manga)
.AutoInclude();
modelBuilder.Entity<DownloadSingleChapterJob>()
.Navigation(dscj => dscj.Chapter)
.AutoInclude();
modelBuilder.Entity<UpdateMetadataJob>()
.Navigation(umj => umj.Manga)
.AutoInclude();
modelBuilder.Entity<Manga>()
.HasOne<MangaConnector>(m => m.MangaConnector)
.WithMany()
.HasForeignKey(m => m.MangaConnectorId);
modelBuilder.Entity<Manga>()
.Navigation(m => m.MangaConnector)
.AutoInclude();
modelBuilder.Entity<Manga>()
.HasMany<Author>(m => m.Authors)
.WithMany();
modelBuilder.Entity<Manga>()
.Navigation(m => m.Authors)
.AutoInclude();
modelBuilder.Entity<Manga>()
.HasMany<MangaTag>(m => m.Tags)
.WithMany();
modelBuilder.Entity<Manga>()
.Navigation(m => m.Tags)
.AutoInclude();
modelBuilder.Entity<Manga>()
.HasMany<Link>(m => m.Links)
.WithOne();
modelBuilder.Entity<Manga>()
.Navigation(m => m.Links)
.AutoInclude();
modelBuilder.Entity<Manga>()
.HasMany<MangaAltTitle>(m => m.AltTitles)
.WithOne();
modelBuilder.Entity<Manga>()
.Navigation(m => m.AltTitles)
.AutoInclude();
modelBuilder.Entity<Chapter>()
.HasOne<Manga>(c => c.ParentManga)
.WithMany()
.HasForeignKey(c => c.ParentMangaId);
modelBuilder.Entity<Chapter>()
.Navigation(c => c.ParentManga)
.AutoInclude();
}
}

40
API/TokenGen.cs Normal file
View File

@ -0,0 +1,40 @@
using System.Security.Cryptography;
using System.Text;
namespace API;
public static class TokenGen
{
private const uint MinimumLength = 8;
private const string Chars = "abcdefghijklmnopqrstuvwxyz0123456789";
public static string CreateToken(Type t, uint fullLength) => CreateToken(t.Name, fullLength);
public static string CreateToken(string prefix, uint fullLength)
{
if (prefix.Length + 1 >= fullLength - MinimumLength)
throw new ArgumentException("Prefix to long to create Token of meaningful length.");
long l = fullLength - prefix.Length - 1;
byte[] rng = new byte[l];
RandomNumberGenerator.Create().GetBytes(rng);
string key = new (rng.Select(b => Chars[b % Chars.Length]).ToArray());
key = string.Join('-', prefix, key);
return key;
}
public static string CreateTokenHash(string prefix, uint fullLength, string[] keys)
{
if (prefix.Length + 1 >= fullLength - MinimumLength)
throw new ArgumentException("Prefix to long to create Token of meaningful length.");
int l = (int)(fullLength - prefix.Length - 1);
MD5 md5 = MD5.Create();
byte[][] hashes = keys.Select(key => md5.ComputeHash(Encoding.UTF8.GetBytes(key))).ToArray();
byte[] xOrHash = new byte[l];
foreach (byte[] hash in hashes)
for(int i = 0; i < hash.Length; i++)
xOrHash[i] = (byte)(xOrHash[i] ^ (i >= hash.Length ? 0 : hash[i]));
string key = new (xOrHash.Select(b => Chars[b % Chars.Length]).ToArray());
key = string.Join('-', prefix, key);
return key;
}
}

104
API/Tranga.cs Normal file
View File

@ -0,0 +1,104 @@
using API.Schema;
using API.Schema.Jobs;
using API.Schema.NotificationConnectors;
using log4net;
using log4net.Config;
namespace API;
public static class Tranga
{
public static Thread NotificationSenderThread { get; } = new (NotificationSender);
public static Thread JobStarterThread { get; } = new (JobStarter);
private static readonly Dictionary<Thread, Job> RunningJobs = new();
private static readonly ILog Log = LogManager.GetLogger(typeof(Tranga));
internal static void StartLogger()
{
BasicConfigurator.Configure();
}
private static void NotificationSender(object? pgsqlContext)
{
if(pgsqlContext is null) return;
PgsqlContext context = (PgsqlContext)pgsqlContext;
IQueryable<Notification> staleNotifications = context.Notifications.Where(n => n.Urgency < NotificationUrgency.Normal);
context.Notifications.RemoveRange(staleNotifications);
context.SaveChanges();
while (true)
{
SendNotifications(context, NotificationUrgency.High);
SendNotifications(context, NotificationUrgency.Normal);
SendNotifications(context, NotificationUrgency.Low);
context.SaveChanges();
Thread.Sleep(2000);
}
}
private static void SendNotifications(PgsqlContext context, NotificationUrgency urgency)
{
List<Notification> notifications = context.Notifications.Where(n => n.Urgency == urgency).ToList();
if (notifications.Any())
{
DateTime max = notifications.MaxBy(n => n.Date)!.Date;
if (DateTime.Now.Subtract(max) > TrangaSettings.NotificationUrgencyDelay(urgency))
{
foreach (NotificationConnector notificationConnector in context.NotificationConnectors)
{
foreach (Notification notification in notifications)
notificationConnector.SendNotification(notification.Title, notification.Message);
}
context.Notifications.RemoveRange(notifications);
}
}
context.SaveChanges();
}
private static void JobStarter(object? pgsqlContext)
{
if(pgsqlContext is null) return;
PgsqlContext context = (PgsqlContext)pgsqlContext;
string TRANGA = "\n\n _______ \n|_ _|.----..---.-..-----..-----..---.-.\n | | | _|| _ || || _ || _ |\n |___| |__| |___._||__|__||___ ||___._|\n |_____| \n\n";
Log.Info(TRANGA);
while (true)
{
List<Job> completedJobs = context.Jobs.Where(j => j.state == JobState.Completed).ToList();
foreach (Job job in completedJobs)
if(job.RecurrenceMs <= 0)
context.Jobs.Remove(job);
else
{
job.LastExecution = DateTime.UtcNow;
job.state = JobState.Waiting;
context.Jobs.Update(job);
}
List<Job> runJobs = context.Jobs.Where(j => j.state <= JobState.Running).ToList().Where(j => j.NextExecution < DateTime.UtcNow).ToList();
foreach (Job job in runJobs)
{
Thread t = new (() =>
{
IEnumerable<Job> newJobs = job.Run(context);
context.Jobs.AddRange(newJobs);
});
RunningJobs.Add(t, job);
t.Start();
context.Jobs.Update(job);
}
(Thread, Job)[] removeFromThreadsList = RunningJobs.Where(t => !t.Key.IsAlive)
.Select(t => (t.Key, t.Value)).ToArray();
foreach ((Thread thread, Job job) thread in removeFromThreadsList)
{
RunningJobs.Remove(thread.thread);
context.Jobs.Update(thread.job);
}
context.SaveChanges();
Thread.Sleep(2000);
}
}
}

191
API/TrangaSettings.cs Normal file
View File

@ -0,0 +1,191 @@
using System.Runtime.InteropServices;
using API.MangaDownloadClients;
using API.Schema;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using static System.IO.UnixFileMode;
namespace API;
public static class TrangaSettings
{
public static string downloadLocation { get; private set; } = (RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "/Manga" : Path.Join(Directory.GetCurrentDirectory(), "Downloads"));
public static string workingDirectory { get; private set; } = Path.Join(RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "/usr/share" : Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "tranga-api");
public static int apiPortNumber { get; private set; } = 6531;
[JsonIgnore]
internal static readonly string DefaultUserAgent = $"Tranga ({Enum.GetName(Environment.OSVersion.Platform)}; {(Environment.Is64BitOperatingSystem ? "x64" : "")}) / 1.0";
public static string userAgent { get; private set; } = DefaultUserAgent;
public static int compression{ get; private set; } = 40;
public static bool bwImages { get; private set; } = false;
[JsonIgnore]
public static string settingsFilePath => Path.Join(workingDirectory, "settings.json");
[JsonIgnore]
public static string coverImageCache => Path.Join(workingDirectory, "imageCache");
public static bool aprilFoolsMode { get; private set; } = true;
[JsonIgnore]
internal static readonly Dictionary<RequestType, int> DefaultRequestLimits = new ()
{
{RequestType.MangaInfo, 250},
{RequestType.MangaDexFeed, 250},
{RequestType.MangaDexImage, 40},
{RequestType.MangaImage, 60},
{RequestType.MangaCover, 250},
{RequestType.Default, 60}
};
public static Dictionary<RequestType, int> requestLimits { get; set; } = DefaultRequestLimits;
public static TimeSpan NotificationUrgencyDelay(NotificationUrgency urgency) => urgency switch
{
NotificationUrgency.High => TimeSpan.Zero,
NotificationUrgency.Normal => TimeSpan.FromMinutes(5),
NotificationUrgency.Low => TimeSpan.FromMinutes(10),
_ => TimeSpan.FromHours(1)
};
public static void Load()
{
if(File.Exists(settingsFilePath))
Deserialize(File.ReadAllText(settingsFilePath));
else return;
Directory.CreateDirectory(downloadLocation);
ExportSettings();
}
public static void UpdateAprilFoolsMode(bool enabled)
{
aprilFoolsMode = enabled;
ExportSettings();
}
public static void UpdateCompressImages(int value)
{
compression = int.Clamp(value, 1, 100);
ExportSettings();
}
public static void UpdateBwImages(bool enabled)
{
bwImages = enabled;
ExportSettings();
}
public static void UpdateDownloadLocation(string newPath, bool moveFiles = true)
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
Directory.CreateDirectory(newPath, GroupRead | GroupWrite | None | OtherRead | OtherWrite | UserRead | UserWrite);
else
Directory.CreateDirectory(newPath);
if (moveFiles)
MoveContentsOfDirectoryTo(TrangaSettings.downloadLocation, newPath);
TrangaSettings.downloadLocation = newPath;
ExportSettings();
}
private static void MoveContentsOfDirectoryTo(string oldDir, string newDir)
{
string[] directoryPaths = Directory.GetDirectories(oldDir);
string[] filePaths = Directory.GetFiles(oldDir);
foreach (string file in filePaths)
{
string newPath = Path.Join(newDir, Path.GetFileName(file));
File.Move(file, newPath, true);
}
foreach(string directory in directoryPaths)
{
string? dirName = Path.GetDirectoryName(directory);
if(dirName is null)
continue;
string newPath = Path.Join(newDir, dirName);
if(Directory.Exists(newPath))
MoveContentsOfDirectoryTo(directory, newPath);
else
Directory.Move(directory, newPath);
}
}
public static void UpdateUserAgent(string? customUserAgent)
{
userAgent = customUserAgent ?? DefaultUserAgent;
ExportSettings();
}
public static void UpdateRateLimit(RequestType requestType, int newLimit)
{
requestLimits[requestType] = newLimit;
ExportSettings();
}
public static void ResetRateLimits()
{
requestLimits = DefaultRequestLimits;
ExportSettings();
}
public static void ExportSettings()
{
if (File.Exists(settingsFilePath))
{
while(IsFileInUse(settingsFilePath))
Thread.Sleep(100);
}
else
Directory.CreateDirectory(new FileInfo(settingsFilePath).DirectoryName!);
File.WriteAllText(settingsFilePath, Serialize());
}
internal static bool IsFileInUse(string filePath)
{
if (!File.Exists(filePath))
return false;
try
{
using FileStream stream = new (filePath, FileMode.Open, FileAccess.Read, FileShare.None);
stream.Close();
return false;
}
catch (IOException)
{
return true;
}
}
public static JObject AsJObject()
{
JObject jobj = new JObject();
jobj.Add("downloadLocation", JToken.FromObject(downloadLocation));
jobj.Add("workingDirectory", JToken.FromObject(workingDirectory));
jobj.Add("apiPortNumber", JToken.FromObject(apiPortNumber));
jobj.Add("userAgent", JToken.FromObject(userAgent));
jobj.Add("aprilFoolsMode", JToken.FromObject(aprilFoolsMode));
jobj.Add("requestLimits", JToken.FromObject(requestLimits));
jobj.Add("compression", JToken.FromObject(compression));
jobj.Add("bwImages", JToken.FromObject(bwImages));
return jobj;
}
public static string Serialize() => AsJObject().ToString();
public static void Deserialize(string serialized)
{
JObject jobj = JObject.Parse(serialized);
if (jobj.TryGetValue("downloadLocation", out JToken? dl))
downloadLocation = dl.Value<string>()!;
if (jobj.TryGetValue("workingDirectory", out JToken? wd))
workingDirectory = wd.Value<string>()!;
if (jobj.TryGetValue("apiPortNumber", out JToken? apn))
apiPortNumber = apn.Value<int>();
if (jobj.TryGetValue("userAgent", out JToken? ua))
userAgent = ua.Value<string>()!;
if (jobj.TryGetValue("aprilFoolsMode", out JToken? afm))
aprilFoolsMode = afm.Value<bool>()!;
if (jobj.TryGetValue("requestLimits", out JToken? rl))
requestLimits = rl.ToObject<Dictionary<RequestType, int>>()!;
if (jobj.TryGetValue("compression", out JToken? ci))
compression = ci.Value<int>()!;
if (jobj.TryGetValue("bwImages", out JToken? bwi))
bwImages = bwi.Value<bool>()!;
}
}

42
Dockerfile Normal file
View File

@ -0,0 +1,42 @@
# syntax=docker/dockerfile:1
ARG DOTNET=9.0
FROM --platform=$TARGETPLATFORM mcr.microsoft.com/dotnet/aspnet:$DOTNET AS base
WORKDIR /publish
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium
ENV XDG_CONFIG_HOME=/tmp/.chromium
ENV XDG_CACHE_HOME=/tmp/.chromium
RUN apt-get update \
&& apt-get install -y libx11-6 libx11-xcb1 libatk1.0-0 libgtk-3-0 libcups2 libdrm2 libxkbcommon0 libxcomposite1 libxdamage1 libxrandr2 libgbm1 libpango-1.0-0 libcairo2 libasound2 libxshmfence1 libnss3 chromium \
&& apt-get autopurge -y \
&& apt-get autoclean -y
FROM --platform=$BUILDPLATFORM mcr.microsoft.com/dotnet/sdk:$DOTNET AS build-env
WORKDIR /src
COPY Tranga.sln /src
COPY API/API.csproj /src/API/API.csproj
RUN dotnet restore /src/Tranga.sln
COPY . /src/
RUN dotnet publish -c Release --property:OutputPath=/publish -maxcpucount:1
FROM --platform=$TARGETPLATFORM base AS runtime
EXPOSE 6531
ARG UNAME=tranga
ARG UID=1000
ARG GID=1000
RUN groupadd -g $GID -o $UNAME \
&& useradd -m -u $UID -g $GID -o -s /bin/bash $UNAME \
&& mkdir /usr/share/tranga-api \
&& mkdir /Manga \
&& chown 1000:1000 /usr/share/tranga-api \
&& chown 1000:1000 /Manga
USER $UNAME
WORKDIR /publish
COPY --chown=1000:1000 --from=build-env /publish .
USER 0
ENTRYPOINT ["dotnet", "/publish/API.dll"]
CMD ["-f", "-c", "-l", "/usr/share/tranga-api/logs"]

View File

@ -1,32 +0,0 @@
using System.Text;
using System.Text.Json.Serialization;
namespace Logging;
public class FileLogger : LoggerBase
{
private string logFilePath { get; }
private const int MaxNumberOfLogFiles = 5;
public FileLogger(string logFilePath, TextWriter? stdOut, Encoding? encoding = null) : base (stdOut, encoding)
{
this.logFilePath = logFilePath;
//Remove oldest logfile if more than MaxNumberOfLogFiles
string parentFolderPath = Path.GetDirectoryName(logFilePath)!;
for (int fileCount = new DirectoryInfo(parentFolderPath).EnumerateFiles().Count(); fileCount > MaxNumberOfLogFiles - 1; fileCount--) //-1 because we create own logfile later
File.Delete(new DirectoryInfo(parentFolderPath).EnumerateFiles().MinBy(file => file.LastWriteTime)!.FullName);
}
protected override void Write(LogMessage logMessage)
{
try
{
File.AppendAllText(logFilePath, logMessage.ToString());
}
catch (Exception e)
{
stdOut?.WriteLine(e);
}
}
}

View File

@ -1,17 +0,0 @@
using System.Text;
namespace Logging;
public class FormattedConsoleLogger : LoggerBase
{
public FormattedConsoleLogger(TextWriter? stdOut, Encoding? encoding = null) : base(stdOut, encoding)
{
}
protected override void Write(LogMessage message)
{
//Nothing to do yet
}
}

View File

@ -1,63 +0,0 @@
using System.Net.Mime;
using System.Text;
namespace Logging;
public class Logger : TextWriter
{
public override Encoding Encoding { get; }
public enum LoggerType
{
FileLogger,
ConsoleLogger
}
private FileLogger? _fileLogger;
private FormattedConsoleLogger? _formattedConsoleLogger;
private MemoryLogger _memoryLogger;
private TextWriter? stdOut;
public Logger(LoggerType[] enabledLoggers, TextWriter? stdOut, Encoding? encoding, string? logFilePath)
{
this.Encoding = encoding ?? Encoding.ASCII;
this.stdOut = stdOut ?? null;
if (enabledLoggers.Contains(LoggerType.FileLogger) && logFilePath is not null)
_fileLogger = new FileLogger(logFilePath, null, encoding);
else
{
_fileLogger = null;
throw new ArgumentException($"logFilePath can not be null for LoggerType {LoggerType.FileLogger}");
}
_formattedConsoleLogger = enabledLoggers.Contains(LoggerType.ConsoleLogger) ? new FormattedConsoleLogger(null, encoding) : null;
_memoryLogger = new MemoryLogger(null, encoding);
}
public void WriteLine(string caller, string? value)
{
value = value is null ? Environment.NewLine : string.Concat(value, Environment.NewLine);
Write(caller, value);
}
public void Write(string caller, string? value)
{
if (value is null)
return;
_fileLogger?.Write(caller, value);
_formattedConsoleLogger?.Write(caller, value);
_memoryLogger.Write(caller, value);
stdOut?.Write(value);
}
public string[] Tail(uint? lines)
{
return _memoryLogger.Tail(lines);
}
public string[] GetNewLines()
{
return _memoryLogger.GetNewLines();
}
}

View File

@ -1,58 +0,0 @@
using System.Text;
namespace Logging;
public abstract class LoggerBase : TextWriter
{
public override Encoding Encoding { get; }
protected TextWriter? stdOut { get; }
public LoggerBase(TextWriter? stdOut, Encoding? encoding = null)
{
this.Encoding = encoding ?? Encoding.ASCII;
this.stdOut = stdOut;
}
public void WriteLine(string caller, string? value)
{
value = value is null ? Environment.NewLine : string.Join(value, Environment.NewLine);
LogMessage message = new LogMessage(DateTime.Now, caller, value);
Write(message);
}
public void Write(string caller, string? value)
{
if (value is null)
return;
LogMessage message = new LogMessage(DateTime.Now, caller, value);
stdOut?.Write(message.ToString());
Write(message);
}
protected abstract void Write(LogMessage message);
public class LogMessage
{
public DateTime logTime { get; }
public string caller { get; }
public string value { get; }
public LogMessage(DateTime now, string caller, string value)
{
this.logTime = now;
this.caller = caller;
this.value = value;
}
public override string ToString()
{
string dateTimeString = $"{logTime.ToShortDateString()} {logTime.ToLongTimeString()}";
return $"[{dateTimeString}] {caller,30} | {value}";
}
}
}

View File

@ -1,9 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net7.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
</Project>

View File

@ -1,57 +0,0 @@
using System.Text;
namespace Logging;
public class MemoryLogger : LoggerBase
{
private readonly SortedList<DateTime, LogMessage> _logMessages = new();
private int _lastLogMessageIndex = 0;
public MemoryLogger(TextWriter? stdOut, Encoding? encoding = null) : base(stdOut, encoding)
{
}
protected override void Write(LogMessage value)
{
_logMessages.Add(value.logTime, value);
}
public string[] GetLogMessage()
{
return Tail(Convert.ToUInt32(_logMessages.Count));
}
public string[] Tail(uint? length)
{
int retLength;
if (length is null || length > _logMessages.Count)
retLength = _logMessages.Count;
else
retLength = (int)length;
string[] ret = new string[retLength];
for (int retIndex = 0; retIndex < ret.Length; retIndex++)
{
ret[retIndex] = _logMessages.GetValueAtIndex(_logMessages.Count - retLength + retIndex).ToString();
}
_lastLogMessageIndex = _logMessages.Count - 1;
return ret;
}
public string[] GetNewLines()
{
int logMessageCount = _logMessages.Count;
string[] ret = new string[logMessageCount - _lastLogMessageIndex];
for (int retIndex = 0; retIndex < ret.Length; retIndex++)
{
ret[retIndex] = _logMessages.GetValueAtIndex(_lastLogMessageIndex + retIndex).ToString();
}
_lastLogMessageIndex = logMessageCount;
return ret;
}
}

View File

@ -1,25 +1,17 @@
<!-- PROJECT SHIELDS -->
<!--
*** I'm using markdown "reference style" links for readability.
*** Reference links are enclosed in brackets [ ] instead of parentheses ( ).
*** See the bottom of this document for the declaration of the reference variables
*** for contributors-url, forks-url, etc. This is an optional, concise syntax you may use.
*** https://www.markdownguide.org/basic-syntax/#reference-style-links
-->
<!-- PROJECT LOGO -->
<br />
<div align="center">
<h3 align="center">Tranga</h3>
<h3 align="center">Tranga v2</h3>
<p align="center">
Automatic Manga and Metadata downloader
</p>
<p align="center">
This is the API for <a href="https://github.com/C9Glax/tranga-website">Tranga-Website</a>
</p>
</div>
<!-- TABLE OF CONTENTS -->
<details>
<summary>Table of Contents</summary>
@ -33,6 +25,7 @@
<li>
<a href="#getting-started">Getting Started</a>
<ul>
<li><a href="#prerequisites">Usage</a></li>
<li><a href="#prerequisites">Prerequisites</a></li>
</ul>
</li>
@ -48,54 +41,92 @@
<!-- ABOUT THE PROJECT -->
## About The Project
Tranga can download Chapters and Metadata from Scanlation sites such as
Tranga can download Chapters and Metadata from "Scanlation" sites such as
- [MangaDex.org](https://mangadex.org/)
- [MangaDex.org](https://mangadex.org/) (Multilingual)
- [Manganato.com](https://manganato.com/) (en)
- [Mangasee.com](https://mangasee123.com/) (en)
- [MangaKatana.com](https://mangakatana.com) (en)
- [Mangaworld.bz](https://www.mangaworld.bz/) (it)
- [Bato.to](https://bato.to/v3x) (en)
- [Manga4Life](https://manga4life.com) (en)
- [ManhuaPlus](https://manhuaplus.org/) (en)
- [MangaHere](https://www.mangahere.cc/) (en) (Their covers aren't scrapeable.)
- [Weebcentral](https://weebcentral.com) (en)
- ❓ Open an [issue](https://github.com/C9Glax/tranga/issues/new?assignees=&labels=New+Connector&projects=&template=new_connector.yml&title=%5BNew+Connector%5D%3A+)
and trigger a library-scan with [Komga](https://komga.org/) and [Kavita](https://www.kavitareader.com/).
Notifications can be sent to your devices using [Gotify](https://gotify.net/), [LunaSea](https://www.lunasea.app/) or [Ntfy](https://ntfy.sh/
).
### What this does and doesn't do
Tranga (this git-repo) will open a port (standard 6531) and listen for requests to add Jobs to Monitor and/or download specific Manga.
The configuration is all done through HTTP-Requests. [Documentation](docs/API_Calls_v2.md)
_**For a web-frontend use [tranga-website](https://github.com/C9Glax/tranga-website).**_
This project downloads the images for a Manga from the specified Scanlation-Website and packages them with some metadata - from that same website - in a .cbz-archive (per chapter).
It does this on an interval, and checks for any Chapters (.cbz-Archive) not already existing in your specified Download-Location. (If you rename or move files, it will download those again)
Tranga can (if configured) trigger a scan in Komga or Kavita, however the directory in which the Manga reside has to be available to both Tranga and Komga/Kavita.
The project doesn't manage metadata, and doesn't curate, change or enhance any information that isn't available on the selected Scanlation-Site.
It will blindly use whatever is scrapes (yes this is a glorified Web-scraper).
and automatically start updates in [Komga](https://komga.org/) to import them.
### Inspiration:
Because [Kaizoku](https://github.com/oae/kaizoku) was relying on [mangal](https://github.com/metafates/mangal) and mangal
hasn't received bugfixes for it's issues with Titles not showing up, or throwing errors because of illegal characters,
there were no alternatives for automatic downloads. However [Kaizoku](https://github.com/oae/kaizoku) certainly had a great Web-UI.
hasn't received bugfixes for its issues with Titles not showing up, or throwing errors because of illegal characters,
there were no alternatives for automatic downloads. However, [Kaizoku](https://github.com/oae/kaizoku) certainly had a great Web-UI.
That is why I wanted to create my own project, in a language I understand, and that I am able to maintain myself.
<p align="right">(<a href="#readme-top">back to top</a>)</p>
### Built With
- .NET-Core
- Newtonsoft.JSON
- Love <3
- [PuppeteerSharp](https://www.puppeteersharp.com/)
- [Html Agility Pack (HAP)](https://html-agility-pack.net/)
- [Soenneker.Utils.String.NeedlemanWunsch](https://github.com/soenneker/soenneker.utils.string.needlemanwunsch)
- [Sixlabors.ImageSharp](https://docs-v2.sixlabors.com/articles/imagesharp/index.html#license)
- [zstd-wrapper](https://github.com/oleg-st/ZstdSharp) [zstd](https://github.com/facebook/zstd)
- 💙 Blåhaj 🦈
<p align="right">(<a href="#readme-top">back to top</a>)</p>
## Star History
<a href="https://star-history.com/#c9glax/tranga&Date">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=c9glax/tranga&type=Date&theme=dark" />
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=c9glax/tranga&type=Date" />
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=c9glax/tranga&type=Date" />
</picture>
</a>
<!-- GETTING STARTED -->
## Getting Started
To use head over to [releases](https://git.bernloehr.eu/glax/Tranga/releases) and download a release.
### Docker
A CLI will guide you through setup.
Download [docker-compose.yaml](https://git.bernloehr.eu/glax/Tranga/src/branch/master/docker-compose.yaml) and configure to your needs.
Mount `/Manga` to wherever you want your chapters (`.cbz`-Archives) downloaded (where Komga/Kavita can access them).
The `docker-compose` also includes [tranga-website](https://github.com/C9Glax/tranga-website) as frontend. For its configuration refer to the repo README.
For compatibility do not execute the compose as root (which you should not do anyways...) but as user that can
access the folder.
### Prerequisites
[.NET-Core 7.0](https://dotnet.microsoft.com/en-us/download/dotnet/7.0)
#### To Build
[.NET-Core 8.0 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/8.0)
#### To Run
[.NET-Core 8.0 Runtime](https://dotnet.microsoft.com/en-us/download/dotnet/8.0) scroll down a bit, should be on the right the second item.
<!-- ROADMAP -->
## Roadmap
- [ ] Web-UI #1
- [ ] More Connectors
- [ ] Manganato #2
- [ ] ?
See the [open issues](https://git.bernloehr.eu/glax/Tranga/issues) for a full list of proposed features (and known issues).
See the [open issues](https://github.com/C9Glax/tranga/issues) for a full list of proposed features (and known issues).
<p align="right">(<a href="#readme-top">back to top</a>)</p>

View File

@ -1,20 +0,0 @@
FROM mcr.microsoft.com/dotnet/aspnet:7.0 AS base
WORKDIR /app
EXPOSE 80
EXPOSE 443
FROM mcr.microsoft.com/dotnet/sdk:7.0 AS build
WORKDIR /src
COPY ["Tranga-API/Tranga-API.csproj", "Tranga-API/"]
RUN dotnet restore "Tranga-API/Tranga-API.csproj"
COPY . .
WORKDIR "/src/Tranga-API"
RUN dotnet build "Tranga-API.csproj" -c Release -o /app/build
FROM build AS publish
RUN dotnet publish "Tranga-API.csproj" -c Release -o /app/publish /p:UseAppHost=false
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "Tranga-API.dll"]

View File

@ -1,122 +0,0 @@
using Logging;
using Tranga;
string applicationFolderPath = Path.Join(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "Tranga-API");
string logsFolderPath = Path.Join(applicationFolderPath, "logs");
string logFilePath = Path.Join(logsFolderPath, $"log-{DateTime.Now:dd-M-yyyy-HH-mm-ss}.txt");
string settingsFilePath = Path.Join(applicationFolderPath, "data.json");
Directory.CreateDirectory(applicationFolderPath);
Directory.CreateDirectory(logsFolderPath);
Console.WriteLine($"Logfile-Path: {logFilePath}");
Console.WriteLine($"Settings-File-Path: {settingsFilePath}");
Logger logger = new(new[] { Logger.LoggerType.FileLogger }, null, null, logFilePath);
logger.WriteLine("Tranga_CLI", "Loading Taskmanager.");
TaskManager.SettingsData settings;
if (File.Exists(settingsFilePath))
settings = TaskManager.LoadData(settingsFilePath);
else
settings = new TaskManager.SettingsData(Directory.GetCurrentDirectory(), settingsFilePath, null, new HashSet<TrangaTask>());
TaskManager taskManager = new (settings, logger);
var builder = WebApplication.CreateBuilder(args);
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen();
builder.Services.AddControllers().AddNewtonsoftJson();
var app = builder.Build();
app.UseSwagger();
app.UseSwaggerUI();
app.UseSwagger();
app.UseSwaggerUI();
app.MapGet("/GetAvailableControllers", () => taskManager.GetAvailableConnectors());
app.MapGet("/GetKnownPublications", () => taskManager.GetAllPublications());
app.MapGet("/GetPublicationsFromConnector", (string connectorName, string title) =>
{
Connector? connector = taskManager.GetAvailableConnectors().FirstOrDefault(con => con.Key == connectorName).Value;
if (connector is null)
return Array.Empty<Publication>();
if(title.Length < 4)
return Array.Empty<Publication>();
return taskManager.GetPublicationsFromConnector(connector, title);
});
app.MapGet("/Tasks/GetTaskTypes", () => Enum.GetNames(typeof(TrangaTask.Task)));
app.MapPost("/Tasks/Create", (string taskType, string? connectorName, string? publicationId, string reoccurrenceTime, string? language) =>
{
Publication? publication = taskManager.GetAllPublications().FirstOrDefault(pub => pub.internalId == publicationId);
TrangaTask.Task task = Enum.Parse<TrangaTask.Task>(taskType);
taskManager.AddTask(task, connectorName, publication, TimeSpan.Parse(reoccurrenceTime), language??"");
});
app.MapPost("/Tasks/Delete", (string taskType, string? connectorName, string? publicationId) =>
{
Publication? publication = taskManager.GetAllPublications().FirstOrDefault(pub => pub.internalId == publicationId);
TrangaTask.Task task = Enum.Parse<TrangaTask.Task>(taskType);
taskManager.DeleteTask(task, connectorName, publication);
});
app.MapGet("/Tasks/GetList", () => taskManager.GetAllTasks());
app.MapPost("/Tasks/Start", (string taskType, string? connectorName, string? publicationId) =>
{
TrangaTask.Task pTask = Enum.Parse<TrangaTask.Task>(taskType);
TrangaTask? task = taskManager.GetAllTasks().FirstOrDefault(tTask =>
tTask.task == pTask && tTask.publication?.internalId == publicationId && tTask.connectorName == connectorName);
if (task is null)
return;
taskManager.ExecuteTaskNow(task);
});
app.MapGet("/Tasks/GetRunningTasks",
() => taskManager.GetAllTasks().Where(task => task.state is TrangaTask.ExecutionState.Running));
app.MapGet("/Queue/GetList",
() => taskManager.GetAllTasks().Where(task => task.state is TrangaTask.ExecutionState.Enqueued));
app.MapPost("/Queue/Enqueue", (string taskType, string? connectorName, string? publicationId) =>
{
TrangaTask.Task pTask = Enum.Parse<TrangaTask.Task>(taskType);
TrangaTask? task = taskManager.GetAllTasks().FirstOrDefault(tTask =>
tTask.task == pTask && tTask.publication?.internalId == publicationId && tTask.connectorName == connectorName);
if (task is null)
return;
taskManager.AddTaskToQueue(task);
});
app.MapPost("/Queue/Dequeue", (string taskType, string? connectorName, string? publicationId) =>
{
TrangaTask.Task pTask = Enum.Parse<TrangaTask.Task>(taskType);
TrangaTask? task = taskManager.GetAllTasks().FirstOrDefault(tTask =>
tTask.task == pTask && tTask.publication?.internalId == publicationId && tTask.connectorName == connectorName);
if (task is null)
return;
taskManager.RemoveTaskFromQueue(task);
});
app.MapGet("/Settings/Get", () => new Settings(taskManager.settings));
app.MapPost("/Settings/Update", (string? downloadLocation, string? komgaUrl, string? komgaAuth) => taskManager.UpdateSettings(downloadLocation, komgaUrl, komgaAuth) );
app.Run();
class Settings
{
public string downloadLocation { get; }
public Komga? komga { get; }
public Settings(TaskManager.SettingsData settings)
{
this.downloadLocation = settings.downloadLocation;
this.komga = settings.komga;
}
}

View File

@ -1,37 +0,0 @@
{
"iisSettings": {
"windowsAuthentication": false,
"anonymousAuthentication": true,
"iisExpress": {
"applicationUrl": "http://localhost:1716",
"sslPort": 44391
}
},
"profiles": {
"http": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": true,
"applicationUrl": "http://localhost:5177",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"https": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": true,
"applicationUrl": "https://localhost:7036;http://localhost:5177",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"IIS Express": {
"commandName": "IISExpress",
"launchBrowser": true,
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
}
}
}

View File

@ -1,28 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net7.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<RootNamespace>Tranga_API</RootNamespace>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
</PropertyGroup>
<ItemGroup>
<Content Include="..\.dockerignore">
<Link>.dockerignore</Link>
</Content>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Logging\Logging.csproj" />
<ProjectReference Include="..\Tranga\Tranga.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="7.0.5" />
<PackageReference Include="Microsoft.VisualStudio.Web.CodeGeneration.Design" Version="7.0.6" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.5.0" />
</ItemGroup>
</Project>

View File

@ -1,18 +0,0 @@
FROM mcr.microsoft.com/dotnet/runtime:7.0 AS base
WORKDIR /app
FROM mcr.microsoft.com/dotnet/sdk:7.0 AS build
WORKDIR /src
COPY ["Tranga-CLI/Tranga-CLI.csproj", "Tranga-CLI/"]
RUN dotnet restore "Tranga-CLI/Tranga-CLI.csproj"
COPY . .
WORKDIR "/src/Tranga-CLI"
RUN dotnet build "Tranga-CLI.csproj" -c Release -o /app/build
FROM build AS publish
RUN dotnet publish "Tranga-CLI.csproj" -c Release -o /app/publish /p:UseAppHost=false
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "Tranga-CLI.dll"]

View File

@ -1,22 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net7.0</TargetFramework>
<RootNamespace>Tranga_CLI</RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
</PropertyGroup>
<ItemGroup>
<Content Include="..\.dockerignore">
<Link>.dockerignore</Link>
</Content>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Tranga\Tranga.csproj" />
</ItemGroup>
</Project>

View File

@ -1,502 +0,0 @@
using System.Globalization;
using Logging;
using Tranga;
namespace Tranga_CLI;
/*
* This is written with pure hatred for readability.
* At some point do this properly.
* Read at own risk.
*/
public static class Tranga_Cli
{
public static void Main(string[] args)
{
string applicationFolderPath = Path.Join(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "Tranga");
string logsFolderPath = Path.Join(applicationFolderPath, "logs");
string logFilePath = Path.Join(logsFolderPath, $"log-{DateTime.Now:dd-M-yyyy-HH-mm-ss}.txt");
string settingsFilePath = Path.Join(applicationFolderPath, "data.json");
Directory.CreateDirectory(applicationFolderPath);
Directory.CreateDirectory(logsFolderPath);
Console.WriteLine($"Logfile-Path: {logFilePath}");
Console.WriteLine($"Settings-File-Path: {settingsFilePath}");
Logger logger = new(new[] { Logger.LoggerType.FileLogger }, null, null, logFilePath);
logger.WriteLine("Tranga_CLI", "Loading Taskmanager.");
TaskManager.SettingsData settings;
if (File.Exists(settingsFilePath))
settings = TaskManager.LoadData(settingsFilePath);
else
settings = new TaskManager.SettingsData(Directory.GetCurrentDirectory(), settingsFilePath, null, new HashSet<TrangaTask>());
logger.WriteLine("Tranga_CLI", "User Input");
Console.WriteLine($"Output folder path [{settings.downloadLocation}]:");
string? tmpPath = Console.ReadLine();
while(tmpPath is null)
tmpPath = Console.ReadLine();
if(tmpPath.Length > 0)
settings.UpdateSettings(pDownloadLocation: tmpPath, null);
Console.WriteLine($"Komga BaseURL [{settings.komga?.baseUrl}]:");
string? tmpUrl = Console.ReadLine();
while (tmpUrl is null)
tmpUrl = Console.ReadLine();
if (tmpUrl.Length > 0)
{
Console.WriteLine("Username:");
string? tmpUser = Console.ReadLine();
while (tmpUser is null || tmpUser.Length < 1)
tmpUser = Console.ReadLine();
Console.WriteLine("Password:");
string tmpPass = string.Empty;
ConsoleKey key;
do
{
var keyInfo = Console.ReadKey(intercept: true);
key = keyInfo.Key;
if (key == ConsoleKey.Backspace && tmpPass.Length > 0)
{
Console.Write("\b \b");
tmpPass = tmpPass[0..^1];
}
else if (!char.IsControl(keyInfo.KeyChar))
{
Console.Write("*");
tmpPass += keyInfo.KeyChar;
}
} while (key != ConsoleKey.Enter);
settings.UpdateSettings(null, new Komga(tmpUrl, tmpUser, tmpPass, logger));
}
logger.WriteLine("Tranga_CLI", "Loaded.");
TaskMode(settings, logger);
}
private static void TaskMode(TaskManager.SettingsData settings, Logger logger)
{
TaskManager taskManager = new (settings, logger);
ConsoleKey selection = ConsoleKey.EraseEndOfFile;
PrintMenu(taskManager, taskManager.settings.downloadLocation, logger);
while (selection != ConsoleKey.Q)
{
int taskCount = taskManager.GetAllTasks().Length;
int taskRunningCount = taskManager.GetAllTasks().Count(task => task.state == TrangaTask.ExecutionState.Running);
int taskEnqueuedCount =
taskManager.GetAllTasks().Count(task => task.state == TrangaTask.ExecutionState.Enqueued);
Console.SetCursorPosition(0,1);
Console.WriteLine($"Tasks (Running/Queue/Total)): {taskRunningCount}/{taskEnqueuedCount}/{taskCount}");
if (Console.KeyAvailable)
{
selection = Console.ReadKey().Key;
switch (selection)
{
case ConsoleKey.L:
PrintTasks(taskManager.GetAllTasks(), logger);
Console.WriteLine("Press any key.");
Console.ReadKey();
break;
case ConsoleKey.C:
CreateTask(taskManager, taskManager.settings, logger);
Console.WriteLine("Press any key.");
Console.ReadKey();
break;
case ConsoleKey.D:
DeleteTask(taskManager, logger);
Console.WriteLine("Press any key.");
Console.ReadKey();
break;
case ConsoleKey.E:
ExecuteTaskNow(taskManager, logger);
Console.WriteLine("Press any key.");
Console.ReadKey();
break;
case ConsoleKey.S:
SearchTasks(taskManager, logger);
Console.WriteLine("Press any key.");
Console.ReadKey();
break;
case ConsoleKey.R:
PrintTasks(
taskManager.GetAllTasks().Where(eTask => eTask.state == TrangaTask.ExecutionState.Running)
.ToArray(), logger);
Console.WriteLine("Press any key.");
Console.ReadKey();
break;
case ConsoleKey.K:
PrintTasks(
taskManager.GetAllTasks().Where(qTask => qTask.state is TrangaTask.ExecutionState.Enqueued)
.ToArray(), logger);
Console.WriteLine("Press any key.");
Console.ReadKey();
break;
case ConsoleKey.F:
TailLog(logger);
Console.ReadKey();
break;
case ConsoleKey.M:
RemoveTaskFromQueue(taskManager, logger);
Console.WriteLine("Press any key.");
Console.ReadKey();
break;
case ConsoleKey.B:
AddTaskToQueue(taskManager, logger);
Console.WriteLine("Press any key.");
Console.ReadKey();
break;
}
PrintMenu(taskManager, taskManager.settings.downloadLocation, logger);
}
Thread.Sleep(200);
}
logger.WriteLine("Tranga_CLI", "Exiting.");
Console.Clear();
Console.WriteLine("Exiting.");
if (taskManager.GetAllTasks().Any(task => task.state == TrangaTask.ExecutionState.Running))
{
Console.WriteLine("Force quit (Even with running tasks?) y/N");
selection = Console.ReadKey().Key;
while(selection != ConsoleKey.Y && selection != ConsoleKey.N)
selection = Console.ReadKey().Key;
taskManager.Shutdown(selection == ConsoleKey.Y);
}else
// ReSharper disable once RedundantArgumentDefaultValue Better readability
taskManager.Shutdown(false);
}
private static void PrintMenu(TaskManager taskManager, string folderPath, Logger logger)
{
int taskCount = taskManager.GetAllTasks().Length;
int taskRunningCount = taskManager.GetAllTasks().Count(task => task.state == TrangaTask.ExecutionState.Running);
int taskEnqueuedCount =
taskManager.GetAllTasks().Count(task => task.state == TrangaTask.ExecutionState.Enqueued);
Console.Clear();
Console.WriteLine($"Download Folder: {folderPath}");
Console.WriteLine($"Tasks (Running/Queue/Total)): {taskRunningCount}/{taskEnqueuedCount}/{taskCount}");
Console.WriteLine();
Console.WriteLine($"{"C: Create Task",-30}{"L: List tasks",-30}{"B: Enqueue Task", -30}");
Console.WriteLine($"{"D: Delete Task",-30}{"S: Search Tasks", -30}{"K: List Task Queue", -30}");
Console.WriteLine($"{"E: Execute Task now",-30}{"R: List Running Tasks", -30}{"M: Remove Task from Queue", -30}");
Console.WriteLine();
Console.WriteLine($"{"",-30}{"F: Show Log",-30}{"Q: Exit",-30}");
}
private static void PrintTasks(TrangaTask[] tasks, Logger logger)
{
logger.WriteLine("Tranga_CLI", "Printing Tasks");
int taskCount = tasks.Length;
int taskRunningCount = tasks.Count(task => task.state == TrangaTask.ExecutionState.Running);
int taskEnqueuedCount = tasks.Count(task => task.state == TrangaTask.ExecutionState.Enqueued);
Console.Clear();
int tIndex = 0;
Console.WriteLine($"Tasks (Running/Queue/Total): {taskRunningCount}/{taskEnqueuedCount}/{taskCount}");
string header =
$"{"",-5}{"Task",-20} | {"Last Executed",-20} | {"Reoccurrence",-12} | {"State",-10} | {"Connector",-15} | Publication/Manga";
Console.WriteLine(header);
Console.WriteLine(new string('-', header.Length));
foreach(TrangaTask trangaTask in tasks)
Console.WriteLine($"{tIndex++:000}: {trangaTask}");
}
private static TrangaTask? SelectTask(TrangaTask[] tasks, Logger logger)
{
logger.WriteLine("Tranga_CLI", "Menu: Select task");
if (tasks.Length < 1)
{
Console.Clear();
Console.WriteLine("There are no available Tasks.");
logger.WriteLine("Tranga_CLI", "No available Tasks.");
return null;
}
PrintTasks(tasks, logger);
logger.WriteLine("Tranga_CLI", "Selecting Task to Remove (from queue)");
Console.WriteLine("Enter q to abort");
Console.WriteLine($"Select Task (0-{tasks.Length - 1}):");
string? selectedTask = Console.ReadLine();
while(selectedTask is null || selectedTask.Length < 1)
selectedTask = Console.ReadLine();
if (selectedTask.Length == 1 && selectedTask.ToLower() == "q")
{
Console.Clear();
Console.WriteLine("aborted.");
logger.WriteLine("Tranga_CLI", "aborted");
return null;
}
try
{
int selectedTaskIndex = Convert.ToInt32(selectedTask);
logger.WriteLine("Tranga_CLI", "Sending Task to TaskManager");
return tasks[selectedTaskIndex];
}
catch (Exception e)
{
Console.WriteLine($"Exception: {e.Message}");
logger.WriteLine("Tranga_CLI", e.Message);
}
return null;
}
private static void AddTaskToQueue(TaskManager taskManager, Logger logger)
{
Console.Clear();
logger.WriteLine("Tranga_CLI", "Menu: Add Task to queue");
TrangaTask[] tasks = taskManager.GetAllTasks().Where(rTask =>
rTask.state is not TrangaTask.ExecutionState.Enqueued and not TrangaTask.ExecutionState.Running).ToArray();
TrangaTask? selectedTask = SelectTask(tasks, logger);
if (selectedTask is null)
return;
logger.WriteLine("Tranga_CLI", "Sending Task to TaskManager");
taskManager.AddTaskToQueue(selectedTask);
}
private static void RemoveTaskFromQueue(TaskManager taskManager, Logger logger)
{
Console.Clear();
logger.WriteLine("Tranga_CLI", "Menu: Remove Task from queue");
TrangaTask[] tasks = taskManager.GetAllTasks().Where(rTask => rTask.state is TrangaTask.ExecutionState.Enqueued).ToArray();
TrangaTask? selectedTask = SelectTask(tasks, logger);
if (selectedTask is null)
return;
logger.WriteLine("Tranga_CLI", "Sending Task to TaskManager");
taskManager.RemoveTaskFromQueue(selectedTask);
}
private static void TailLog(Logger logger)
{
logger.WriteLine("Tranga_CLI", "Menu: Show Log-lines");
Console.Clear();
string[] lines = logger.Tail(20);
foreach (string message in lines)
Console.Write(message);
while (!Console.KeyAvailable)
{
string[] newLines = logger.GetNewLines();
foreach(string message in newLines)
Console.Write(message);
Thread.Sleep(40);
}
}
private static void CreateTask(TaskManager taskManager, TaskManager.SettingsData settings, Logger logger)
{
logger.WriteLine("Tranga_CLI", "Menu: Creating Task");
TrangaTask.Task? tmpTask = SelectTaskType(logger);
if (tmpTask is null)
return;
TrangaTask.Task task = (TrangaTask.Task)tmpTask!;
Connector? connector = null;
if (task != TrangaTask.Task.UpdateKomgaLibrary)
{
connector = SelectConnector(settings.downloadLocation, taskManager.GetAvailableConnectors().Values.ToArray(), logger);
if (connector is null)
return;
}
Publication? publication = null;
if (task != TrangaTask.Task.UpdatePublications && task != TrangaTask.Task.UpdateKomgaLibrary)
{
publication = SelectPublication(connector!, logger);
if (publication is null)
return;
}
TimeSpan reoccurrence = SelectReoccurrence(logger);
logger.WriteLine("Tranga_CLI", "Sending Task to TaskManager");
TrangaTask newTask = taskManager.AddTask(task, connector?.name, publication, reoccurrence, "en");
Console.WriteLine(newTask);
}
private static void ExecuteTaskNow(TaskManager taskManager, Logger logger)
{
logger.WriteLine("Tranga_CLI", "Menu: Executing Task");
TrangaTask[] tasks = taskManager.GetAllTasks().Where(nTask => nTask.state is not TrangaTask.ExecutionState.Running).ToArray();
TrangaTask? selectedTask = SelectTask(tasks, logger);
if (selectedTask is null)
return;
logger.WriteLine("Tranga_CLI", "Sending Task to TaskManager");
taskManager.ExecuteTaskNow(selectedTask);
}
private static void DeleteTask(TaskManager taskManager, Logger logger)
{
logger.WriteLine("Tranga_CLI", "Menu: Delete Task");
TrangaTask[] tasks = taskManager.GetAllTasks();
TrangaTask? selectedTask = SelectTask(tasks, logger);
if (selectedTask is null)
return;
logger.WriteLine("Tranga_CLI", "Sending Task to TaskManager");
taskManager.DeleteTask(selectedTask.task, selectedTask.connectorName, selectedTask.publication);
}
private static TrangaTask.Task? SelectTaskType(Logger logger)
{
logger.WriteLine("Tranga_CLI", "Menu: Select TaskType");
Console.Clear();
string[] taskNames = Enum.GetNames<TrangaTask.Task>();
int tIndex = 0;
Console.WriteLine("Available Tasks:");
foreach (string taskName in taskNames)
Console.WriteLine($"{tIndex++}: {taskName}");
Console.WriteLine("Enter q to abort");
Console.WriteLine($"Select Task (0-{taskNames.Length - 1}):");
string? selectedTask = Console.ReadLine();
while(selectedTask is null || selectedTask.Length < 1)
selectedTask = Console.ReadLine();
if (selectedTask.Length == 1 && selectedTask.ToLower() == "q")
{
Console.Clear();
Console.WriteLine("aborted.");
logger.WriteLine("Tranga_CLI", "aborted.");
return null;
}
try
{
int selectedTaskIndex = Convert.ToInt32(selectedTask);
string selectedTaskName = taskNames[selectedTaskIndex];
return Enum.Parse<TrangaTask.Task>(selectedTaskName);
}
catch (Exception e)
{
Console.WriteLine($"Exception: {e.Message}");
logger.WriteLine("Tranga_CLI", e.Message);
}
return null;
}
private static TimeSpan SelectReoccurrence(Logger logger)
{
logger.WriteLine("Tranga_CLI", "Menu: Select Reoccurrence");
Console.WriteLine("Select reoccurrence Timer (Format hh:mm:ss):");
return TimeSpan.Parse(Console.ReadLine()!, new CultureInfo("en-US"));
}
private static Connector? SelectConnector(string folderPath, Connector[] connectors, Logger logger)
{
logger.WriteLine("Tranga_CLI", "Menu: Select Connector");
Console.Clear();
int cIndex = 0;
Console.WriteLine("Connectors:");
foreach (Connector connector in connectors)
Console.WriteLine($"{cIndex++}: {connector.name}");
Console.WriteLine("Enter q to abort");
Console.WriteLine($"Select Connector (0-{connectors.Length - 1}):");
string? selectedConnector = Console.ReadLine();
while(selectedConnector is null || selectedConnector.Length < 1)
selectedConnector = Console.ReadLine();
if (selectedConnector.Length == 1 && selectedConnector.ToLower() == "q")
{
Console.Clear();
Console.WriteLine("aborted.");
logger.WriteLine("Tranga_CLI", "aborted.");
return null;
}
try
{
int selectedConnectorIndex = Convert.ToInt32(selectedConnector);
return connectors[selectedConnectorIndex];
}
catch (Exception e)
{
Console.WriteLine($"Exception: {e.Message}");
logger.WriteLine("Tranga_CLI", e.Message);
}
return null;
}
private static Publication? SelectPublication(Connector connector, Logger logger)
{
logger.WriteLine("Tranga_CLI", "Menu: Select Publication");
Console.Clear();
Console.WriteLine($"Connector: {connector.name}");
Console.WriteLine("Publication search query (leave empty for all):");
string? query = Console.ReadLine();
Publication[] publications = connector.GetPublications(query ?? "");
int pIndex = 0;
Console.WriteLine("Publications:");
foreach(Publication publication in publications)
Console.WriteLine($"{pIndex++}: {publication.sortName}");
Console.WriteLine("Enter q to abort");
Console.WriteLine($"Select publication to Download (0-{publications.Length - 1}):");
string? selectedPublication = Console.ReadLine();
while(selectedPublication is null || selectedPublication.Length < 1)
selectedPublication = Console.ReadLine();
if (selectedPublication.Length == 1 && selectedPublication.ToLower() == "q")
{
Console.Clear();
Console.WriteLine("aborted.");
logger.WriteLine("Tranga_CLI", "aborted.");
return null;
}
try
{
int selectedPublicationIndex = Convert.ToInt32(selectedPublication);
return publications[selectedPublicationIndex];
}
catch (Exception e)
{
Console.WriteLine($"Exception: {e.Message}");
logger.WriteLine("Tranga_CLI", e.Message);
}
return null;
}
private static void SearchTasks(TaskManager taskManager, Logger logger)
{
logger.WriteLine("Tranga_CLI", "Menu: Search task");
Console.Clear();
Console.WriteLine("Enter search query:");
string? query = Console.ReadLine();
while (query is null || query.Length < 4)
query = Console.ReadLine();
PrintTasks(taskManager.GetAllTasks().Where(qTask =>
qTask.ToString().ToLower().Contains(query, StringComparison.OrdinalIgnoreCase)).ToArray(), logger);
}
}

View File

@ -1,12 +1,6 @@

Microsoft Visual Studio Solution File, Format Version 12.00
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Tranga", ".\Tranga\Tranga.csproj", "{545E81B9-D96B-4C8F-A97F-2C02414DE566}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Tranga-CLI", "Tranga-CLI\Tranga-CLI.csproj", "{4899E3B2-B259-479A-B43E-042D043E9501}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Logging", "Logging\Logging.csproj", "{415BE889-BB7D-426F-976F-8D977876A462}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Tranga-API", "Tranga-API\Tranga-API.csproj", "{48F4E495-75BC-4402-8E03-DEC5B79D7E83}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "API", "API\API.csproj", "{EDB07E7B-351F-4FCC-9AEF-777838E5551E}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@ -14,21 +8,9 @@ Global
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{545E81B9-D96B-4C8F-A97F-2C02414DE566}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{545E81B9-D96B-4C8F-A97F-2C02414DE566}.Debug|Any CPU.Build.0 = Debug|Any CPU
{545E81B9-D96B-4C8F-A97F-2C02414DE566}.Release|Any CPU.ActiveCfg = Release|Any CPU
{545E81B9-D96B-4C8F-A97F-2C02414DE566}.Release|Any CPU.Build.0 = Release|Any CPU
{4899E3B2-B259-479A-B43E-042D043E9501}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{4899E3B2-B259-479A-B43E-042D043E9501}.Debug|Any CPU.Build.0 = Debug|Any CPU
{4899E3B2-B259-479A-B43E-042D043E9501}.Release|Any CPU.ActiveCfg = Release|Any CPU
{4899E3B2-B259-479A-B43E-042D043E9501}.Release|Any CPU.Build.0 = Release|Any CPU
{415BE889-BB7D-426F-976F-8D977876A462}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{415BE889-BB7D-426F-976F-8D977876A462}.Debug|Any CPU.Build.0 = Debug|Any CPU
{415BE889-BB7D-426F-976F-8D977876A462}.Release|Any CPU.ActiveCfg = Release|Any CPU
{415BE889-BB7D-426F-976F-8D977876A462}.Release|Any CPU.Build.0 = Release|Any CPU
{48F4E495-75BC-4402-8E03-DEC5B79D7E83}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{48F4E495-75BC-4402-8E03-DEC5B79D7E83}.Debug|Any CPU.Build.0 = Debug|Any CPU
{48F4E495-75BC-4402-8E03-DEC5B79D7E83}.Release|Any CPU.ActiveCfg = Release|Any CPU
{48F4E495-75BC-4402-8E03-DEC5B79D7E83}.Release|Any CPU.Build.0 = Release|Any CPU
{EDB07E7B-351F-4FCC-9AEF-777838E5551E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{EDB07E7B-351F-4FCC-9AEF-777838E5551E}.Debug|Any CPU.Build.0 = Debug|Any CPU
{EDB07E7B-351F-4FCC-9AEF-777838E5551E}.Release|Any CPU.ActiveCfg = Release|Any CPU
{EDB07E7B-351F-4FCC-9AEF-777838E5551E}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
EndGlobal

View File

@ -1,3 +1,14 @@
<wpf:ResourceDictionary xml:space="preserve" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" xmlns:s="clr-namespace:System;assembly=mscorlib" xmlns:ss="urn:shemas-jetbrains-com:settings-storage-xaml" xmlns:wpf="http://schemas.microsoft.com/winfx/2006/xaml/presentation">
<s:Boolean x:Key="/Default/UserDictionary/Words/=altnames/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=authorsartists/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=Gotify/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=jjob/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=Komga/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=lunasea/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=mangakatana/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=Manganato/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=Mangasee/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=Mangaworld/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=Ntfy/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=Taskmanager/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=Tranga/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>

View File

@ -1,33 +0,0 @@
using System.Globalization;
namespace Tranga;
/// <summary>
/// Has to be Part of a publication
/// Includes the Chapter-Name, -VolumeNumber, -ChapterNumber, the location of the chapter on the internet and the saveName of the local file.
/// </summary>
public struct Chapter
{
public string? name { get; }
public string? volumeNumber { get; }
public string? chapterNumber { get; }
public string url { get; }
public string fileName { get; }
public string sortNumber { get; }
public Chapter(string? name, string? volumeNumber, string? chapterNumber, string url)
{
this.name = name;
this.volumeNumber = volumeNumber is { Length: > 0 } ? volumeNumber : "1";
this.chapterNumber = chapterNumber;
this.url = url;
string chapterName = string.Concat((name ?? "").Split(Path.GetInvalidFileNameChars()));
NumberFormatInfo nfi = new NumberFormatInfo()
{
NumberDecimalSeparator = "."
};
sortNumber = decimal.Round(Convert.ToDecimal(this.volumeNumber) * Convert.ToDecimal(this.chapterNumber, nfi), 1)
.ToString(nfi);
this.fileName = $"{chapterName} - V{volumeNumber}C{chapterNumber} - {sortNumber}";
}
}

View File

@ -1,212 +0,0 @@
using System.IO.Compression;
using System.Net;
using System.Xml.Linq;
using Logging;
namespace Tranga;
/// <summary>
/// Base-Class for all Connectors
/// Provides some methods to be used by all Connectors, as well as a DownloadClient
/// </summary>
public abstract class Connector
{
internal string downloadLocation { get; } //Location of local files
protected DownloadClient downloadClient { get; }
protected Logger? logger;
protected Connector(string downloadLocation, uint downloadDelay, Logger? logger)
{
this.downloadLocation = downloadLocation;
this.downloadClient = new DownloadClient(downloadDelay);
this.logger = logger;
}
public abstract string name { get; } //Name of the Connector (e.g. Website)
/// <summary>
/// Returns all Publications with the given string.
/// If the string is empty or null, returns all Publication of the Connector
/// </summary>
/// <param name="publicationTitle">Search-Query</param>
/// <returns>Publications matching the query</returns>
public abstract Publication[] GetPublications(string publicationTitle = "");
/// <summary>
/// Returns all Chapters of the publication in the provided language.
/// If the language is empty or null, returns all Chapters in all Languages.
/// </summary>
/// <param name="publication">Publication to get Chapters for</param>
/// <param name="language">Language of the Chapters</param>
/// <returns>Array of Chapters matching Publication and Language</returns>
public abstract Chapter[] GetChapters(Publication publication, string language = "");
/// <summary>
/// Retrieves the Chapter (+Images) from the website.
/// Should later call DownloadChapterImages to retrieve the individual Images of the Chapter.
/// </summary>
/// <param name="publication">Publication that contains Chapter</param>
/// <param name="chapter">Chapter with Images to retrieve</param>
public abstract void DownloadChapter(Publication publication, Chapter chapter);
/// <summary>
/// Retrieves the Cover from the Website
/// </summary>
/// <param name="publication">Publication to retrieve Cover for</param>
public abstract void DownloadCover(Publication publication);
/// <summary>
/// Saves the series-info to series.json in the Publication Folder
/// </summary>
/// <param name="publication">Publication to save series.json for</param>
public void SaveSeriesInfo(Publication publication)
{
logger?.WriteLine(this.GetType().ToString(), $"Saving series.json for {publication.sortName}");
//Check if Publication already has a Folder and a series.json
string publicationFolder = Path.Join(downloadLocation, publication.folderName);
if(!Directory.Exists(publicationFolder))
Directory.CreateDirectory(publicationFolder);
string seriesInfoPath = Path.Join(publicationFolder, "series.json");
if(!File.Exists(seriesInfoPath))
File.WriteAllText(seriesInfoPath,publication.GetSeriesInfoJson());
}
/// <summary>
/// Creates a string containing XML of publication and chapter.
/// See ComicInfo.xml
/// </summary>
/// <returns>XML-string</returns>
protected static string CreateComicInfo(Publication publication, Chapter chapter, Logger? logger)
{
logger?.WriteLine("Connector", $"Creating ComicInfo.Xml for {publication.sortName} Chapter {chapter.volumeNumber} {chapter.chapterNumber}");
XElement comicInfo = new XElement("ComicInfo",
new XElement("Tags", string.Join(',',publication.tags)),
new XElement("LanguageISO", publication.originalLanguage),
new XElement("Title", chapter.name),
new XElement("Volume", chapter.volumeNumber),
new XElement("Number", chapter.chapterNumber) //TODO check if this is correct at some point
);
return comicInfo.ToString();
}
/// <summary>
/// Checks if a chapter-archive is already present
/// </summary>
/// <returns>true if chapter is present</returns>
public bool ChapterIsDownloaded(Publication publication, Chapter chapter)
{
return File.Exists(CreateFullFilepath(publication, chapter));
}
/// <summary>
/// Creates full file path of chapter-archive
/// </summary>
/// <returns>Filepath</returns>
protected string CreateFullFilepath(Publication publication, Chapter chapter)
{
return Path.Join(downloadLocation, publication.folderName, chapter.fileName);
}
/// <summary>
/// Downloads Image from URL and saves it to the given path(incl. fileName)
/// </summary>
/// <param name="imageUrl"></param>
/// <param name="fullPath"></param>
/// <param name="downloadClient">DownloadClient of the connector</param>
protected static void DownloadImage(string imageUrl, string fullPath, DownloadClient downloadClient)
{
DownloadClient.RequestResult requestResult = downloadClient.MakeRequest(imageUrl);
byte[] buffer = new byte[requestResult.result.Length];
requestResult.result.ReadExactly(buffer, 0, buffer.Length);
File.WriteAllBytes(fullPath, buffer);
}
/// <summary>
/// Downloads all Images from URLs, Compresses to zip(cbz) and saves.
/// </summary>
/// <param name="imageUrls">List of URLs to download Images from</param>
/// <param name="saveArchiveFilePath">Full path to save archive to (without file ending .cbz)</param>
/// <param name="downloadClient">DownloadClient of the connector</param>
/// <param name="comicInfoPath">Path of the generate Chapter ComicInfo.xml, if it was generated</param>
protected static void DownloadChapterImages(string[] imageUrls, string saveArchiveFilePath, DownloadClient downloadClient, Logger? logger, string? comicInfoPath = null)
{
logger?.WriteLine("Connector", "Downloading Images");
//Check if Publication Directory already exists
string[] splitPath = saveArchiveFilePath.Split(Path.DirectorySeparatorChar);
string directoryPath = Path.Combine(splitPath.Take(splitPath.Length - 1).ToArray());
if (!Directory.Exists(directoryPath))
Directory.CreateDirectory(directoryPath);
string fullPath = $"{saveArchiveFilePath}.cbz";
if (File.Exists(fullPath)) //Don't download twice.
return;
//Create a temporary folder to store images
string tempFolder = Directory.CreateTempSubdirectory().FullName;
int chapter = 0;
//Download all Images to temporary Folder
foreach (string imageUrl in imageUrls)
{
string[] split = imageUrl.Split('.');
string extension = split[^1];
DownloadImage(imageUrl, Path.Join(tempFolder, $"{chapter++}.{extension}"), downloadClient);
}
if(comicInfoPath is not null)
File.Copy(comicInfoPath, Path.Join(tempFolder, "ComicInfo.xml"));
logger?.WriteLine("Connector", "Creating archive");
//ZIP-it and ship-it
ZipFile.CreateFromDirectory(tempFolder, fullPath);
Directory.Delete(tempFolder, true); //Cleanup
}
protected class DownloadClient
{
private readonly TimeSpan _requestSpeed;
private DateTime _lastRequest;
private static readonly HttpClient Client = new();
/// <summary>
/// Creates a httpClient
/// </summary>
/// <param name="delay">minimum delay between requests (to avoid spam)</param>
public DownloadClient(uint delay)
{
_requestSpeed = TimeSpan.FromMilliseconds(delay);
_lastRequest = DateTime.Now.Subtract(_requestSpeed);
}
/// <summary>
/// Request Webpage
/// </summary>
/// <param name="url"></param>
/// <returns>RequestResult with StatusCode and Stream of received data</returns>
public RequestResult MakeRequest(string url)
{
while((DateTime.Now - _lastRequest) < _requestSpeed)
Thread.Sleep(10);
_lastRequest = DateTime.Now;
HttpRequestMessage requestMessage = new(HttpMethod.Get, url);
HttpResponseMessage response = Client.Send(requestMessage);
Stream resultString = response.IsSuccessStatusCode ? response.Content.ReadAsStream() : Stream.Null;
return new RequestResult(response.StatusCode, resultString);
}
public struct RequestResult
{
public HttpStatusCode statusCode { get; }
public Stream result { get; }
public RequestResult(HttpStatusCode statusCode, Stream result)
{
this.statusCode = statusCode;
this.result = result;
}
}
}
}

View File

@ -1,242 +0,0 @@
using System.Globalization;
using System.Net;
using System.Text.Json;
using System.Text.Json.Nodes;
using Logging;
namespace Tranga.Connectors;
public class MangaDex : Connector
{
public override string name { get; }
public MangaDex(string downloadLocation, uint downloadDelay, Logger? logger) : base(downloadLocation, downloadDelay, logger)
{
name = "MangaDex";
}
public MangaDex(string downloadLocation, Logger? logger) : base(downloadLocation, 750, logger)
{
name = "MangaDex";
}
public override Publication[] GetPublications(string publicationTitle = "")
{
logger?.WriteLine(this.GetType().ToString(), $"Getting Publications (title={publicationTitle})");
const int limit = 100; //How many values we want returned at once
int offset = 0; //"Page"
int total = int.MaxValue; //How many total results are there, is updated on first request
HashSet<Publication> publications = new();
while (offset < total) //As long as we haven't requested all "Pages"
{
//Request next Page
DownloadClient.RequestResult requestResult =
downloadClient.MakeRequest(
$"https://api.mangadex.org/manga?limit={limit}&title={publicationTitle}&offset={offset}");
if (requestResult.statusCode != HttpStatusCode.OK)
break;
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
offset += limit;
if (result is null)
break;
total = result["total"]!.GetValue<int>(); //Update the total number of Publications
JsonArray mangaInResult = result["data"]!.AsArray(); //Manga-data-Array
//Loop each Manga and extract information from JSON
foreach (JsonNode? mangeNode in mangaInResult)
{
JsonObject manga = (JsonObject)mangeNode!;
JsonObject attributes = manga["attributes"]!.AsObject();
string title = attributes["title"]!.AsObject().ContainsKey("en") && attributes["title"]!["en"] is not null
? attributes["title"]!["en"]!.GetValue<string>()
: attributes["title"]![((IDictionary<string, JsonNode?>)attributes["title"]!.AsObject()).Keys.First()]!.GetValue<string>();
string? description = attributes["description"]!.AsObject().ContainsKey("en") && attributes["description"]!["en"] is not null
? attributes["description"]!["en"]!.GetValue<string?>()
: null;
JsonArray altTitlesObject = attributes["altTitles"]!.AsArray();
Dictionary<string, string> altTitlesDict = new();
foreach (JsonNode? altTitleNode in altTitlesObject)
{
JsonObject altTitleObject = (JsonObject)altTitleNode!;
string key = ((IDictionary<string, JsonNode?>)altTitleObject).Keys.ToArray()[0];
altTitlesDict.TryAdd(key, altTitleObject[key]!.GetValue<string>());
}
JsonArray tagsObject = attributes["tags"]!.AsArray();
HashSet<string> tags = new();
foreach (JsonNode? tagNode in tagsObject)
{
JsonObject tagObject = (JsonObject)tagNode!;
if(tagObject["attributes"]!["name"]!.AsObject().ContainsKey("en"))
tags.Add(tagObject["attributes"]!["name"]!["en"]!.GetValue<string>());
}
string? poster = null;
if (manga.ContainsKey("relationships") && manga["relationships"] is not null)
{
JsonArray relationships = manga["relationships"]!.AsArray();
poster = relationships.FirstOrDefault(relationship => relationship!["type"]!.GetValue<string>() == "cover_art")!["id"]!.GetValue<string>();
}
Dictionary<string, string> linksDict = new();
string[,]? links = null;
if (attributes.ContainsKey("links") && attributes["links"] is not null)
{
JsonObject linksObject = attributes["links"]!.AsObject();
foreach (string key in ((IDictionary<string, JsonNode?>)linksObject).Keys)
{
linksDict.Add(key, linksObject[key]!.GetValue<string>());
}
}
int? year = attributes.ContainsKey("year") && attributes["year"] is not null
? attributes["year"]!.GetValue<int?>()
: null;
string? originalLanguage = attributes.ContainsKey("originalLanguage") && attributes["originalLanguage"] is not null
? attributes["originalLanguage"]!.GetValue<string?>()
: null;
string status = attributes["status"]!.GetValue<string>();
Publication pub = new Publication(
title,
description,
altTitlesDict,
tags.ToArray(),
poster,
linksDict,
year,
originalLanguage,
status,
manga["id"]!.GetValue<string>()
);
publications.Add(pub); //Add Publication (Manga) to result
}
}
return publications.ToArray();
}
public override Chapter[] GetChapters(Publication publication, string language = "")
{
logger?.WriteLine(this.GetType().ToString(), $"Getting Chapters {publication.sortName} (language={language})");
const int limit = 100; //How many values we want returned at once
int offset = 0; //"Page"
int total = int.MaxValue; //How many total results are there, is updated on first request
List<Chapter> chapters = new();
//As long as we haven't requested all "Pages"
while (offset < total)
{
//Request next "Page"
DownloadClient.RequestResult requestResult =
downloadClient.MakeRequest(
$"https://api.mangadex.org/manga/{publication.downloadUrl}/feed?limit={limit}&offset={offset}&translatedLanguage%5B%5D={language}");
if (requestResult.statusCode != HttpStatusCode.OK)
break;
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
offset += limit;
if (result is null)
break;
total = result["total"]!.GetValue<int>();
JsonArray chaptersInResult = result["data"]!.AsArray();
//Loop through all Chapters in result and extract information from JSON
foreach (JsonNode? jsonNode in chaptersInResult)
{
JsonObject chapter = (JsonObject)jsonNode!;
JsonObject attributes = chapter["attributes"]!.AsObject();
string chapterId = chapter["id"]!.GetValue<string>();
string? title = attributes.ContainsKey("title") && attributes["title"] is not null
? attributes["title"]!.GetValue<string>()
: null;
string? volume = attributes.ContainsKey("volume") && attributes["volume"] is not null
? attributes["volume"]!.GetValue<string>()
: null;
string? chapterNum = attributes.ContainsKey("chapter") && attributes["chapter"] is not null
? attributes["chapter"]!.GetValue<string>()
: null;
chapters.Add(new Chapter(title, volume, chapterNum, chapterId));
}
}
//Return Chapters ordered by Chapter-Number
NumberFormatInfo chapterNumberFormatInfo = new()
{
NumberDecimalSeparator = "."
};
return chapters.OrderBy(chapter => Convert.ToSingle(chapter.chapterNumber, chapterNumberFormatInfo)).ToArray();
}
public override void DownloadChapter(Publication publication, Chapter chapter)
{
logger?.WriteLine(this.GetType().ToString(), $"Download Chapter {publication.sortName} {chapter.volumeNumber}-{chapter.chapterNumber}");
//Request URLs for Chapter-Images
DownloadClient.RequestResult requestResult =
downloadClient.MakeRequest($"https://api.mangadex.org/at-home/server/{chapter.url}?forcePort443=false'");
if (requestResult.statusCode != HttpStatusCode.OK)
return;
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
if (result is null)
return;
string baseUrl = result["baseUrl"]!.GetValue<string>();
string hash = result["chapter"]!["hash"]!.GetValue<string>();
JsonArray imageFileNames = result["chapter"]!["data"]!.AsArray();
//Loop through all imageNames and construct urls (imageUrl)
HashSet<string> imageUrls = new();
foreach (JsonNode? image in imageFileNames)
imageUrls.Add($"{baseUrl}/data/{hash}/{image!.GetValue<string>()}");
string comicInfoPath = Path.GetTempFileName();
File.WriteAllText(comicInfoPath, CreateComicInfo(publication, chapter, logger));
//Download Chapter-Images
DownloadChapterImages(imageUrls.ToArray(), CreateFullFilepath(publication, chapter), downloadClient, logger, comicInfoPath);
}
public override void DownloadCover(Publication publication)
{
logger?.WriteLine(this.GetType().ToString(), $"Download cover {publication.sortName}");
//Check if Publication already has a Folder and cover
string publicationFolder = Path.Join(downloadLocation, publication.folderName);
if(!Directory.Exists(publicationFolder))
Directory.CreateDirectory(publicationFolder);
DirectoryInfo dirInfo = new (publicationFolder);
foreach(FileInfo fileInfo in dirInfo.EnumerateFiles())
if (fileInfo.Name.Contains("cover."))
return;
//Request information where to download Cover
DownloadClient.RequestResult requestResult =
downloadClient.MakeRequest($"https://api.mangadex.org/cover/{publication.posterUrl}");
if (requestResult.statusCode != HttpStatusCode.OK)
return;
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
if (result is null)
return;
string fileName = result["data"]!["attributes"]!["fileName"]!.GetValue<string>();
string coverUrl = $"https://uploads.mangadex.org/covers/{publication.downloadUrl}/{fileName}";
//Get file-extension (jpg, png)
string[] split = coverUrl.Split('.');
string extension = split[^1];
string outFolderPath = Path.Join(downloadLocation, publication.folderName);
Directory.CreateDirectory(outFolderPath);
//Download cover-Image
DownloadImage(coverUrl, Path.Join(downloadLocation, publication.folderName, $"cover.{extension}"), this.downloadClient);
}
}

View File

@ -1,125 +0,0 @@
using System.Net.Http.Headers;
using System.Text.Json.Nodes;
using Logging;
using Newtonsoft.Json;
using JsonSerializer = System.Text.Json.JsonSerializer;
namespace Tranga;
/// <summary>
/// Provides connectivity to Komga-API
/// Can fetch and update libraries
/// </summary>
public class Komga
{
public string baseUrl { get; }
public string auth { get; } //Base64 encoded, if you use your password everywhere, you have problems
private Logger? logger;
/// <param name="baseUrl">Base-URL of Komga instance, no trailing slashes(/)</param>
/// <param name="username">Komga Username</param>
/// <param name="password">Komga password, will be base64 encoded. yea</param>
public Komga(string baseUrl, string username, string password, Logger? logger)
{
this.baseUrl = baseUrl;
this.auth = Convert.ToBase64String(System.Text.Encoding.ASCII.GetBytes($"{username}:{password}"));
this.logger = logger;
}
/// <param name="baseUrl">Base-URL of Komga instance, no trailing slashes(/)</param>
/// <param name="auth">Base64 string of username and password (username):(password)</param>
[JsonConstructor]
public Komga(string baseUrl, string auth, Logger? logger)
{
this.baseUrl = baseUrl;
this.auth = auth;
this.logger = logger;
}
/// <summary>
/// Fetches all libraries available to the user
/// </summary>
/// <returns>Array of KomgaLibraries</returns>
public KomgaLibrary[] GetLibraries()
{
logger?.WriteLine(this.GetType().ToString(), $"Getting Libraries");
Stream data = NetClient.MakeRequest($"{baseUrl}/api/v1/libraries", auth);
JsonArray? result = JsonSerializer.Deserialize<JsonArray>(data);
if (result is null)
return Array.Empty<KomgaLibrary>();
HashSet<KomgaLibrary> ret = new();
foreach (JsonNode? jsonNode in result)
{
var jObject = (JsonObject?)jsonNode;
string libraryId = jObject!["id"]!.GetValue<string>();
string libraryName = jObject!["name"]!.GetValue<string>();
ret.Add(new KomgaLibrary(libraryId, libraryName));
}
return ret.ToArray();
}
/// <summary>
/// Updates library with given id
/// </summary>
/// <param name="libraryId">Id of the Komga-Library</param>
/// <returns>true if successful</returns>
public bool UpdateLibrary(string libraryId)
{
logger?.WriteLine(this.GetType().ToString(), $"Updating Libraries");
return NetClient.MakePost($"{baseUrl}/api/v1/libraries/{libraryId}/scan", auth);
}
public struct KomgaLibrary
{
public string id { get; }
public string name { get; }
public KomgaLibrary(string id, string name)
{
this.id = id;
this.name = name;
}
}
private static class NetClient
{
public static Stream MakeRequest(string url, string auth)
{
HttpClient client = new();
HttpRequestMessage requestMessage = new HttpRequestMessage
{
Method = HttpMethod.Get,
RequestUri = new Uri(url),
Headers =
{
{ "Accept", "application/json" },
{ "Authorization", new AuthenticationHeaderValue("Basic", auth).ToString() }
}
};
HttpResponseMessage response = client.Send(requestMessage);
Stream resultString = response.IsSuccessStatusCode ? response.Content.ReadAsStream() : Stream.Null;
return resultString;
}
public static bool MakePost(string url, string auth)
{
HttpClient client = new();
HttpRequestMessage requestMessage = new HttpRequestMessage
{
Method = HttpMethod.Post,
RequestUri = new Uri(url),
Headers =
{
{ "Accept", "application/json" },
{ "Authorization", new AuthenticationHeaderValue("Basic", auth).ToString() }
}
};
HttpResponseMessage response = client.Send(requestMessage);
return response.IsSuccessStatusCode;
}
}
}

View File

@ -1,101 +0,0 @@
using Newtonsoft.Json;
namespace Tranga;
/// <summary>
/// Contains information on a Publication (Manga)
/// </summary>
public readonly struct Publication
{
public string sortName { get; }
// ReSharper disable UnusedAutoPropertyAccessor.Global we need it, trust
[JsonIgnore]public Dictionary<string,string> altTitles { get; }
// ReSharper disable trice MemberCanBePrivate.Global, trust
public string? description { get; }
public string[] tags { get; }
public string? posterUrl { get; }
[JsonIgnore]public Dictionary<string,string> links { get; }
public int? year { get; }
public string? originalLanguage { get; }
public string status { get; }
public string folderName { get; }
public string downloadUrl { get; }
public string internalId { get; }
public readonly struct ValueTuple
{
}
public Publication(string sortName, string? description, Dictionary<string,string> altTitles, string[] tags, string? posterUrl, Dictionary<string,string>? links, int? year, string? originalLanguage, string status, string downloadUrl)
{
this.sortName = sortName;
this.description = description;
this.altTitles = altTitles;
this.tags = tags;
this.posterUrl = posterUrl;
this.links = links ?? new Dictionary<string, string>();
this.year = year;
this.originalLanguage = originalLanguage;
this.status = status;
this.downloadUrl = downloadUrl;
this.folderName = string.Concat(sortName.Split(Path.GetInvalidPathChars().Concat(Path.GetInvalidFileNameChars()).ToArray()));
this.internalId = Guid.NewGuid().ToString();
}
/// <returns>Serialized JSON String for series.json</returns>
public string GetSeriesInfoJson()
{
SeriesInfo si = new (new Metadata(this.sortName, this.year.ToString() ?? string.Empty, this.status, this.description ?? ""));
return System.Text.Json.JsonSerializer.Serialize(si);
}
//Only for series.json
private struct SeriesInfo
{
// ReSharper disable once UnusedAutoPropertyAccessor.Local we need it, trust
[JsonRequired]public Metadata metadata { get; }
public SeriesInfo(Metadata metadata) => this.metadata = metadata;
}
//Only for series.json what an abomination, why are all the fields not-null????
private struct Metadata
{
// ReSharper disable UnusedAutoPropertyAccessor.Local we need them all, trust me
[JsonRequired] public string type { get; }
[JsonRequired] public string publisher { get; }
// ReSharper disable twice IdentifierTypo
[JsonRequired] public int comicid { get; }
[JsonRequired] public string booktype { get; }
// ReSharper disable InconsistentNaming This one property is capitalized. Why?
[JsonRequired] public string ComicImage { get; }
[JsonRequired] public int total_issues { get; }
[JsonRequired] public string publication_run { get; }
[JsonRequired]public string name { get; }
[JsonRequired]public string year { get; }
[JsonRequired]public string status { get; }
[JsonRequired]public string description_text { get; }
public Metadata(string name, string year, string status, string description_text)
{
this.name = name;
this.year = year;
if(status == "ongoing" || status == "hiatus")
this.status = "Continuing";
else if (status == "completed" || status == "cancelled")
this.status = "Ended";
else
this.status = status;
this.description_text = description_text;
//kill it with fire, but otherwise Komga will not parse
type = "Manga";
publisher = "";
comicid = 0;
booktype = "";
ComicImage = "";
total_issues = 0;
publication_run = "";
}
}
}

View File

@ -1,129 +0,0 @@
using Logging;
namespace Tranga;
/// <summary>
/// Executes TrangaTasks
/// Based on the TrangaTask.Task a method is called.
/// The chapterCollection is updated with new Publications/Chapters.
/// </summary>
public static class TaskExecutor
{
/// <summary>
/// Executes TrangaTask.
/// </summary>
/// <param name="taskManager">Parent</param>
/// <param name="trangaTask">Task to execute</param>
/// <param name="chapterCollection">Current chapterCollection to update</param>
/// <param name="logger"></param>
/// <exception cref="ArgumentException">Is thrown when there is no Connector available with the name of the TrangaTask.connectorName</exception>
public static void Execute(TaskManager taskManager, TrangaTask trangaTask, Logger? logger)
{
//Only execute task if it is not already being executed.
if (trangaTask.state == TrangaTask.ExecutionState.Running)
{
logger?.WriteLine("TaskExecutor", $"Task already running {trangaTask}");
return;
}
trangaTask.state = TrangaTask.ExecutionState.Running;
logger?.WriteLine("TaskExecutor", $"Starting Task {trangaTask}");
//Connector is not needed for all tasks
Connector? connector = null;
if (trangaTask.task != TrangaTask.Task.UpdateKomgaLibrary)
connector = taskManager.GetConnector(trangaTask.connectorName!);
//Call appropriate Method based on TrangaTask.Task
switch (trangaTask.task)
{
case TrangaTask.Task.DownloadNewChapters:
DownloadNewChapters(connector!, (Publication)trangaTask.publication!, trangaTask.language, ref taskManager._chapterCollection);
break;
case TrangaTask.Task.UpdateChapters:
UpdateChapters(connector!, (Publication)trangaTask.publication!, trangaTask.language, ref taskManager._chapterCollection);
break;
case TrangaTask.Task.UpdatePublications:
UpdatePublications(connector!, ref taskManager._chapterCollection);
break;
case TrangaTask.Task.UpdateKomgaLibrary:
UpdateKomgaLibrary(taskManager);
break;
}
logger?.WriteLine("TaskExecutor", $"Task finished! {trangaTask}");
trangaTask.lastExecuted = DateTime.Now;
trangaTask.state = TrangaTask.ExecutionState.Waiting;
}
/// <summary>
/// Updates all Komga-Libraries
/// </summary>
/// <param name="taskManager">Parent</param>
private static void UpdateKomgaLibrary(TaskManager taskManager)
{
if (taskManager.komga is null)
return;
Komga komga = taskManager.komga;
Komga.KomgaLibrary[] allLibraries = komga.GetLibraries();
foreach (Komga.KomgaLibrary lib in allLibraries)
komga.UpdateLibrary(lib.id);
}
/// <summary>
/// Updates the available Publications from a Connector (all of them)
/// </summary>
/// <param name="connector">Connector to receive Publications from</param>
/// <param name="chapterCollection"></param>
private static void UpdatePublications(Connector connector, ref Dictionary<Publication, List<Chapter>> chapterCollection)
{
Publication[] publications = connector.GetPublications();
foreach (Publication publication in publications)
chapterCollection.TryAdd(publication, new List<Chapter>());
}
/// <summary>
/// Checks for new Chapters and Downloads new ones.
/// If no Chapters had been downloaded previously, download also cover and create series.json
/// </summary>
/// <param name="connector">Connector to use</param>
/// <param name="publication">Publication to check</param>
/// <param name="language">Language to receive chapters for</param>
/// <param name="chapterCollection"></param>
private static void DownloadNewChapters(Connector connector, Publication publication, string language, ref Dictionary<Publication, List<Chapter>> chapterCollection)
{
List<Chapter> newChapters = UpdateChapters(connector, publication, language, ref chapterCollection);
connector.DownloadCover(publication);
//Check if Publication already has a Folder and a series.json
string publicationFolder = Path.Join(connector.downloadLocation, publication.folderName);
if(!Directory.Exists(publicationFolder))
Directory.CreateDirectory(publicationFolder);
string seriesInfoPath = Path.Join(publicationFolder, "series.json");
if(!File.Exists(seriesInfoPath))
File.WriteAllText(seriesInfoPath,publication.GetSeriesInfoJson());
foreach(Chapter newChapter in newChapters)
connector.DownloadChapter(publication, newChapter);
}
/// <summary>
/// Updates the available Chapters of a Publication
/// </summary>
/// <param name="connector">Connector to use</param>
/// <param name="publication">Publication to check</param>
/// <param name="language">Language to receive chapters for</param>
/// <param name="chapterCollection"></param>
/// <returns>List of Chapters that were previously not in collection</returns>
private static List<Chapter> UpdateChapters(Connector connector, Publication publication, string language, ref Dictionary<Publication, List<Chapter>> chapterCollection)
{
List<Chapter> newChaptersList = new();
chapterCollection.TryAdd(publication, newChaptersList); //To ensure publication is actually in collection
Chapter[] newChapters = connector.GetChapters(publication, language);
newChaptersList = newChapters.Where(nChapter => !connector.ChapterIsDownloaded(publication, nChapter)).ToList();
return newChaptersList;
}
}

Some files were not shown because too many files have changed in this diff Show More