87 Commits

Author SHA1 Message Date
f0de0a29da Merge pull request #400 from C9Glax/dependabot/github_actions/docker/build-push-action-6.18.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/build-push-action from 6.17.0 to 6.18.0
2025-05-28 15:50:47 +02:00
d4227f2b8f Bump docker/build-push-action from 6.17.0 to 6.18.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.17.0 to 6.18.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.17.0...v6.18.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-version: 6.18.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-05-28 05:59:24 +00:00
cd00d35f22 Merge pull request #395 from C9Glax/dependabot/github_actions/docker/build-push-action-6.17.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/build-push-action from 6.16.0 to 6.17.0
2025-05-16 16:15:40 +02:00
4ef3e877ce Bump docker/build-push-action from 6.16.0 to 6.17.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.16.0 to 6.17.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.16.0...v6.17.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-version: 6.17.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-05-16 05:31:47 +00:00
7dba2518f9 Merge pull request #388 from C9Glax/dependabot/github_actions/docker/build-push-action-6.16.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/build-push-action from 6.15.0 to 6.16.0
2025-04-25 09:01:00 +02:00
7506a0201e Bump docker/build-push-action from 6.15.0 to 6.16.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.15.0 to 6.16.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.15.0...v6.16.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-version: 6.16.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-04-25 05:51:29 +00:00
91fb815153 Update README.md
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-03-29 21:17:22 +01:00
6faf8bc733 Merge pull request #376 from C9Glax/cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
Weebcentral fixxes
2025-03-18 18:13:47 +01:00
bdff5b7aec Merge pull request #375 from TheyCallMeTravis/webtoons-search_regex_fix
Some checks failed
Docker Image CI / build (push) Has been cancelled
webtoons - fix search regex
2025-03-18 18:12:35 +01:00
5af8060d7b Merge pull request #374 from TheyCallMeTravis/weebcentral-fixsearch
Weebcentral - Fix Search Results Parse
2025-03-18 18:12:23 +01:00
6ed8ff1d52 webtoons - fix search regex parsing 2025-03-18 10:12:42 -05:00
3324ed6e4a Weebcentral - Fix Search Results Parse 2025-03-17 14:29:09 -05:00
67fd9d284b Merge pull request #369 from TheyCallMeTravis/WeebCentral-add_referrer
Some checks failed
Docker Image CI / build (push) Has been cancelled
WeebCentral - add referer to DownloadChapterImages
2025-03-15 10:24:28 +01:00
08f26dd21d add referer to DownloadChapterImages 2025-03-14 21:18:51 -05:00
89ed500751 Update actions for Server-V2
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-03-08 19:02:14 +01:00
b00b0ee030 Merge branch 'master' into cuttingedge-merge-ServerV2
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-03-08 19:00:42 +01:00
e47c52ad48 Merge pull request #367 from C9Glax/cuttingedge
Cuttingedge merge
2025-03-08 18:58:40 +01:00
293f0af8e3 Merge pull request #366 from merlinmarijn/manganato-domain-switch
Some checks are pending
Docker Image CI / build (push) Waiting to run
Manganato connector search fix
2025-03-08 07:40:11 +01:00
ebfa34e386 Update Manganato.cs 2025-03-07 22:33:24 +01:00
14524407f9 Update Manganato.cs 2025-03-07 22:29:40 +01:00
d56f0b383a Merge pull request #365 from merlinmarijn/manganato-domain-switch
Some checks are pending
Docker Image CI / build (push) Waiting to run
Manganato fix chapter naming format in CBZ files (i am sorry)
2025-03-07 21:54:06 +01:00
70391c83c1 Update Manganato.cs
i found out, i am stupid
2025-03-07 21:39:17 +01:00
dc7696ee26 Merge pull request #364 from merlinmarijn/manganato-domain-switch
Some checks are pending
Docker Image CI / build (push) Waiting to run
Enforce correct referrer check for access to Manganato
2025-03-07 20:19:52 +01:00
49dab9a670 Referrer policy changed
- Updated: image hosting platform seem to have changed a policy requiring now to send the referrer from the actual site instead of just allowing any connecting regardless of the referrer address
2025-03-07 19:57:27 +01:00
c9bc79fbd5 Update new_connector.yml
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-03-07 10:19:08 +01:00
83ce315f87 Merge pull request #357 from merlinmarijn/manganato-domain-switch
Some checks are pending
Docker Image CI / build (push) Waiting to run
Update Connector for Manganato connector: Migrate from .com to .gg & Adjust HTML Parsing
#358 @merlinmarijn
2025-03-07 10:06:44 +01:00
59511056d0 added try around getting urls 2025-03-03 23:43:35 +01:00
ed3ca5dba8 removed leftover comment 2025-03-03 23:04:43 +01:00
8df05d7e8a fixed image referrer 2025-03-03 22:59:25 +01:00
95d1e37b47 Update Manganato.cs 2025-03-03 22:27:37 +01:00
b6494ab7f9 Merge pull request #354 from C9Glax/dependabot/github_actions/docker/setup-qemu-action-3.6.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/setup-qemu-action from 3.5.0 to 3.6.0
2025-03-03 13:54:59 +01:00
1d1d01b6e5 Bump docker/setup-qemu-action from 3.5.0 to 3.6.0
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 3.5.0 to 3.6.0.
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
- [Commits](https://github.com/docker/setup-qemu-action/compare/v3.5.0...v3.6.0)

---
updated-dependencies:
- dependency-name: docker/setup-qemu-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-03-03 05:09:43 +00:00
5bb4977876 Merge pull request #353 from ale-ben/cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
Weebcentral: File name also depends on original chapter name
2025-03-02 16:20:07 +01:00
c6bb1c9180 [cuttingedge] fix(Chapter): Minor logic change to account for all chapterName cases 2025-03-02 16:06:21 +01:00
9a066e7ac7 [cuttingedge] fix(Weebcentral): Updated CheckChapterIsDownloaded logic to also consider chapter name if present 2025-03-02 15:57:09 +01:00
4bafffded4 [cuttingedge] feat(Weebcentra): When ordering chapters, order name desc to put special chapters first 2025-03-02 15:56:12 +01:00
942b43da67 Merge branch 'refs/heads/cuttingedge' into cuttingedge-merge-ServerV2 2025-03-02 10:06:48 +01:00
ce5538b352 Merge pull request #341 from Makhuta/cuttingedge
Some checks are pending
Docker Image CI / build (push) Waiting to run
Added to HandleGet support for connector languages
2025-03-02 09:51:11 +01:00
0cfdf17bd4 Merge pull request #350 from C9Glax/dependabot/github_actions/docker/setup-buildx-action-3.10.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/setup-buildx-action from 3.9.0 to 3.10.0
2025-03-02 09:50:21 +01:00
0c48c1e020 Merge pull request #351 from C9Glax/dependabot/github_actions/docker/build-push-action-6.15.0
Bump docker/build-push-action from 6.14.0 to 6.15.0
2025-03-02 09:50:17 +01:00
0638e75ed6 Merge pull request #349 from C9Glax/dependabot/github_actions/docker/setup-qemu-action-3.5.0
Bump docker/setup-qemu-action from 3.4.0 to 3.5.0
2025-03-02 09:49:16 +01:00
5a4bc1c6de [cuttingedge] fix(Weebcentral): Handle case of chapter name with multiple number parts 2025-03-01 12:06:51 +01:00
71f663ca2f [cuttingedge] fix(Weebcentral): File name also depends on original chapter name 2025-03-01 11:39:12 +01:00
1b61a16061 Bump docker/build-push-action from 6.14.0 to 6.15.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.14.0 to 6.15.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.14.0...v6.15.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-27 05:25:23 +00:00
db81fdce39 Bump docker/setup-buildx-action from 3.9.0 to 3.10.0
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3.9.0 to 3.10.0.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3.9.0...v3.10.0)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-27 05:25:21 +00:00
fdb5451162 Bump docker/setup-qemu-action from 3.4.0 to 3.5.0
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 3.4.0 to 3.5.0.
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
- [Commits](https://github.com/docker/setup-qemu-action/compare/v3.4.0...v3.5.0)

---
updated-dependencies:
- dependency-name: docker/setup-qemu-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-27 05:25:19 +00:00
6b7632b071 Merge pull request #344 from C9Glax/dependabot/github_actions/docker/build-push-action-6.14.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/build-push-action from 6.13.0 to 6.14.0
2025-02-20 16:49:19 +01:00
06c080dfce Bump docker/build-push-action from 6.13.0 to 6.14.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.13.0 to 6.14.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.13.0...v6.14.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-20 05:50:25 +00:00
8130e11a9c Merge pull request #342 from C9Glax/cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
Cuttingedge master merge
2025-02-14 15:51:42 +01:00
659a42d370 Add
- ability to get supported languages of connector for use in monitoring languages selector
2025-02-12 14:07:13 +01:00
9cef068785 Merge pull request #340 from Makhuta/cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
Fix the Webtoons connector getting few chapters multiple times
2025-02-11 21:24:14 +01:00
4ad3149523 Fix
- fixed when parsing chapters the pages was incorrectly parsed resulting into adding the chapters from the last page multiple times (was still downloading OK but it would try to download the chapters from last page multiple times)
2025-02-11 20:16:30 +01:00
e6d40a7b36 Remove unused code from Weebcentral
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-02-09 18:37:55 +01:00
a95cb90561 Update Nuget packages 2025-02-09 18:37:41 +01:00
603e1b41d9 Add Chromium referer header 2025-02-09 18:37:33 +01:00
bb8a514830 Do not create .duplicate files anymore.
Just warn in log and delete (or attempt to delete)
2025-02-09 17:57:08 +01:00
edacaaba8a Update Readme 2025-02-09 17:38:54 +01:00
d97da26994 spelling error 2025-02-09 17:37:53 +01:00
8b923d73c4 Merge pull request #337 from Makhuta/cuttingedge
Add Manga Connector
2025-02-09 17:34:08 +01:00
814efd3528 Merge remote-tracking branch 'origin/cuttingedge' into cuttingedge 2025-02-09 17:28:03 +01:00
2cd5d8bc4f Merge branch 'cuttingedge-merge-ServerV2' into cuttingedge 2025-02-09 17:27:42 +01:00
5a864ab9b7 Remove Manga4Life 2025-02-09 17:27:35 +01:00
c700974693 Merge pull request #339 from C9Glax/dependabot/github_actions/docker/setup-qemu-action-3.4.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/setup-qemu-action from 3.3.0 to 3.4.0
2025-02-09 17:18:42 +01:00
553b5558d3 Merge pull request #338 from C9Glax/dependabot/github_actions/docker/setup-buildx-action-3.9.0
Bump docker/setup-buildx-action from 3.8.0 to 3.9.0
2025-02-09 17:18:25 +01:00
c9bbfee26b Merge pull request #331 from C9Glax/dependabot/github_actions/docker/build-push-action-6.13.0
Bump docker/build-push-action from 6.12.0 to 6.13.0
2025-02-09 17:18:10 +01:00
6e869eeb0d Bump docker/setup-qemu-action from 3.3.0 to 3.4.0
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 3.3.0 to 3.4.0.
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
- [Commits](https://github.com/docker/setup-qemu-action/compare/v3.3.0...v3.4.0)

---
updated-dependencies:
- dependency-name: docker/setup-qemu-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-07 05:13:18 +00:00
be7da69dbd Bump docker/setup-buildx-action from 3.8.0 to 3.9.0
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3.8.0 to 3.9.0.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3.8.0...v3.9.0)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-07 05:13:16 +00:00
7f13d9b1e6 Fix
- forgotten comma
2025-02-06 15:39:06 +01:00
0c9e3205c2 Add Manga Connector
- added [Webtoon](https://www.webtoons.com) manga connector
- modified/added support for saving covers with refferer
2025-02-06 15:37:30 +01:00
8c3b70b32e Bump docker/build-push-action from 6.12.0 to 6.13.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.12.0 to 6.13.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.12.0...v6.13.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-01-27 06:02:55 +00:00
4f7031ecfc Merge pull request #328 from ale-ben/cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
fix: Add escape to Weebcentral regex
2025-01-25 23:26:48 +01:00
f7a285aabd [cuttingedge] fix: Add escape to Weebcentral regex 2025-01-25 11:40:00 +01:00
786482398c Merge pull request #327 from ale-ben/cuttingedge
Some checks are pending
Docker Image CI / build (push) Waiting to run
Fix bug that prevented download of chapters 0
2025-01-24 22:09:53 +01:00
7921dcb1cb [cuttingedge] fix: Change condition for newChapters. Should solve #323 2025-01-24 21:52:52 +01:00
d0c9313279 Merge pull request #322 from C9Glax/dependabot/github_actions/docker/build-push-action-6.12.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/build-push-action from 6.11.0 to 6.12.0
2025-01-16 17:00:36 +01:00
58cf4cf4e0 Bump docker/build-push-action from 6.11.0 to 6.12.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.11.0 to 6.12.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.11.0...v6.12.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-01-16 05:52:33 +00:00
280d715a7c Merge branch 'cuttingedge-merge-ServerV2' into cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-01-15 23:14:20 +01:00
b4edcccafe Merge branch 'cuttingedge-merge-ServerV2' into cuttingedge 2025-01-15 23:02:55 +01:00
1701881f4b Merge branch 'cuttingedge-merge-ServerV2' into cuttingedge 2025-01-15 22:53:47 +01:00
e5be5703f8 Merge branch 'cuttingedge-merge-ServerV2' into cuttingedge 2025-01-15 22:25:16 +01:00
ce217aae4f Merge remote-tracking branch 'origin/cuttingedge' into cuttingedge 2025-01-15 22:18:11 +01:00
3abf7224d0 Merge pull request #316 from ale-ben/cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
Fixed regex to capture chapters with decimal (1.5, ..)
2025-01-11 00:41:20 +01:00
b39dbd5671 [cuttingedge] fix(weebcentral): Fixed regex to capture chapters with decimal (1.5, ..) 2025-01-10 22:10:34 +01:00
375fad0c21 Merge pull request #314 from C9Glax/dependabot/github_actions/docker/setup-qemu-action-3.3.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/setup-qemu-action from 3.2.0 to 3.3.0
2025-01-10 11:02:46 +01:00
ee0d17c24f Merge pull request #315 from C9Glax/dependabot/github_actions/docker/build-push-action-6.11.0
Bump docker/build-push-action from 6.9.0 to 6.11.0
2025-01-10 11:02:26 +01:00
36ab3c3fdb Bump docker/build-push-action from 6.9.0 to 6.11.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.9.0 to 6.11.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.9.0...v6.11.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-01-10 05:46:13 +00:00
c3d60c6586 Bump docker/setup-qemu-action from 3.2.0 to 3.3.0
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 3.2.0 to 3.3.0.
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
- [Commits](https://github.com/docker/setup-qemu-action/compare/v3.2.0...v3.3.0)

---
updated-dependencies:
- dependency-name: docker/setup-qemu-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-01-10 05:46:10 +00:00
112 changed files with 4928 additions and 5993 deletions

27
.dockerignore Normal file
View File

@ -0,0 +1,27 @@
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/.idea
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/azds.yaml
**/bin
**/charts
**/docker-compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/obj
**/secrets.dev.yaml
**/values.dev.yaml
LICENSE
README.md
Manga
settings

View File

@ -12,7 +12,7 @@ body:
- type: checkboxes - type: checkboxes
attributes: attributes:
label: Is the Website free to access? label: Is the Website free to access?
description: We can't support pay-to-use sites. description: We can't support pay-to-use sites, or captcha-proxied sites as Cloudflare.
options: options:
- label: The Website is freely accessible. - label: The Website is freely accessible.
required: true required: true
@ -20,4 +20,4 @@ body:
attributes: attributes:
label: Anything else? label: Anything else?
validations: validations:
required: false required: false

View File

@ -17,12 +17,12 @@ jobs:
# https://github.com/docker/setup-qemu-action#usage # https://github.com/docker/setup-qemu-action#usage
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0 uses: docker/setup-qemu-action@v3.6.0
# https://github.com/marketplace/actions/docker-setup-buildx # https://github.com/marketplace/actions/docker-setup-buildx
- name: Set up Docker Buildx - name: Set up Docker Buildx
id: buildx id: buildx
uses: docker/setup-buildx-action@v3.8.0 uses: docker/setup-buildx-action@v3.10.0
# https://github.com/docker/login-action#docker-hub # https://github.com/docker/login-action#docker-hub
- name: Login to Docker Hub - name: Login to Docker Hub
@ -33,7 +33,7 @@ jobs:
# https://github.com/docker/build-push-action#multi-platform-image # https://github.com/docker/build-push-action#multi-platform-image
- name: Build and push API - name: Build and push API
uses: docker/build-push-action@v6.9.0 uses: docker/build-push-action@v6.18.0
with: with:
context: ./ context: ./
file: ./Dockerfile file: ./Dockerfile

View File

@ -1,45 +0,0 @@
name: Docker Image CI
on:
push:
branches: [ "dev" ]
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
# https://github.com/docker/setup-qemu-action#usage
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
# https://github.com/marketplace/actions/docker-setup-buildx
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3.8.0
# https://github.com/docker/login-action#docker-hub
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
# https://github.com/docker/build-push-action#multi-platform-image
- name: Build and push API
uses: docker/build-push-action@v6.9.0
with:
context: ./
file: ./Dockerfile
#platforms: linux/amd64,linux/arm64,linux/riscv64,linux/ppc64le,linux/s390x,linux/386,linux/mips64le,linux/mips64,linux/arm/v7,linux/arm/v6
platforms: linux/amd64,linux/arm64
pull: true
push: true
tags: |
glax/tranga-api:dev

View File

@ -17,12 +17,12 @@ jobs:
# https://github.com/docker/setup-qemu-action#usage # https://github.com/docker/setup-qemu-action#usage
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0 uses: docker/setup-qemu-action@v3.6.0
# https://github.com/marketplace/actions/docker-setup-buildx # https://github.com/marketplace/actions/docker-setup-buildx
- name: Set up Docker Buildx - name: Set up Docker Buildx
id: buildx id: buildx
uses: docker/setup-buildx-action@v3.8.0 uses: docker/setup-buildx-action@v3.10.0
# https://github.com/docker/login-action#docker-hub # https://github.com/docker/login-action#docker-hub
- name: Login to Docker Hub - name: Login to Docker Hub
@ -33,7 +33,7 @@ jobs:
# https://github.com/docker/build-push-action#multi-platform-image # https://github.com/docker/build-push-action#multi-platform-image
- name: Build and push API - name: Build and push API
uses: docker/build-push-action@v6.9.0 uses: docker/build-push-action@v6.18.0
with: with:
context: ./ context: ./
file: ./Dockerfile file: ./Dockerfile

View File

@ -2,7 +2,7 @@ name: Docker Image CI
on: on:
push: push:
branches: [ "Server-V2" ] branches: [ "postgres-Server-V2" ]
workflow_dispatch: workflow_dispatch:
jobs: jobs:
@ -17,12 +17,12 @@ jobs:
# https://github.com/docker/setup-qemu-action#usage # https://github.com/docker/setup-qemu-action#usage
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0 uses: docker/setup-qemu-action@v3.6.0
# https://github.com/marketplace/actions/docker-setup-buildx # https://github.com/marketplace/actions/docker-setup-buildx
- name: Set up Docker Buildx - name: Set up Docker Buildx
id: buildx id: buildx
uses: docker/setup-buildx-action@v3.8.0 uses: docker/setup-buildx-action@v3.10.0
# https://github.com/docker/login-action#docker-hub # https://github.com/docker/login-action#docker-hub
- name: Login to Docker Hub - name: Login to Docker Hub
@ -33,7 +33,7 @@ jobs:
# https://github.com/docker/build-push-action#multi-platform-image # https://github.com/docker/build-push-action#multi-platform-image
- name: Build and push API - name: Build and push API
uses: docker/build-push-action@v6.9.0 uses: docker/build-push-action@v6.18.0
with: with:
context: ./ context: ./
file: ./Dockerfile file: ./Dockerfile

2
.gitignore vendored
View File

@ -20,8 +20,6 @@ riderModule.iml
cover.jpg cover.jpg
cover.png cover.png
/.vscode /.vscode
/.vs/
Tranga/Properties/launchSettings.json
/Manga /Manga
/settings /settings
*.DotSettings.user *.DotSettings.user

View File

@ -1,37 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
<NoWarn>$(NoWarn);1591</NoWarn>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Asp.Versioning.Mvc.ApiExplorer" Version="8.1.0" />
<PackageReference Include="HtmlAgilityPack" Version="1.11.71" />
<PackageReference Include="log4net" Version="3.0.3" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="9.0.0" />
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.0">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="Npgsql" Version="9.0.2" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="9.0.2" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL.Design" Version="1.1.0" />
<PackageReference Include="PuppeteerSharp" Version="20.0.5" />
<PackageReference Include="SixLabors.ImageSharp" Version="3.1.5" />
<PackageReference Include="Soenneker.Utils.String.NeedlemanWunsch" Version="3.0.697" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="7.2.0" />
<PackageReference Include="System.Drawing.Common" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<Folder Include="Migrations\" />
</ItemGroup>
</Project>

View File

@ -1,6 +0,0 @@
@API_HostAddress = http://localhost:5105
GET {{API_HostAddress}}/weatherforecast/
Accept: application/json
###

View File

@ -1,223 +0,0 @@
using API.Schema;
using API.Schema.Jobs;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{version:apiVersion}/[controller]")]
public class JobController(PgsqlContext context) : Controller
{
/// <summary>
/// Returns all Jobs
/// </summary>
/// <returns>Array of Jobs</returns>
[HttpGet]
[ProducesResponseType<Job[]>(Status200OK)]
public IActionResult GetAllJobs()
{
Job[] ret = context.Jobs.ToArray();
return Ok(ret);
}
/// <summary>
/// Returns Jobs with requested Job-IDs
/// </summary>
/// <param name="ids">Array of Job-IDs</param>
/// <returns>Array of Jobs</returns>
[HttpPost("WithIDs")]
[ProducesResponseType<Job[]>(Status200OK)]
public IActionResult GetJobs([FromBody]string[] ids)
{
Job[] ret = context.Jobs.Where(job => ids.Contains(job.JobId)).ToArray();
return Ok(ret);
}
/// <summary>
/// Get all Jobs in requested State
/// </summary>
/// <param name="state">Requested Job-State</param>
/// <returns>Array of Jobs</returns>
[HttpGet("State/{state}")]
[ProducesResponseType<Job[]>(Status200OK)]
public IActionResult GetJobsInState(JobState state)
{
Job[] jobsInState = context.Jobs.Where(job => job.state == state).ToArray();
return Ok(jobsInState);
}
/// <summary>
/// Returns all Jobs of requested Type
/// </summary>
/// <param name="type">Requested Job-Type</param>
/// <returns>Array of Jobs</returns>
[HttpGet("Type/{type}")]
[ProducesResponseType<Job[]>(Status200OK)]
public IActionResult GetJobsOfType(JobType type)
{
Job[] jobsOfType = context.Jobs.Where(job => job.JobType == type).ToArray();
return Ok(jobsOfType);
}
/// <summary>
/// Return Job with ID
/// </summary>
/// <param name="id">Job-ID</param>
/// <returns>Job</returns>
[HttpGet("{id}")]
[ProducesResponseType<Job>(Status200OK)]
[ProducesResponseType(Status404NotFound)]
public IActionResult GetJob(string id)
{
Job? ret = context.Jobs.Find(id);
return (ret is not null) switch
{
true => Ok(ret),
false => NotFound()
};
}
/// <summary>
/// Create a new CreateNewDownloadChapterJob
/// </summary>
/// <param name="request">ID of the Manga, and how often we check again</param>
/// <returns>Nothing</returns>
[HttpPut("NewDownloadChapterJob/{mangaId}")]
[ProducesResponseType(Status201Created)]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult CreateNewDownloadChapterJob(string mangaId, [FromBody]ulong recurrenceTime)
{
Job job = new DownloadNewChaptersJob(recurrenceTime, mangaId);
return AddJob(job);
}
/// <summary>
/// Create a new DownloadSingleChapterJob
/// </summary>
/// <param name="chapterId">ID of the Chapter</param>
/// <returns>Nothing</returns>
[HttpPut("DownloadSingleChapterJob/{chapterId}")]
[ProducesResponseType(Status201Created)]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult CreateNewDownloadChapterJob(string chapterId)
{
Job job = new DownloadSingleChapterJob(chapterId);
return AddJob(job);
}
/// <summary>
/// Create a new UpdateMetadataJob
/// </summary>
/// <param name="mangaId">ID of the Manga</param>
/// <returns>Nothing</returns>
[HttpPut("UpdateMetadataJob/{mangaId}")]
[ProducesResponseType(Status201Created)]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult CreateUpdateMetadataJob(string mangaId)
{
Job job = new UpdateMetadataJob(0, mangaId);
return AddJob(job);
}
/// <summary>
/// Create a new UpdateMetadataJob for all Manga
/// </summary>
/// <returns>Nothing</returns>
[HttpPut("UpdateMetadataJob")]
[ProducesResponseType(Status201Created)]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult CreateUpdateAllMetadataJob()
{
List<string> ids = context.Manga.Select(m => m.MangaId).ToList();
List<UpdateMetadataJob> jobs = ids.Select(id => new UpdateMetadataJob(0, id)).ToList();
try
{
context.Jobs.AddRange(jobs);
context.SaveChanges();
return Created();
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
private IActionResult AddJob(Job job)
{
try
{
context.Jobs.Add(job);
context.SaveChanges();
return Created();
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
/// <summary>
/// Delete Job with ID
/// </summary>
/// <param name="id">Job-ID</param>
/// <returns>Nothing</returns>
[HttpDelete("{id}")]
[ProducesResponseType(Status200OK)]
[ProducesResponseType(Status404NotFound)]
[ProducesResponseType(Status500InternalServerError)]
public IActionResult DeleteJob(string id)
{
try
{
Job? ret = context.Jobs.Find(id);
switch (ret is not null)
{
case true:
context.Remove(ret);
context.SaveChanges();
return Ok();
case false: return NotFound();
}
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
/// <summary>
/// Starts the Job with the requested ID
/// </summary>
/// <param name="id">Job-ID</param>
/// <returns>Nothing</returns>
[HttpPost("{id}/Start")]
[ProducesResponseType(Status202Accepted)]
[ProducesResponseType(Status404NotFound)]
[ProducesResponseType(Status500InternalServerError)]
public IActionResult StartJob(string id)
{
Job? ret = context.Jobs.Find(id);
if (ret is null)
return NotFound();
try
{
context.Update(ret);
context.SaveChanges();
return Accepted();
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
[HttpPost("{id}/Stop")]
public IActionResult StopJob(string id)
{
return NotFound(new ProblemResponse("Not implemented")); //TODO
}
}

View File

@ -1,95 +0,0 @@
using API.Schema;
using API.Schema.LibraryConnectors;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{v:apiVersion}/[controller]")]
public class LibraryConnectorController(PgsqlContext context) : Controller
{
/// <summary>
/// Gets all configured Library-Connectors
/// </summary>
/// <returns>Array of configured Library-Connectors</returns>
[HttpGet]
[ProducesResponseType<LibraryConnector[]>(Status200OK)]
public IActionResult GetAllConnectors()
{
LibraryConnector[] connectors = context.LibraryConnectors.ToArray();
return Ok(connectors);
}
/// <summary>
/// Returns Library-Connector with requested ID
/// </summary>
/// <param name="id">Library-Connector-ID</param>
/// <returns>Library-Connector</returns>
[HttpGet("{id}")]
[ProducesResponseType<LibraryConnector>(Status200OK)]
[ProducesResponseType(Status404NotFound)]
public IActionResult GetConnector(string id)
{
LibraryConnector? ret = context.LibraryConnectors.Find(id);
return (ret is not null) switch
{
true => Ok(ret),
false => NotFound()
};
}
/// <summary>
/// Creates a new Library-Connector
/// </summary>
/// <param name="libraryConnector">Library-Connector</param>
/// <returns>Nothing</returns>
[HttpPut]
[ProducesResponseType(Status200OK)]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult CreateConnector([FromBody]LibraryConnector libraryConnector)
{
try
{
context.LibraryConnectors.Add(libraryConnector);
context.SaveChanges();
return Created();
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
/// <summary>
/// Deletes the Library-Connector with the requested ID
/// </summary>
/// <param name="id">Library-Connector-ID</param>
/// <returns>Nothing</returns>
[HttpDelete("{id}")]
[ProducesResponseType(Status200OK)]
[ProducesResponseType(Status404NotFound)]
[ProducesResponseType(Status500InternalServerError)]
public IActionResult DeleteConnector(string id)
{
try
{
LibraryConnector? ret = context.LibraryConnectors.Find(id);
switch (ret is not null)
{
case true:
context.Remove(ret);
context.SaveChanges();
return Ok();
case false: return NotFound();
}
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
}

View File

@ -1,163 +0,0 @@
using API.Schema;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{v:apiVersion}/[controller]")]
public class MangaController(PgsqlContext context) : Controller
{
/// <summary>
/// Returns all cached Manga
/// </summary>
/// <returns>Array of Manga</returns>
[HttpGet]
[ProducesResponseType<Manga[]>(Status200OK)]
public IActionResult GetAllManga()
{
Manga[] ret = context.Manga.ToArray();
return Ok(ret);
}
/// <summary>
/// Returns all cached Manga with IDs
/// </summary>
/// <param name="ids">Array of Manga-IDs</param>
/// <returns>Array of Manga</returns>
[HttpPost("WithIDs")]
[ProducesResponseType<Manga[]>(Status200OK)]
public IActionResult GetManga([FromBody]string[] ids)
{
Manga[] ret = context.Manga.Where(m => ids.Contains(m.MangaId)).ToArray();
return Ok(ret);
}
/// <summary>
/// Return Manga with ID
/// </summary>
/// <param name="id">Manga-ID</param>
/// <returns>Manga</returns>
[HttpGet("{id}")]
[ProducesResponseType<Manga>(Status200OK)]
[ProducesResponseType(Status404NotFound)]
public IActionResult GetManga(string id)
{
Manga? ret = context.Manga.Find(id);
return (ret is not null) switch
{
true => Ok(ret),
false => NotFound()
};
}
/// <summary>
/// Delete Manga with ID
/// </summary>
/// <param name="id">Manga-ID</param>
/// <returns>Nothing</returns>
[HttpDelete("{id}")]
[ProducesResponseType(Status200OK)]
[ProducesResponseType(Status404NotFound)]
[ProducesResponseType(Status500InternalServerError)]
public IActionResult DeleteManga(string id)
{
try
{
Manga? ret = context.Manga.Find(id);
switch (ret is not null)
{
case true:
context.Remove(ret);
context.SaveChanges();
return Ok();
case false: return NotFound();
}
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
/// <summary>
/// Returns URL of Cover of Manga
/// </summary>
/// <param name="id">Manga-ID</param>
/// <returns>URL of Cover</returns>
[HttpGet("{id}/Cover")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetCover(string id)
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Returns all Chapters of Manga
/// </summary>
/// <param name="id">Manga-ID</param>
/// <returns>Array of Chapters</returns>
[HttpGet("{id}/Chapters")]
[ProducesResponseType<Chapter[]>(Status200OK)]
[ProducesResponseType<string>(Status404NotFound)]
public IActionResult GetChapters(string id)
{
Manga? m = context.Manga.Find(id);
if (m is null)
return NotFound("Manga could not be found");
Chapter[] ret = context.Chapters.Where(c => c.ParentManga.MangaId == m.MangaId).ToArray();
return Ok(ret);
}
/// <summary>
/// Returns the latest Chapter of requested Manga
/// </summary>
/// <param name="id">Manga-ID</param>
/// <returns>Latest Chapter</returns>
[HttpGet("{id}/Chapter/Latest")]
[ProducesResponseType<Chapter>(Status200OK)]
[ProducesResponseType<string>(Status404NotFound)]
public IActionResult GetLatestChapter(string id)
{
Manga? m = context.Manga.Find(id);
if (m is null)
return NotFound("Manga could not be found");
List<Chapter> chapters = context.Chapters.Where(c => c.ParentManga.MangaId == m.MangaId).ToList();
Chapter? max = chapters.Max();
if (max is null)
return NotFound("Chapter could not be found");
return Ok(max);
}
/// <summary>
/// Configure the cut-off for Manga
/// </summary>
/// <remarks>This is important for the DownloadNewChapters-Job</remarks>
/// <param name="id">Manga-ID</param>
/// <returns>Nothing</returns>
[HttpPatch("{id}/IgnoreChaptersBefore")]
[ProducesResponseType<float>(Status200OK)]
public IActionResult IgnoreChaptersBefore(string id)
{
Manga? m = context.Manga.Find(id);
if (m is null)
return NotFound("Manga could not be found");
return Ok(m.IgnoreChapterBefore);
}
/// <summary>
/// Move the Directory the .cbz-files are located in
/// </summary>
/// <param name="id">Manga-ID</param>
/// <param name="folder">New Directory-Path</param>
/// <returns>Nothing</returns>
[HttpPost("{id}/MoveFolder")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult MoveFolder(string id, [FromBody]string folder)
{
return StatusCode(500, "Not implemented"); //TODO
}
}

View File

@ -1,26 +0,0 @@
using API.Schema;
using API.Schema.MangaConnectors;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{v:apiVersion}")]
public class MiscController(PgsqlContext context) : Controller
{
/// <summary>
/// Get all available Connectors (Scanlation-Sites)
/// </summary>
/// <returns>Array of MangaConnector</returns>
[HttpGet("GetConnectors")]
[ProducesResponseType<MangaConnector[]>(Status200OK)]
public IActionResult GetConnectors()
{
MangaConnector[] connectors = context.MangaConnectors.ToArray();
return Ok(connectors);
}
}

View File

@ -1,95 +0,0 @@
using API.Schema;
using API.Schema.NotificationConnectors;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{v:apiVersion}/[controller]")]
public class NotificationConnectorController(PgsqlContext context) : Controller
{
/// <summary>
/// Gets all configured Notification-Connectors
/// </summary>
/// <returns>Array of configured Notification-Connectors</returns>
[HttpGet]
[ProducesResponseType<NotificationConnector[]>(Status200OK)]
public IActionResult GetAllConnectors()
{
NotificationConnector[] ret = context.NotificationConnectors.ToArray();
return Ok(ret);
}
/// <summary>
/// Returns Notification-Connector with requested ID
/// </summary>
/// <param name="id">Notification-Connector-ID</param>
/// <returns>Notification-Connector</returns>
[HttpGet("{id}")]
[ProducesResponseType<NotificationConnector>(Status200OK)]
[ProducesResponseType(Status404NotFound)]
public IActionResult GetConnector(string id)
{
NotificationConnector? ret = context.NotificationConnectors.Find(id);
return (ret is not null) switch
{
true => Ok(ret),
false => NotFound()
};
}
/// <summary>
/// Creates a new Notification-Connector
/// </summary>
/// <param name="notificationConnector">Notification-Connector</param>
/// <returns>Nothing</returns>
[HttpPut]
[ProducesResponseType<NotificationConnector[]>(Status200OK)]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult CreateConnector([FromBody]NotificationConnector notificationConnector)
{
try
{
context.NotificationConnectors.Add(notificationConnector);
context.SaveChanges();
return Created();
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
/// <summary>
/// Deletes the Notification-Connector with the requested ID
/// </summary>
/// <param name="id">Notification-Connector-ID</param>
/// <returns>Nothing</returns>
[HttpDelete("{id}")]
[ProducesResponseType(Status200OK)]
[ProducesResponseType(Status404NotFound)]
[ProducesResponseType(Status500InternalServerError)]
public IActionResult DeleteConnector(string id)
{
try
{
NotificationConnector? ret = context.NotificationConnectors.Find(id);
switch (ret is not null)
{
case true:
context.Remove(ret);
context.SaveChanges();
return Ok();
case false: return NotFound();
}
}
catch (Exception e)
{
return StatusCode(500, e.Message);
}
}
}

View File

@ -1,150 +0,0 @@
using API.Schema;
using API.Schema.MangaConnectors;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{v:apiVersion}/[controller]")]
public class SearchController(PgsqlContext context) : Controller
{
/// <summary>
/// Initiate a search for a Manga on all Connectors
/// </summary>
/// <param name="name">Name/Title of the Manga</param>
/// <returns>Array of Manga</returns>
[HttpPost("{name}")]
[ProducesResponseType<Manga[]>(Status500InternalServerError)]
public IActionResult SearchMangaGlobal(string name)
{
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> allManga = new();
foreach (MangaConnector contextMangaConnector in context.MangaConnectors)
allManga.AddRange(contextMangaConnector.GetManga(name));
List<Manga> retMangas = new();
foreach ((Manga? manga, List<Author>? authors, List<MangaTag>? tags, List<Link>? links, List<MangaAltTitle>? altTitles) in allManga)
{
try
{
Manga? add = AddMangaToContext(manga, authors, tags, links, altTitles);
if(add is not null)
retMangas.Add(add);
}
catch (DbUpdateException)
{
return StatusCode(500, new ProblemResponse("An error occurred while processing your request."));
}
}
return Ok(retMangas.ToArray());
}
/// <summary>
/// Initiate a search for a Manga on a specific Connector
/// </summary>
/// <param name="id">Manga-Connector-ID</param>
/// <param name="name">Name/Title of the Manga</param>
/// <returns>Manga</returns>
[HttpPost("{id}/{name}")]
[ProducesResponseType<Manga[]>(Status200OK)]
[ProducesResponseType<ProblemResponse>(Status404NotFound)]
[ProducesResponseType<ProblemResponse>(Status500InternalServerError)]
public IActionResult SearchManga(string id, string name)
{
MangaConnector? connector = context.MangaConnectors.Find(id);
if (connector is null)
return NotFound(new ProblemResponse("Connector not found."));
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] mangas = connector.GetManga(name);
List<Manga> retMangas = new();
foreach ((Manga? manga, List<Author>? authors, List<MangaTag>? tags, List<Link>? links, List<MangaAltTitle>? altTitles) in mangas)
{
try
{
Manga? add = AddMangaToContext(manga, authors, tags, links, altTitles);
if(add is not null)
retMangas.Add(add);
}
catch (DbUpdateException e)
{
return StatusCode(500, new ProblemResponse("An error occurred while processing your request.", e.Message));
}
}
return Ok(retMangas.ToArray());
}
private Manga? AddMangaToContext(Manga? manga, List<Author>? authors, List<MangaTag>? tags, List<Link>? links,
List<MangaAltTitle>? altTitles)
{
if (manga is null)
return null;
Manga? existing = context.Manga.FirstOrDefault(m =>
m.MangaConnector == manga.MangaConnector && m.ConnectorId == manga.ConnectorId);
if (tags is not null)
{
IEnumerable<MangaTag> mergedTags = tags.Select(mt =>
{
MangaTag? inDb = context.Tags.FirstOrDefault(t => t.Equals(mt));
return inDb ?? mt;
});
manga.Tags = mergedTags.ToList();
IEnumerable<MangaTag> newTags = manga.Tags.Where(mt => !context.Tags.Any(t => t.Tag.Equals(mt.Tag)));
context.Tags.AddRange(newTags);
}
if (authors is not null)
{
IEnumerable<Author> mergedAuthors = authors.Select(ma =>
{
Author? inDb = context.Authors.FirstOrDefault(a => a.AuthorName == ma.AuthorName);
return inDb ?? ma;
});
manga.Authors = mergedAuthors.ToList();
IEnumerable<Author> newAuthors = manga.Authors.Where(ma => !context.Authors.Any(a =>
a.AuthorName == ma.AuthorName));
context.Authors.AddRange(newAuthors);
}
if (links is not null)
{
IEnumerable<Link> mergedLinks = links.Select(ml =>
{
Link? inDb = context.Link.FirstOrDefault(l =>
l.LinkProvider == ml.LinkProvider && l.LinkUrl == ml.LinkUrl);
return inDb ?? ml;
});
manga.Links = mergedLinks.ToList();
IEnumerable<Link> newLinks = manga.Links.Where(ml => !context.Link.Any(l =>
l.LinkProvider == ml.LinkProvider && l.LinkUrl == ml.LinkUrl));
context.Link.AddRange(newLinks);
}
if (altTitles is not null)
{
IEnumerable<MangaAltTitle> mergedAltTitles = altTitles.Select(mat =>
{
MangaAltTitle? inDb = context.AltTitles.FirstOrDefault(at =>
at.Language == mat.Language && at.Title == mat.Title);
return inDb ?? mat;
});
manga.AltTitles = mergedAltTitles.ToList();
IEnumerable<MangaAltTitle> newAltTitles = manga.AltTitles.Where(mat =>
!context.AltTitles.Any(at => at.Language == mat.Language && at.Title == mat.Title));
context.AltTitles.AddRange(newAltTitles);
}
existing?.UpdateWithInfo(manga);
if(existing is not null)
context.Manga.Update(existing);
else
context.Manga.Add(manga);
context.SaveChanges();
return existing ?? manga;
}
}

View File

@ -1,161 +0,0 @@
using API.Schema;
using Asp.Versioning;
using Microsoft.AspNetCore.Mvc;
using static Microsoft.AspNetCore.Http.StatusCodes;
namespace API.Controllers;
[ApiVersion(2)]
[ApiController]
[Produces("application/json")]
[Route("v{v:apiVersion}/[controller]")]
public class SettingsController(PgsqlContext context) : Controller
{
/// <summary>
/// Get all Settings
/// </summary>
/// <returns></returns>
[HttpGet]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetSettings()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Get the current UserAgent used by Tranga
/// </summary>
/// <returns>UserAgent as string</returns>
[HttpGet("UserAgent")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetUserAgent()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Set a new UserAgent
/// </summary>
/// <returns>Nothing</returns>
[HttpPatch("UserAgent")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult SetUserAgent()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Reset the UserAgent to default
/// </summary>
/// <returns>Nothing</returns>
[HttpDelete("UserAgent")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult ResetUserAgent()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Get all Request-Limits
/// </summary>
/// <returns></returns>
[HttpGet("RequestLimits")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetRequestLimits()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Update all Request-Limits to new values
/// </summary>
/// <returns>Nothing</returns>
[HttpPatch("RequestLimits")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult SetRequestLimits()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Reset all Request-Limits
/// </summary>
/// <returns>Nothing</returns>
[HttpDelete("RequestLimits")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult ResetRequestLimits()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Returns Level of Image-Compression for Images
/// </summary>
/// <returns></returns>
[HttpGet("ImageCompression")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetImageCompression()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Set the Image-Compression-Level for Images
/// </summary>
/// <param name="percentage">100 to disable, 0-99 for JPEG compression-Level</param>
/// <returns>Nothing</returns>
[HttpPatch("ImageCompression")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult SetImageCompression(int percentage)
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Get state of Black/White-Image setting
/// </summary>
/// <returns>True if enabled</returns>
[HttpGet("BWImages")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetBwImagesToggle()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Enable/Disable conversion of Images to Black and White
/// </summary>
/// <param name="enabled">true to enable</param>
/// <returns>Nothing</returns>
[HttpPatch("BWImages")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult SetBwImagesToggle(bool enabled)
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Get state of April Fools Mode
/// </summary>
/// <remarks>April Fools Mode disables all downloads on April 1st</remarks>
/// <returns>True if enabled</returns>
[HttpGet("AprilFoolsMode")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult GetAprilFoolsMode()
{
return StatusCode(500, "Not implemented"); //TODO
}
/// <summary>
/// Enable/Disable April Fools Mode
/// </summary>
/// <remarks>April Fools Mode disables all downloads on April 1st</remarks>
/// <param name="enabled">true to enable</param>
/// <returns>Nothing</returns>
[HttpPatch("AprilFoolsMode")]
[ProducesResponseType<string>(Status500InternalServerError)]
public IActionResult SetAprilFoolsMode(bool enabled)
{
return StatusCode(500, "Not implemented"); //TODO
}
}

View File

@ -1,781 +0,0 @@
// <auto-generated />
using System;
using API.Schema;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace API.Migrations
{
[DbContext(typeof(PgsqlContext))]
[Migration("20241201235443_Initial")]
partial class Initial
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.0")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("API.Schema.Author", b =>
{
b.Property<string>("AuthorId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("AuthorName")
.IsRequired()
.HasColumnType("text");
b.HasKey("AuthorId");
b.ToTable("Authors");
});
modelBuilder.Entity("API.Schema.Chapter", b =>
{
b.Property<string>("ChapterId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("ArchiveFileName")
.IsRequired()
.HasColumnType("text");
b.Property<string>("ChapterIds")
.IsRequired()
.HasColumnType("text");
b.Property<float>("ChapterNumber")
.HasColumnType("real");
b.Property<bool>("Downloaded")
.HasColumnType("boolean");
b.Property<string>("ParentMangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("Title")
.HasColumnType("text");
b.Property<string>("Url")
.IsRequired()
.HasColumnType("text");
b.Property<float?>("VolumeNumber")
.HasColumnType("real");
b.HasKey("ChapterId");
b.HasIndex("ParentMangaId");
b.ToTable("Chapters");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.Property<string>("JobId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.PrimitiveCollection<string[]>("DependsOnJobIds")
.HasMaxLength(64)
.HasColumnType("text[]");
b.Property<string>("JobId1")
.HasColumnType("character varying(64)");
b.Property<byte>("JobType")
.HasColumnType("smallint");
b.Property<DateTime>("LastExecution")
.HasColumnType("timestamp with time zone");
b.Property<DateTime>("NextExecution")
.HasColumnType("timestamp with time zone");
b.Property<string>("ParentJobId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<decimal>("RecurrenceMs")
.HasColumnType("numeric(20,0)");
b.Property<int>("state")
.HasColumnType("integer");
b.HasKey("JobId");
b.HasIndex("JobId1");
b.ToTable("Jobs");
b.HasDiscriminator<byte>("JobType");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("API.Schema.LibraryConnectors.LibraryConnector", b =>
{
b.Property<string>("LibraryConnectorId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("Auth")
.IsRequired()
.HasColumnType("text");
b.Property<string>("BaseUrl")
.IsRequired()
.HasColumnType("text");
b.Property<byte>("LibraryType")
.HasColumnType("smallint");
b.HasKey("LibraryConnectorId");
b.ToTable("LibraryConnectors");
b.HasDiscriminator<byte>("LibraryType");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("API.Schema.Link", b =>
{
b.Property<string>("LinkId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("LinkIds")
.HasColumnType("text");
b.Property<string>("LinkProvider")
.IsRequired()
.HasColumnType("text");
b.Property<string>("LinkUrl")
.IsRequired()
.HasColumnType("text");
b.Property<string>("MangaId")
.IsRequired()
.HasColumnType("character varying(64)");
b.HasKey("LinkId");
b.HasIndex("MangaId");
b.ToTable("Link");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.Property<string>("MangaId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.PrimitiveCollection<string[]>("AltTitleIds")
.IsRequired()
.HasColumnType("text[]");
b.PrimitiveCollection<string[]>("AuthorIds")
.IsRequired()
.HasColumnType("text[]");
b.Property<string>("ConnectorId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("CoverFileNameInCache")
.HasColumnType("text");
b.Property<string>("CoverUrl")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Description")
.IsRequired()
.HasColumnType("text");
b.Property<string>("FolderName")
.IsRequired()
.HasColumnType("text");
b.Property<float>("IgnoreChapterBefore")
.HasColumnType("real");
b.Property<string>("LatestChapterAvailableId")
.HasColumnType("character varying(64)");
b.Property<string>("LatestChapterDownloadedId")
.HasColumnType("character varying(64)");
b.PrimitiveCollection<string[]>("LinkIds")
.IsRequired()
.HasColumnType("text[]");
b.Property<string>("MangaConnectorName")
.IsRequired()
.HasColumnType("character varying(32)");
b.Property<string>("MangaIds")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("text");
b.Property<string>("OriginalLanguage")
.HasColumnType("text");
b.Property<byte>("ReleaseStatus")
.HasColumnType("smallint");
b.PrimitiveCollection<string[]>("TagIds")
.IsRequired()
.HasColumnType("text[]");
b.Property<long>("year")
.HasColumnType("bigint");
b.HasKey("MangaId");
b.HasIndex("LatestChapterAvailableId")
.IsUnique();
b.HasIndex("LatestChapterDownloadedId")
.IsUnique();
b.HasIndex("MangaConnectorName");
b.ToTable("Manga");
});
modelBuilder.Entity("API.Schema.MangaAltTitle", b =>
{
b.Property<string>("AltTitleId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("AltTitleIds")
.HasColumnType("text");
b.Property<string>("Language")
.IsRequired()
.HasMaxLength(8)
.HasColumnType("character varying(8)");
b.Property<string>("MangaId")
.IsRequired()
.HasColumnType("character varying(64)");
b.Property<string>("Title")
.IsRequired()
.HasColumnType("text");
b.HasKey("AltTitleId");
b.HasIndex("MangaId");
b.ToTable("AltTitles");
});
modelBuilder.Entity("API.Schema.MangaConnector", b =>
{
b.Property<string>("Name")
.HasMaxLength(32)
.HasColumnType("character varying(32)");
b.PrimitiveCollection<string[]>("BaseUris")
.IsRequired()
.HasColumnType("text[]");
b.PrimitiveCollection<string[]>("SupportedLanguages")
.IsRequired()
.HasColumnType("text[]");
b.HasKey("Name");
b.ToTable("MangaConnectors");
});
modelBuilder.Entity("API.Schema.MangaTag", b =>
{
b.Property<string>("Tag")
.HasColumnType("text");
b.HasKey("Tag");
b.ToTable("Tags");
});
modelBuilder.Entity("API.Schema.NotificationConnectors.NotificationConnector", b =>
{
b.Property<string>("NotificationConnectorId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<byte>("NotificationConnectorType")
.HasColumnType("smallint");
b.HasKey("NotificationConnectorId");
b.ToTable("NotificationConnectors");
b.HasDiscriminator<byte>("NotificationConnectorType");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("MangaAuthor", b =>
{
b.Property<string>("MangaId")
.HasColumnType("character varying(64)");
b.Property<string>("AuthorId")
.HasColumnType("character varying(64)");
b.Property<string>("AuthorIds")
.HasColumnType("text");
b.Property<string>("MangaIds")
.HasColumnType("text");
b.HasKey("MangaId", "AuthorId");
b.HasIndex("AuthorId");
b.ToTable("MangaAuthor");
});
modelBuilder.Entity("MangaTag", b =>
{
b.Property<string>("MangaId")
.HasColumnType("character varying(64)");
b.Property<string>("Tag")
.HasColumnType("text");
b.Property<string>("MangaIds")
.IsRequired()
.HasColumnType("text");
b.Property<string>("TagIds")
.HasColumnType("text");
b.HasKey("MangaId", "Tag");
b.HasIndex("MangaIds");
b.HasIndex("Tag");
b.ToTable("MangaTag");
});
modelBuilder.Entity("API.Schema.Jobs.CreateArchiveJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("ChapterId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("ComicInfoLocation")
.IsRequired()
.HasColumnType("text");
b.Property<string>("ImagesLocation")
.IsRequired()
.HasColumnType("text");
b.HasIndex("ChapterId");
b.ToTable("Jobs", t =>
{
t.Property("ChapterId")
.HasColumnName("CreateArchiveJob_ChapterId");
});
b.HasDiscriminator().HasValue((byte)4);
});
modelBuilder.Entity("API.Schema.Jobs.CreateComicInfoXmlJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("ChapterId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("Path")
.IsRequired()
.HasColumnType("text");
b.HasIndex("ChapterId");
b.ToTable("Jobs", t =>
{
t.Property("ChapterId")
.HasColumnName("CreateComicInfoXmlJob_ChapterId");
});
b.HasDiscriminator().HasValue((byte)6);
});
modelBuilder.Entity("API.Schema.Jobs.DownloadNewChaptersJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.HasDiscriminator().HasValue((byte)1);
});
modelBuilder.Entity("API.Schema.Jobs.DownloadSingleChapterJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("ChapterId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("ChapterId");
b.HasDiscriminator().HasValue((byte)0);
});
modelBuilder.Entity("API.Schema.Jobs.MoveFileOrFolderJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("FromLocation")
.IsRequired()
.HasColumnType("text");
b.Property<string>("ToLocation")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)3);
});
modelBuilder.Entity("API.Schema.Jobs.ProcessImagesJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<bool>("Bw")
.HasColumnType("boolean");
b.Property<int>("Compression")
.HasColumnType("integer");
b.Property<string>("Path")
.IsRequired()
.HasColumnType("text");
b.ToTable("Jobs", t =>
{
t.Property("Path")
.HasColumnName("ProcessImagesJob_Path");
});
b.HasDiscriminator().HasValue((byte)5);
});
modelBuilder.Entity("API.Schema.Jobs.SearchMangaJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaConnectorName")
.IsRequired()
.HasColumnType("text");
b.Property<string>("SearchString")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)7);
});
modelBuilder.Entity("API.Schema.Jobs.UpdateMetadataJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.ToTable("Jobs", t =>
{
t.Property("MangaId")
.HasColumnName("UpdateMetadataJob_MangaId");
});
b.HasDiscriminator().HasValue((byte)2);
});
modelBuilder.Entity("API.Schema.LibraryConnectors.Kavita", b =>
{
b.HasBaseType("API.Schema.LibraryConnectors.LibraryConnector");
b.HasDiscriminator().HasValue((byte)1);
});
modelBuilder.Entity("API.Schema.LibraryConnectors.Komga", b =>
{
b.HasBaseType("API.Schema.LibraryConnectors.LibraryConnector");
b.HasDiscriminator().HasValue((byte)0);
});
modelBuilder.Entity("API.Schema.NotificationConnectors.Gotify", b =>
{
b.HasBaseType("API.Schema.NotificationConnectors.NotificationConnector");
b.Property<string>("AppToken")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Endpoint")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)0);
});
modelBuilder.Entity("API.Schema.NotificationConnectors.Lunasea", b =>
{
b.HasBaseType("API.Schema.NotificationConnectors.NotificationConnector");
b.Property<string>("Id")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)1);
});
modelBuilder.Entity("API.Schema.NotificationConnectors.Ntfy", b =>
{
b.HasBaseType("API.Schema.NotificationConnectors.NotificationConnector");
b.Property<string>("Auth")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Endpoint")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Topic")
.IsRequired()
.HasColumnType("text");
b.ToTable("NotificationConnectors", t =>
{
t.Property("Endpoint")
.HasColumnName("Ntfy_Endpoint");
});
b.HasDiscriminator().HasValue((byte)2);
});
modelBuilder.Entity("API.Schema.Chapter", b =>
{
b.HasOne("API.Schema.Manga", "ParentManga")
.WithMany("Chapters")
.HasForeignKey("ParentMangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("ParentManga");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.HasOne("API.Schema.Jobs.Job", null)
.WithMany("DependsOnJobs")
.HasForeignKey("JobId1");
});
modelBuilder.Entity("API.Schema.Link", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany("Links")
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.HasOne("API.Schema.Chapter", "LatestChapterAvailable")
.WithOne()
.HasForeignKey("API.Schema.Manga", "LatestChapterAvailableId");
b.HasOne("API.Schema.Chapter", "LatestChapterDownloaded")
.WithOne()
.HasForeignKey("API.Schema.Manga", "LatestChapterDownloadedId");
b.HasOne("API.Schema.MangaConnector", "MangaConnector")
.WithMany("Mangas")
.HasForeignKey("MangaConnectorName")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("LatestChapterAvailable");
b.Navigation("LatestChapterDownloaded");
b.Navigation("MangaConnector");
});
modelBuilder.Entity("API.Schema.MangaAltTitle", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany("AltTitles")
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("MangaAuthor", b =>
{
b.HasOne("API.Schema.Author", null)
.WithMany()
.HasForeignKey("AuthorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.Manga", null)
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("MangaTag", b =>
{
b.HasOne("API.Schema.Manga", null)
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.MangaTag", null)
.WithMany()
.HasForeignKey("MangaIds")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.MangaTag", null)
.WithMany()
.HasForeignKey("Tag")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("API.Schema.Jobs.CreateArchiveJob", b =>
{
b.HasOne("API.Schema.Chapter", "Chapter")
.WithMany()
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Schema.Jobs.CreateComicInfoXmlJob", b =>
{
b.HasOne("API.Schema.Chapter", "Chapter")
.WithMany()
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Schema.Jobs.DownloadNewChaptersJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Jobs.DownloadSingleChapterJob", b =>
{
b.HasOne("API.Schema.Chapter", "Chapter")
.WithMany()
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Schema.Jobs.UpdateMetadataJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.Navigation("DependsOnJobs");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.Navigation("AltTitles");
b.Navigation("Chapters");
b.Navigation("Links");
});
modelBuilder.Entity("API.Schema.MangaConnector", b =>
{
b.Navigation("Mangas");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -1,447 +0,0 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace API.Migrations
{
/// <inheritdoc />
public partial class Initial : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "Authors",
columns: table => new
{
AuthorId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
AuthorName = table.Column<string>(type: "text", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Authors", x => x.AuthorId);
});
migrationBuilder.CreateTable(
name: "LibraryConnectors",
columns: table => new
{
LibraryConnectorId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
LibraryType = table.Column<byte>(type: "smallint", nullable: false),
BaseUrl = table.Column<string>(type: "text", nullable: false),
Auth = table.Column<string>(type: "text", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_LibraryConnectors", x => x.LibraryConnectorId);
});
migrationBuilder.CreateTable(
name: "MangaConnectors",
columns: table => new
{
Name = table.Column<string>(type: "character varying(32)", maxLength: 32, nullable: false),
SupportedLanguages = table.Column<string[]>(type: "text[]", nullable: false),
BaseUris = table.Column<string[]>(type: "text[]", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_MangaConnectors", x => x.Name);
});
migrationBuilder.CreateTable(
name: "NotificationConnectors",
columns: table => new
{
NotificationConnectorId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
NotificationConnectorType = table.Column<byte>(type: "smallint", nullable: false),
Endpoint = table.Column<string>(type: "text", nullable: true),
AppToken = table.Column<string>(type: "text", nullable: true),
Id = table.Column<string>(type: "text", nullable: true),
Ntfy_Endpoint = table.Column<string>(type: "text", nullable: true),
Auth = table.Column<string>(type: "text", nullable: true),
Topic = table.Column<string>(type: "text", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_NotificationConnectors", x => x.NotificationConnectorId);
});
migrationBuilder.CreateTable(
name: "Tags",
columns: table => new
{
Tag = table.Column<string>(type: "text", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Tags", x => x.Tag);
});
migrationBuilder.CreateTable(
name: "AltTitles",
columns: table => new
{
AltTitleId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
Language = table.Column<string>(type: "character varying(8)", maxLength: 8, nullable: false),
Title = table.Column<string>(type: "text", nullable: false),
MangaId = table.Column<string>(type: "character varying(64)", nullable: false),
AltTitleIds = table.Column<string>(type: "text", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AltTitles", x => x.AltTitleId);
});
migrationBuilder.CreateTable(
name: "Chapters",
columns: table => new
{
ChapterId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
VolumeNumber = table.Column<float>(type: "real", nullable: true),
ChapterNumber = table.Column<float>(type: "real", nullable: false),
Url = table.Column<string>(type: "text", nullable: false),
Title = table.Column<string>(type: "text", nullable: true),
ArchiveFileName = table.Column<string>(type: "text", nullable: false),
Downloaded = table.Column<bool>(type: "boolean", nullable: false),
ParentMangaId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
ChapterIds = table.Column<string>(type: "text", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Chapters", x => x.ChapterId);
});
migrationBuilder.CreateTable(
name: "Manga",
columns: table => new
{
MangaId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
ConnectorId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
Name = table.Column<string>(type: "text", nullable: false),
Description = table.Column<string>(type: "text", nullable: false),
CoverUrl = table.Column<string>(type: "text", nullable: false),
CoverFileNameInCache = table.Column<string>(type: "text", nullable: true),
year = table.Column<long>(type: "bigint", nullable: false),
OriginalLanguage = table.Column<string>(type: "text", nullable: true),
ReleaseStatus = table.Column<byte>(type: "smallint", nullable: false),
FolderName = table.Column<string>(type: "text", nullable: false),
IgnoreChapterBefore = table.Column<float>(type: "real", nullable: false),
LatestChapterDownloadedId = table.Column<string>(type: "character varying(64)", nullable: true),
LatestChapterAvailableId = table.Column<string>(type: "character varying(64)", nullable: true),
MangaConnectorName = table.Column<string>(type: "character varying(32)", nullable: false),
AuthorIds = table.Column<string[]>(type: "text[]", nullable: false),
TagIds = table.Column<string[]>(type: "text[]", nullable: false),
LinkIds = table.Column<string[]>(type: "text[]", nullable: false),
AltTitleIds = table.Column<string[]>(type: "text[]", nullable: false),
MangaIds = table.Column<string>(type: "text", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Manga", x => x.MangaId);
table.ForeignKey(
name: "FK_Manga_Chapters_LatestChapterAvailableId",
column: x => x.LatestChapterAvailableId,
principalTable: "Chapters",
principalColumn: "ChapterId");
table.ForeignKey(
name: "FK_Manga_Chapters_LatestChapterDownloadedId",
column: x => x.LatestChapterDownloadedId,
principalTable: "Chapters",
principalColumn: "ChapterId");
table.ForeignKey(
name: "FK_Manga_MangaConnectors_MangaConnectorName",
column: x => x.MangaConnectorName,
principalTable: "MangaConnectors",
principalColumn: "Name",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Jobs",
columns: table => new
{
JobId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
ParentJobId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: true),
DependsOnJobIds = table.Column<string[]>(type: "text[]", maxLength: 64, nullable: true),
JobType = table.Column<byte>(type: "smallint", nullable: false),
RecurrenceMs = table.Column<decimal>(type: "numeric(20,0)", nullable: false),
LastExecution = table.Column<DateTime>(type: "timestamp with time zone", nullable: false),
NextExecution = table.Column<DateTime>(type: "timestamp with time zone", nullable: false),
state = table.Column<int>(type: "integer", nullable: false),
JobId1 = table.Column<string>(type: "character varying(64)", nullable: true),
ImagesLocation = table.Column<string>(type: "text", nullable: true),
ComicInfoLocation = table.Column<string>(type: "text", nullable: true),
CreateArchiveJob_ChapterId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: true),
Path = table.Column<string>(type: "text", nullable: true),
CreateComicInfoXmlJob_ChapterId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: true),
MangaId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: true),
ChapterId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: true),
FromLocation = table.Column<string>(type: "text", nullable: true),
ToLocation = table.Column<string>(type: "text", nullable: true),
ProcessImagesJob_Path = table.Column<string>(type: "text", nullable: true),
Bw = table.Column<bool>(type: "boolean", nullable: true),
Compression = table.Column<int>(type: "integer", nullable: true),
SearchString = table.Column<string>(type: "text", nullable: true),
MangaConnectorName = table.Column<string>(type: "text", nullable: true),
UpdateMetadataJob_MangaId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Jobs", x => x.JobId);
table.ForeignKey(
name: "FK_Jobs_Chapters_ChapterId",
column: x => x.ChapterId,
principalTable: "Chapters",
principalColumn: "ChapterId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_Jobs_Chapters_CreateArchiveJob_ChapterId",
column: x => x.CreateArchiveJob_ChapterId,
principalTable: "Chapters",
principalColumn: "ChapterId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_Jobs_Chapters_CreateComicInfoXmlJob_ChapterId",
column: x => x.CreateComicInfoXmlJob_ChapterId,
principalTable: "Chapters",
principalColumn: "ChapterId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_Jobs_Jobs_JobId1",
column: x => x.JobId1,
principalTable: "Jobs",
principalColumn: "JobId");
table.ForeignKey(
name: "FK_Jobs_Manga_MangaId",
column: x => x.MangaId,
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_Jobs_Manga_UpdateMetadataJob_MangaId",
column: x => x.UpdateMetadataJob_MangaId,
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Link",
columns: table => new
{
LinkId = table.Column<string>(type: "character varying(64)", maxLength: 64, nullable: false),
LinkProvider = table.Column<string>(type: "text", nullable: false),
LinkUrl = table.Column<string>(type: "text", nullable: false),
MangaId = table.Column<string>(type: "character varying(64)", nullable: false),
LinkIds = table.Column<string>(type: "text", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Link", x => x.LinkId);
table.ForeignKey(
name: "FK_Link_Manga_MangaId",
column: x => x.MangaId,
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "MangaAuthor",
columns: table => new
{
MangaId = table.Column<string>(type: "character varying(64)", nullable: false),
AuthorId = table.Column<string>(type: "character varying(64)", nullable: false),
AuthorIds = table.Column<string>(type: "text", nullable: true),
MangaIds = table.Column<string>(type: "text", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_MangaAuthor", x => new { x.MangaId, x.AuthorId });
table.ForeignKey(
name: "FK_MangaAuthor_Authors_AuthorId",
column: x => x.AuthorId,
principalTable: "Authors",
principalColumn: "AuthorId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_MangaAuthor_Manga_MangaId",
column: x => x.MangaId,
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "MangaTag",
columns: table => new
{
MangaId = table.Column<string>(type: "character varying(64)", nullable: false),
Tag = table.Column<string>(type: "text", nullable: false),
MangaIds = table.Column<string>(type: "text", nullable: false),
TagIds = table.Column<string>(type: "text", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_MangaTag", x => new { x.MangaId, x.Tag });
table.ForeignKey(
name: "FK_MangaTag_Manga_MangaId",
column: x => x.MangaId,
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_MangaTag_Tags_MangaIds",
column: x => x.MangaIds,
principalTable: "Tags",
principalColumn: "Tag",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_MangaTag_Tags_Tag",
column: x => x.Tag,
principalTable: "Tags",
principalColumn: "Tag",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateIndex(
name: "IX_AltTitles_MangaId",
table: "AltTitles",
column: "MangaId");
migrationBuilder.CreateIndex(
name: "IX_Chapters_ParentMangaId",
table: "Chapters",
column: "ParentMangaId");
migrationBuilder.CreateIndex(
name: "IX_Jobs_ChapterId",
table: "Jobs",
column: "ChapterId");
migrationBuilder.CreateIndex(
name: "IX_Jobs_CreateArchiveJob_ChapterId",
table: "Jobs",
column: "CreateArchiveJob_ChapterId");
migrationBuilder.CreateIndex(
name: "IX_Jobs_CreateComicInfoXmlJob_ChapterId",
table: "Jobs",
column: "CreateComicInfoXmlJob_ChapterId");
migrationBuilder.CreateIndex(
name: "IX_Jobs_JobId1",
table: "Jobs",
column: "JobId1");
migrationBuilder.CreateIndex(
name: "IX_Jobs_MangaId",
table: "Jobs",
column: "MangaId");
migrationBuilder.CreateIndex(
name: "IX_Jobs_UpdateMetadataJob_MangaId",
table: "Jobs",
column: "UpdateMetadataJob_MangaId");
migrationBuilder.CreateIndex(
name: "IX_Link_MangaId",
table: "Link",
column: "MangaId");
migrationBuilder.CreateIndex(
name: "IX_Manga_LatestChapterAvailableId",
table: "Manga",
column: "LatestChapterAvailableId",
unique: true);
migrationBuilder.CreateIndex(
name: "IX_Manga_LatestChapterDownloadedId",
table: "Manga",
column: "LatestChapterDownloadedId",
unique: true);
migrationBuilder.CreateIndex(
name: "IX_Manga_MangaConnectorName",
table: "Manga",
column: "MangaConnectorName");
migrationBuilder.CreateIndex(
name: "IX_MangaAuthor_AuthorId",
table: "MangaAuthor",
column: "AuthorId");
migrationBuilder.CreateIndex(
name: "IX_MangaTag_MangaIds",
table: "MangaTag",
column: "MangaIds");
migrationBuilder.CreateIndex(
name: "IX_MangaTag_Tag",
table: "MangaTag",
column: "Tag");
migrationBuilder.AddForeignKey(
name: "FK_AltTitles_Manga_MangaId",
table: "AltTitles",
column: "MangaId",
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
migrationBuilder.AddForeignKey(
name: "FK_Chapters_Manga_ParentMangaId",
table: "Chapters",
column: "ParentMangaId",
principalTable: "Manga",
principalColumn: "MangaId",
onDelete: ReferentialAction.Cascade);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropForeignKey(
name: "FK_Chapters_Manga_ParentMangaId",
table: "Chapters");
migrationBuilder.DropTable(
name: "AltTitles");
migrationBuilder.DropTable(
name: "Jobs");
migrationBuilder.DropTable(
name: "LibraryConnectors");
migrationBuilder.DropTable(
name: "Link");
migrationBuilder.DropTable(
name: "MangaAuthor");
migrationBuilder.DropTable(
name: "MangaTag");
migrationBuilder.DropTable(
name: "NotificationConnectors");
migrationBuilder.DropTable(
name: "Authors");
migrationBuilder.DropTable(
name: "Tags");
migrationBuilder.DropTable(
name: "Manga");
migrationBuilder.DropTable(
name: "Chapters");
migrationBuilder.DropTable(
name: "MangaConnectors");
}
}
}

View File

@ -1,688 +0,0 @@
// <auto-generated />
using System;
using API.Schema;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace API.Migrations
{
[DbContext(typeof(PgsqlContext))]
partial class PgsqlContextModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "9.0.0")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("API.Schema.Author", b =>
{
b.Property<string>("AuthorId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("AuthorName")
.IsRequired()
.HasColumnType("text");
b.HasKey("AuthorId");
b.ToTable("Authors");
});
modelBuilder.Entity("API.Schema.Chapter", b =>
{
b.Property<string>("ChapterId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("ArchiveFileName")
.IsRequired()
.HasColumnType("text");
b.Property<float>("ChapterNumber")
.HasColumnType("real");
b.Property<bool>("Downloaded")
.HasColumnType("boolean");
b.Property<string>("ParentMangaId")
.IsRequired()
.HasColumnType("character varying(64)");
b.Property<string>("Title")
.HasColumnType("text");
b.Property<string>("Url")
.IsRequired()
.HasColumnType("text");
b.Property<float?>("VolumeNumber")
.HasColumnType("real");
b.HasKey("ChapterId");
b.HasIndex("ParentMangaId");
b.ToTable("Chapters");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.Property<string>("JobId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.PrimitiveCollection<string[]>("DependsOnJobsIds")
.HasMaxLength(64)
.HasColumnType("text[]");
b.Property<string>("JobId1")
.HasColumnType("character varying(64)");
b.Property<byte>("JobType")
.HasColumnType("smallint");
b.Property<DateTime>("LastExecution")
.HasColumnType("timestamp with time zone");
b.Property<string>("ParentJobId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<decimal>("RecurrenceMs")
.HasColumnType("numeric(20,0)");
b.Property<int>("state")
.HasColumnType("integer");
b.HasKey("JobId");
b.HasIndex("JobId1");
b.HasIndex("ParentJobId");
b.ToTable("Jobs");
b.HasDiscriminator<byte>("JobType");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("API.Schema.LibraryConnectors.LibraryConnector", b =>
{
b.Property<string>("LibraryConnectorId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("Auth")
.IsRequired()
.HasColumnType("text");
b.Property<string>("BaseUrl")
.IsRequired()
.HasColumnType("text");
b.Property<byte>("LibraryType")
.HasColumnType("smallint");
b.HasKey("LibraryConnectorId");
b.ToTable("LibraryConnectors");
b.HasDiscriminator<byte>("LibraryType");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("API.Schema.Link", b =>
{
b.Property<string>("LinkId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("LinkProvider")
.IsRequired()
.HasColumnType("text");
b.Property<string>("LinkUrl")
.IsRequired()
.HasColumnType("text");
b.Property<string>("MangaId")
.HasColumnType("character varying(64)");
b.HasKey("LinkId");
b.HasIndex("MangaId");
b.ToTable("Link");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.Property<string>("MangaId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("ConnectorId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("CoverFileNameInCache")
.HasColumnType("text");
b.Property<string>("CoverUrl")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Description")
.IsRequired()
.HasColumnType("text");
b.Property<string>("FolderName")
.IsRequired()
.HasColumnType("text");
b.Property<float>("IgnoreChapterBefore")
.HasColumnType("real");
b.Property<string>("MangaConnectorId")
.IsRequired()
.HasColumnType("character varying(32)");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("text");
b.Property<string>("OriginalLanguage")
.HasColumnType("text");
b.Property<byte>("ReleaseStatus")
.HasColumnType("smallint");
b.Property<string>("WebsiteUrl")
.IsRequired()
.HasColumnType("text");
b.Property<long>("Year")
.HasColumnType("bigint");
b.HasKey("MangaId");
b.HasIndex("MangaConnectorId");
b.ToTable("Manga");
});
modelBuilder.Entity("API.Schema.MangaAltTitle", b =>
{
b.Property<string>("AltTitleId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<string>("Language")
.IsRequired()
.HasMaxLength(8)
.HasColumnType("character varying(8)");
b.Property<string>("MangaId")
.HasColumnType("character varying(64)");
b.Property<string>("Title")
.IsRequired()
.HasColumnType("text");
b.HasKey("AltTitleId");
b.HasIndex("MangaId");
b.ToTable("AltTitles");
});
modelBuilder.Entity("API.Schema.MangaConnectors.MangaConnector", b =>
{
b.Property<string>("Name")
.HasMaxLength(32)
.HasColumnType("character varying(32)");
b.PrimitiveCollection<string[]>("BaseUris")
.IsRequired()
.HasColumnType("text[]");
b.PrimitiveCollection<string[]>("SupportedLanguages")
.IsRequired()
.HasColumnType("text[]");
b.HasKey("Name");
b.ToTable("MangaConnectors");
b.HasDiscriminator<string>("Name").HasValue("MangaConnector");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("API.Schema.MangaTag", b =>
{
b.Property<string>("Tag")
.HasColumnType("text");
b.HasKey("Tag");
b.ToTable("Tags");
});
modelBuilder.Entity("API.Schema.Notification", b =>
{
b.Property<string>("NotificationId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<DateTime>("Date")
.HasColumnType("timestamp with time zone");
b.Property<string>("Message")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Title")
.IsRequired()
.HasColumnType("text");
b.Property<byte>("Urgency")
.HasColumnType("smallint");
b.HasKey("NotificationId");
b.ToTable("Notifications");
});
modelBuilder.Entity("API.Schema.NotificationConnectors.NotificationConnector", b =>
{
b.Property<string>("NotificationConnectorId")
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.Property<byte>("NotificationConnectorType")
.HasColumnType("smallint");
b.HasKey("NotificationConnectorId");
b.ToTable("NotificationConnectors");
b.HasDiscriminator<byte>("NotificationConnectorType");
b.UseTphMappingStrategy();
});
modelBuilder.Entity("AuthorManga", b =>
{
b.Property<string>("AuthorsAuthorId")
.HasColumnType("character varying(64)");
b.Property<string>("MangaId")
.HasColumnType("character varying(64)");
b.HasKey("AuthorsAuthorId", "MangaId");
b.HasIndex("MangaId");
b.ToTable("AuthorManga");
});
modelBuilder.Entity("MangaMangaTag", b =>
{
b.Property<string>("MangaId")
.HasColumnType("character varying(64)");
b.Property<string>("TagsTag")
.HasColumnType("text");
b.HasKey("MangaId", "TagsTag");
b.HasIndex("TagsTag");
b.ToTable("MangaMangaTag");
});
modelBuilder.Entity("API.Schema.Jobs.DownloadNewChaptersJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.HasDiscriminator().HasValue((byte)1);
});
modelBuilder.Entity("API.Schema.Jobs.DownloadSingleChapterJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("ChapterId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("ChapterId");
b.HasDiscriminator().HasValue((byte)0);
});
modelBuilder.Entity("API.Schema.Jobs.MoveFileOrFolderJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("FromLocation")
.IsRequired()
.HasColumnType("text");
b.Property<string>("ToLocation")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)3);
});
modelBuilder.Entity("API.Schema.Jobs.UpdateMetadataJob", b =>
{
b.HasBaseType("API.Schema.Jobs.Job");
b.Property<string>("MangaId")
.IsRequired()
.HasMaxLength(64)
.HasColumnType("character varying(64)");
b.HasIndex("MangaId");
b.ToTable("Jobs", t =>
{
t.Property("MangaId")
.HasColumnName("UpdateMetadataJob_MangaId");
});
b.HasDiscriminator().HasValue((byte)2);
});
modelBuilder.Entity("API.Schema.LibraryConnectors.Kavita", b =>
{
b.HasBaseType("API.Schema.LibraryConnectors.LibraryConnector");
b.HasDiscriminator().HasValue((byte)1);
});
modelBuilder.Entity("API.Schema.LibraryConnectors.Komga", b =>
{
b.HasBaseType("API.Schema.LibraryConnectors.LibraryConnector");
b.HasDiscriminator().HasValue((byte)0);
});
modelBuilder.Entity("API.Schema.MangaConnectors.AsuraToon", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("AsuraToon");
});
modelBuilder.Entity("API.Schema.MangaConnectors.Bato", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("Bato");
});
modelBuilder.Entity("API.Schema.MangaConnectors.MangaDex", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("MangaDex");
});
modelBuilder.Entity("API.Schema.MangaConnectors.MangaHere", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("MangaHere");
});
modelBuilder.Entity("API.Schema.MangaConnectors.MangaKatana", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("MangaKatana");
});
modelBuilder.Entity("API.Schema.MangaConnectors.MangaLife", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("Manga4Life");
});
modelBuilder.Entity("API.Schema.MangaConnectors.Manganato", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("Manganato");
});
modelBuilder.Entity("API.Schema.MangaConnectors.Mangasee", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("Mangasee");
});
modelBuilder.Entity("API.Schema.MangaConnectors.Mangaworld", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("Mangaworld");
});
modelBuilder.Entity("API.Schema.MangaConnectors.ManhuaPlus", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("ManhuaPlus");
});
modelBuilder.Entity("API.Schema.MangaConnectors.Weebcentral", b =>
{
b.HasBaseType("API.Schema.MangaConnectors.MangaConnector");
b.HasDiscriminator().HasValue("Weebcentral");
});
modelBuilder.Entity("API.Schema.NotificationConnectors.Gotify", b =>
{
b.HasBaseType("API.Schema.NotificationConnectors.NotificationConnector");
b.Property<string>("AppToken")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Endpoint")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)0);
});
modelBuilder.Entity("API.Schema.NotificationConnectors.Lunasea", b =>
{
b.HasBaseType("API.Schema.NotificationConnectors.NotificationConnector");
b.Property<string>("Id")
.IsRequired()
.HasColumnType("text");
b.HasDiscriminator().HasValue((byte)1);
});
modelBuilder.Entity("API.Schema.NotificationConnectors.Ntfy", b =>
{
b.HasBaseType("API.Schema.NotificationConnectors.NotificationConnector");
b.Property<string>("Auth")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Endpoint")
.IsRequired()
.HasColumnType("text");
b.Property<string>("Topic")
.IsRequired()
.HasColumnType("text");
b.ToTable("NotificationConnectors", t =>
{
t.Property("Endpoint")
.HasColumnName("Ntfy_Endpoint");
});
b.HasDiscriminator().HasValue((byte)2);
});
modelBuilder.Entity("API.Schema.Chapter", b =>
{
b.HasOne("API.Schema.Manga", "ParentManga")
.WithMany()
.HasForeignKey("ParentMangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("ParentManga");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.HasOne("API.Schema.Jobs.Job", null)
.WithMany("DependsOnJobs")
.HasForeignKey("JobId1");
b.HasOne("API.Schema.Jobs.Job", "ParentJob")
.WithMany()
.HasForeignKey("ParentJobId");
b.Navigation("ParentJob");
});
modelBuilder.Entity("API.Schema.Link", b =>
{
b.HasOne("API.Schema.Manga", null)
.WithMany("Links")
.HasForeignKey("MangaId");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.HasOne("API.Schema.MangaConnectors.MangaConnector", "MangaConnector")
.WithMany()
.HasForeignKey("MangaConnectorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("MangaConnector");
});
modelBuilder.Entity("API.Schema.MangaAltTitle", b =>
{
b.HasOne("API.Schema.Manga", null)
.WithMany("AltTitles")
.HasForeignKey("MangaId");
});
modelBuilder.Entity("AuthorManga", b =>
{
b.HasOne("API.Schema.Author", null)
.WithMany()
.HasForeignKey("AuthorsAuthorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.Manga", null)
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("MangaMangaTag", b =>
{
b.HasOne("API.Schema.Manga", null)
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Schema.MangaTag", null)
.WithMany()
.HasForeignKey("TagsTag")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("API.Schema.Jobs.DownloadNewChaptersJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Jobs.DownloadSingleChapterJob", b =>
{
b.HasOne("API.Schema.Chapter", "Chapter")
.WithMany()
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Schema.Jobs.UpdateMetadataJob", b =>
{
b.HasOne("API.Schema.Manga", "Manga")
.WithMany()
.HasForeignKey("MangaId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Manga");
});
modelBuilder.Entity("API.Schema.Jobs.Job", b =>
{
b.Navigation("DependsOnJobs");
});
modelBuilder.Entity("API.Schema.Manga", b =>
{
b.Navigation("AltTitles");
b.Navigation("Links");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -1,46 +0,0 @@
using Asp.Versioning.ApiExplorer;
using Microsoft.Extensions.Options;
using Microsoft.OpenApi.Models;
using Swashbuckle.AspNetCore.SwaggerGen;
namespace API;
public class NamedSwaggerGenOptions : IConfigureNamedOptions<SwaggerGenOptions>
{
private readonly IApiVersionDescriptionProvider provider;
public NamedSwaggerGenOptions(IApiVersionDescriptionProvider provider)
{
this.provider = provider;
}
public void Configure(string? name, SwaggerGenOptions options)
{
Configure(options);
}
public void Configure(SwaggerGenOptions options)
{
// add swagger document for every API version discovered
foreach (var description in provider.ApiVersionDescriptions)
{
options.SwaggerDoc(
description.GroupName,
CreateVersionInfo(description));
}
}
private OpenApiInfo CreateVersionInfo(
ApiVersionDescription description)
{
var info = new OpenApiInfo()
{
Title = "Test API " + description.GroupName,
Version = description.ApiVersion.ToString()
};
if (description.IsDeprecated)
{
info.Description += " This API version has been deprecated.";
}
return info;
}
}

View File

@ -1,3 +0,0 @@
namespace API;
public record ProblemResponse(string title, string? message = null);

View File

@ -1,132 +0,0 @@
using System.Reflection;
using System.Text.Json.Serialization;
using API;
using API.Schema;
using API.Schema.Jobs;
using API.Schema.MangaConnectors;
using Asp.Versioning;
using Asp.Versioning.Builder;
using Asp.Versioning.Conventions;
using Microsoft.EntityFrameworkCore;
using Newtonsoft.Json.Converters;
var builder = WebApplication.CreateBuilder(args);
builder.Services.AddCors(options =>
{
options.AddPolicy("AllowAll",
policy =>
{
policy
.AllowAnyOrigin()
.AllowAnyMethod()
.AllowAnyHeader();
});
});
builder.Services.AddApiVersioning(option =>
{
option.AssumeDefaultVersionWhenUnspecified = true;
option.DefaultApiVersion = new ApiVersion(2);
option.ReportApiVersions = true;
option.ApiVersionReader = ApiVersionReader.Combine(
new UrlSegmentApiVersionReader(),
new QueryStringApiVersionReader("api-version"),
new HeaderApiVersionReader("X-Version"),
new MediaTypeApiVersionReader("x-version"));
})
.AddMvc(options =>
{
options.Conventions.Add(new VersionByNamespaceConvention());
})
.AddApiExplorer(options => {
options.GroupNameFormat = "'v'V";
options.SubstituteApiVersionInUrl = true;
});
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen(opt =>
{
var xmlFilename = $"{Assembly.GetExecutingAssembly().GetName().Name}.xml";
opt.IncludeXmlComments(Path.Combine(AppContext.BaseDirectory, xmlFilename));
});
builder.Services.ConfigureOptions<NamedSwaggerGenOptions>();
builder.Services.AddDbContext<PgsqlContext>(options =>
options.UseNpgsql($"Host={Environment.GetEnvironmentVariable("POSTGRES_HOST")??"localhost:5432"}; " +
$"Database={Environment.GetEnvironmentVariable("POSTGRES_DB")??"postgres"}; " +
$"Username={Environment.GetEnvironmentVariable("POSTGRES_USER")??"postgres"}; " +
$"Password={Environment.GetEnvironmentVariable("POSTGRES_PASSWORD")??"postgres"}"));
builder.Services.AddControllers().AddNewtonsoftJson(opts =>
{
opts.SerializerSettings.Converters.Add(new StringEnumConverter());
});
builder.WebHost.UseUrls("http://*:6531");
var app = builder.Build();
ApiVersionSet apiVersionSet = app.NewApiVersionSet()
.HasApiVersion(new ApiVersion(2))
.ReportApiVersions()
.Build();
app.UseCors("AllowAll");
app.MapControllers()
.WithApiVersionSet(apiVersionSet)
.MapToApiVersion(2);
app.UseSwagger();
app.UseSwaggerUI(options =>
{
options.SwaggerEndpoint(
$"/swagger/v2/swagger.json", "v2");
});
app.UseHttpsRedirection();
using (var scope = app.Services.CreateScope())
{
var db = scope.ServiceProvider.GetRequiredService<PgsqlContext>();
db.Database.Migrate();
}
using (var scope = app.Services.CreateScope())
{
PgsqlContext context = scope.ServiceProvider.GetService<PgsqlContext>()!;
MangaConnector[] connectors =
[
new AsuraToon(),
new Bato(),
new MangaDex(),
new MangaHere(),
new MangaKatana(),
new MangaLife(),
new Manganato(),
new Mangaworld(),
new ManhuaPlus(),
new Weebcentral()
];
MangaConnector[] newConnectors = connectors.Where(c => !context.MangaConnectors.Contains(c)).ToArray();
context.MangaConnectors.AddRange(newConnectors);
context.Jobs.RemoveRange(context.Jobs.Where(j => j.state == JobState.Completed && j.RecurrenceMs < 1));
string[] emojis = { "(•‿•)", "(づ \u25d5‿\u25d5 )づ", "( \u02d8\u25bd\u02d8)っ\u2668", "=\uff3e\u25cf \u22cf \u25cf\uff3e=", "(ΦωΦ)", "(\u272a\u3268\u272a)", "( ノ・o・ )ノ", "(〜^\u2207^ )〜", "~(\u2267ω\u2266)~","૮ \u00b4• ﻌ \u00b4• ა", "(\u02c3ᆺ\u02c2)", "(=\ud83d\udf66 \u0f1d \ud83d\udf66=)"};
context.Notifications.Add(new Notification("Tranga Started", emojis[Random.Shared.Next(0, emojis.Length - 1)], NotificationUrgency.High));
context.SaveChanges();
}
TrangaSettings.Load();
Tranga.StartLogger();
Tranga.JobStarterThread.Start(app.Services.CreateScope().ServiceProvider.GetService<PgsqlContext>());
Tranga.NotificationSenderThread.Start(app.Services.CreateScope().ServiceProvider.GetService<PgsqlContext>());
app.UseCors("AllowAll");
app.Run();

View File

@ -1,47 +0,0 @@
{
"$schema": "http://json.schemastore.org/launchsettings.json",
"iisSettings": {
"windowsAuthentication": false,
"anonymousAuthentication": true,
"iisExpress": {
"applicationUrl": "http://localhost:5976",
"sslPort": 44332,
"environmentVariables": {
"POSTGRES_Host": "localhost:5432"
}
}
},
"profiles": {
"http": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": true,
"launchUrl": "swagger",
"applicationUrl": "http://localhost:5287",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development",
"POSTGRES_Host": "localhost:5432"
}
},
"https": {
"commandName": "Project",
"dotnetRunMessages": true,
"launchBrowser": true,
"launchUrl": "swagger",
"applicationUrl": "https://localhost:7206;http://localhost:5287",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development",
"POSTGRES_Host": "localhost:5432"
}
},
"IIS Express": {
"commandName": "IISExpress",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development",
"POSTGRES_Host": "localhost:5432"
}
}
}
}

View File

@ -1,12 +0,0 @@
using System.ComponentModel.DataAnnotations;
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
[PrimaryKey("AuthorId")]
public class Author(string authorName)
{
[MaxLength(64)]
public string AuthorId { get; init; } = TokenGen.CreateToken(typeof(Author), 64);
public string AuthorName { get; init; } = authorName;
}

View File

@ -1,113 +0,0 @@
using System.ComponentModel.DataAnnotations;
using System.Xml.Linq;
using API.Schema.Jobs;
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
[PrimaryKey("ChapterId")]
public class Chapter : IComparable<Chapter>
{
[MaxLength(64)]
public string ChapterId { get; init; } = TokenGen.CreateToken(typeof(Chapter), 64);
public int? VolumeNumber { get; private set; }
public ChapterNumber ChapterNumber { get; private set; }
public string Url { get; internal set; }
public string? Title { get; private set; }
public string ArchiveFileName { get; private set; }
public bool Downloaded { get; internal set; } = false;
public string ParentMangaId { get; internal set; }
public Manga? ParentManga { get; init; }
public Chapter(Manga parentManga, string url, ChapterNumber chapterNumber, int? volumeNumber = null, string? title = null)
: this(parentManga.MangaId, url, chapterNumber, volumeNumber, title)
{
this.ParentManga = parentManga;
}
public Chapter(string parentMangaId, string url, ChapterNumber chapterNumber,
int? volumeNumber = null, string? title = null)
{
this.ParentMangaId = parentMangaId;
this.Url = url;
this.ChapterNumber = chapterNumber;
this.VolumeNumber = volumeNumber;
this.Title = title;
this.ArchiveFileName = BuildArchiveFileName();
}
public MoveFileOrFolderJob? UpdateChapterNumber(ChapterNumber chapterNumber)
{
this.ChapterNumber = chapterNumber;
return UpdateArchiveFileName();
}
public MoveFileOrFolderJob? UpdateVolumeNumber(int? volumeNumber)
{
this.VolumeNumber = volumeNumber;
return UpdateArchiveFileName();
}
public MoveFileOrFolderJob? UpdateTitle(string? title)
{
this.Title = title;
return UpdateArchiveFileName();
}
private string BuildArchiveFileName()
{
return $"{this.ParentManga.Name} - Vol.{this.VolumeNumber ?? 0} Ch.{this.ChapterNumber}{(this.Title is null ? "" : $" - {this.Title}")}.cbz";
}
private MoveFileOrFolderJob? UpdateArchiveFileName()
{
string oldPath = GetArchiveFilePath();
this.ArchiveFileName = BuildArchiveFileName();
if (Downloaded)
{
return new MoveFileOrFolderJob(oldPath, GetArchiveFilePath());
}
return null;
}
/// <summary>
/// Creates full file path of chapter-archive
/// </summary>
/// <returns>Filepath</returns>
internal string GetArchiveFilePath()
{
return Path.Join(TrangaSettings.downloadLocation, ParentManga.FolderName, ArchiveFileName);
}
public bool IsDownloaded()
{
string path = GetArchiveFilePath();
return File.Exists(path);
}
public int CompareTo(Chapter? other)
{
if(other is not { } otherChapter)
throw new ArgumentException($"{other} can not be compared to {this}");
return this.VolumeNumber?.CompareTo(otherChapter.VolumeNumber) switch
{
<0 => -1,
>0 => 1,
_ => this.ChapterNumber.CompareTo(otherChapter.ChapterNumber)
};
}
internal string GetComicInfoXmlString()
{
XElement comicInfo = new XElement("ComicInfo",
new XElement("Tags", string.Join(',', ParentManga.Tags.Select(tag => tag.Tag))),
new XElement("LanguageISO", ParentManga.OriginalLanguage),
new XElement("Title", this.Title),
new XElement("Writer", string.Join(',', ParentManga.Authors.Select(author => author.AuthorName))),
new XElement("Volume", this.VolumeNumber),
new XElement("Number", this.ChapterNumber)
);
return comicInfo.ToString();
}
}

View File

@ -1,305 +0,0 @@
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Numerics;
using System.Text.RegularExpressions;
namespace API.Schema;
public readonly struct ChapterNumber : INumber<ChapterNumber>
{
private readonly uint[] _numbers;
private readonly bool _naN;
private ChapterNumber(uint[] numbers, bool naN = false)
{
this._numbers = numbers;
this._naN = naN;
}
public ChapterNumber(string number)
{
if (!CanParse(number))
{
this._numbers = [];
this._naN = true;
}
this._numbers = number.Split('.').Select(uint.Parse).ToArray();
}
public ChapterNumber(float number) : this(number.ToString("F")) {}
public ChapterNumber(double number) : this((float)number) {}
public ChapterNumber(uint number)
{
this._numbers = [number];
this._naN = false;
}
public ChapterNumber(int number)
{
if (int.IsNegative(number))
{
this._numbers = [];
this._naN = true;
}
this._numbers = [(uint)number];
this._naN = false;
}
public int CompareTo(ChapterNumber other)
{
byte index = 0;
do
{
if (this._numbers[index] < other._numbers[index])
return -1;
else if (this._numbers[index] > other._numbers[index])
return 1;
}while(index < this._numbers.Length && index < other._numbers.Length);
if (index >= this._numbers.Length && index >= other._numbers.Length)
return 0;
else if (index >= this._numbers.Length)
return -1;
else if (index >= other._numbers.Length)
return 1;
throw new UnreachableException();
}
private static readonly Regex Pattern = new(@"[0-9]+(?:\.[0-9]+)*");
public static bool CanParse(string? number) => number is not null && Pattern.Match(number).Length == number.Length && number.Length > 0;
public bool Equals(ChapterNumber other) => CompareTo(other) == 0;
public string ToString(string? format, IFormatProvider? formatProvider)
{
return string.Join('.', _numbers);
}
public override bool Equals(object? obj)
{
return obj is ChapterNumber other && Equals(other);
}
public override int GetHashCode()
{
return HashCode.Combine(_numbers, _naN);
}
public bool TryFormat(Span<char> destination, out int charsWritten, ReadOnlySpan<char> format, IFormatProvider? provider)
{
throw new NotImplementedException();
}
public int CompareTo(object? obj)
{
if(obj is ChapterNumber other)
return CompareTo(other);
throw new ArgumentException();
}
public static ChapterNumber Parse(string s, IFormatProvider? provider)
{
if(!CanParse(s))
throw new FormatException($"Invalid ChapterNumber-String: {s}");
return new ChapterNumber(s);
}
public static bool TryParse([NotNullWhen(true)] string? s, IFormatProvider? provider, out ChapterNumber result)
{
result = new ChapterNumber([], true);;
if (!CanParse(s))
return false;
if (s is null)
return false;
result = new ChapterNumber(s);
return true;
}
public static ChapterNumber Parse(ReadOnlySpan<char> s, IFormatProvider? provider) => Parse(s.ToString(), provider);
public static bool TryParse(ReadOnlySpan<char> s, IFormatProvider? provider, out ChapterNumber result) => TryParse(s.ToString(), provider, out result);
public static ChapterNumber operator +(ChapterNumber left, ChapterNumber right)
{
if (IsNaN(left) || IsNaN(right))
return new ChapterNumber([], true);
int size = left._numbers.Length > right._numbers.Length ? left._numbers.Length : right._numbers.Length;
uint[] numbers = new uint[size];
for (int i = 0; i < size; i++)
{
if(left._numbers.Length < i)
numbers[i] = right._numbers[i];
else if(right._numbers.Length < i)
numbers[i] = left._numbers[i];
else
numbers[i] = left._numbers[i] + right._numbers[i];
}
return new ChapterNumber(numbers);
}
private static bool BothNotNaN(ChapterNumber left, ChapterNumber right) => !IsNaN(left) && !IsNaN(right);
public static ChapterNumber AdditiveIdentity => Zero;
public static bool operator ==(ChapterNumber left, ChapterNumber right) => BothNotNaN(left, right) && left.Equals(right);
public static bool operator !=(ChapterNumber left, ChapterNumber right) => !(left == right);
public static bool operator >(ChapterNumber left, ChapterNumber right) => BothNotNaN(left, right) && left.CompareTo(right) > 0;
public static bool operator >=(ChapterNumber left, ChapterNumber right) => BothNotNaN(left, right) && left.CompareTo(right) >= 0;
public static bool operator <(ChapterNumber left, ChapterNumber right) => BothNotNaN(left, right) && left.CompareTo(right) < 0;
public static bool operator <=(ChapterNumber left, ChapterNumber right) => BothNotNaN(left, right) && left.CompareTo(right) <= 0;
public static ChapterNumber operator %(ChapterNumber left, ChapterNumber right) => throw new ArithmeticException();
public static ChapterNumber operator +(ChapterNumber value) => throw new InvalidOperationException();
public static ChapterNumber operator --(ChapterNumber value)
{
if (IsNaN(value))
return value;
uint[] numbers = value._numbers;
numbers[0]--;
return new ChapterNumber(numbers);
}
public static ChapterNumber operator /(ChapterNumber left, ChapterNumber right) => throw new InvalidOperationException();
public static ChapterNumber operator ++(ChapterNumber value)
{
if (IsNaN(value))
return value;
uint[] numbers = value._numbers;
numbers[0]++;
return new ChapterNumber(numbers);
}
public static ChapterNumber MultiplicativeIdentity => One;
public static ChapterNumber operator *(ChapterNumber left, ChapterNumber right) => throw new InvalidOperationException();
public static ChapterNumber operator -(ChapterNumber left, ChapterNumber right) => throw new InvalidOperationException();
public static ChapterNumber operator -(ChapterNumber value) => throw new InvalidOperationException();
public static ChapterNumber Abs(ChapterNumber value) => value;
public static bool IsCanonical(ChapterNumber value) => true;
public static bool IsComplexNumber(ChapterNumber value) => false;
public static bool IsEvenInteger(ChapterNumber value) => IsInteger(value) && uint.IsEvenInteger(value._numbers[0]);
public static bool IsFinite(ChapterNumber value) => true;
public static bool IsImaginaryNumber(ChapterNumber value) => false;
public static bool IsInfinity(ChapterNumber value) => false;
public static bool IsInteger(ChapterNumber value) => !IsNaN(value) && value._numbers.Length == 1;
public static bool IsNaN(ChapterNumber value) => value._naN;
public static bool IsNegative(ChapterNumber value) => false;
public static bool IsNegativeInfinity(ChapterNumber value) => false;
public static bool IsNormal(ChapterNumber value) => true;
public static bool IsOddInteger(ChapterNumber value) => false;
public static bool IsPositive(ChapterNumber value) => true;
public static bool IsPositiveInfinity(ChapterNumber value) => false;
public static bool IsRealNumber(ChapterNumber value) => false;
public static bool IsSubnormal(ChapterNumber value) => false;
public static bool IsZero(ChapterNumber value) => value._numbers.All(n => n == 0);
public static ChapterNumber MaxMagnitude(ChapterNumber x, ChapterNumber y)
{
if(IsNaN(x))
return new ChapterNumber([], true);
if (IsNaN(y))
return new ChapterNumber([], true);
return x >= y ? x : y;
}
public static ChapterNumber MaxMagnitudeNumber(ChapterNumber x, ChapterNumber y)
{
if (IsNaN(x))
return y;
if (IsNaN(y))
return x;
return x >= y ? x : y;
}
public static ChapterNumber MinMagnitude(ChapterNumber x, ChapterNumber y)
{
if(IsNaN(x))
return new ChapterNumber([], true);
if (IsNaN(y))
return new ChapterNumber([], true);
return x <= y ? x : y;
}
public static ChapterNumber MinMagnitudeNumber(ChapterNumber x, ChapterNumber y)
{
if (IsNaN(x))
return y;
if (IsNaN(y))
return x;
return x <= y ? x : y;
}
public static ChapterNumber Parse(ReadOnlySpan<char> s, NumberStyles style, IFormatProvider? provider) => throw new NotImplementedException();
public static ChapterNumber Parse(string s, NumberStyles style, IFormatProvider? provider) => throw new NotImplementedException();
public static bool TryConvertFromChecked<TOther>(TOther value, out ChapterNumber result) where TOther : INumberBase<TOther>
{
throw new NotImplementedException();
}
public static bool TryConvertFromSaturating<TOther>(TOther value, out ChapterNumber result) where TOther : INumberBase<TOther>
{
throw new NotImplementedException();
}
public static bool TryConvertFromTruncating<TOther>(TOther value, out ChapterNumber result) where TOther : INumberBase<TOther>
{
throw new NotImplementedException();
}
public static bool TryConvertToChecked<TOther>(ChapterNumber value, [MaybeNullWhen(false)] out TOther result) where TOther : INumberBase<TOther>
{
throw new NotImplementedException();
}
public static bool TryConvertToSaturating<TOther>(ChapterNumber value, [MaybeNullWhen(false)] out TOther result) where TOther : INumberBase<TOther>
{
throw new NotImplementedException();
}
public static bool TryConvertToTruncating<TOther>(ChapterNumber value, [MaybeNullWhen(false)] out TOther result) where TOther : INumberBase<TOther>
{
throw new NotImplementedException();
}
public static bool TryParse(ReadOnlySpan<char> s, NumberStyles style, IFormatProvider? provider, out ChapterNumber result)
=> TryParse(s.ToString(), style, provider, out result);
public static bool TryParse([NotNullWhen(true)] string? s, NumberStyles style, IFormatProvider? provider, out ChapterNumber result)
=> TryParse(s, provider, out result);
public static ChapterNumber One => new(1);
public static int Radix => 10;
public static ChapterNumber Zero => new(0);
}

View File

@ -1,136 +0,0 @@
using System.ComponentModel.DataAnnotations;
using System.IO.Compression;
using System.Runtime.InteropServices;
using API.MangaDownloadClients;
using API.Schema.MangaConnectors;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.Formats.Jpeg;
using SixLabors.ImageSharp.Processing;
using SixLabors.ImageSharp.Processing.Processors.Binarization;
using static System.IO.UnixFileMode;
namespace API.Schema.Jobs;
public class DownloadMangaCoverJob(string chapterId, string? parentJobId = null, ICollection<string>? dependsOnJobsIds = null)
: Job(TokenGen.CreateToken(typeof(DownloadMangaCoverJob), 64), JobType.DownloadMangaCoverJob, 0, parentJobId, dependsOnJobsIds)
{
[MaxLength(64)]
public string ChapterId { get; init; } = chapterId;
public Chapter? Chapter { get; init; }
protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{
MangaConnector connector = Chapter.ParentManga?.MangaConnector ?? context.MangaConnectors.Find(context.Manga.Find(Chapter.ParentMangaId)?.MangaId)!;
DownloadChapterImages(Chapter, connector);
return [];
}
private bool DownloadChapterImages(Chapter chapter, MangaConnector connector)
{
string[] imageUrls = connector.GetChapterImageUrls(Chapter);
string saveArchiveFilePath = chapter.GetArchiveFilePath();
//Check if Publication Directory already exists
string directoryPath = Path.GetDirectoryName(saveArchiveFilePath)!;
if (!Directory.Exists(directoryPath))
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
Directory.CreateDirectory(directoryPath,
UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute );
else
Directory.CreateDirectory(directoryPath);
if (File.Exists(saveArchiveFilePath)) //Don't download twice. Redownload
File.Delete(saveArchiveFilePath);
//Create a temporary folder to store images
string tempFolder = Directory.CreateTempSubdirectory("trangatemp").FullName;
int chapterNum = 0;
//Download all Images to temporary Folder
if (imageUrls.Length == 0)
{
Directory.Delete(tempFolder, true);
return false;
}
foreach (string imageUrl in imageUrls)
{
string extension = imageUrl.Split('.')[^1].Split('?')[0];
string imagePath = Path.Join(tempFolder, $"{chapterNum++}.{extension}");
bool status = DownloadImage(imageUrl, imagePath);
if (status is false)
return false;
}
CopyCoverFromCacheToDownloadLocation();
File.WriteAllText(Path.Join(tempFolder, "ComicInfo.xml"), chapter.GetComicInfoXmlString());
//ZIP-it and ship-it
ZipFile.CreateFromDirectory(tempFolder, saveArchiveFilePath);
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(saveArchiveFilePath, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute | OtherRead | OtherExecute);
Directory.Delete(tempFolder, true); //Cleanup
return true;
}
private void ProcessImage(string imagePath)
{
if (!TrangaSettings.bwImages && TrangaSettings.compression == 100)
return;
DateTime start = DateTime.Now;
using Image image = Image.Load(imagePath);
File.Delete(imagePath);
if(TrangaSettings.bwImages)
image.Mutate(i => i.ApplyProcessor(new AdaptiveThresholdProcessor()));
image.SaveAsJpeg(imagePath, new JpegEncoder()
{
Quality = TrangaSettings.compression
});
}
private void CopyCoverFromCacheToDownloadLocation(int? retries = 1)
{
//Check if Publication already has a Folder and cover
string publicationFolder = Chapter.ParentManga.CreatePublicationFolder();
DirectoryInfo dirInfo = new (publicationFolder);
if (dirInfo.EnumerateFiles().Any(info => info.Name.Contains("cover", StringComparison.InvariantCultureIgnoreCase)))
{
return;
}
string? fileInCache = Chapter.ParentManga.CoverFileNameInCache;
if (fileInCache is null || !File.Exists(fileInCache))
{
if (retries > 0 && Chapter.ParentManga.CoverUrl is not null)
{
Chapter.ParentManga.SaveCoverImageToCache();
CopyCoverFromCacheToDownloadLocation(--retries);
}
return;
}
string newFilePath = Path.Join(publicationFolder, $"cover.{Path.GetFileName(fileInCache).Split('.')[^1]}" );
File.Copy(fileInCache, newFilePath, true);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(newFilePath, GroupRead | GroupWrite | UserRead | UserWrite);
}
private bool DownloadImage(string imageUrl, string savePath)
{
HttpDownloadClient downloadClient = new();
RequestResult requestResult = downloadClient.MakeRequest(imageUrl, RequestType.MangaImage);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return false;
if (requestResult.result == Stream.Null)
return false;
FileStream fs = new (savePath, FileMode.Create);
requestResult.result.CopyTo(fs);
fs.Close();
ProcessImage(savePath);
return true;
}
}

View File

@ -1,22 +0,0 @@
using System.ComponentModel.DataAnnotations;
using API.Schema.MangaConnectors;
namespace API.Schema.Jobs;
public class DownloadNewChaptersJob(ulong recurrenceMs, string mangaId, string? parentJobId = null, ICollection<string>? dependsOnJobsIds = null)
: Job(TokenGen.CreateToken(typeof(DownloadNewChaptersJob), 64), JobType.DownloadNewChaptersJob, recurrenceMs, parentJobId, dependsOnJobsIds)
{
[MaxLength(64)]
public string MangaId { get; init; } = mangaId;
public Manga? Manga { get; init; }
protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{
Manga m = Manga ?? context.Manga.Find(MangaId)!;
MangaConnector connector = m.MangaConnector ?? context.MangaConnectors.Find(m.MangaConnectorId)!;
Chapter[] newChapters = connector.GetNewChapters(m);
context.Chapters.AddRangeAsync(newChapters).Wait();
context.SaveChangesAsync().Wait();
return newChapters.Select(chapter => new DownloadSingleChapterJob(chapter.ChapterId, this.JobId));
}
}

View File

@ -1,138 +0,0 @@
using System.ComponentModel.DataAnnotations;
using System.IO.Compression;
using System.Runtime.InteropServices;
using API.MangaDownloadClients;
using API.Schema.MangaConnectors;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.Formats.Jpeg;
using SixLabors.ImageSharp.Processing;
using SixLabors.ImageSharp.Processing.Processors.Binarization;
using static System.IO.UnixFileMode;
namespace API.Schema.Jobs;
public class DownloadSingleChapterJob(string chapterId, string? parentJobId = null, ICollection<string>? dependsOnJobsIds = null)
: Job(TokenGen.CreateToken(typeof(DownloadSingleChapterJob), 64), JobType.DownloadSingleChapterJob, 0, parentJobId, dependsOnJobsIds)
{
[MaxLength(64)]
public string ChapterId { get; init; } = chapterId;
public Chapter? Chapter { get; init; }
protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{
Chapter c = Chapter ?? context.Chapters.Find(ChapterId)!;
Manga m = c.ParentManga ?? context.Manga.Find(c.ParentMangaId)!;
MangaConnector connector = m.MangaConnector ?? context.MangaConnectors.Find(m.MangaConnectorId)!;
DownloadChapterImages(c, connector, m);
return [];
}
private bool DownloadChapterImages(Chapter chapter, MangaConnector connector, Manga manga)
{
string[] imageUrls = connector.GetChapterImageUrls(chapter);
string saveArchiveFilePath = chapter.GetArchiveFilePath();
//Check if Publication Directory already exists
string directoryPath = Path.GetDirectoryName(saveArchiveFilePath)!;
if (!Directory.Exists(directoryPath))
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
Directory.CreateDirectory(directoryPath,
UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute );
else
Directory.CreateDirectory(directoryPath);
if (File.Exists(saveArchiveFilePath)) //Don't download twice. Redownload
File.Delete(saveArchiveFilePath);
//Create a temporary folder to store images
string tempFolder = Directory.CreateTempSubdirectory("trangatemp").FullName;
int chapterNum = 0;
//Download all Images to temporary Folder
if (imageUrls.Length == 0)
{
Directory.Delete(tempFolder, true);
return false;
}
foreach (string imageUrl in imageUrls)
{
string extension = imageUrl.Split('.')[^1].Split('?')[0];
string imagePath = Path.Join(tempFolder, $"{chapterNum++}.{extension}");
bool status = DownloadImage(imageUrl, imagePath);
if (status is false)
return false;
}
CopyCoverFromCacheToDownloadLocation(manga);
File.WriteAllText(Path.Join(tempFolder, "ComicInfo.xml"), chapter.GetComicInfoXmlString());
//ZIP-it and ship-it
ZipFile.CreateFromDirectory(tempFolder, saveArchiveFilePath);
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(saveArchiveFilePath, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute | OtherRead | OtherExecute);
Directory.Delete(tempFolder, true); //Cleanup
return true;
}
private void ProcessImage(string imagePath)
{
if (!TrangaSettings.bwImages && TrangaSettings.compression == 100)
return;
DateTime start = DateTime.Now;
using Image image = Image.Load(imagePath);
File.Delete(imagePath);
if(TrangaSettings.bwImages)
image.Mutate(i => i.ApplyProcessor(new AdaptiveThresholdProcessor()));
image.SaveAsJpeg(imagePath, new JpegEncoder()
{
Quality = TrangaSettings.compression
});
}
private void CopyCoverFromCacheToDownloadLocation(Manga manga, int? retries = 1)
{
//Check if Publication already has a Folder and cover
string publicationFolder = manga.CreatePublicationFolder();
DirectoryInfo dirInfo = new (publicationFolder);
if (dirInfo.EnumerateFiles().Any(info => info.Name.Contains("cover", StringComparison.InvariantCultureIgnoreCase)))
{
return;
}
string? fileInCache = manga.CoverFileNameInCache;
if (fileInCache is null || !File.Exists(fileInCache))
{
if (retries > 0)
{
manga.SaveCoverImageToCache();
CopyCoverFromCacheToDownloadLocation(manga, --retries);
}
return;
}
string newFilePath = Path.Join(publicationFolder, $"cover.{Path.GetFileName(fileInCache).Split('.')[^1]}" );
File.Copy(fileInCache, newFilePath, true);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(newFilePath, GroupRead | GroupWrite | UserRead | UserWrite);
}
private bool DownloadImage(string imageUrl, string savePath)
{
HttpDownloadClient downloadClient = new();
RequestResult requestResult = downloadClient.MakeRequest(imageUrl, RequestType.MangaImage);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return false;
if (requestResult.result == Stream.Null)
return false;
FileStream fs = new (savePath, FileMode.Create);
requestResult.result.CopyTo(fs);
fs.Close();
ProcessImage(savePath);
return true;
}
}

View File

@ -1,55 +0,0 @@
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using Microsoft.EntityFrameworkCore;
using Newtonsoft.Json;
namespace API.Schema.Jobs;
[PrimaryKey("JobId")]
public abstract class Job
{
[MaxLength(64)]
public string JobId { get; init; }
[MaxLength(64)]
public string? ParentJobId { get; init; }
public Job? ParentJob { get; init; }
[MaxLength(64)]
public ICollection<string>? DependsOnJobsIds { get; init; }
public ICollection<Job>? DependsOnJobs { get; init; }
public JobType JobType { get; init; }
public ulong RecurrenceMs { get; set; }
public DateTime LastExecution { get; internal set; } = DateTime.UnixEpoch;
[NotMapped]
public DateTime NextExecution => LastExecution.AddMilliseconds(RecurrenceMs);
public JobState state { get; internal set; } = JobState.Waiting;
public Job(string jobId, JobType jobType, ulong recurrenceMs, Job? parentJob = null, ICollection<Job>? dependsOnJobs = null)
: this(jobId, jobType, recurrenceMs, parentJob?.JobId, dependsOnJobs?.Select(j => j.JobId).ToList())
{
this.ParentJob = parentJob;
this.DependsOnJobs = dependsOnJobs;
}
public Job(string jobId, JobType jobType, ulong recurrenceMs, string? parentJobId = null, ICollection<string>? dependsOnJobsIds = null)
{
JobId = jobId;
ParentJobId = parentJobId;
DependsOnJobsIds = dependsOnJobsIds;
JobType = jobType;
RecurrenceMs = recurrenceMs;
}
public IEnumerable<Job> Run(PgsqlContext context)
{
this.state = JobState.Running;
IEnumerable<Job> newJobs = RunInternal(context);
this.state = JobState.Completed;
return newJobs;
}
protected abstract IEnumerable<Job> RunInternal(PgsqlContext context);
}

View File

@ -1,29 +0,0 @@
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using JsonSerializer = Newtonsoft.Json.JsonSerializer;
namespace API.Schema.Jobs;
public class JobJsonDeserializer : JsonConverter<Job>
{
public override bool CanWrite { get; } = false;
public override void WriteJson(JsonWriter writer, Job? value, JsonSerializer serializer)
{
throw new NotImplementedException();
}
public override Job? ReadJson(JsonReader reader, Type objectType, Job? existingValue, bool hasExistingValue, JsonSerializer serializer)
{
JObject j = JObject.Load(reader);
JobType? type = Enum.Parse<JobType>(j.GetValue("jobType")!.Value<string>()!);
return type switch
{
JobType.DownloadSingleChapterJob => j.ToObject<DownloadSingleChapterJob>(),
JobType.DownloadNewChaptersJob => j.ToObject<DownloadNewChaptersJob>(),
JobType.UpdateMetaDataJob => j.ToObject<UpdateMetadataJob>(),
JobType.MoveFileOrFolderJob => j.ToObject<MoveFileOrFolderJob>(),
_ => null
};
}
}

View File

@ -1,8 +0,0 @@
namespace API.Schema.Jobs;
public enum JobState
{
Waiting,
Running,
Completed
}

View File

@ -1,11 +0,0 @@
namespace API.Schema.Jobs;
public enum JobType : byte
{
DownloadSingleChapterJob = 0,
DownloadNewChaptersJob = 1,
UpdateMetaDataJob = 2,
MoveFileOrFolderJob = 3,
DownloadMangaCoverJob = 4
}

View File

@ -1,13 +0,0 @@
namespace API.Schema.Jobs;
public class MoveFileOrFolderJob(string fromLocation, string toLocation, string? parentJobId = null, ICollection<string>? dependsOnJobsIds = null)
: Job(TokenGen.CreateToken(typeof(MoveFileOrFolderJob), 64), JobType.MoveFileOrFolderJob, 0, parentJobId, dependsOnJobsIds)
{
public string FromLocation { get; init; } = fromLocation;
public string ToLocation { get; init; } = toLocation;
protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{
throw new NotImplementedException();
}
}

View File

@ -1,16 +0,0 @@
using System.ComponentModel.DataAnnotations;
namespace API.Schema.Jobs;
public class UpdateMetadataJob(ulong recurrenceMs, string mangaId, string? parentJobId = null, ICollection<string>? dependsOnJobsIds = null)
: Job(TokenGen.CreateToken(typeof(UpdateMetadataJob), 64), JobType.UpdateMetaDataJob, recurrenceMs, parentJobId, dependsOnJobsIds)
{
[MaxLength(64)]
public string MangaId { get; init; } = mangaId;
public virtual Manga Manga { get; init; }
protected override IEnumerable<Job> RunInternal(PgsqlContext context)
{
throw new NotImplementedException();
}
}

View File

@ -1,18 +0,0 @@
using System.ComponentModel.DataAnnotations;
using Microsoft.EntityFrameworkCore;
namespace API.Schema.LibraryConnectors;
[PrimaryKey("LibraryConnectorId")]
public abstract class LibraryConnector(string libraryConnectorId, LibraryType libraryType, string baseUrl, string auth)
{
[MaxLength(64)]
public string LibraryConnectorId { get; } = libraryConnectorId;
public LibraryType LibraryType { get; init; } = libraryType;
public string BaseUrl { get; init; } = baseUrl;
public string Auth { get; init; } = auth;
protected abstract void UpdateLibraryInternal();
internal abstract bool Test();
}

View File

@ -1,7 +0,0 @@
namespace API.Schema.LibraryConnectors;
public enum LibraryType : byte
{
Komga = 0,
Kavita = 1
}

View File

@ -1,69 +0,0 @@
using System.Net;
using System.Net.Http.Headers;
namespace API.Schema.LibraryConnectors;
public class NetClient
{
public static Stream MakeRequest(string url, string authScheme, string auth)
{
HttpClient client = new();
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(authScheme, auth);
HttpRequestMessage requestMessage = new ()
{
Method = HttpMethod.Get,
RequestUri = new Uri(url)
};
try
{
HttpResponseMessage response = client.Send(requestMessage);
if (response.StatusCode is HttpStatusCode.Unauthorized &&
response.RequestMessage!.RequestUri!.AbsoluteUri != url)
return MakeRequest(response.RequestMessage!.RequestUri!.AbsoluteUri, authScheme, auth);
else if (response.IsSuccessStatusCode)
return response.Content.ReadAsStream();
else
return Stream.Null;
}
catch (Exception e)
{
switch (e)
{
case HttpRequestException:
break;
default:
throw;
}
return Stream.Null;
}
}
public static bool MakePost(string url, string authScheme, string auth)
{
HttpClient client = new()
{
DefaultRequestHeaders =
{
{ "Accept", "application/json" },
{ "Authorization", new AuthenticationHeaderValue(authScheme, auth).ToString() }
}
};
HttpRequestMessage requestMessage = new ()
{
Method = HttpMethod.Post,
RequestUri = new Uri(url)
};
HttpResponseMessage response = client.Send(requestMessage);
if(response.StatusCode is HttpStatusCode.Unauthorized && response.RequestMessage!.RequestUri!.AbsoluteUri != url)
return MakePost(response.RequestMessage!.RequestUri!.AbsoluteUri, authScheme, auth);
else if (response.IsSuccessStatusCode)
return true;
else
return false;
}
}

View File

@ -1,20 +0,0 @@
using System.ComponentModel.DataAnnotations;
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
[PrimaryKey("LinkId")]
public class Link(string linkProvider, string linkUrl)
{
[MaxLength(64)]
public string LinkId { get; init; } = TokenGen.CreateToken(typeof(Link), 64);
public string LinkProvider { get; init; } = linkProvider;
public string LinkUrl { get; init; } = linkUrl;
public override bool Equals(object? obj)
{
if (obj is not Link other)
return false;
return other.LinkProvider == LinkProvider && other.LinkUrl == LinkUrl;
}
}

View File

@ -1,130 +0,0 @@
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using API.Schema.Jobs;
using API.Schema.MangaConnectors;
using Microsoft.EntityFrameworkCore;
using static System.IO.UnixFileMode;
namespace API.Schema;
[PrimaryKey("MangaId")]
public class Manga
{
[MaxLength(64)]
public string MangaId { get; init; } = TokenGen.CreateToken(typeof(Manga), 64);
[MaxLength(64)]
public string ConnectorId { get; init; }
public string Name { get; internal set; }
public string Description { get; internal set; }
public string WebsiteUrl { get; internal set; }
public string CoverUrl { get; internal set; }
public string? CoverFileNameInCache { get; internal set; }
public uint Year { get; internal set; }
public string? OriginalLanguage { get; internal set; }
public MangaReleaseStatus ReleaseStatus { get; internal set; }
public string FolderName { get; private set; }
public float IgnoreChapterBefore { get; internal set; }
public string MangaConnectorId { get; private set; }
public MangaConnector? MangaConnector { get; private set; }
public ICollection<Author>? Authors { get; internal set; }
public ICollection<MangaTag>? Tags { get; internal set; }
public ICollection<Link>? Links { get; internal set; }
public ICollection<MangaAltTitle>? AltTitles { get; internal set; }
public Manga(string connectorId, string name, string description, string websiteUrl, string coverUrl,
string? coverFileNameInCache, uint year, string? originalLanguage, MangaReleaseStatus releaseStatus,
float ignoreChapterBefore, MangaConnector mangaConnector, ICollection<Author> authors,
ICollection<MangaTag> tags, ICollection<Link> links, ICollection<MangaAltTitle> altTitles)
: this(connectorId, name, description, websiteUrl, coverUrl, coverFileNameInCache, year, originalLanguage,
releaseStatus, ignoreChapterBefore, mangaConnector.Name)
{
this.Authors = authors;
this.Tags = tags;
this.Links = links;
this.AltTitles = altTitles;
}
public Manga(string connectorId, string name, string description, string websiteUrl, string coverUrl,
string? coverFileNameInCache, uint year, string? originalLanguage, MangaReleaseStatus releaseStatus,
float ignoreChapterBefore, string mangaConnectorId)
{
ConnectorId = connectorId;
Name = name;
Description = description;
WebsiteUrl = websiteUrl;
CoverUrl = coverUrl;
CoverFileNameInCache = coverFileNameInCache;
Year = year;
OriginalLanguage = originalLanguage;
ReleaseStatus = releaseStatus;
IgnoreChapterBefore = ignoreChapterBefore;
MangaConnectorId = mangaConnectorId;
FolderName = BuildFolderName(name);
}
public MoveFileOrFolderJob UpdateFolderName(string downloadLocation, string newName)
{
string oldName = this.FolderName;
this.FolderName = newName;
return new MoveFileOrFolderJob(Path.Join(downloadLocation, oldName), Path.Join(downloadLocation, this.FolderName));
}
internal void UpdateWithInfo(Manga other)
{
this.Name = other.Name;
this.Year = other.Year;
this.Description = other.Description;
this.CoverUrl = other.CoverUrl;
this.OriginalLanguage = other.OriginalLanguage;
this.Authors = other.Authors;
this.Links = other.Links;
this.Tags = other.Tags;
this.AltTitles = other.AltTitles;
this.ReleaseStatus = other.ReleaseStatus;
}
private static string BuildFolderName(string mangaName)
{
return mangaName;
}
internal string SaveCoverImageToCache()
{
Regex urlRex = new (@"https?:\/\/((?:[a-zA-Z0-9-]+\.)+[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+))");
//https?:\/\/[a-zA-Z0-9-]+\.([a-zA-Z0-9-]+\.[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+)) for only second level domains
Match match = urlRex.Match(CoverUrl);
string filename = $"{match.Groups[1].Value}-{MangaId}.{match.Groups[3].Value}";
string saveImagePath = Path.Join(TrangaSettings.coverImageCache, filename);
if (File.Exists(saveImagePath))
return saveImagePath;
RequestResult coverResult = new HttpDownloadClient().MakeRequest(CoverUrl, RequestType.MangaCover);
using MemoryStream ms = new();
coverResult.result.CopyTo(ms);
Directory.CreateDirectory(TrangaSettings.coverImageCache);
File.WriteAllBytes(saveImagePath, ms.ToArray());
return saveImagePath;
}
public string CreatePublicationFolder()
{
string publicationFolder = Path.Join(TrangaSettings.downloadLocation, this.FolderName);
if(!Directory.Exists(publicationFolder))
Directory.CreateDirectory(publicationFolder);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(publicationFolder, GroupRead | GroupWrite | GroupExecute | OtherRead | OtherWrite | OtherExecute | UserRead | UserWrite | UserExecute);
return publicationFolder;
}
//TODO onchanges create job to update metadata files in archives, etc.
}

View File

@ -1,15 +0,0 @@
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
[PrimaryKey("AltTitleId")]
public class MangaAltTitle(string language, string title)
{
[MaxLength(64)]
public string AltTitleId { get; init; } = TokenGen.CreateToken("AltTitle", 64);
[MaxLength(8)]
public string Language { get; init; } = language;
public string Title { get; set; } = title;
}

View File

@ -1,39 +0,0 @@
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using API.MangaDownloadClients;
using Microsoft.EntityFrameworkCore;
using Newtonsoft.Json;
namespace API.Schema.MangaConnectors;
[PrimaryKey("Name")]
public abstract class MangaConnector(string name, string[] supportedLanguages, string[] baseUris)
{
[MaxLength(32)]
public string Name { get; init; } = name;
public string[] SupportedLanguages { get; init; } = supportedLanguages;
public string[] BaseUris { get; init; } = baseUris;
public abstract (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "");
public abstract (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url);
public abstract (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId);
public abstract Chapter[] GetChapters(Manga manga, string language="en");
[JsonIgnore]
[NotMapped]
internal DownloadClient downloadClient { get; init; } = null!;
public Chapter[] GetNewChapters(Manga manga)
{
Chapter[] allChapters = GetChapters(manga);
if (allChapters.Length < 1)
return [];
return allChapters.Where(chapter => !chapter.IsDownloaded()).ToArray();
}
internal abstract string[] GetChapterImageUrls(Chapter chapter);
}

View File

@ -1,188 +0,0 @@
using System.Net;
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using HtmlAgilityPack;
namespace API.Schema.MangaConnectors;
public class MangaLife : MangaConnector
{
public MangaLife() : base("Manga4Life", ["en"], ["manga4life.com"])
{
this.downloadClient = new ChromiumDownloadClient();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
string sanitizedTitle = WebUtility.UrlEncode(publicationTitle);
string requestUrl = $"https://manga4life.com/search/?name={sanitizedTitle}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return [];
if (requestResult.htmlDocument is null)
return [];
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
return publications;
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://manga4life.com/manga/{publicationId}");
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
Regex publicationIdRex = new(@"https:\/\/(www\.)?manga4life.com\/manga\/(.*)(\/.*)*");
string publicationId = publicationIdRex.Match(url).Groups[2].Value;
RequestResult requestResult = this.downloadClient.MakeRequest(url, RequestType.MangaInfo);
if(requestResult.htmlDocument is not null)
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
return null;
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document)
{
HtmlNode resultsNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']/div[last()]/div[1]/div");
if (resultsNode.Descendants("div").Count() == 1 && resultsNode.Descendants("div").First().HasClass("NoResults"))
{
return [];
}
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new();
foreach (HtmlNode resultNode in resultsNode.SelectNodes("div"))
{
string url = resultNode.Descendants().First(d => d.HasClass("SeriesName")).GetAttributeValue("href", "");
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl($"https://manga4life.com{url}");
if (manga is { } x)
ret.Add(x);
}
return ret.ToArray();
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
string originalLanguage = "", status = "";
Dictionary<string, string> altTitles = new(), links = new();
HashSet<string> tags = new();
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//img");
string coverUrl = posterNode.GetAttributeValue("src", "");
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//h1");
string sortName = titleNode.InnerText;
HtmlNode[] authorsNodes = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Author(s):']/..").Descendants("a")
.ToArray();
List<string> authorNames = new();
foreach (HtmlNode authorNode in authorsNodes)
authorNames.Add(authorNode.InnerText);
List<Author> authors = authorNames.Select(a => new Author(a)).ToList();
HtmlNode[] genreNodes = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Genre(s):']/..").Descendants("a")
.ToArray();
foreach (HtmlNode genreNode in genreNodes)
tags.Add(genreNode.InnerText);
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
HtmlNode yearNode = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Released:']/..").Descendants("a")
.First();
uint year = uint.Parse(yearNode.InnerText);
HtmlNode[] statusNodes = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Status:']/..").Descendants("a")
.ToArray();
foreach (HtmlNode statusNode in statusNodes)
if (statusNode.InnerText.Contains("publish", StringComparison.CurrentCultureIgnoreCase))
status = statusNode.InnerText.Split(' ')[0];
switch (status.ToLower())
{
case "cancelled": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "hiatus": releaseStatus = MangaReleaseStatus.OnHiatus; break;
case "discontinued": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "complete": releaseStatus = MangaReleaseStatus.Completed; break;
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break;
}
HtmlNode descriptionNode = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Description:']/..")
.Descendants("div").First();
string description = descriptionNode.InnerText;
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
[]);
return (manga, authors, mangaTags, [], []);
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
RequestResult result = downloadClient.MakeRequest($"https://manga4life.com/manga/{manga.MangaId}", RequestType.Default, clickButton:"[class*='ShowAllChapters']");
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
{
return Array.Empty<Chapter>();
}
HtmlNodeCollection chapterNodes = result.htmlDocument.DocumentNode.SelectNodes(
"//a[contains(concat(' ',normalize-space(@class),' '),' ChapterLink ')]");
string[] urls = chapterNodes.Select(node => node.GetAttributeValue("href", "")).ToArray();
Regex urlRex = new (@"-chapter-([0-9\\.]+)(-index-([0-9\\.]+))?");
List<Chapter> chapters = new();
foreach (string url in urls)
{
Match rexMatch = urlRex.Match(url);
int? volumeNumber = rexMatch.Groups[3].Success && rexMatch.Groups[3].Value.Length > 0
? int.Parse(rexMatch.Groups[3].Value)
: null;
if(!ChapterNumber.CanParse(rexMatch.Groups[1].Value))
continue;
ChapterNumber chapterNumber = new(rexMatch.Groups[1].Value);
string fullUrl = $"https://manga4life.com{url}";
fullUrl = fullUrl.Replace(Regex.Match(url,"(-page-[0-9])").Value,"");
try
{
chapters.Add(new Chapter(manga, fullUrl, chapterNumber, volumeNumber, null));
}
catch (Exception e)
{
}
}
//Return Chapters ordered by Chapter-Number
return chapters.Order().ToArray();
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
RequestResult requestResult = this.downloadClient.MakeRequest(chapter.Url, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
{
return [];
}
HtmlDocument document = requestResult.htmlDocument;
HtmlNode gallery = document.DocumentNode.Descendants("div").First(div => div.HasClass("ImageGallery"));
HtmlNode[] images = gallery.Descendants("img").Where(img => img.HasClass("img-fluid")).ToArray();
List<string> urls = new();
foreach(HtmlNode galleryImage in images)
urls.Add(galleryImage.GetAttributeValue("src", ""));
return urls.ToArray();
}
}

View File

@ -1,226 +0,0 @@
using System.Globalization;
using System.Net;
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using HtmlAgilityPack;
namespace API.Schema.MangaConnectors;
public class Manganato : MangaConnector
{
public Manganato() : base("Manganato", ["en"], ["manganato.com"])
{
this.downloadClient = new HttpDownloadClient();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
string sanitizedTitle = string.Join('_', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
string requestUrl = $"https://manganato.com/search/story/{sanitizedTitle}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 ||requestResult.htmlDocument is null)
return [];
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
return publications;
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document)
{
List<HtmlNode> searchResults = document.DocumentNode.Descendants("div").Where(n => n.HasClass("search-story-item")).ToList();
List<string> urls = new();
foreach (HtmlNode mangaResult in searchResults)
{
urls.Add(mangaResult.Descendants("a").First(n => n.HasClass("item-title")).GetAttributes()
.First(a => a.Name == "href").Value);
}
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new();
foreach (string url in urls)
{
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url);
if (manga is { } x)
ret.Add(x);
}
return ret.ToArray();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://chapmanganato.com/{publicationId}");
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
RequestResult requestResult =
downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return null;
if (requestResult.htmlDocument is null)
return null;
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
Dictionary<string, string> altTitlesDict = new();
Dictionary<string, string>? links = null;
HashSet<string> tags = new();
string[] authorNames = [];
string originalLanguage = "";
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased;
HtmlNode infoNode = document.DocumentNode.Descendants("div").First(d => d.HasClass("story-info-right"));
string sortName = infoNode.Descendants("h1").First().InnerText;
HtmlNode infoTable = infoNode.Descendants().First(d => d.Name == "table");
foreach (HtmlNode row in infoTable.Descendants("tr"))
{
string key = row.SelectNodes("td").First().InnerText.ToLower();
string value = row.SelectNodes("td").Last().InnerText;
string keySanitized = string.Concat(Regex.Matches(key, "[a-z]"));
switch (keySanitized)
{
case "alternative":
string[] alts = value.Split(" ; ");
for(int i = 0; i < alts.Length; i++)
altTitlesDict.Add(i.ToString(), alts[i]);
break;
case "authors":
authorNames = value.Split('-');
for (int i = 0; i < authorNames.Length; i++)
authorNames[i] = authorNames[i].Replace("\r\n", "");
break;
case "status":
switch (value.ToLower())
{
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break;
case "completed": releaseStatus = MangaReleaseStatus.Completed; break;
}
break;
case "genres":
string[] genres = value.Split(" - ");
for (int i = 0; i < genres.Length; i++)
genres[i] = genres[i].Replace("\r\n", "");
tags = genres.ToHashSet();
break;
}
}
List<Author> authors = authorNames.Select(n => new Author(n)).ToList();
List<MangaTag> mangaTags = tags.Select(n => new MangaTag(n)).ToList();
List<MangaAltTitle> mangaAltTitles = altTitlesDict.Select(a => new MangaAltTitle(a.Key, a.Value)).ToList();
string coverUrl = document.DocumentNode.Descendants("span").First(s => s.HasClass("info-image")).Descendants("img").First()
.GetAttributes().First(a => a.Name == "src").Value;
string description = document.DocumentNode.Descendants("div").First(d => d.HasClass("panel-story-info-description"))
.InnerText.Replace("Description :", "");
while (description.StartsWith('\n'))
description = description.Substring(1);
string pattern = "MMM dd,yyyy HH:mm";
HtmlNode? oldestChapter = document.DocumentNode
.SelectNodes("//span[contains(concat(' ',normalize-space(@class),' '),' chapter-time ')]").MaxBy(
node => DateTime.ParseExact(node.GetAttributeValue("title", "Dec 31 2400, 23:59"), pattern,
CultureInfo.InvariantCulture).Millisecond);
uint year = (uint)DateTime.ParseExact(oldestChapter?.GetAttributeValue("title", "Dec 31 2400, 23:59")??"Dec 31 2400, 23:59", pattern,
CultureInfo.InvariantCulture).Year;
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
mangaAltTitles);
return (manga, authors, mangaTags, [], mangaAltTitles);
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
string requestUrl = $"https://chapmanganato.com/{manga.MangaId}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return [];
//Return Chapters ordered by Chapter-Number
if (requestResult.htmlDocument is null)
return [];
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
return chapters.Order().ToArray();
}
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
{
List<Chapter> ret = new();
HtmlNode chapterList = document.DocumentNode.Descendants("ul").First(l => l.HasClass("row-content-chapter"));
Regex volRex = new(@"Vol\.([0-9]+).*");
Regex chapterRex = new(@"https:\/\/chapmanganato.[A-z]+\/manga-[A-z0-9]+\/chapter-([0-9\.]+)");
Regex nameRex = new(@"Chapter ([0-9]+(\.[0-9]+)*){1}:? (.*)");
foreach (HtmlNode chapterInfo in chapterList.Descendants("li"))
{
string fullString = chapterInfo.Descendants("a").First(d => d.HasClass("chapter-name")).InnerText;
string url = chapterInfo.Descendants("a").First(d => d.HasClass("chapter-name"))
.GetAttributeValue("href", "");
int? volumeNumber = volRex.IsMatch(fullString)
? int.Parse(volRex.Match(fullString).Groups[1].Value)
: null;
if(!ChapterNumber.CanParse(chapterRex.Match(url).Groups[1].Value))
continue;
ChapterNumber chapterNumber = new(chapterRex.Match(url).Groups[1].Value);
string chapterName = nameRex.Match(fullString).Groups[3].Value;
try
{
ret.Add(new Chapter(manga, url, chapterNumber, volumeNumber, chapterName));
}
catch (Exception e)
{
}
}
ret.Reverse();
return ret;
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
string requestUrl = chapter.Url;
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 ||
requestResult.htmlDocument is null)
{
return [];
}
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
return imageUrls;
}
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
{
List<string> ret = new();
HtmlNode imageContainer =
document.DocumentNode.Descendants("div").First(i => i.HasClass("container-chapter-reader"));
foreach(HtmlNode imageNode in imageContainer.Descendants("img"))
ret.Add(imageNode.GetAttributeValue("src", ""));
return ret.ToArray();
}
}

View File

@ -1,228 +0,0 @@
using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using HtmlAgilityPack;
using Soenneker.Utils.String.NeedlemanWunsch;
namespace API.Schema.MangaConnectors;
public class Weebcentral : MangaConnector
{
private readonly string _baseUrl = "https://weebcentral.com";
private readonly string[] _filterWords =
{ "a", "the", "of", "as", "to", "no", "for", "on", "with", "be", "and", "in", "wa", "at", "be", "ni" };
public Weebcentral() : base("Weebcentral", ["en"], ["https://weebcentral.com"])
{
downloadClient = new ChromiumDownloadClient();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "")
{
const int limit = 32; //How many values we want returned at once
var offset = 0; //"Page"
var requestUrl =
$"{_baseUrl}/search/data?limit={limit}&offset={offset}&text={publicationTitle}&sort=Best+Match&order=Ascending&official=Any&display_mode=Minimal%20Display";
var requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 ||
requestResult.htmlDocument == null)
{
return [];
}
var publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
return publications;
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document)
{
if (document.DocumentNode.SelectNodes("//article") == null)
return [];
var urls = document.DocumentNode.SelectNodes("/html/body/article/a[@class='link link-hover']")
.Select(elem => elem.GetAttributeValue("href", "")).ToList();
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new();
foreach (var url in urls)
{
var manga = GetMangaFromUrl(url);
if (manga is { } x)
ret.Add(x);
}
return ret.ToArray();
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url)
{
Regex publicationIdRex = new(@"https:\/\/weebcentral\.com\/series\/(\w*)\/(.*)");
var publicationId = publicationIdRex.Match(url).Groups[1].Value;
var requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 300 && (int)requestResult.statusCode >= 200 &&
requestResult.htmlDocument is not null)
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
return null;
}
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
var posterNode =
document.DocumentNode.SelectSingleNode("//section[@class='flex items-center justify-center']/picture/img");
var coverUrl = posterNode?.GetAttributeValue("src", "") ?? "";
var titleNode = document.DocumentNode.SelectSingleNode("//section/h1");
var sortName = titleNode?.InnerText ?? "Undefined";
HtmlNode[] authorsNodes =
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Author(s): ']/span")?.ToArray() ?? [];
var authorNames = authorsNodes.Select(n => n.InnerText).ToList();
List<Author> authors = authorNames.Select(n => new Author(n)).ToList();
HtmlNode[] genreNodes =
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Tags(s): ']/span")?.ToArray() ?? [];
HashSet<string> tags = genreNodes.Select(n => n.InnerText).ToHashSet();
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
var statusNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Status: ']/a");
var status = statusNode?.InnerText ?? "";
var releaseStatus = MangaReleaseStatus.Unreleased;
switch (status.ToLower())
{
case "cancelled": releaseStatus = MangaReleaseStatus.Cancelled; break;
case "hiatus": releaseStatus = MangaReleaseStatus.OnHiatus; break;
case "complete": releaseStatus = MangaReleaseStatus.Completed; break;
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break;
}
var yearNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Released: ']/span");
var year = uint.Parse(yearNode?.InnerText ?? "0");
var descriptionNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Description']/p");
var description = descriptionNode?.InnerText ?? "Undefined";
HtmlNode[] altTitleNodes = document.DocumentNode
.SelectNodes("//ul/li[strong/text() = 'Associated Name(s)']/ul/li")?.ToArray() ?? [];
Dictionary<string, string> altTitlesDict = new(), links = new();
for (var i = 0; i < altTitleNodes.Length; i++)
altTitlesDict.Add(i.ToString(), altTitleNodes[i].InnerText);
List<MangaAltTitle> altTitles = altTitlesDict.Select(a => new MangaAltTitle(a.Key, a.Value)).ToList();
var originalLanguage = "";
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
altTitles);
return (manga, authors, mangaTags, [], altTitles);
}
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://weebcentral.com/series/{publicationId}");
}
private string ToFilteredString(string input)
{
return string.Join(' ', input.ToLower().Split(' ').Where(word => _filterWords.Contains(word) == false));
}
private SearchResult[] FilteredResults(string publicationTitle, SearchResult[] unfilteredSearchResults)
{
Dictionary<SearchResult, int> similarity = new();
foreach (var sr in unfilteredSearchResults)
{
List<int> scores = new();
var filteredPublicationString = ToFilteredString(publicationTitle);
var filteredSString = ToFilteredString(sr.s);
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(filteredSString, filteredPublicationString));
foreach (var srA in sr.a)
{
var filteredAString = ToFilteredString(srA);
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(filteredAString, filteredPublicationString));
}
similarity.Add(sr, scores.Sum() / scores.Count);
}
var ret = similarity.OrderBy(s => s.Value).Take(10).Select(s => s.Key).ToList();
return ret.ToArray();
}
public override Chapter[] GetChapters(Manga manga, string language = "en")
{
var requestUrl = $"{_baseUrl}/series/{manga.MangaId}/full-chapter-list";
var requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return Array.Empty<Chapter>();
//Return Chapters ordered by Chapter-Number
if (requestResult.htmlDocument is null)
return Array.Empty<Chapter>();
var chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
return chapters.Order().ToArray();
}
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
{
var chaptersWrapper = document.DocumentNode.SelectSingleNode("/html/body");
Regex chapterRex = new(@".* (\d+)");
Regex idRex = new(@"https:\/\/weebcentral\.com\/chapters\/(\w*)");
var ret = chaptersWrapper.Descendants("a").Select(elem =>
{
var url = elem.GetAttributeValue("href", "") ?? "Undefined";
if (!url.StartsWith("https://") && !url.StartsWith("http://"))
return new Chapter(manga, "undefined", new ChapterNumber(-1), null, null);
var idMatch = idRex.Match(url);
var id = idMatch.Success ? idMatch.Groups[1].Value : null;
var chapterNode = elem.SelectSingleNode("span[@class='grow flex items-center gap-2']/span")?.InnerText ??
"Undefined";
var chapterNumberMatch = chapterRex.Match(chapterNode);
if(!chapterNumberMatch.Success || !ChapterNumber.CanParse(chapterNumberMatch.Groups[1].Value))
return new Chapter(manga, "undefined", new ChapterNumber(-1), null, null);
ChapterNumber chapterNumber = new(chapterNumberMatch.Groups[1].Value);
return new Chapter(manga, url, chapterNumber, null, null);
}).Where(elem => elem.ChapterNumber < ChapterNumber.Zero && elem.Url != "undefined").ToList();
ret.Reverse();
return ret;
}
internal override string[] GetChapterImageUrls(Chapter chapter)
{
var requestResult = downloadClient.MakeRequest(chapter.Url, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 ||requestResult.htmlDocument is null)
{
return [];
}
var document = requestResult.htmlDocument;
var imageNodes =
document.DocumentNode.SelectNodes($"//section[@hx-get='{chapter.Url}/images']/img")?.ToArray() ?? [];
var urls = imageNodes.Select(imgNode => imgNode.GetAttributeValue("src", "")).ToArray();
return urls;
}
private struct SearchResult
{
public string i { get; set; }
public string s { get; set; }
public string[] a { get; set; }
}
}

View File

@ -1,10 +0,0 @@
namespace API.Schema;
public enum MangaReleaseStatus : byte
{
Continuing = 0,
Completed = 1,
OnHiatus = 2,
Cancelled = 3,
Unreleased = 4
}

View File

@ -1,9 +0,0 @@
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
[PrimaryKey("Tag")]
public class MangaTag(string tag)
{
public string Tag { get; init; } = tag;
}

View File

@ -1,21 +0,0 @@
using System.ComponentModel.DataAnnotations;
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
[PrimaryKey("NotificationId")]
public class Notification(string title, string message = "", NotificationUrgency urgency = NotificationUrgency.Normal, DateTime? date = null)
{
[MaxLength(64)]
public string NotificationId { get; init; } = TokenGen.CreateToken("Notification", 64);
public NotificationUrgency Urgency { get; init; } = urgency;
public string Title { get; init; } = title;
public string Message { get; init; } = message;
public DateTime Date { get; init; } = date ?? DateTime.UtcNow;
public Notification() : this("") { }
}

View File

@ -1,20 +0,0 @@
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using Microsoft.EntityFrameworkCore;
using Newtonsoft.Json;
namespace API.Schema.NotificationConnectors;
[PrimaryKey("NotificationConnectorId")]
public abstract class NotificationConnector(string notificationConnectorId, NotificationConnectorType notificationConnectorType)
{
[MaxLength(64)]
public string NotificationConnectorId { get; } = notificationConnectorId;
public NotificationConnectorType NotificationConnectorType { get; init; } = notificationConnectorType;
[JsonIgnore]
[NotMapped]
protected readonly HttpClient _client = new();
public abstract void SendNotification(string title, string notificationText);
}

View File

@ -1,9 +0,0 @@
namespace API.Schema.NotificationConnectors;
public enum NotificationConnectorType : byte
{
Gotify = 0,
LunaSea = 1,
Ntfy = 2
}

View File

@ -1,8 +0,0 @@
namespace API.Schema;
public enum NotificationUrgency : byte
{
Low = 1,
Normal = 3,
High = 5
}

View File

@ -1,108 +0,0 @@
using API.Schema.Jobs;
using API.Schema.LibraryConnectors;
using API.Schema.MangaConnectors;
using API.Schema.NotificationConnectors;
using Microsoft.EntityFrameworkCore;
namespace API.Schema;
public class PgsqlContext(DbContextOptions<PgsqlContext> options) : DbContext(options)
{
public DbSet<Job> Jobs { get; set; }
public DbSet<MangaConnector> MangaConnectors { get; set; }
public DbSet<Manga> Manga { get; set; }
public DbSet<Chapter> Chapters { get; set; }
public DbSet<Author> Authors { get; set; }
public DbSet<Link> Link { get; set; }
public DbSet<MangaTag> Tags { get; set; }
public DbSet<MangaAltTitle> AltTitles { get; set; }
public DbSet<LibraryConnector> LibraryConnectors { get; set; }
public DbSet<NotificationConnector> NotificationConnectors { get; set; }
public DbSet<Notification> Notifications { get; set; }
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.Entity<MangaConnector>()
.HasDiscriminator(c => c.Name)
.HasValue<AsuraToon>("AsuraToon")
.HasValue<Bato>("Bato")
.HasValue<MangaHere>("MangaHere")
.HasValue<MangaKatana>("MangaKatana")
.HasValue<MangaLife>("Manga4Life")
.HasValue<Manganato>("Manganato")
.HasValue<Mangaworld>("Mangaworld")
.HasValue<ManhuaPlus>("ManhuaPlus")
.HasValue<Weebcentral>("Weebcentral")
.HasValue<MangaDex>("MangaDex");
modelBuilder.Entity<LibraryConnector>()
.HasDiscriminator<LibraryType>(l => l.LibraryType)
.HasValue<Komga>(LibraryType.Komga)
.HasValue<Kavita>(LibraryType.Kavita);
modelBuilder.Entity<NotificationConnector>()
.HasDiscriminator<NotificationConnectorType>(n => n.NotificationConnectorType)
.HasValue<Gotify>(NotificationConnectorType.Gotify)
.HasValue<Ntfy>(NotificationConnectorType.Ntfy)
.HasValue<Lunasea>(NotificationConnectorType.LunaSea);
modelBuilder.Entity<Job>()
.HasDiscriminator<JobType>(j => j.JobType)
.HasValue<MoveFileOrFolderJob>(JobType.MoveFileOrFolderJob)
.HasValue<DownloadNewChaptersJob>(JobType.DownloadNewChaptersJob)
.HasValue<DownloadSingleChapterJob>(JobType.DownloadSingleChapterJob)
.HasValue<UpdateMetadataJob>(JobType.UpdateMetaDataJob);
modelBuilder.Entity<Job>()
.HasOne<Job>(j => j.ParentJob)
.WithMany()
.HasForeignKey(j => j.ParentJobId);
modelBuilder.Entity<Job>()
.HasMany<Job>(j => j.DependsOnJobs);
modelBuilder.Entity<DownloadNewChaptersJob>()
.Navigation(dncj => dncj.Manga)
.AutoInclude();
modelBuilder.Entity<DownloadSingleChapterJob>()
.Navigation(dscj => dscj.Chapter)
.AutoInclude();
modelBuilder.Entity<UpdateMetadataJob>()
.Navigation(umj => umj.Manga)
.AutoInclude();
modelBuilder.Entity<Manga>()
.HasOne<MangaConnector>(m => m.MangaConnector)
.WithMany()
.HasForeignKey(m => m.MangaConnectorId);
modelBuilder.Entity<Manga>()
.Navigation(m => m.MangaConnector)
.AutoInclude();
modelBuilder.Entity<Manga>()
.HasMany<Author>(m => m.Authors)
.WithMany();
modelBuilder.Entity<Manga>()
.Navigation(m => m.Authors)
.AutoInclude();
modelBuilder.Entity<Manga>()
.HasMany<MangaTag>(m => m.Tags)
.WithMany();
modelBuilder.Entity<Manga>()
.Navigation(m => m.Tags)
.AutoInclude();
modelBuilder.Entity<Manga>()
.HasMany<Link>(m => m.Links)
.WithOne();
modelBuilder.Entity<Manga>()
.Navigation(m => m.Links)
.AutoInclude();
modelBuilder.Entity<Manga>()
.HasMany<MangaAltTitle>(m => m.AltTitles)
.WithOne();
modelBuilder.Entity<Manga>()
.Navigation(m => m.AltTitles)
.AutoInclude();
modelBuilder.Entity<Chapter>()
.HasOne<Manga>(c => c.ParentManga)
.WithMany()
.HasForeignKey(c => c.ParentMangaId);
modelBuilder.Entity<Chapter>()
.Navigation(c => c.ParentManga)
.AutoInclude();
}
}

View File

@ -1,40 +0,0 @@
using System.Security.Cryptography;
using System.Text;
namespace API;
public static class TokenGen
{
private const uint MinimumLength = 8;
private const string Chars = "abcdefghijklmnopqrstuvwxyz0123456789";
public static string CreateToken(Type t, uint fullLength) => CreateToken(t.Name, fullLength);
public static string CreateToken(string prefix, uint fullLength)
{
if (prefix.Length + 1 >= fullLength - MinimumLength)
throw new ArgumentException("Prefix to long to create Token of meaningful length.");
long l = fullLength - prefix.Length - 1;
byte[] rng = new byte[l];
RandomNumberGenerator.Create().GetBytes(rng);
string key = new (rng.Select(b => Chars[b % Chars.Length]).ToArray());
key = string.Join('-', prefix, key);
return key;
}
public static string CreateTokenHash(string prefix, uint fullLength, string[] keys)
{
if (prefix.Length + 1 >= fullLength - MinimumLength)
throw new ArgumentException("Prefix to long to create Token of meaningful length.");
int l = (int)(fullLength - prefix.Length - 1);
MD5 md5 = MD5.Create();
byte[][] hashes = keys.Select(key => md5.ComputeHash(Encoding.UTF8.GetBytes(key))).ToArray();
byte[] xOrHash = new byte[l];
foreach (byte[] hash in hashes)
for(int i = 0; i < hash.Length; i++)
xOrHash[i] = (byte)(xOrHash[i] ^ (i >= hash.Length ? 0 : hash[i]));
string key = new (xOrHash.Select(b => Chars[b % Chars.Length]).ToArray());
key = string.Join('-', prefix, key);
return key;
}
}

View File

@ -1,104 +0,0 @@
using API.Schema;
using API.Schema.Jobs;
using API.Schema.NotificationConnectors;
using log4net;
using log4net.Config;
namespace API;
public static class Tranga
{
public static Thread NotificationSenderThread { get; } = new (NotificationSender);
public static Thread JobStarterThread { get; } = new (JobStarter);
private static readonly Dictionary<Thread, Job> RunningJobs = new();
private static readonly ILog Log = LogManager.GetLogger(typeof(Tranga));
internal static void StartLogger()
{
BasicConfigurator.Configure();
}
private static void NotificationSender(object? pgsqlContext)
{
if(pgsqlContext is null) return;
PgsqlContext context = (PgsqlContext)pgsqlContext;
IQueryable<Notification> staleNotifications = context.Notifications.Where(n => n.Urgency < NotificationUrgency.Normal);
context.Notifications.RemoveRange(staleNotifications);
context.SaveChanges();
while (true)
{
SendNotifications(context, NotificationUrgency.High);
SendNotifications(context, NotificationUrgency.Normal);
SendNotifications(context, NotificationUrgency.Low);
context.SaveChanges();
Thread.Sleep(2000);
}
}
private static void SendNotifications(PgsqlContext context, NotificationUrgency urgency)
{
List<Notification> notifications = context.Notifications.Where(n => n.Urgency == urgency).ToList();
if (notifications.Any())
{
DateTime max = notifications.MaxBy(n => n.Date)!.Date;
if (DateTime.Now.Subtract(max) > TrangaSettings.NotificationUrgencyDelay(urgency))
{
foreach (NotificationConnector notificationConnector in context.NotificationConnectors)
{
foreach (Notification notification in notifications)
notificationConnector.SendNotification(notification.Title, notification.Message);
}
context.Notifications.RemoveRange(notifications);
}
}
context.SaveChanges();
}
private static void JobStarter(object? pgsqlContext)
{
if(pgsqlContext is null) return;
PgsqlContext context = (PgsqlContext)pgsqlContext;
string TRANGA = "\n\n _______ \n|_ _|.----..---.-..-----..-----..---.-.\n | | | _|| _ || || _ || _ |\n |___| |__| |___._||__|__||___ ||___._|\n |_____| \n\n";
Log.Info(TRANGA);
while (true)
{
List<Job> completedJobs = context.Jobs.Where(j => j.state == JobState.Completed).ToList();
foreach (Job job in completedJobs)
if(job.RecurrenceMs <= 0)
context.Jobs.Remove(job);
else
{
job.LastExecution = DateTime.UtcNow;
job.state = JobState.Waiting;
context.Jobs.Update(job);
}
List<Job> runJobs = context.Jobs.Where(j => j.state <= JobState.Running).ToList().Where(j => j.NextExecution < DateTime.UtcNow).ToList();
foreach (Job job in runJobs)
{
Thread t = new (() =>
{
IEnumerable<Job> newJobs = job.Run(context);
context.Jobs.AddRange(newJobs);
});
RunningJobs.Add(t, job);
t.Start();
context.Jobs.Update(job);
}
(Thread, Job)[] removeFromThreadsList = RunningJobs.Where(t => !t.Key.IsAlive)
.Select(t => (t.Key, t.Value)).ToArray();
foreach ((Thread thread, Job job) thread in removeFromThreadsList)
{
RunningJobs.Remove(thread.thread);
context.Jobs.Update(thread.job);
}
context.SaveChanges();
Thread.Sleep(2000);
}
}
}

View File

@ -1,191 +0,0 @@
using System.Runtime.InteropServices;
using API.MangaDownloadClients;
using API.Schema;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using static System.IO.UnixFileMode;
namespace API;
public static class TrangaSettings
{
public static string downloadLocation { get; private set; } = (RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "/Manga" : Path.Join(Directory.GetCurrentDirectory(), "Downloads"));
public static string workingDirectory { get; private set; } = Path.Join(RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "/usr/share" : Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "tranga-api");
public static int apiPortNumber { get; private set; } = 6531;
[JsonIgnore]
internal static readonly string DefaultUserAgent = $"Tranga ({Enum.GetName(Environment.OSVersion.Platform)}; {(Environment.Is64BitOperatingSystem ? "x64" : "")}) / 1.0";
public static string userAgent { get; private set; } = DefaultUserAgent;
public static int compression{ get; private set; } = 40;
public static bool bwImages { get; private set; } = false;
[JsonIgnore]
public static string settingsFilePath => Path.Join(workingDirectory, "settings.json");
[JsonIgnore]
public static string coverImageCache => Path.Join(workingDirectory, "imageCache");
public static bool aprilFoolsMode { get; private set; } = true;
[JsonIgnore]
internal static readonly Dictionary<RequestType, int> DefaultRequestLimits = new ()
{
{RequestType.MangaInfo, 250},
{RequestType.MangaDexFeed, 250},
{RequestType.MangaDexImage, 40},
{RequestType.MangaImage, 60},
{RequestType.MangaCover, 250},
{RequestType.Default, 60}
};
public static Dictionary<RequestType, int> requestLimits { get; set; } = DefaultRequestLimits;
public static TimeSpan NotificationUrgencyDelay(NotificationUrgency urgency) => urgency switch
{
NotificationUrgency.High => TimeSpan.Zero,
NotificationUrgency.Normal => TimeSpan.FromMinutes(5),
NotificationUrgency.Low => TimeSpan.FromMinutes(10),
_ => TimeSpan.FromHours(1)
};
public static void Load()
{
if(File.Exists(settingsFilePath))
Deserialize(File.ReadAllText(settingsFilePath));
else return;
Directory.CreateDirectory(downloadLocation);
ExportSettings();
}
public static void UpdateAprilFoolsMode(bool enabled)
{
aprilFoolsMode = enabled;
ExportSettings();
}
public static void UpdateCompressImages(int value)
{
compression = int.Clamp(value, 1, 100);
ExportSettings();
}
public static void UpdateBwImages(bool enabled)
{
bwImages = enabled;
ExportSettings();
}
public static void UpdateDownloadLocation(string newPath, bool moveFiles = true)
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
Directory.CreateDirectory(newPath, GroupRead | GroupWrite | None | OtherRead | OtherWrite | UserRead | UserWrite);
else
Directory.CreateDirectory(newPath);
if (moveFiles)
MoveContentsOfDirectoryTo(TrangaSettings.downloadLocation, newPath);
TrangaSettings.downloadLocation = newPath;
ExportSettings();
}
private static void MoveContentsOfDirectoryTo(string oldDir, string newDir)
{
string[] directoryPaths = Directory.GetDirectories(oldDir);
string[] filePaths = Directory.GetFiles(oldDir);
foreach (string file in filePaths)
{
string newPath = Path.Join(newDir, Path.GetFileName(file));
File.Move(file, newPath, true);
}
foreach(string directory in directoryPaths)
{
string? dirName = Path.GetDirectoryName(directory);
if(dirName is null)
continue;
string newPath = Path.Join(newDir, dirName);
if(Directory.Exists(newPath))
MoveContentsOfDirectoryTo(directory, newPath);
else
Directory.Move(directory, newPath);
}
}
public static void UpdateUserAgent(string? customUserAgent)
{
userAgent = customUserAgent ?? DefaultUserAgent;
ExportSettings();
}
public static void UpdateRateLimit(RequestType requestType, int newLimit)
{
requestLimits[requestType] = newLimit;
ExportSettings();
}
public static void ResetRateLimits()
{
requestLimits = DefaultRequestLimits;
ExportSettings();
}
public static void ExportSettings()
{
if (File.Exists(settingsFilePath))
{
while(IsFileInUse(settingsFilePath))
Thread.Sleep(100);
}
else
Directory.CreateDirectory(new FileInfo(settingsFilePath).DirectoryName!);
File.WriteAllText(settingsFilePath, Serialize());
}
internal static bool IsFileInUse(string filePath)
{
if (!File.Exists(filePath))
return false;
try
{
using FileStream stream = new (filePath, FileMode.Open, FileAccess.Read, FileShare.None);
stream.Close();
return false;
}
catch (IOException)
{
return true;
}
}
public static JObject AsJObject()
{
JObject jobj = new JObject();
jobj.Add("downloadLocation", JToken.FromObject(downloadLocation));
jobj.Add("workingDirectory", JToken.FromObject(workingDirectory));
jobj.Add("apiPortNumber", JToken.FromObject(apiPortNumber));
jobj.Add("userAgent", JToken.FromObject(userAgent));
jobj.Add("aprilFoolsMode", JToken.FromObject(aprilFoolsMode));
jobj.Add("requestLimits", JToken.FromObject(requestLimits));
jobj.Add("compression", JToken.FromObject(compression));
jobj.Add("bwImages", JToken.FromObject(bwImages));
return jobj;
}
public static string Serialize() => AsJObject().ToString();
public static void Deserialize(string serialized)
{
JObject jobj = JObject.Parse(serialized);
if (jobj.TryGetValue("downloadLocation", out JToken? dl))
downloadLocation = dl.Value<string>()!;
if (jobj.TryGetValue("workingDirectory", out JToken? wd))
workingDirectory = wd.Value<string>()!;
if (jobj.TryGetValue("apiPortNumber", out JToken? apn))
apiPortNumber = apn.Value<int>();
if (jobj.TryGetValue("userAgent", out JToken? ua))
userAgent = ua.Value<string>()!;
if (jobj.TryGetValue("aprilFoolsMode", out JToken? afm))
aprilFoolsMode = afm.Value<bool>()!;
if (jobj.TryGetValue("requestLimits", out JToken? rl))
requestLimits = rl.ToObject<Dictionary<RequestType, int>>()!;
if (jobj.TryGetValue("compression", out JToken? ci))
compression = ci.Value<int>()!;
if (jobj.TryGetValue("bwImages", out JToken? bwi))
bwImages = bwi.Value<bool>()!;
}
}

View File

@ -1,8 +0,0 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
}
}

View File

@ -1,9 +0,0 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*"
}

19
CLI/CLI.csproj Normal file
View File

@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>12</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Spectre.Console.Cli" Version="0.49.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Tranga\Tranga.csproj" />
</ItemGroup>
</Project>

157
CLI/Program.cs Normal file
View File

@ -0,0 +1,157 @@
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using Logging;
using Spectre.Console;
using Spectre.Console.Cli;
using Tranga;
var app = new CommandApp<TrangaCli>();
return app.Run(args);
internal sealed class TrangaCli : Command<TrangaCli.Settings>
{
public sealed class Settings : CommandSettings
{
[Description("Directory to which downloaded Manga are saved")]
[CommandOption("-d|--downloadLocation")]
[DefaultValue(null)]
public string? downloadLocation { get; init; }
[Description("Directory in which application-data is saved")]
[CommandOption("-w|--workingDirectory")]
[DefaultValue(null)]
public string? workingDirectory { get; init; }
[Description("Enables the file-logger")]
[CommandOption("-f")]
[DefaultValue(null)]
public bool? fileLogger { get; init; }
[Description("Path to save logfile to")]
[CommandOption("-l|--fPath")]
[DefaultValue(null)]
public string? fileLoggerPath { get; init; }
[Description("Port on which to run API on")]
[CommandOption("-p|--port")]
[DefaultValue(null)]
public int? apiPort { get; init; }
}
public override int Execute([NotNull] CommandContext context, [NotNull] Settings settings)
{
List<Logger.LoggerType> enabledLoggers = new();
if(settings.fileLogger is true)
enabledLoggers.Add(Logger.LoggerType.FileLogger);
string? logFolderPath = settings.fileLoggerPath ?? "";
Logger logger = new(enabledLoggers.ToArray(), Console.Out, Console.OutputEncoding, logFolderPath);
if(settings.workingDirectory is not null)
TrangaSettings.LoadFromWorkingDirectory(settings.workingDirectory);
else
TrangaSettings.CreateOrUpdate();
if(settings.downloadLocation is not null)
TrangaSettings.CreateOrUpdate(downloadDirectory: settings.downloadLocation);
Tranga.Tranga? api = null;
Thread trangaApi = new Thread(() =>
{
api = new(logger);
});
trangaApi.Start();
HttpClient client = new();
bool exit = false;
while (!exit)
{
string menuSelect = AnsiConsole.Prompt(
new SelectionPrompt<string>()
.Title("Menu")
.PageSize(10)
.MoreChoicesText("Up/Down")
.AddChoices(new[]
{
"CustomRequest",
"Log",
"Exit"
}));
switch (menuSelect)
{
case "CustomRequest":
HttpMethod requestMethod = AnsiConsole.Prompt(
new SelectionPrompt<HttpMethod>()
.Title("Request Type")
.AddChoices(new[]
{
HttpMethod.Get,
HttpMethod.Delete,
HttpMethod.Post
}));
string requestPath = AnsiConsole.Prompt(
new TextPrompt<string>("Request Path:"));
List<ValueTuple<string, string>> parameters = new();
while (AnsiConsole.Confirm("Add Parameter?"))
{
string name = AnsiConsole.Ask<string>("Parameter Name:");
string value = AnsiConsole.Ask<string>("Parameter Value:");
parameters.Add(new ValueTuple<string, string>(name, value));
}
string requestString = $"http://localhost:{TrangaSettings.apiPortNumber}/{requestPath}";
if (parameters.Any())
{
requestString += "?";
foreach (ValueTuple<string, string> parameter in parameters)
requestString += $"{parameter.Item1}={parameter.Item2}&";
}
HttpRequestMessage request = new (requestMethod, requestString);
AnsiConsole.WriteLine($"Request: {request.Method} {request.RequestUri}");
HttpResponseMessage response;
if (AnsiConsole.Confirm("Send Request?"))
response = client.Send(request);
else break;
AnsiConsole.WriteLine($"Response: {(int)response.StatusCode} {response.StatusCode}");
AnsiConsole.WriteLine(response.Content.ReadAsStringAsync().Result);
break;
case "Log":
List<string> lines = logger.Tail(10).ToList();
Rows rows = new Rows(lines.Select(line => new Text(line)));
AnsiConsole.Live(rows).Start(context =>
{
bool running = true;
while (running)
{
string[] newLines = logger.GetNewLines();
if (newLines.Length > 0)
{
lines.AddRange(newLines);
rows = new Rows(lines.Select(line => new Text(line)));
context.UpdateTarget(rows);
}
Thread.Sleep(100);
if (AnsiConsole.Console.Input.IsKeyAvailable())
{
AnsiConsole.Console.Input.ReadKey(true); //Do not process input
running = false;
}
}
});
break;
case "Exit":
exit = true;
break;
}
}
if (api is not null)
api.keepRunning = false;
return 0;
}
}

View File

@ -1,7 +1,7 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
ARG DOTNET=9.0 ARG DOTNET=8.0
FROM --platform=$TARGETPLATFORM mcr.microsoft.com/dotnet/aspnet:$DOTNET AS base FROM --platform=$TARGETPLATFORM mcr.microsoft.com/dotnet/runtime:$DOTNET AS base
WORKDIR /publish WORKDIR /publish
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium
@ -16,7 +16,9 @@ FROM --platform=$BUILDPLATFORM mcr.microsoft.com/dotnet/sdk:$DOTNET AS build-env
WORKDIR /src WORKDIR /src
COPY Tranga.sln /src COPY Tranga.sln /src
COPY API/API.csproj /src/API/API.csproj COPY CLI/CLI.csproj /src/CLI/CLI.csproj
COPY Logging/Logging.csproj /src/Logging/Logging.csproj
COPY Tranga/Tranga.csproj /src/Tranga/Tranga.csproj
RUN dotnet restore /src/Tranga.sln RUN dotnet restore /src/Tranga.sln
COPY . /src/ COPY . /src/
@ -38,5 +40,5 @@ USER $UNAME
WORKDIR /publish WORKDIR /publish
COPY --chown=1000:1000 --from=build-env /publish . COPY --chown=1000:1000 --from=build-env /publish .
USER 0 USER 0
ENTRYPOINT ["dotnet", "/publish/API.dll"] ENTRYPOINT ["dotnet", "/publish/Tranga.dll"]
CMD ["-f", "-c", "-l", "/usr/share/tranga-api/logs"] CMD ["-f", "-c", "-l", "/usr/share/tranga-api/logs"]

32
Logging/FileLogger.cs Normal file
View File

@ -0,0 +1,32 @@
using System.Text;
namespace Logging;
public class FileLogger : LoggerBase
{
internal string logFilePath { get; }
private const int MaxNumberOfLogFiles = 5;
public FileLogger(string logFilePath, Encoding? encoding = null) : base (encoding)
{
this.logFilePath = logFilePath;
DirectoryInfo dir = Directory.CreateDirectory(new FileInfo(logFilePath).DirectoryName!);
//Remove oldest logfile if more than MaxNumberOfLogFiles
for (int fileCount = dir.EnumerateFiles().Count(); fileCount > MaxNumberOfLogFiles - 1; fileCount--) //-1 because we create own logfile later
File.Delete(dir.EnumerateFiles().MinBy(file => file.LastWriteTime)!.FullName);
}
protected override void Write(LogMessage logMessage)
{
try
{
File.AppendAllText(logFilePath, logMessage.formattedMessage);
}
catch (Exception)
{
// ignored
}
}
}

View File

@ -0,0 +1,17 @@
using System.Text;
namespace Logging;
public class FormattedConsoleLogger : LoggerBase
{
private readonly TextWriter _stdOut;
public FormattedConsoleLogger(TextWriter stdOut, Encoding? encoding = null) : base(encoding)
{
this._stdOut = stdOut;
}
protected override void Write(LogMessage message)
{
this._stdOut.Write(message.formattedMessage);
}
}

23
Logging/LogMessage.cs Normal file
View File

@ -0,0 +1,23 @@
namespace Logging;
public readonly struct LogMessage
{
public DateTime logTime { get; }
public string caller { get; }
public string value { get; }
public string formattedMessage => ToString();
public LogMessage(DateTime messageTime, string caller, string value)
{
this.logTime = messageTime;
this.caller = caller;
this.value = value;
}
public override string ToString()
{
string dateTimeString = $"{logTime.ToShortDateString()} {logTime.ToLongTimeString()}.{logTime.Millisecond,-3}";
string name = caller.Split(new char[] { '.', '+' }).Last();
return $"[{dateTimeString}] {name.Substring(0, name.Length >= 13 ? 13 : name.Length),13} | {value}";
}
}

80
Logging/Logger.cs Normal file
View File

@ -0,0 +1,80 @@
using System.Runtime.InteropServices;
using System.Text;
namespace Logging;
public class Logger : TextWriter
{
private static readonly string LogDirectoryPath = RuntimeInformation.IsOSPlatform(OSPlatform.Linux)
? "/var/log/tranga-api"
: Path.Join(Directory.GetCurrentDirectory(), "logs");
public string? logFilePath => _fileLogger?.logFilePath;
public override Encoding Encoding { get; }
public enum LoggerType
{
FileLogger,
ConsoleLogger
}
private readonly FileLogger? _fileLogger;
private readonly FormattedConsoleLogger? _formattedConsoleLogger;
private readonly MemoryLogger _memoryLogger;
public Logger(LoggerType[] enabledLoggers, TextWriter? stdOut, Encoding? encoding, string? logFolderPath)
{
this.Encoding = encoding ?? Encoding.UTF8;
DateTime now = DateTime.Now;
if(enabledLoggers.Contains(LoggerType.FileLogger) && (logFolderPath is null || logFolderPath == ""))
{
string filePath = Path.Join(LogDirectoryPath,
$"{now.ToShortDateString()}_{now.Hour}-{now.Minute}-{now.Second}.log");
_fileLogger = new FileLogger(filePath, encoding);
}else if (enabledLoggers.Contains(LoggerType.FileLogger) && logFolderPath is not null)
_fileLogger = new FileLogger(Path.Join(logFolderPath, $"{now.ToShortDateString()}_{now.Hour}-{now.Minute}-{now.Second}.log") , encoding);
if (enabledLoggers.Contains(LoggerType.ConsoleLogger) && stdOut is not null)
{
_formattedConsoleLogger = new FormattedConsoleLogger(stdOut, encoding);
}
else if (enabledLoggers.Contains(LoggerType.ConsoleLogger) && stdOut is null)
{
_formattedConsoleLogger = null;
throw new ArgumentException($"stdOut can not be null for LoggerType {LoggerType.ConsoleLogger}");
}
_memoryLogger = new MemoryLogger(encoding);
WriteLine(GetType().ToString(), $"Logfile: {logFilePath}");
}
public void WriteLine(string caller, string? value)
{
value = value is null ? Environment.NewLine : string.Concat(value, Environment.NewLine);
Write(caller, value);
}
public void Write(string caller, string? value)
{
if (value is null)
return;
_fileLogger?.Write(caller, value);
_formattedConsoleLogger?.Write(caller, value);
_memoryLogger.Write(caller, value);
}
public string[] Tail(uint? lines)
{
return _memoryLogger.Tail(lines);
}
public string[] GetNewLines()
{
return _memoryLogger.GetNewLines();
}
public string[] GetLog()
{
return _memoryLogger.GetLogMessages();
}
}

25
Logging/LoggerBase.cs Normal file
View File

@ -0,0 +1,25 @@
using System.Text;
namespace Logging;
public abstract class LoggerBase : TextWriter
{
public override Encoding Encoding { get; }
public LoggerBase(Encoding? encoding = null)
{
this.Encoding = encoding ?? Encoding.ASCII;
}
public void Write(string caller, string? value)
{
if (value is null)
return;
LogMessage message = new (DateTime.Now, caller, value);
Write(message);
}
protected abstract void Write(LogMessage message);
}

10
Logging/Logging.csproj Normal file
View File

@ -0,0 +1,10 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>12</LangVersion>
</PropertyGroup>
</Project>

74
Logging/MemoryLogger.cs Normal file
View File

@ -0,0 +1,74 @@
using System.Text;
namespace Logging;
public class MemoryLogger : LoggerBase
{
private readonly SortedList<DateTime, LogMessage> _logMessages = new();
private int _lastLogMessageIndex = 0;
public MemoryLogger(Encoding? encoding = null) : base(encoding)
{
}
protected override void Write(LogMessage value)
{
lock (_logMessages)
{
_logMessages.Add(DateTime.Now, value);
}
}
public string[] GetLogMessages()
{
return Tail(Convert.ToUInt32(_logMessages.Count));
}
public string[] Tail(uint? length)
{
int retLength;
if (length is null || length > _logMessages.Count)
retLength = _logMessages.Count;
else
retLength = (int)length;
string[] ret = new string[retLength];
for (int retIndex = 0; retIndex < ret.Length; retIndex++)
{
lock (_logMessages)
{
ret[retIndex] = _logMessages.GetValueAtIndex(_logMessages.Count - retLength + retIndex).ToString();
}
}
_lastLogMessageIndex = _logMessages.Count - 1;
return ret;
}
public string[] GetNewLines()
{
int logMessageCount = _logMessages.Count;
List<string> ret = new();
int retIndex = 0;
for (; retIndex < logMessageCount - _lastLogMessageIndex; retIndex++)
{
try
{
lock(_logMessages)
{
ret.Add(_logMessages.GetValueAtIndex(_lastLogMessageIndex + retIndex).ToString());
}
}
catch (NullReferenceException)//Called when LogMessage has not finished writing
{
break;
}
}
_lastLogMessageIndex = _lastLogMessageIndex + retIndex;
return ret.ToArray();
}
}

View File

@ -1,8 +1,12 @@
# Testers for V2 wanted!
[Details](https://github.com/C9Glax/tranga/pull/355#issuecomment-2764217944)
<!-- PROJECT LOGO --> <!-- PROJECT LOGO -->
<br /> <br />
<div align="center"> <div align="center">
<h3 align="center">Tranga v2</h3> <h3 align="center">Tranga</h3>
<p align="center"> <p align="center">
Automatic Manga and Metadata downloader Automatic Manga and Metadata downloader
@ -45,14 +49,13 @@ Tranga can download Chapters and Metadata from "Scanlation" sites such as
- [MangaDex.org](https://mangadex.org/) (Multilingual) - [MangaDex.org](https://mangadex.org/) (Multilingual)
- [Manganato.com](https://manganato.com/) (en) - [Manganato.com](https://manganato.com/) (en)
- [Mangasee.com](https://mangasee123.com/) (en)
- [MangaKatana.com](https://mangakatana.com) (en) - [MangaKatana.com](https://mangakatana.com) (en)
- [Mangaworld.bz](https://www.mangaworld.bz/) (it) - [Mangaworld.bz](https://www.mangaworld.bz/) (it)
- [Bato.to](https://bato.to/v3x) (en) - [Bato.to](https://bato.to/v3x) (en)
- [Manga4Life](https://manga4life.com) (en)
- [ManhuaPlus](https://manhuaplus.org/) (en) - [ManhuaPlus](https://manhuaplus.org/) (en)
- [MangaHere](https://www.mangahere.cc/) (en) (Their covers aren't scrapeable.) - [MangaHere](https://www.mangahere.cc/) (en) (Their covers aren't scrapeable.)
- [Weebcentral](https://weebcentral.com) (en) - [Weebcentral](https://weebcentral.com) (en)
- [Webtoons](https://www.webtoons.com/en/)
- ❓ Open an [issue](https://github.com/C9Glax/tranga/issues/new?assignees=&labels=New+Connector&projects=&template=new_connector.yml&title=%5BNew+Connector%5D%3A+) - ❓ Open an [issue](https://github.com/C9Glax/tranga/issues/new?assignees=&labels=New+Connector&projects=&template=new_connector.yml&title=%5BNew+Connector%5D%3A+)
and trigger a library-scan with [Komga](https://komga.org/) and [Kavita](https://www.kavitareader.com/). and trigger a library-scan with [Komga](https://komga.org/) and [Kavita](https://www.kavitareader.com/).
@ -62,8 +65,7 @@ Notifications can be sent to your devices using [Gotify](https://gotify.net/), [
### What this does and doesn't do ### What this does and doesn't do
Tranga (this git-repo) will open a port (standard 6531) and listen for requests to add Jobs to Monitor and/or download specific Manga. Tranga (this git-repo) will open a port (standard 6531) and listen for requests to add Jobs to Monitor and/or download specific Manga.
The configuration is all done through HTTP-Requests. [Documentation](docs/API_Calls_v2.md) The configuration is all done through HTTP-Requests.
_**For a web-frontend use [tranga-website](https://github.com/C9Glax/tranga-website).**_ _**For a web-frontend use [tranga-website](https://github.com/C9Glax/tranga-website).**_
This project downloads the images for a Manga from the specified Scanlation-Website and packages them with some metadata - from that same website - in a .cbz-archive (per chapter). This project downloads the images for a Manga from the specified Scanlation-Website and packages them with some metadata - from that same website - in a .cbz-archive (per chapter).
@ -91,8 +93,6 @@ That is why I wanted to create my own project, in a language I understand, and t
- [PuppeteerSharp](https://www.puppeteersharp.com/) - [PuppeteerSharp](https://www.puppeteersharp.com/)
- [Html Agility Pack (HAP)](https://html-agility-pack.net/) - [Html Agility Pack (HAP)](https://html-agility-pack.net/)
- [Soenneker.Utils.String.NeedlemanWunsch](https://github.com/soenneker/soenneker.utils.string.needlemanwunsch) - [Soenneker.Utils.String.NeedlemanWunsch](https://github.com/soenneker/soenneker.utils.string.needlemanwunsch)
- [Sixlabors.ImageSharp](https://docs-v2.sixlabors.com/articles/imagesharp/index.html#license)
- [zstd-wrapper](https://github.com/oleg-st/ZstdSharp) [zstd](https://github.com/facebook/zstd)
- 💙 Blåhaj 🦈 - 💙 Blåhaj 🦈
<p align="right">(<a href="#readme-top">back to top</a>)</p> <p align="right">(<a href="#readme-top">back to top</a>)</p>

View File

@ -1,6 +1,10 @@
 
Microsoft Visual Studio Solution File, Format Version 12.00 Microsoft Visual Studio Solution File, Format Version 12.00
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "API", "API\API.csproj", "{EDB07E7B-351F-4FCC-9AEF-777838E5551E}" Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Tranga", ".\Tranga\Tranga.csproj", "{545E81B9-D96B-4C8F-A97F-2C02414DE566}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Logging", "Logging\Logging.csproj", "{415BE889-BB7D-426F-976F-8D977876A462}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CLI", "CLI\CLI.csproj", "{4324C816-F9D2-468F-8ED6-397FE2F0DCB3}"
EndProject EndProject
Global Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution GlobalSection(SolutionConfigurationPlatforms) = preSolution
@ -8,9 +12,17 @@ Global
Release|Any CPU = Release|Any CPU Release|Any CPU = Release|Any CPU
EndGlobalSection EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution GlobalSection(ProjectConfigurationPlatforms) = postSolution
{EDB07E7B-351F-4FCC-9AEF-777838E5551E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {545E81B9-D96B-4C8F-A97F-2C02414DE566}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{EDB07E7B-351F-4FCC-9AEF-777838E5551E}.Debug|Any CPU.Build.0 = Debug|Any CPU {545E81B9-D96B-4C8F-A97F-2C02414DE566}.Debug|Any CPU.Build.0 = Debug|Any CPU
{EDB07E7B-351F-4FCC-9AEF-777838E5551E}.Release|Any CPU.ActiveCfg = Release|Any CPU {545E81B9-D96B-4C8F-A97F-2C02414DE566}.Release|Any CPU.ActiveCfg = Release|Any CPU
{EDB07E7B-351F-4FCC-9AEF-777838E5551E}.Release|Any CPU.Build.0 = Release|Any CPU {545E81B9-D96B-4C8F-A97F-2C02414DE566}.Release|Any CPU.Build.0 = Release|Any CPU
{415BE889-BB7D-426F-976F-8D977876A462}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{415BE889-BB7D-426F-976F-8D977876A462}.Debug|Any CPU.Build.0 = Debug|Any CPU
{415BE889-BB7D-426F-976F-8D977876A462}.Release|Any CPU.ActiveCfg = Release|Any CPU
{415BE889-BB7D-426F-976F-8D977876A462}.Release|Any CPU.Build.0 = Release|Any CPU
{4324C816-F9D2-468F-8ED6-397FE2F0DCB3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{4324C816-F9D2-468F-8ED6-397FE2F0DCB3}.Debug|Any CPU.Build.0 = Debug|Any CPU
{4324C816-F9D2-468F-8ED6-397FE2F0DCB3}.Release|Any CPU.ActiveCfg = Release|Any CPU
{4324C816-F9D2-468F-8ED6-397FE2F0DCB3}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection EndGlobalSection
EndGlobal EndGlobal

159
Tranga/Chapter.cs Normal file
View File

@ -0,0 +1,159 @@
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using System.Xml.Linq;
using static System.IO.UnixFileMode;
namespace Tranga;
/// <summary>
/// Has to be Part of a publication
/// Includes the Chapter-Name, -VolumeNumber, -ChapterNumber, the location of the chapter on the internet and the saveName of the local file.
/// </summary>
public readonly struct Chapter : IComparable
{
// ReSharper disable once MemberCanBePrivate.Global
public Manga parentManga { get; }
public string? name { get; }
public float volumeNumber { get; }
public float chapterNumber { get; }
public string url { get; }
// ReSharper disable once MemberCanBePrivate.Global
public string fileName { get; }
public string? id { get; }
private static readonly Regex LegalCharacters = new (@"([A-z]*[0-9]* *\.*-*,*\]*\[*'*\'*\)*\(*~*!*)*");
private static readonly Regex IllegalStrings = new(@"(Vol(ume)?|Ch(apter)?)\.?", RegexOptions.IgnoreCase);
public Chapter(Manga parentManga, string? name, string? volumeNumber, string chapterNumber, string url, string? id = null)
: this(parentManga, name, float.Parse(volumeNumber??"0", GlobalBase.numberFormatDecimalPoint),
float.Parse(chapterNumber, GlobalBase.numberFormatDecimalPoint), url, id)
{
}
public Chapter(Manga parentManga, string? name, float? volumeNumber, float chapterNumber, string url, string? id = null)
{
this.parentManga = parentManga;
this.name = name;
this.volumeNumber = volumeNumber??0;
this.chapterNumber = chapterNumber;
this.url = url;
this.id = id;
string chapterVolNumStr = $"Vol.{this.volumeNumber} Ch.{chapterNumber}";
if (name is not null && name.Length > 0)
{
string chapterName = IllegalStrings.Replace(string.Concat(LegalCharacters.Matches(name)), "");
this.fileName = chapterName.Length > 0 ? $"{chapterVolNumStr} - {chapterName}" : chapterVolNumStr;
}
else
this.fileName = chapterVolNumStr;
}
public override string ToString()
{
return $"Chapter {parentManga.sortName} {parentManga.internalId} {chapterNumber} {name}";
}
public override bool Equals(object? obj)
{
if (obj is not Chapter)
return false;
return CompareTo(obj) == 0;
}
public int CompareTo(object? obj)
{
if(obj is not Chapter otherChapter)
throw new ArgumentException($"{obj} can not be compared to {this}");
return volumeNumber.CompareTo(otherChapter.volumeNumber) switch
{
<0 => -1,
>0 => 1,
_ => chapterNumber.CompareTo(otherChapter.chapterNumber)
};
}
/// <summary>
/// Checks if a chapter-archive is already present
/// </summary>
/// <returns>true if chapter is present</returns>
internal bool CheckChapterIsDownloaded()
{
string mangaDirectory = Path.Join(TrangaSettings.downloadLocation, parentManga.folderName);
if (!Directory.Exists(mangaDirectory))
return false;
FileInfo? mangaArchive = null;
string markerPath = Path.Join(mangaDirectory, $".{id}");
if (this.id is not null && File.Exists(markerPath))
{
if(File.Exists(File.ReadAllText(markerPath)))
mangaArchive = new FileInfo(File.ReadAllText(markerPath));
else
File.Delete(markerPath);
}
if(mangaArchive is null)
{
FileInfo[] archives = new DirectoryInfo(mangaDirectory).GetFiles("*.cbz");
Regex volChRex = new(@"(?:Vol(?:ume)?\.([0-9]+)\D*)?Ch(?:apter)?\.([0-9]+(?:\.[0-9]+)*)(?: - (.*))?.cbz");
Chapter t = this;
mangaArchive = archives.FirstOrDefault(archive =>
{
Match m = volChRex.Match(archive.Name);
/*
* 1. If the volumeNumber is not present in the filename, it is not checked.
* 2. Check the chapterNumber in the chapter against the one in the filename.
* 3. The chpaterName has to either be absent both in the chapter and the filename or match.
*/
return (!m.Groups[1].Success || m.Groups[1].Value == t.volumeNumber.ToString(GlobalBase.numberFormatDecimalPoint)) &&
m.Groups[2].Value == t.chapterNumber.ToString(GlobalBase.numberFormatDecimalPoint) &&
((!m.Groups[3].Success && string.IsNullOrEmpty(t.name)) || m.Groups[3].Value == t.name);
});
}
string correctPath = GetArchiveFilePath();
if(mangaArchive is not null && mangaArchive.FullName != correctPath)
mangaArchive.MoveTo(correctPath, true);
return (mangaArchive is not null);
}
public void CreateChapterMarker()
{
if (this.id is null)
return;
string path = Path.Join(TrangaSettings.downloadLocation, parentManga.folderName, $".{id}");
File.WriteAllText(path, GetArchiveFilePath());
File.SetAttributes(path, FileAttributes.Hidden);
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(path, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute | OtherRead | OtherExecute);
}
/// <summary>
/// Creates full file path of chapter-archive
/// </summary>
/// <returns>Filepath</returns>
internal string GetArchiveFilePath()
{
return Path.Join(TrangaSettings.downloadLocation, parentManga.folderName, $"{parentManga.folderName} - {this.fileName}.cbz");
}
/// <summary>
/// Creates a string containing XML of publication and chapter.
/// See ComicInfo.xml
/// </summary>
/// <returns>XML-string</returns>
internal string GetComicInfoXmlString()
{
XElement comicInfo = new XElement("ComicInfo",
new XElement("Tags", string.Join(',', parentManga.tags)),
new XElement("LanguageISO", parentManga.originalLanguage),
new XElement("Title", this.name),
new XElement("Writer", string.Join(',', parentManga.authors)),
new XElement("Volume", this.volumeNumber),
new XElement("Number", this.chapterNumber)
);
return comicInfo.ToString();
}
}

143
Tranga/GlobalBase.cs Normal file
View File

@ -0,0 +1,143 @@
using System.Globalization;
using System.Text.RegularExpressions;
using Logging;
using Newtonsoft.Json;
using Tranga.LibraryConnectors;
using Tranga.NotificationConnectors;
namespace Tranga;
public abstract class GlobalBase
{
[JsonIgnore]
public Logger? logger { get; init; }
protected HashSet<NotificationConnector> notificationConnectors { get; init; }
protected HashSet<LibraryConnector> libraryConnectors { get; init; }
private Dictionary<string, Manga> cachedPublications { get; init; }
public static readonly NumberFormatInfo numberFormatDecimalPoint = new (){ NumberDecimalSeparator = "." };
protected static readonly Regex baseUrlRex = new(@"https?:\/\/[0-9A-z\.-]+(:[0-9]+)?");
protected GlobalBase(GlobalBase clone)
{
this.logger = clone.logger;
this.notificationConnectors = clone.notificationConnectors;
this.libraryConnectors = clone.libraryConnectors;
this.cachedPublications = clone.cachedPublications;
}
protected GlobalBase(Logger? logger)
{
this.logger = logger;
this.notificationConnectors = TrangaSettings.LoadNotificationConnectors(this);
this.libraryConnectors = TrangaSettings.LoadLibraryConnectors(this);
this.cachedPublications = new();
}
protected void AddMangaToCache(Manga manga)
{
if (!this.cachedPublications.TryAdd(manga.internalId, manga))
{
Log($"Overwriting Manga {manga.internalId}");
this.cachedPublications[manga.internalId] = manga;
}
}
protected Manga? GetCachedManga(string internalId)
{
return cachedPublications.TryGetValue(internalId, out Manga manga) switch
{
true => manga,
_ => null
};
}
protected IEnumerable<Manga> GetAllCachedManga()
{
return cachedPublications.Values;
}
protected void Log(string message)
{
logger?.WriteLine(this.GetType().Name, message);
}
protected void Log(string fStr, params object?[] replace)
{
Log(string.Format(fStr, replace));
}
protected void SendNotifications(string title, string text, bool buffer = false)
{
foreach (NotificationConnector nc in notificationConnectors)
nc.SendNotification(title, text, buffer);
}
protected void AddNotificationConnector(NotificationConnector notificationConnector)
{
Log($"Adding {notificationConnector}");
notificationConnectors.RemoveWhere(nc => nc.notificationConnectorType == notificationConnector.notificationConnectorType);
notificationConnectors.Add(notificationConnector);
while(IsFileInUse(TrangaSettings.notificationConnectorsFilePath))
Thread.Sleep(100);
Log("Exporting notificationConnectors");
File.WriteAllText(TrangaSettings.notificationConnectorsFilePath, JsonConvert.SerializeObject(notificationConnectors));
}
protected void DeleteNotificationConnector(NotificationConnector.NotificationConnectorType notificationConnectorType)
{
Log($"Removing {notificationConnectorType}");
notificationConnectors.RemoveWhere(nc => nc.notificationConnectorType == notificationConnectorType);
while(IsFileInUse(TrangaSettings.notificationConnectorsFilePath))
Thread.Sleep(100);
Log("Exporting notificationConnectors");
File.WriteAllText(TrangaSettings.notificationConnectorsFilePath, JsonConvert.SerializeObject(notificationConnectors));
}
protected void UpdateLibraries()
{
foreach(LibraryConnector lc in libraryConnectors)
lc.UpdateLibrary();
}
protected void AddLibraryConnector(LibraryConnector libraryConnector)
{
Log($"Adding {libraryConnector}");
libraryConnectors.RemoveWhere(lc => lc.libraryType == libraryConnector.libraryType);
libraryConnectors.Add(libraryConnector);
while(IsFileInUse(TrangaSettings.libraryConnectorsFilePath))
Thread.Sleep(100);
Log("Exporting libraryConnectors");
File.WriteAllText(TrangaSettings.libraryConnectorsFilePath, JsonConvert.SerializeObject(libraryConnectors, Formatting.Indented));
}
protected void DeleteLibraryConnector(LibraryConnector.LibraryType libraryType)
{
Log($"Removing {libraryType}");
libraryConnectors.RemoveWhere(lc => lc.libraryType == libraryType);
while(IsFileInUse(TrangaSettings.libraryConnectorsFilePath))
Thread.Sleep(100);
Log("Exporting libraryConnectors");
File.WriteAllText(TrangaSettings.libraryConnectorsFilePath, JsonConvert.SerializeObject(libraryConnectors, Formatting.Indented));
}
protected bool IsFileInUse(string filePath) => IsFileInUse(filePath, this.logger);
public static bool IsFileInUse(string filePath, Logger? logger)
{
if (!File.Exists(filePath))
return false;
try
{
using FileStream stream = new (filePath, FileMode.Open, FileAccess.Read, FileShare.None);
stream.Close();
return false;
}
catch (IOException)
{
logger?.WriteLine($"File is in use {filePath}");
return true;
}
}
}

View File

@ -0,0 +1,54 @@
using System.Net;
using Tranga.MangaConnectors;
namespace Tranga.Jobs;
public class DownloadChapter : Job
{
public Chapter chapter { get; init; }
public DownloadChapter(GlobalBase clone, MangaConnector connector, Chapter chapter, DateTime lastExecution, string? parentJobId = null) : base(clone, JobType.DownloadChapterJob, connector, lastExecution, parentJobId: parentJobId)
{
this.chapter = chapter;
}
public DownloadChapter(GlobalBase clone, MangaConnector connector, Chapter chapter, string? parentJobId = null) : base(clone, JobType.DownloadChapterJob, connector, parentJobId: parentJobId)
{
this.chapter = chapter;
}
protected override string GetId()
{
return $"{GetType()}-{chapter.parentManga.internalId}-{chapter.chapterNumber}";
}
public override string ToString()
{
return $"{id} Chapter: {chapter}";
}
protected override IEnumerable<Job> ExecuteReturnSubTasksInternal(JobBoss jobBoss)
{
Task downloadTask = new(delegate
{
mangaConnector.CopyCoverFromCacheToDownloadLocation(chapter.parentManga);
HttpStatusCode success = mangaConnector.DownloadChapter(chapter, this.progressToken);
chapter.parentManga.UpdateLatestDownloadedChapter(chapter);
if (success == HttpStatusCode.OK)
{
UpdateLibraries();
SendNotifications("Chapter downloaded", $"{chapter.parentManga.sortName} - {chapter.chapterNumber}", true);
}
});
downloadTask.Start();
return Array.Empty<Job>();
}
public override bool Equals(object? obj)
{
if (obj is not DownloadChapter otherJob)
return false;
return otherJob.mangaConnector == this.mangaConnector &&
otherJob.chapter.Equals(this.chapter);
}
}

View File

@ -0,0 +1,59 @@
using Tranga.MangaConnectors;
namespace Tranga.Jobs;
public class DownloadNewChapters : Job
{
public Manga manga { get; set; }
public string translatedLanguage { get; init; }
public DownloadNewChapters(GlobalBase clone, MangaConnector connector, Manga manga, DateTime lastExecution,
bool recurring = false, TimeSpan? recurrence = null, string? parentJobId = null, string translatedLanguage = "en") : base(clone, JobType.DownloadNewChaptersJob, connector, lastExecution, recurring,
recurrence, parentJobId)
{
this.manga = manga;
this.translatedLanguage = translatedLanguage;
}
public DownloadNewChapters(GlobalBase clone, MangaConnector connector, Manga manga, bool recurring = false, TimeSpan? recurrence = null, string? parentJobId = null, string translatedLanguage = "en") : base (clone, JobType.DownloadNewChaptersJob, connector, recurring, recurrence, parentJobId)
{
this.manga = manga;
this.translatedLanguage = translatedLanguage;
}
protected override string GetId()
{
return $"{GetType()}-{manga.internalId}";
}
public override string ToString()
{
return $"{id} Manga: {manga}";
}
protected override IEnumerable<Job> ExecuteReturnSubTasksInternal(JobBoss jobBoss)
{
manga.SaveSeriesInfoJson();
Chapter[] chapters = mangaConnector.GetNewChapters(manga, this.translatedLanguage);
this.progressToken.increments = chapters.Length;
List<Job> jobs = new();
mangaConnector.CopyCoverFromCacheToDownloadLocation(manga);
foreach (Chapter chapter in chapters)
{
DownloadChapter downloadChapterJob = new(this, this.mangaConnector, chapter, parentJobId: this.id);
jobs.Add(downloadChapterJob);
}
UpdateMetadata updateMetadataJob = new(this, this.mangaConnector, this.manga, parentJobId: this.id);
jobs.Add(updateMetadataJob);
progressToken.Complete();
return jobs;
}
public override bool Equals(object? obj)
{
if (obj is not DownloadNewChapters otherJob)
return false;
return otherJob.mangaConnector == this.mangaConnector &&
otherJob.manga.publicationId == this.manga.publicationId;
}
}

98
Tranga/Jobs/Job.cs Normal file
View File

@ -0,0 +1,98 @@
using Tranga.MangaConnectors;
namespace Tranga.Jobs;
public abstract class Job : GlobalBase
{
public MangaConnector mangaConnector { get; init; }
public ProgressToken progressToken { get; private set; }
public bool recurring { get; init; }
public TimeSpan? recurrenceTime { get; set; }
public DateTime? lastExecution { get; private set; }
public DateTime nextExecution => NextExecution();
public string id => GetId();
internal IEnumerable<Job>? subJobs { get; private set; }
public string? parentJobId { get; init; }
public enum JobType : byte { DownloadChapterJob, DownloadNewChaptersJob, UpdateMetaDataJob }
public JobType jobType;
internal Job(GlobalBase clone, JobType jobType, MangaConnector connector, bool recurring = false, TimeSpan? recurrenceTime = null, string? parentJobId = null) : base(clone)
{
this.jobType = jobType;
this.mangaConnector = connector;
this.progressToken = new ProgressToken(0);
this.recurring = recurring;
if (recurring && recurrenceTime is null)
throw new ArgumentException("If recurrence is set to true, a recurrence time has to be provided.");
else if(recurring && recurrenceTime is not null)
this.lastExecution = DateTime.Now.Subtract((TimeSpan)recurrenceTime);
this.recurrenceTime = recurrenceTime ?? TimeSpan.Zero;
this.parentJobId = parentJobId;
}
internal Job(GlobalBase clone, JobType jobType, MangaConnector connector, DateTime lastExecution, bool recurring = false,
TimeSpan? recurrenceTime = null, string? parentJobId = null) : base(clone)
{
this.jobType = jobType;
this.mangaConnector = connector;
this.progressToken = new ProgressToken(0);
this.recurring = recurring;
if (recurring && recurrenceTime is null)
throw new ArgumentException("If recurrence is set to true, a recurrence time has to be provided.");
this.lastExecution = lastExecution;
this.recurrenceTime = recurrenceTime ?? TimeSpan.Zero;
this.parentJobId = parentJobId;
}
protected abstract string GetId();
public void AddSubJob(Job job)
{
subJobs ??= new List<Job>();
subJobs = subJobs.Append(job);
}
private DateTime NextExecution()
{
if(recurrenceTime.HasValue && lastExecution.HasValue)
return lastExecution.Value.Add(recurrenceTime.Value);
if(recurrenceTime.HasValue && !lastExecution.HasValue)
return DateTime.Now;
return DateTime.MaxValue;
}
public void ResetProgress()
{
this.progressToken.increments -= progressToken.incrementsCompleted;
this.lastExecution = DateTime.Now;
this.progressToken.Waiting();
}
public void ExecutionEnqueue()
{
this.progressToken.increments -= progressToken.incrementsCompleted;
this.progressToken.Standby();
}
public void Cancel()
{
Log($"Cancelling {this}");
this.progressToken.cancellationRequested = true;
this.progressToken.Cancel();
this.lastExecution = DateTime.Now;
if(subJobs is not null)
foreach(Job subJob in subJobs)
subJob.Cancel();
}
public IEnumerable<Job> ExecuteReturnSubTasks(JobBoss jobBoss)
{
progressToken.Start();
subJobs = ExecuteReturnSubTasksInternal(jobBoss);
lastExecution = DateTime.Now;
return subJobs;
}
protected abstract IEnumerable<Job> ExecuteReturnSubTasksInternal(JobBoss jobBoss);
}

301
Tranga/Jobs/JobBoss.cs Normal file
View File

@ -0,0 +1,301 @@
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using Newtonsoft.Json;
using Tranga.MangaConnectors;
using static System.IO.UnixFileMode;
namespace Tranga.Jobs;
public class JobBoss : GlobalBase
{
public HashSet<Job> jobs { get; init; }
private Dictionary<MangaConnector, Queue<Job>> mangaConnectorJobQueue { get; init; }
public JobBoss(GlobalBase clone, HashSet<MangaConnector> connectors) : base(clone)
{
this.jobs = new();
LoadJobsList(connectors);
this.mangaConnectorJobQueue = new();
Log($"Next job in {jobs.MinBy(job => job.nextExecution)?.nextExecution.Subtract(DateTime.Now)} {jobs.MinBy(job => job.nextExecution)?.id}");
}
public bool AddJob(Job job, string? jobFile = null)
{
if (ContainsJobLike(job))
{
Log($"Already Contains Job {job}");
return false;
}
else
{
if (!this.jobs.Add(job))
return false;
Log($"Added {job}");
UpdateJobFile(job, jobFile);
}
return true;
}
public void AddJobs(IEnumerable<Job> jobsToAdd)
{
foreach (Job job in jobsToAdd)
AddJob(job);
}
/// <summary>
/// Compares contents of the provided job and all current jobs
/// Does not check if objects are the same
/// </summary>
public bool ContainsJobLike(Job job)
{
return this.jobs.Any(existingJob => existingJob.Equals(job));
}
public void RemoveJob(Job job)
{
Log($"Removing {job}");
job.Cancel();
this.jobs.Remove(job);
if(job.subJobs is not null && job.subJobs.Any())
RemoveJobs(job.subJobs);
UpdateJobFile(job);
}
public void RemoveJobs(IEnumerable<Job?> jobsToRemove)
{
List<Job?> toRemove = jobsToRemove.ToList(); //Prevent multiple enumeration
Log($"Removing {toRemove.Count()} jobs.");
foreach (Job? job in toRemove)
if(job is not null)
RemoveJob(job);
}
public IEnumerable<Job> GetJobsLike(string? connectorName = null, string? internalId = null, float? chapterNumber = null)
{
IEnumerable<Job> ret = this.jobs;
if (connectorName is not null)
ret = ret.Where(job => job.mangaConnector.name == connectorName);
if (internalId is not null && chapterNumber is not null)
ret = ret.Where(jjob =>
{
if (jjob is not DownloadChapter job)
return false;
return job.chapter.parentManga.internalId == internalId &&
job.chapter.chapterNumber.Equals(chapterNumber);
});
else if (internalId is not null)
ret = ret.Where(jjob =>
{
if (jjob is not DownloadNewChapters job)
return false;
return job.manga.internalId == internalId;
});
return ret;
}
public IEnumerable<Job> GetJobsLike(MangaConnector? mangaConnector = null, Manga? publication = null,
Chapter? chapter = null)
{
if (chapter is not null)
return GetJobsLike(mangaConnector?.name, chapter.Value.parentManga.internalId, chapter.Value.chapterNumber);
else
return GetJobsLike(mangaConnector?.name, publication?.internalId);
}
public Job? GetJobById(string jobId)
{
if (this.jobs.FirstOrDefault(jjob => jjob.id == jobId) is { } job)
return job;
return null;
}
public bool TryGetJobById(string jobId, out Job? job)
{
if (this.jobs.FirstOrDefault(jjob => jjob.id == jobId) is { } ret)
{
job = ret;
return true;
}
job = null;
return false;
}
private bool QueueContainsJob(Job job)
{
if (mangaConnectorJobQueue.TryAdd(job.mangaConnector, new Queue<Job>()))//If we can add the queue, there is certainly no job in it
return true;
return mangaConnectorJobQueue[job.mangaConnector].Contains(job);
}
public void AddJobToQueue(Job job)
{
Log($"Adding Job to Queue. {job}");
if(!QueueContainsJob(job))
mangaConnectorJobQueue[job.mangaConnector].Enqueue(job);
job.ExecutionEnqueue();
}
private void AddJobsToQueue(IEnumerable<Job> newJobs)
{
foreach(Job job in newJobs)
AddJobToQueue(job);
}
private void LoadJobsList(HashSet<MangaConnector> connectors)
{
if (!Directory.Exists(TrangaSettings.jobsFolderPath)) //No jobs to load
{
Directory.CreateDirectory(TrangaSettings.jobsFolderPath);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(TrangaSettings.jobsFolderPath, UserRead | UserWrite | UserExecute | GroupRead | OtherRead);
return;
}
//Load json-job-files
foreach (FileInfo file in Directory.GetFiles(TrangaSettings.jobsFolderPath, "*.json").Select(f => new FileInfo(f)))
{
Log($"Adding {file.Name}");
try
{
Job? job = JsonConvert.DeserializeObject<Job>(File.ReadAllText(file.FullName),
new JobJsonConverter(this, new MangaConnectorJsonConverter(this, connectors)));
if (job is null) throw new NullReferenceException();
Log($"Adding Job {job}");
if (!AddJob(job, file.FullName)) //If we detect a duplicate, delete the file.
{
//string path = string.Concat(file.FullName, ".duplicate");
//file.MoveTo(path);
//Log($"Duplicate detected or otherwise not able to add job to list.\nMoved job {job} to {path}");
Log($"Duplicate detected or otherwise not able to add job to list. Removed the file {file.FullName} {job}");
}
}
catch (Exception e)
{
if (e is not UnreachableException or NullReferenceException)
throw;
Log(e.Message);
string newName = file.FullName + ".failed";
Log($"Failed loading file {file.Name}.\nMoving to {newName}.\n" +
$"If you think this is a bug, upload contents of the file to the Bugreport!");
File.Move(file.FullName, newName);
continue;
}
}
//Connect jobs to parent-jobs and add Publications to cache
foreach (Job job in this.jobs)
{
Log($"Loading Job {job}");
Job? parentJob = this.jobs.FirstOrDefault(jjob => jjob.id == job.parentJobId);
if (parentJob is not null)
{
parentJob.AddSubJob(job);
Log($"Parent Job {parentJob}");
}
if (job is DownloadNewChapters dncJob)
AddMangaToCache(dncJob.manga);
}
string[] coverFiles = Directory.GetFiles(TrangaSettings.coverImageCache);
foreach(string fileName in coverFiles.Where(fileName => !GetAllCachedManga().Any(manga => manga.coverFileNameInCache == fileName)))
File.Delete(fileName);
}
internal void UpdateJobFile(Job job, string? oldFile = null)
{
string newJobFilePath = Path.Join(TrangaSettings.jobsFolderPath, $"{job.id}.json");
string oldFilePath = oldFile??Path.Join(TrangaSettings.jobsFolderPath, $"{job.id}.json");
//Delete old file
if (File.Exists(oldFilePath))
{
Log($"Deleting Job-file {oldFilePath}");
try
{
while(IsFileInUse(oldFilePath))
Thread.Sleep(10);
File.Delete(oldFilePath);
}
catch (Exception e)
{
Log($"Error deleting {oldFilePath} job {job.id}\n{e}");
return; //Don't export a new file when we haven't actually deleted the old one
}
}
//Export job (in new file) if it is still in our jobs list
if (GetJobById(job.id) is not null)
{
Log($"Exporting Job {newJobFilePath}");
string jobStr = JsonConvert.SerializeObject(job, Formatting.Indented);
while(IsFileInUse(newJobFilePath))
Thread.Sleep(10);
File.WriteAllText(newJobFilePath, jobStr);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(newJobFilePath, UserRead | UserWrite | GroupRead | OtherRead);
}
}
private void UpdateAllJobFiles()
{
Log("Exporting Jobs");
foreach (Job job in this.jobs)
UpdateJobFile(job);
//Remove files with jobs not in this.jobs-list
Regex idRex = new (@"(.*)\.json");
foreach (FileInfo file in new DirectoryInfo(TrangaSettings.jobsFolderPath).EnumerateFiles())
{
if (idRex.IsMatch(file.Name))
{
string id = idRex.Match(file.Name).Groups[1].Value;
if (!this.jobs.Any(job => job.id == id))
{
try
{
file.Delete();
}
catch (Exception e)
{
Log(e.ToString());
}
}
}
}
}
public void CheckJobs()
{
AddJobsToQueue(jobs.Where(job => job.progressToken.state == ProgressToken.State.Waiting && job.nextExecution < DateTime.Now && !QueueContainsJob(job)).OrderBy(job => job.nextExecution));
foreach (Queue<Job> jobQueue in mangaConnectorJobQueue.Values)
{
if(jobQueue.Count < 1)
continue;
Job queueHead = jobQueue.Peek();
if (queueHead.progressToken.state is ProgressToken.State.Complete or ProgressToken.State.Cancelled)
{
if(!queueHead.recurring)
RemoveJob(queueHead);
else
queueHead.ResetProgress();
jobQueue.Dequeue();
Log($"Next job in {jobs.MinBy(job => job.nextExecution)?.nextExecution.Subtract(DateTime.Now)} {jobs.MinBy(job => job.nextExecution)?.id}");
}else if (queueHead.progressToken.state is ProgressToken.State.Standby)
{
Job eJob = jobQueue.Peek();
Job[] subJobs = eJob.ExecuteReturnSubTasks(this).ToArray();
UpdateJobFile(eJob);
AddJobs(subJobs);
AddJobsToQueue(subJobs);
}else if (queueHead.progressToken.state is ProgressToken.State.Running && DateTime.Now.Subtract(queueHead.progressToken.lastUpdate) > TimeSpan.FromMinutes(5))
{
Log($"{queueHead} inactive for more than 5 minutes. Cancelling.");
queueHead.Cancel();
}
}
}
}

View File

@ -0,0 +1,84 @@
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using Tranga.MangaConnectors;
namespace Tranga.Jobs;
public class JobJsonConverter : JsonConverter
{
private GlobalBase _clone;
private MangaConnectorJsonConverter _mangaConnectorJsonConverter;
internal JobJsonConverter(GlobalBase clone, MangaConnectorJsonConverter mangaConnectorJsonConverter)
{
this._clone = clone;
this._mangaConnectorJsonConverter = mangaConnectorJsonConverter;
}
public override bool CanConvert(Type objectType)
{
return (objectType == typeof(Job));
}
public override object ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer)
{
JObject jo = JObject.Load(reader);
if (jo.ContainsKey("jobType") && jo["jobType"]!.Value<byte>() == (byte)Job.JobType.UpdateMetaDataJob)
{
return new UpdateMetadata(this._clone,
jo.GetValue("mangaConnector")!.ToObject<MangaConnector>(JsonSerializer.Create(new JsonSerializerSettings()
{
Converters =
{
this._mangaConnectorJsonConverter
}
}))!,
jo.GetValue("manga")!.ToObject<Manga>(),
jo.GetValue("parentJobId")!.Value<string?>());
}else if ((jo.ContainsKey("jobType") && jo["jobType"]!.Value<byte>() == (byte)Job.JobType.DownloadNewChaptersJob) || jo.ContainsKey("translatedLanguage"))//TODO change to jobType
{
DateTime lastExecution = jo.GetValue("lastExecution") is {} le
? le.ToObject<DateTime>()
: DateTime.UnixEpoch; //TODO do null checks on all variables
return new DownloadNewChapters(this._clone,
jo.GetValue("mangaConnector")!.ToObject<MangaConnector>(JsonSerializer.Create(new JsonSerializerSettings()
{
Converters =
{
this._mangaConnectorJsonConverter
}
}))!,
jo.GetValue("manga")!.ToObject<Manga>(),
lastExecution,
jo.GetValue("recurring")!.Value<bool>(),
jo.GetValue("recurrenceTime")!.ToObject<TimeSpan?>(),
jo.GetValue("parentJobId")!.Value<string?>());
}else if ((jo.ContainsKey("jobType") && jo["jobType"]!.Value<byte>() == (byte)Job.JobType.DownloadChapterJob) || jo.ContainsKey("chapter"))//TODO change to jobType
{
return new DownloadChapter(this._clone,
jo.GetValue("mangaConnector")!.ToObject<MangaConnector>(JsonSerializer.Create(new JsonSerializerSettings()
{
Converters =
{
this._mangaConnectorJsonConverter
}
}))!,
jo.GetValue("chapter")!.ToObject<Chapter>(),
DateTime.UnixEpoch,
jo.GetValue("parentJobId")!.Value<string?>());
}
throw new Exception();
}
public override bool CanWrite => false;
/// <summary>
/// Don't call this
/// </summary>
public override void WriteJson(JsonWriter writer, object? value, JsonSerializer serializer)
{
throw new Exception("Dont call this");
}
}

View File

@ -0,0 +1,78 @@
namespace Tranga.Jobs;
public class ProgressToken
{
public bool cancellationRequested { get; set; }
public int increments { get; set; }
public int incrementsCompleted { get; set; }
public float progress => GetProgress();
public DateTime lastUpdate { get; private set; }
public DateTime executionStarted { get; private set; }
public TimeSpan timeRemaining => GetTimeRemaining();
public enum State { Running, Complete, Standby, Cancelled, Waiting }
public State state { get; private set; }
public ProgressToken(int increments)
{
this.cancellationRequested = false;
this.increments = increments;
this.incrementsCompleted = 0;
this.state = State.Waiting;
this.executionStarted = DateTime.UnixEpoch;
this.lastUpdate = DateTime.UnixEpoch;
}
private float GetProgress()
{
if(increments > 0 && incrementsCompleted > 0)
return incrementsCompleted / (float)increments;
return 0;
}
private TimeSpan GetTimeRemaining()
{
if (increments > 0 && incrementsCompleted > 0)
return DateTime.Now.Subtract(this.executionStarted).Divide(incrementsCompleted).Multiply(increments - incrementsCompleted);
return TimeSpan.MaxValue;
}
public void Increment()
{
this.lastUpdate = DateTime.Now;
this.incrementsCompleted++;
if (incrementsCompleted > increments)
state = State.Complete;
}
public void Standby()
{
this.lastUpdate = DateTime.Now;
state = State.Standby;
}
public void Start()
{
this.lastUpdate = DateTime.Now;
state = State.Running;
this.executionStarted = DateTime.Now;
}
public void Complete()
{
this.lastUpdate = DateTime.Now;
state = State.Complete;
}
public void Cancel()
{
this.lastUpdate = DateTime.Now;
state = State.Cancelled;
}
public void Waiting()
{
this.lastUpdate = DateTime.Now;
state = State.Waiting;
}
}

View File

@ -0,0 +1,76 @@
using Tranga.MangaConnectors;
namespace Tranga.Jobs;
public class UpdateMetadata : Job
{
public Manga manga { get; set; }
public UpdateMetadata(GlobalBase clone, MangaConnector connector, Manga manga, string? parentJobId = null) : base(clone, JobType.UpdateMetaDataJob, connector, parentJobId: parentJobId)
{
this.manga = manga;
}
protected override string GetId()
{
return $"{GetType()}-{manga.internalId}";
}
public override string ToString()
{
return $"{id} Manga: {manga}";
}
protected override IEnumerable<Job> ExecuteReturnSubTasksInternal(JobBoss jobBoss)
{
//Retrieve new Metadata
Manga? possibleUpdatedManga = mangaConnector.GetMangaFromId(manga.publicationId);
if (possibleUpdatedManga is { } updatedManga)
{
if (updatedManga.Equals(this.manga)) //Check if anything changed
{
this.progressToken.Complete();
return Array.Empty<Job>();
}
this.manga = manga.WithMetadata(updatedManga);
this.manga.SaveSeriesInfoJson(true);
this.mangaConnector.CopyCoverFromCacheToDownloadLocation(manga);
foreach (Job job in jobBoss.GetJobsLike(publication: this.manga))
{
string oldFile;
if (job is DownloadNewChapters dc)
{
oldFile = dc.id;
dc.manga = this.manga;
}
else if (job is UpdateMetadata um)
{
oldFile = um.id;
um.manga = this.manga;
}
else
continue;
jobBoss.UpdateJobFile(job, oldFile);
}
this.progressToken.Complete();
}
else
{
Log($"Could not find Manga {manga}");
this.progressToken.Cancel();
return Array.Empty<Job>();
}
this.progressToken.Cancel();
return Array.Empty<Job>();
}
public override bool Equals(object? obj)
{
if (obj is not UpdateMetadata otherJob)
return false;
return otherJob.mangaConnector == this.mangaConnector &&
otherJob.manga.publicationId == this.manga.publicationId;
}
}

View File

@ -1,22 +1,29 @@
using System.Text.Json; using System.Text.Json.Nodes;
using System.Text.Json.Nodes; using Logging;
using Newtonsoft.Json;
using JsonSerializer = System.Text.Json.JsonSerializer;
namespace API.Schema.LibraryConnectors; namespace Tranga.LibraryConnectors;
public class Kavita : LibraryConnector public class Kavita : LibraryConnector
{ {
public Kavita(string baseUrl, string auth) : base(TokenGen.CreateToken(typeof(Kavita), 64), LibraryType.Kavita, baseUrl, auth) public Kavita(GlobalBase clone, string baseUrl, string username, string password) :
base(clone, baseUrl, GetToken(baseUrl, username, password, clone.logger), LibraryType.Kavita)
{ {
} }
public Kavita(string baseUrl, string username, string password) : [JsonConstructor]
this(baseUrl, GetToken(baseUrl, username, password)) public Kavita(GlobalBase clone, string baseUrl, string auth) : base(clone, baseUrl, auth, LibraryType.Kavita)
{ {
} }
public override string ToString()
private static string GetToken(string baseUrl, string username, string password) {
return $"Kavita {baseUrl}";
}
private static string GetToken(string baseUrl, string username, string password, Logger? logger = null)
{ {
HttpClient client = new() HttpClient client = new()
{ {
@ -34,6 +41,7 @@ public class Kavita : LibraryConnector
try try
{ {
HttpResponseMessage response = client.Send(requestMessage); HttpResponseMessage response = client.Send(requestMessage);
logger?.WriteLine($"Kavita | GetToken {requestMessage.RequestUri} -> {response.StatusCode}");
if (response.IsSuccessStatusCode) if (response.IsSuccessStatusCode)
{ {
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(response.Content.ReadAsStream()); JsonObject? result = JsonSerializer.Deserialize<JsonObject>(response.Content.ReadAsStream());
@ -42,24 +50,28 @@ public class Kavita : LibraryConnector
} }
else else
{ {
logger?.WriteLine($"Kavita | {response.Content}");
} }
} }
catch (HttpRequestException e) catch (HttpRequestException e)
{ {
logger?.WriteLine($"Kavita | Unable to retrieve token:\n\r{e}");
} }
logger?.WriteLine("Kavita | Did not receive token.");
return ""; return "";
} }
protected override void UpdateLibraryInternal() protected override void UpdateLibraryInternal()
{ {
Log("Updating libraries.");
foreach (KavitaLibrary lib in GetLibraries()) foreach (KavitaLibrary lib in GetLibraries())
NetClient.MakePost($"{BaseUrl}/api/Library/scan?libraryId={lib.id}", "Bearer", Auth); NetClient.MakePost($"{baseUrl}/api/Library/scan?libraryId={lib.id}", "Bearer", auth, logger);
} }
internal override bool Test() internal override bool Test()
{ {
foreach (KavitaLibrary lib in GetLibraries()) foreach (KavitaLibrary lib in GetLibraries())
if (NetClient.MakePost($"{BaseUrl}/api/Library/scan?libraryId={lib.id}", "Bearer", Auth)) if (NetClient.MakePost($"{baseUrl}/api/Library/scan?libraryId={lib.id}", "Bearer", auth, logger))
return true; return true;
return false; return false;
} }
@ -70,14 +82,17 @@ public class Kavita : LibraryConnector
/// <returns>Array of KavitaLibrary</returns> /// <returns>Array of KavitaLibrary</returns>
private IEnumerable<KavitaLibrary> GetLibraries() private IEnumerable<KavitaLibrary> GetLibraries()
{ {
Stream data = NetClient.MakeRequest($"{BaseUrl}/api/Library/libraries", "Bearer", Auth); Log("Getting libraries.");
Stream data = NetClient.MakeRequest($"{baseUrl}/api/Library/libraries", "Bearer", auth, logger);
if (data == Stream.Null) if (data == Stream.Null)
{ {
Log("No libraries returned");
return Array.Empty<KavitaLibrary>(); return Array.Empty<KavitaLibrary>();
} }
JsonArray? result = JsonSerializer.Deserialize<JsonArray>(data); JsonArray? result = JsonSerializer.Deserialize<JsonArray>(data);
if (result is null) if (result is null)
{ {
Log("No libraries returned");
return Array.Empty<KavitaLibrary>(); return Array.Empty<KavitaLibrary>();
} }

View File

@ -1,30 +1,41 @@
using System.Text.Json; using System.Text.Json.Nodes;
using System.Text.Json.Nodes; using Newtonsoft.Json;
using JsonSerializer = System.Text.Json.JsonSerializer;
namespace API.Schema.LibraryConnectors; namespace Tranga.LibraryConnectors;
/// <summary>
/// Provides connectivity to Komga-API
/// Can fetch and update libraries
/// </summary>
public class Komga : LibraryConnector public class Komga : LibraryConnector
{ {
public Komga(string baseUrl, string auth) : base(TokenGen.CreateToken(typeof(Komga), 64), LibraryType.Komga, public Komga(GlobalBase clone, string baseUrl, string username, string password)
baseUrl, auth) : base(clone, baseUrl, Convert.ToBase64String(System.Text.Encoding.ASCII.GetBytes($"{username}:{password}")), LibraryType.Komga)
{ {
} }
public Komga(string baseUrl, string username, string password) [JsonConstructor]
: this(baseUrl, Convert.ToBase64String(System.Text.Encoding.ASCII.GetBytes($"{username}:{password}"))) public Komga(GlobalBase clone, string baseUrl, string auth) : base(clone, baseUrl, auth, LibraryType.Komga)
{ {
} }
public override string ToString()
{
return $"Komga {baseUrl}";
}
protected override void UpdateLibraryInternal() protected override void UpdateLibraryInternal()
{ {
Log("Updating libraries.");
foreach (KomgaLibrary lib in GetLibraries()) foreach (KomgaLibrary lib in GetLibraries())
NetClient.MakePost($"{BaseUrl}/api/v1/libraries/{lib.id}/scan", "Basic", Auth); NetClient.MakePost($"{baseUrl}/api/v1/libraries/{lib.id}/scan", "Basic", auth, logger);
} }
internal override bool Test() internal override bool Test()
{ {
foreach (KomgaLibrary lib in GetLibraries()) foreach (KomgaLibrary lib in GetLibraries())
if (NetClient.MakePost($"{BaseUrl}/api/v1/libraries/{lib.id}/scan", "Basic", Auth)) if (NetClient.MakePost($"{baseUrl}/api/v1/libraries/{lib.id}/scan", "Basic", auth, logger))
return true; return true;
return false; return false;
} }
@ -35,14 +46,17 @@ public class Komga : LibraryConnector
/// <returns>Array of KomgaLibraries</returns> /// <returns>Array of KomgaLibraries</returns>
private IEnumerable<KomgaLibrary> GetLibraries() private IEnumerable<KomgaLibrary> GetLibraries()
{ {
Stream data = NetClient.MakeRequest($"{BaseUrl}/api/v1/libraries", "Basic", Auth); Log("Getting Libraries");
Stream data = NetClient.MakeRequest($"{baseUrl}/api/v1/libraries", "Basic", auth, logger);
if (data == Stream.Null) if (data == Stream.Null)
{ {
Log("No libraries returned");
return Array.Empty<KomgaLibrary>(); return Array.Empty<KomgaLibrary>();
} }
JsonArray? result = JsonSerializer.Deserialize<JsonArray>(data); JsonArray? result = JsonSerializer.Deserialize<JsonArray>(data);
if (result is null) if (result is null)
{ {
Log("No libraries returned");
return Array.Empty<KomgaLibrary>(); return Array.Empty<KomgaLibrary>();
} }

View File

@ -0,0 +1,144 @@
using System.Net;
using System.Net.Http.Headers;
using Logging;
namespace Tranga.LibraryConnectors;
public abstract class LibraryConnector : GlobalBase
{
public enum LibraryType : byte
{
Komga = 0,
Kavita = 1
}
// ReSharper disable once UnusedAutoPropertyAccessor.Global
public LibraryType libraryType { get; }
public string baseUrl { get; }
// ReSharper disable once MemberCanBeProtected.Global
public string auth { get; } //Base64 encoded, if you use your password everywhere, you have problems
private DateTime? _updateLibraryRequested = null;
private readonly Thread? _libraryBufferThread = null;
private const int NoChangeTimeout = 2, BiggestInterval = 20;
protected LibraryConnector(GlobalBase clone, string baseUrl, string auth, LibraryType libraryType) : base(clone)
{
Log($"Creating libraryConnector {Enum.GetName(libraryType)}");
if (!baseUrlRex.IsMatch(baseUrl))
throw new ArgumentException("Base url does not match pattern");
if(auth == "")
throw new ArgumentNullException(nameof(auth), "Auth can not be empty");
this.baseUrl = baseUrlRex.Match(baseUrl).Value;
this.auth = auth;
this.libraryType = libraryType;
if (TrangaSettings.bufferLibraryUpdates)
{
_libraryBufferThread = new(CheckLibraryBuffer);
_libraryBufferThread.Start();
}
}
private void CheckLibraryBuffer()
{
while (true)
{
if (_updateLibraryRequested is not null && DateTime.Now.Subtract((DateTime)_updateLibraryRequested) > TimeSpan.FromMinutes(NoChangeTimeout)) //If no updates have been requested for NoChangeTimeout minutes, update library
{
UpdateLibraryInternal();
_updateLibraryRequested = null;
}
Thread.Sleep(100);
}
}
public void UpdateLibrary()
{
_updateLibraryRequested ??= DateTime.Now;
if (!TrangaSettings.bufferLibraryUpdates)
{
UpdateLibraryInternal();
return;
}else if (_updateLibraryRequested is not null &&
DateTime.Now.Subtract((DateTime)_updateLibraryRequested) > TimeSpan.FromMinutes(BiggestInterval)) //If the last update has been more than BiggestInterval minutes ago, update library
{
UpdateLibraryInternal();
_updateLibraryRequested = null;
}
else if(_updateLibraryRequested is not null)
{
Log($"Buffering Library Updates (Updates in latest {((DateTime)_updateLibraryRequested).Add(TimeSpan.FromMinutes(BiggestInterval)).Subtract(DateTime.Now)} or {((DateTime)_updateLibraryRequested).Add(TimeSpan.FromMinutes(NoChangeTimeout)).Subtract(DateTime.Now)})");
}
}
protected abstract void UpdateLibraryInternal();
internal abstract bool Test();
protected static class NetClient
{
public static Stream MakeRequest(string url, string authScheme, string auth, Logger? logger)
{
HttpClient client = new();
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(authScheme, auth);
HttpRequestMessage requestMessage = new ()
{
Method = HttpMethod.Get,
RequestUri = new Uri(url)
};
try
{
HttpResponseMessage response = client.Send(requestMessage);
logger?.WriteLine("LibraryManager.NetClient",
$"GET {url} -> {(int)response.StatusCode}: {response.ReasonPhrase}");
if (response.StatusCode is HttpStatusCode.Unauthorized &&
response.RequestMessage!.RequestUri!.AbsoluteUri != url)
return MakeRequest(response.RequestMessage!.RequestUri!.AbsoluteUri, authScheme, auth, logger);
else if (response.IsSuccessStatusCode)
return response.Content.ReadAsStream();
else
return Stream.Null;
}
catch (Exception e)
{
switch (e)
{
case HttpRequestException:
logger?.WriteLine("LibraryManager.NetClient", $"Failed to make Request:\n\r{e}\n\rContinuing.");
break;
default:
throw;
}
return Stream.Null;
}
}
public static bool MakePost(string url, string authScheme, string auth, Logger? logger)
{
HttpClient client = new()
{
DefaultRequestHeaders =
{
{ "Accept", "application/json" },
{ "Authorization", new AuthenticationHeaderValue(authScheme, auth).ToString() }
}
};
HttpRequestMessage requestMessage = new ()
{
Method = HttpMethod.Post,
RequestUri = new Uri(url)
};
HttpResponseMessage response = client.Send(requestMessage);
logger?.WriteLine("LibraryManager.NetClient", $"POST {url} -> {(int)response.StatusCode}: {response.ReasonPhrase}");
if(response.StatusCode is HttpStatusCode.Unauthorized && response.RequestMessage!.RequestUri!.AbsoluteUri != url)
return MakePost(response.RequestMessage!.RequestUri!.AbsoluteUri, authScheme, auth, logger);
else if (response.IsSuccessStatusCode)
return true;
else
return false;
}
}
}

View File

@ -0,0 +1,45 @@
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace Tranga.LibraryConnectors;
public class LibraryManagerJsonConverter : JsonConverter
{
private readonly GlobalBase _clone;
internal LibraryManagerJsonConverter(GlobalBase clone)
{
this._clone = clone;
}
public override bool CanConvert(Type objectType)
{
return (objectType == typeof(LibraryConnector));
}
public override object ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer)
{
JObject jo = JObject.Load(reader);
if (jo["libraryType"]!.Value<byte>() == (byte)LibraryConnector.LibraryType.Komga)
return new Komga(this._clone,
jo.GetValue("baseUrl")!.Value<string>()!,
jo.GetValue("auth")!.Value<string>()!);
if (jo["libraryType"]!.Value<byte>() == (byte)LibraryConnector.LibraryType.Kavita)
return new Kavita(this._clone,
jo.GetValue("baseUrl")!.Value<string>()!,
jo.GetValue("auth")!.Value<string>()!);
throw new Exception();
}
public override bool CanWrite => false;
/// <summary>
/// Don't call this
/// </summary>
public override void WriteJson(JsonWriter writer, object? value, JsonSerializer serializer)
{
throw new Exception("Dont call this");
}
}

222
Tranga/Manga.cs Normal file
View File

@ -0,0 +1,222 @@
using System.Runtime.InteropServices;
using System.Text;
using System.Text.RegularExpressions;
using System.Web;
using Newtonsoft.Json;
using static System.IO.UnixFileMode;
namespace Tranga;
/// <summary>
/// Contains information on a Publication (Manga)
/// </summary>
public struct Manga
{
public string sortName { get; private set; }
public List<string> authors { get; private set; }
// ReSharper disable once UnusedAutoPropertyAccessor.Global
public Dictionary<string,string> altTitles { get; private set; }
// ReSharper disable once MemberCanBePrivate.Global
public string? description { get; private set; }
public string[] tags { get; private set; }
// ReSharper disable once UnusedAutoPropertyAccessor.Global
public string? coverUrl { get; private set; }
public string? coverFileNameInCache { get; private set; }
// ReSharper disable once UnusedAutoPropertyAccessor.Global
public Dictionary<string,string> links { get; }
// ReSharper disable once MemberCanBePrivate.Global
public int? year { get; private set; }
public string? originalLanguage { get; }
// ReSharper disable twice MemberCanBePrivate.Global
public string status { get; private set; }
public ReleaseStatusByte releaseStatus { get; private set; }
public enum ReleaseStatusByte : byte
{
Continuing = 0,
Completed = 1,
OnHiatus = 2,
Cancelled = 3,
Unreleased = 4
};
public string folderName { get; private set; }
public string publicationId { get; }
public string internalId { get; }
public float ignoreChaptersBelow { get; set; }
public float latestChapterDownloaded { get; set; }
public float latestChapterAvailable { get; set; }
public string? websiteUrl { get; private set; }
private static readonly Regex LegalCharacters = new (@"[A-Za-zÀ-ÖØ-öø-ÿ0-9 \.\-,'\'\)\(~!\+]*");
[JsonConstructor]
public Manga(string sortName, List<string> authors, string? description, Dictionary<string,string> altTitles, string[] tags, string? coverUrl, string? coverFileNameInCache, Dictionary<string,string>? links, int? year, string? originalLanguage, string publicationId, ReleaseStatusByte releaseStatus, string? websiteUrl = null, string? folderName = null, float? ignoreChaptersBelow = 0)
{
this.sortName = HttpUtility.HtmlDecode(sortName);
this.authors = authors.Select(HttpUtility.HtmlDecode).ToList()!;
this.description = HttpUtility.HtmlDecode(description);
this.altTitles = altTitles.ToDictionary(a => HttpUtility.HtmlDecode(a.Key), a => HttpUtility.HtmlDecode(a.Value));
this.tags = tags.Select(HttpUtility.HtmlDecode).ToArray()!;
this.coverFileNameInCache = coverFileNameInCache;
this.coverUrl = coverUrl;
this.links = links ?? new Dictionary<string, string>();
this.year = year;
this.originalLanguage = originalLanguage;
this.publicationId = publicationId;
this.folderName = folderName ?? string.Concat(LegalCharacters.Matches(HttpUtility.HtmlDecode(sortName)));
while (this.folderName.EndsWith('.'))
this.folderName = this.folderName.Substring(0, this.folderName.Length - 1);
string onlyLowerLetters = string.Concat(this.sortName.ToLower().Where(Char.IsLetter));
this.internalId = DateTime.Now.Ticks.ToString();
this.ignoreChaptersBelow = ignoreChaptersBelow ?? 0f;
this.latestChapterDownloaded = 0;
this.latestChapterAvailable = 0;
this.releaseStatus = releaseStatus;
this.status = Enum.GetName(releaseStatus) ?? "";
this.websiteUrl = websiteUrl;
}
public Manga WithMetadata(Manga newManga)
{
return this with
{
sortName = newManga.sortName,
description = newManga.description,
coverUrl = newManga.coverUrl,
authors = authors.Union(newManga.authors).ToList(),
altTitles = altTitles.UnionBy(newManga.altTitles, kv => kv.Key).ToDictionary(x => x.Key, x => x.Value),
tags = tags.Union(newManga.tags).ToArray(),
status = newManga.status,
releaseStatus = newManga.releaseStatus,
websiteUrl = newManga.websiteUrl,
year = newManga.year,
coverFileNameInCache = newManga.coverFileNameInCache
};
}
public override bool Equals(object? obj)
{
if (obj is not Manga compareManga)
return false;
return this.description == compareManga.description &&
this.year == compareManga.year &&
this.status == compareManga.status &&
this.releaseStatus == compareManga.releaseStatus &&
this.sortName == compareManga.sortName &&
this.latestChapterAvailable.Equals(compareManga.latestChapterAvailable) &&
this.authors.All(a => compareManga.authors.Contains(a)) &&
(this.coverFileNameInCache??"").Equals(compareManga.coverFileNameInCache) &&
(this.websiteUrl??"").Equals(compareManga.websiteUrl) &&
this.tags.All(t => compareManga.tags.Contains(t));
}
public override string ToString()
{
return $"Publication {sortName} {internalId}";
}
public string CreatePublicationFolder(string downloadDirectory)
{
string publicationFolder = Path.Join(downloadDirectory, this.folderName);
if(!Directory.Exists(publicationFolder))
Directory.CreateDirectory(publicationFolder);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(publicationFolder, GroupRead | GroupWrite | GroupExecute | OtherRead | OtherWrite | OtherExecute | UserRead | UserWrite | UserExecute);
return publicationFolder;
}
public void MovePublicationFolder(string downloadDirectory, string newFolderName)
{
string oldPath = Path.Join(downloadDirectory, this.folderName);
this.folderName = newFolderName;//Create new Path with the new folderName
string newPath = CreatePublicationFolder(downloadDirectory);
if (Directory.Exists(oldPath))
{
if (Directory.Exists(newPath)) //Move/Overwrite old Files, Delete old Directory
{
IEnumerable<string> newPathFileNames = new DirectoryInfo(newPath).GetFiles().Select(fi => fi.Name);
foreach(FileInfo fileInfo in new DirectoryInfo(oldPath).GetFiles().Where(fi => newPathFileNames.Contains(fi.Name) == false))
File.Move(fileInfo.FullName, Path.Join(newPath, fileInfo.Name), true);
Directory.Delete(oldPath);
}else
Directory.Move(oldPath, newPath);
}
}
public void UpdateLatestDownloadedChapter(Chapter chapter)//TODO check files if chapters are all downloaded
{
float chapterNumber = Convert.ToSingle(chapter.chapterNumber, GlobalBase.numberFormatDecimalPoint);
latestChapterDownloaded = latestChapterDownloaded < chapterNumber ? chapterNumber : latestChapterDownloaded;
}
public void SaveSeriesInfoJson(bool overwrite = false)
{
string publicationFolder = CreatePublicationFolder(TrangaSettings.downloadLocation);
string seriesInfoPath = Path.Join(publicationFolder, "series.json");
if(overwrite || (!overwrite && !File.Exists(seriesInfoPath)))
File.WriteAllText(seriesInfoPath,this.GetSeriesInfoJson());
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(seriesInfoPath, GroupRead | GroupWrite | OtherRead | OtherWrite | UserRead | UserWrite);
}
/// <returns>Serialized JSON String for series.json</returns>
private string GetSeriesInfoJson()
{
SeriesInfo si = new (new Metadata(this));
return System.Text.Json.JsonSerializer.Serialize(si);
}
//Only for series.json
private struct SeriesInfo
{
// ReSharper disable once UnusedAutoPropertyAccessor.Local we need it, trust
[JsonRequired]public Metadata metadata { get; }
public SeriesInfo(Metadata metadata) => this.metadata = metadata;
}
//Only for series.json what an abomination, why are all the fields not-null????
private struct Metadata
{
// ReSharper disable UnusedAutoPropertyAccessor.Local we need them all, trust me
[JsonRequired] public string type { get; }
[JsonRequired] public string publisher { get; }
// ReSharper disable twice IdentifierTypo
[JsonRequired] public int comicid { get; }
[JsonRequired] public string booktype { get; }
// ReSharper disable InconsistentNaming This one property is capitalized. Why?
[JsonRequired] public string ComicImage { get; }
[JsonRequired] public int total_issues { get; }
[JsonRequired] public string publication_run { get; }
[JsonRequired]public string name { get; }
[JsonRequired]public string year { get; }
[JsonRequired]public string status { get; }
[JsonRequired]public string description_text { get; }
public Metadata(Manga manga) : this(manga.sortName, manga.year.ToString() ?? string.Empty, manga.releaseStatus, manga.description ?? "")
{
}
public Metadata(string name, string year, ReleaseStatusByte status, string description_text)
{
this.name = name;
this.year = year;
this.status = status switch
{
ReleaseStatusByte.Continuing => "Continuing",
ReleaseStatusByte.Completed => "Ended",
_ => Enum.GetName(status) ?? "Ended"
};
this.description_text = description_text;
//kill it with fire, but otherwise Komga will not parse
type = "Manga";
publisher = "";
comicid = 0;
booktype = "";
ComicImage = "";
total_issues = 0;
publication_run = "";
}
}
}

View File

@ -1,53 +1,58 @@
using System.Text.RegularExpressions; using System.Net;
using API.MangaDownloadClients; using System.Text.RegularExpressions;
using HtmlAgilityPack; using HtmlAgilityPack;
using Tranga.Jobs;
namespace API.Schema.MangaConnectors; namespace Tranga.MangaConnectors;
public class AsuraToon : MangaConnector public class AsuraToon : MangaConnector
{ {
public AsuraToon() : base("AsuraToon", ["en"], ["https://asuracomic.net"]) public AsuraToon(GlobalBase clone) : base(clone, "AsuraToon", ["en"])
{ {
this.downloadClient = new ChromiumDownloadClient(); this.downloadClient = new ChromiumDownloadClient(clone);
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "") public override Manga[] GetManga(string publicationTitle = "")
{ {
Log($"Searching Publications. Term=\"{publicationTitle}\"");
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower(); string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
string requestUrl = $"https://asuracomic.net/series?name={sanitizedTitle}"; string requestUrl = $"https://asuracomic.net/series?name={sanitizedTitle}";
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return []; return Array.Empty<Manga>();
if (requestResult.htmlDocument is null) if (requestResult.htmlDocument is null)
{ {
return []; Log($"Failed to retrieve site");
return Array.Empty<Manga>();
} }
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument); Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
return publications; return publications;
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId) public override Manga? GetMangaFromId(string publicationId)
{ {
return GetMangaFromUrl($"https://asuracomic.net/series/{publicationId}"); return GetMangaFromUrl($"https://asuracomic.net/series/{publicationId}");
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url) public override Manga? GetMangaFromUrl(string url)
{ {
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo); RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return null; return null;
if (requestResult.htmlDocument is null) if (requestResult.htmlDocument is null)
{ {
Log($"Failed to retrieve site");
return null; return null;
} }
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url); return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
} }
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document) private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
{ {
HtmlNodeCollection mangaList = document.DocumentNode.SelectNodes("//a[starts-with(@href,'series')]"); HtmlNodeCollection mangaList = document.DocumentNode.SelectNodes("//a[starts-with(@href,'series')]");
if (mangaList is null || mangaList.Count < 1) if (mangaList is null || mangaList.Count < 1)
@ -55,41 +60,41 @@ public class AsuraToon : MangaConnector
IEnumerable<string> urls = mangaList.Select(a => $"https://asuracomic.net/{a.GetAttributeValue("href", "")}"); IEnumerable<string> urls = mangaList.Select(a => $"https://asuracomic.net/{a.GetAttributeValue("href", "")}");
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new(); List<Manga> ret = new();
foreach (string url in urls) foreach (string url in urls)
{ {
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url); Manga? manga = GetMangaFromUrl(url);
if (manga is { } x) if (manga is not null)
ret.Add(x); ret.Add((Manga)manga);
} }
return ret.ToArray(); return ret.ToArray();
} }
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl) private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{ {
string? originalLanguage = null; string? originalLanguage = null;
Dictionary<string, string> altTitles = new(), links = new(); Dictionary<string, string> altTitles = new(), links = new();
HtmlNodeCollection genreNodes = document.DocumentNode.SelectNodes("//h3[text()='Genres']/../div/button"); HtmlNodeCollection genreNodes = document.DocumentNode.SelectNodes("//h3[text()='Genres']/../div/button");
string[] tags = genreNodes.Select(b => b.InnerText).ToArray(); string[] tags = genreNodes.Select(b => b.InnerText).ToArray();
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
HtmlNode statusNode = document.DocumentNode.SelectSingleNode("//h3[text()='Status']/../h3[2]"); HtmlNode statusNode = document.DocumentNode.SelectSingleNode("//h3[text()='Status']/../h3[2]");
MangaReleaseStatus releaseStatus = statusNode.InnerText.ToLower() switch Manga.ReleaseStatusByte releaseStatus = statusNode.InnerText.ToLower() switch
{ {
"ongoing" => MangaReleaseStatus.Continuing, "ongoing" => Manga.ReleaseStatusByte.Continuing,
"hiatus" => MangaReleaseStatus.OnHiatus, "hiatus" => Manga.ReleaseStatusByte.OnHiatus,
"completed" => MangaReleaseStatus.Completed, "completed" => Manga.ReleaseStatusByte.Completed,
"dropped" => MangaReleaseStatus.Cancelled, "dropped" => Manga.ReleaseStatusByte.Cancelled,
"season end" => MangaReleaseStatus.Continuing, "season end" => Manga.ReleaseStatusByte.Continuing,
"coming soon" => MangaReleaseStatus.Unreleased, "coming soon" => Manga.ReleaseStatusByte.Unreleased,
_ => MangaReleaseStatus.Unreleased _ => Manga.ReleaseStatusByte.Unreleased
}; };
HtmlNode coverNode = HtmlNode coverNode =
document.DocumentNode.SelectSingleNode("//img[@alt='poster']"); document.DocumentNode.SelectSingleNode("//img[@alt='poster']");
string coverUrl = coverNode.GetAttributeValue("src", ""); string coverUrl = coverNode.GetAttributeValue("src", "");
string coverFileNameInCache = SaveCoverImageToCache(coverUrl, publicationId, RequestType.MangaCover);
HtmlNode titleNode = HtmlNode titleNode =
document.DocumentNode.SelectSingleNode("//title"); document.DocumentNode.SelectSingleNode("//title");
@ -103,34 +108,30 @@ public class AsuraToon : MangaConnector
HtmlNodeCollection artistNodes = document.DocumentNode.SelectNodes("//h3[text()='Artist']/../h3[not(text()='Artist' or text()='_')]"); HtmlNodeCollection artistNodes = document.DocumentNode.SelectNodes("//h3[text()='Artist']/../h3[not(text()='Artist' or text()='_')]");
IEnumerable<string> authorNames = authorNodes is null ? [] : authorNodes.Select(a => a.InnerText); IEnumerable<string> authorNames = authorNodes is null ? [] : authorNodes.Select(a => a.InnerText);
IEnumerable<string> artistNames = artistNodes is null ? [] : artistNodes.Select(a => a.InnerText); IEnumerable<string> artistNames = artistNodes is null ? [] : artistNodes.Select(a => a.InnerText);
List<string> authorStrings = authorNames.Concat(artistNames).ToList(); List<string> authors = authorNames.Concat(artistNames).ToList();
List<Author> authors = authorStrings.Select(author => new Author(author)).ToList();
HtmlNode? firstChapterNode = document.DocumentNode.SelectSingleNode("//a[contains(@href, 'chapter/1')]/../following-sibling::h3"); HtmlNode? firstChapterNode = document.DocumentNode.SelectSingleNode("//a[contains(@href, 'chapter/1')]/../following-sibling::h3");
uint year = uint.Parse(firstChapterNode?.InnerText.Split(' ')[^1] ?? "2000"); int? year = int.Parse(firstChapterNode?.InnerText.Split(' ')[^1] ?? "2000");
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year,
originalLanguage, releaseStatus, -1,
this,
authors,
mangaTags,
[],
[]);
return (manga, authors, mangaTags, [], []); Manga manga = new (sortName, authors, description, altTitles, tags, coverUrl, coverFileNameInCache, links,
year, originalLanguage, publicationId, releaseStatus, websiteUrl);
AddMangaToCache(manga);
return manga;
} }
public override Chapter[] GetChapters(Manga manga, string language="en") public override Chapter[] GetChapters(Manga manga, string language="en")
{ {
string requestUrl = $"https://asuracomic.net/series/{manga.MangaId}"; Log($"Getting chapters {manga}");
string requestUrl = $"https://asuracomic.net/series/{manga.publicationId}";
// Leaving this in for verification if the page exists // Leaving this in for verification if the page exists
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return []; return Array.Empty<Chapter>();
//Return Chapters ordered by Chapter-Number //Return Chapters ordered by Chapter-Number
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl); List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl);
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.Order().ToArray(); return chapters.Order().ToArray();
} }
@ -139,6 +140,7 @@ public class AsuraToon : MangaConnector
RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default); RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default);
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null) if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
{ {
Log("Failed to load site");
return new List<Chapter>(); return new List<Chapter>();
} }
@ -152,40 +154,63 @@ public class AsuraToon : MangaConnector
string chapterUrl = chapterInfo.GetAttributeValue("href", ""); string chapterUrl = chapterInfo.GetAttributeValue("href", "");
Match match = infoRex.Match(chapterInfo.InnerText); Match match = infoRex.Match(chapterInfo.InnerText);
if(!ChapterNumber.CanParse(match.Groups[1].Value)) string chapterNumber = match.Groups[1].Value;
continue;
ChapterNumber chapterNumber = new(match.Groups[1].Value);
string? chapterName = match.Groups[2].Success && match.Groups[2].Length > 1 ? match.Groups[2].Value : null; string? chapterName = match.Groups[2].Success && match.Groups[2].Length > 1 ? match.Groups[2].Value : null;
string url = $"https://asuracomic.net/series/{chapterUrl}"; string url = $"https://asuracomic.net/series/{chapterUrl}";
try try
{ {
ret.Add(new Chapter(manga, url, chapterNumber, null, chapterName)); ret.Add(new Chapter(manga, chapterName, null, chapterNumber, url));
} }
catch (Exception e) catch (Exception e)
{ {
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
} }
} }
return ret; return ret;
} }
internal override string[] GetChapterImageUrls(Chapter chapter) public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
{ {
string requestUrl = chapter.Url; if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Manga chapterParentManga = chapter.parentManga;
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
string requestUrl = chapter.url;
// Leaving this in to check if the page exists // Leaving this in to check if the page exists
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{ {
return []; progressToken?.Cancel();
return requestResult.statusCode;
} }
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
return imageUrls; string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken);
} }
private string[] ParseImageUrlsFromHtml(HtmlDocument document) private string[] ParseImageUrlsFromHtml(string mangaUrl)
{ {
HtmlNodeCollection images = document.DocumentNode.SelectNodes("//img[contains(@alt, 'chapter page')]"); RequestResult requestResult =
downloadClient.MakeRequest(mangaUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
return Array.Empty<string>();
}
if (requestResult.htmlDocument is null)
{
Log($"Failed to retrieve site");
return Array.Empty<string>();
}
HtmlNodeCollection images =
requestResult.htmlDocument.DocumentNode.SelectNodes("//img[contains(@alt, 'chapter page')]");
return images.Select(i => i.GetAttributeValue("src", "")).ToArray(); return images.Select(i => i.GetAttributeValue("src", "")).ToArray();
} }

View File

@ -1,74 +1,78 @@
using System.Net; using System.Net;
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
using API.MangaDownloadClients;
using HtmlAgilityPack; using HtmlAgilityPack;
using Tranga.Jobs;
namespace API.Schema.MangaConnectors; namespace Tranga.MangaConnectors;
public class Bato : MangaConnector public class Bato : MangaConnector
{ {
public Bato() : base("Bato", ["en"], ["bato.to"]) public Bato(GlobalBase clone) : base(clone, "Bato", ["en"])
{ {
this.downloadClient = new HttpDownloadClient(); this.downloadClient = new HttpDownloadClient(clone);
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "") public override Manga[] GetManga(string publicationTitle = "")
{ {
Log($"Searching Publications. Term=\"{publicationTitle}\"");
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower(); string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
string requestUrl = $"https://bato.to/v3x-search?word={sanitizedTitle}&lang=en"; string requestUrl = $"https://bato.to/v3x-search?word={sanitizedTitle}&lang=en";
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return []; return Array.Empty<Manga>();
if (requestResult.htmlDocument is null) if (requestResult.htmlDocument is null)
{ {
return []; Log($"Failed to retrieve site");
return Array.Empty<Manga>();
} }
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument); Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
return publications; return publications;
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId) public override Manga? GetMangaFromId(string publicationId)
{ {
return GetMangaFromUrl($"https://bato.to/title/{publicationId}"); return GetMangaFromUrl($"https://bato.to/title/{publicationId}");
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url) public override Manga? GetMangaFromUrl(string url)
{ {
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo); RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return null; return null;
if (requestResult.htmlDocument is null) if (requestResult.htmlDocument is null)
{ {
Log($"Failed to retrieve site");
return null; return null;
} }
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url); return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
} }
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document) private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
{ {
HtmlNode mangaList = document.DocumentNode.SelectSingleNode("//div[@data-hk='0-0-2']"); HtmlNode mangaList = document.DocumentNode.SelectSingleNode("//div[@data-hk='0-0-2']");
if (!mangaList.ChildNodes.Any(node => node.Name == "div")) if (!mangaList.ChildNodes.Any(node => node.Name == "div"))
return []; return Array.Empty<Manga>();
List<string> urls = mangaList.ChildNodes List<string> urls = mangaList.ChildNodes
.Select(node => $"https://bato.to{node.Descendants("div").First().FirstChild.GetAttributeValue("href", "")}").ToList(); .Select(node => $"https://bato.to{node.Descendants("div").First().FirstChild.GetAttributeValue("href", "")}").ToList();
HashSet<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new(); HashSet<Manga> ret = new();
foreach (string url in urls) foreach (string url in urls)
{ {
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url); Manga? manga = GetMangaFromUrl(url);
if (manga is { } x) if (manga is not null)
ret.Add(x); ret.Add((Manga)manga);
} }
return ret.ToArray(); return ret.ToArray();
} }
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl) private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{ {
HtmlNode infoNode = document.DocumentNode.SelectSingleNode("/html/body/div/main/div[1]/div[2]"); HtmlNode infoNode = document.DocumentNode.SelectSingleNode("/html/body/div/main/div[1]/div[2]");
@ -78,61 +82,57 @@ public class Bato : MangaConnector
string[] altTitlesList = infoNode.ChildNodes[1].ChildNodes[2].InnerText.Split('/'); string[] altTitlesList = infoNode.ChildNodes[1].ChildNodes[2].InnerText.Split('/');
int i = 0; int i = 0;
List<MangaAltTitle> altTitles = altTitlesList.Select(a => new MangaAltTitle(i++.ToString(), a)).ToList(); Dictionary<string, string> altTitles = altTitlesList.ToDictionary(s => i++.ToString(), s => s);
string coverUrl = document.DocumentNode.SelectNodes("//img") string posterUrl = document.DocumentNode.SelectNodes("//img")
.First(child => child.GetAttributeValue("data-hk", "") == "0-1-0").GetAttributeValue("src", "").Replace("&amp;", "&"); .First(child => child.GetAttributeValue("data-hk", "") == "0-1-0").GetAttributeValue("src", "").Replace("&amp;", "&");
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
List<HtmlNode> genreNodes = document.DocumentNode.SelectSingleNode("//b[text()='Genres:']/..").SelectNodes("span").ToList(); List<HtmlNode> genreNodes = document.DocumentNode.SelectSingleNode("//b[text()='Genres:']/..").SelectNodes("span").ToList();
string[] tags = genreNodes.Select(node => node.FirstChild.InnerText).ToArray(); string[] tags = genreNodes.Select(node => node.FirstChild.InnerText).ToArray();
List<MangaTag> mangaTags = tags.Select(s => new MangaTag(s)).ToList();
List<HtmlNode> authorsNodes = infoNode.ChildNodes[1].ChildNodes[3].Descendants("a").ToList(); List<HtmlNode> authorsNodes = infoNode.ChildNodes[1].ChildNodes[3].Descendants("a").ToList();
List<string> authorNames = authorsNodes.Select(node => node.InnerText.Replace("amp;", "")).ToList(); List<string> authors = authorsNodes.Select(node => node.InnerText.Replace("amp;", "")).ToList();
List<Author> authors = authorNames.Select(n => new Author(n)).ToList();
HtmlNode? originalLanguageNode = document.DocumentNode.SelectSingleNode("//span[text()='Tr From']/.."); HtmlNode? originalLanguageNode = document.DocumentNode.SelectSingleNode("//span[text()='Tr From']/..");
string originalLanguage = originalLanguageNode is not null ? originalLanguageNode.LastChild.InnerText : ""; string originalLanguage = originalLanguageNode is not null ? originalLanguageNode.LastChild.InnerText : "";
if (!uint.TryParse( if (!int.TryParse(
document.DocumentNode.SelectSingleNode("//span[text()='Original Publication:']/..").LastChild.InnerText.Split('-')[0], document.DocumentNode.SelectSingleNode("//span[text()='Original Publication:']/..").LastChild.InnerText.Split('-')[0],
out uint year)) out int year))
year = (uint)DateTime.Now.Year; year = DateTime.Now.Year;
string status = document.DocumentNode.SelectSingleNode("//span[text()='Original Publication:']/..") string status = document.DocumentNode.SelectSingleNode("//span[text()='Original Publication:']/..")
.ChildNodes[2].InnerText; .ChildNodes[2].InnerText;
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased; Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
switch (status.ToLower()) switch (status.ToLower())
{ {
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break; case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
case "completed": releaseStatus = MangaReleaseStatus.Completed; break; case "completed": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
case "hiatus": releaseStatus = MangaReleaseStatus.OnHiatus; break; case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
case "cancelled": releaseStatus = MangaReleaseStatus.Cancelled; break; case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
case "pending": releaseStatus = MangaReleaseStatus.Unreleased; break; case "pending": releaseStatus = Manga.ReleaseStatusByte.Unreleased; break;
} }
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year, Manga manga = new (sortName, authors, description, altTitles, tags, posterUrl, coverFileNameInCache, new Dictionary<string, string>(),
originalLanguage, releaseStatus, -1, year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
this, AddMangaToCache(manga);
authors, return manga;
mangaTags,
[],
altTitles);
return (manga, authors, mangaTags, [], altTitles);
} }
public override Chapter[] GetChapters(Manga manga, string language="en") public override Chapter[] GetChapters(Manga manga, string language="en")
{ {
string requestUrl = $"https://bato.to/title/{manga.MangaId}"; Log($"Getting chapters {manga}");
string requestUrl = $"https://bato.to/title/{manga.publicationId}";
// Leaving this in for verification if the page exists // Leaving this in for verification if the page exists
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return []; return Array.Empty<Chapter>();
//Return Chapters ordered by Chapter-Number //Return Chapters ordered by Chapter-Number
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl); List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl);
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.Order().ToArray(); return chapters.Order().ToArray();
} }
@ -141,6 +141,7 @@ public class Bato : MangaConnector
RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default); RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default);
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null) if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
{ {
Log("Failed to load site");
return new List<Chapter>(); return new List<Chapter>();
} }
@ -158,40 +159,64 @@ public class Bato : MangaConnector
Match match = numberRex.Match(chapterUrl); Match match = numberRex.Match(chapterUrl);
string id = match.Groups[1].Value; string id = match.Groups[1].Value;
int? volumeNumber = match.Groups[2].Success ? int.Parse(match.Groups[2].Value) : null; string? volumeNumber = match.Groups[2].Success ? match.Groups[2].Value : null;
if(ChapterNumber.CanParse(match.Groups[3].Value)) string chapterNumber = match.Groups[3].Value;
continue; string chapterName = chapterNumber;
ChapterNumber chapterNumber = new(match.Groups[3].Value);
string url = $"https://bato.to{chapterUrl}?load=2"; string url = $"https://bato.to{chapterUrl}?load=2";
try try
{ {
ret.Add(new Chapter(manga, url, chapterNumber, volumeNumber, null)); ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
} }
catch (Exception e) catch (Exception e)
{ {
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
} }
} }
return ret; return ret;
} }
internal override string[] GetChapterImageUrls(Chapter chapter) public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
{ {
string requestUrl = chapter.Url; if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Manga chapterParentManga = chapter.parentManga;
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
string requestUrl = chapter.url;
// Leaving this in to check if the page exists // Leaving this in to check if the page exists
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{ {
return []; progressToken?.Cancel();
return requestResult.statusCode;
} }
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument); string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
return imageUrls;
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken);
} }
private string[] ParseImageUrlsFromHtml(HtmlDocument document) private string[] ParseImageUrlsFromHtml(string mangaUrl)
{ {
RequestResult requestResult =
downloadClient.MakeRequest(mangaUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
return Array.Empty<string>();
}
if (requestResult.htmlDocument is null)
{
Log($"Failed to retrieve site");
return Array.Empty<string>();
}
HtmlDocument document = requestResult.htmlDocument;
HtmlNode images = document.DocumentNode.SelectNodes("//astro-island").First(node => HtmlNode images = document.DocumentNode.SelectNodes("//astro-island").First(node =>
node.GetAttributeValue("component-url", "").Contains("/_astro/ImageList.")); node.GetAttributeValue("component-url", "").Contains("/_astro/ImageList."));

View File

@ -2,17 +2,19 @@
using System.Text; using System.Text;
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
using HtmlAgilityPack; using HtmlAgilityPack;
using Microsoft.Extensions.Logging;
using PuppeteerSharp; using PuppeteerSharp;
namespace API.MangaDownloadClients; namespace Tranga.MangaConnectors;
internal class ChromiumDownloadClient : DownloadClient internal class ChromiumDownloadClient : DownloadClient
{ {
private static IBrowser? _browser; private static IBrowser? _browser;
private readonly HttpDownloadClient _httpDownloadClient; private readonly HttpDownloadClient _httpDownloadClient;
private static async Task<IBrowser> StartBrowser() private static async Task<IBrowser> StartBrowser(Logging.Logger? logger = null)
{ {
logger?.WriteLine("Starting ChromiumDownloadClient Puppeteer");
return await Puppeteer.LaunchAsync(new LaunchOptions return await Puppeteer.LaunchAsync(new LaunchOptions
{ {
Headless = true, Headless = true,
@ -21,27 +23,28 @@ internal class ChromiumDownloadClient : DownloadClient
"--disable-dev-shm-usage", "--disable-dev-shm-usage",
"--disable-setuid-sandbox", "--disable-setuid-sandbox",
"--no-sandbox"}, "--no-sandbox"},
Timeout = 30000 Timeout = TrangaSettings.ChromiumStartupTimeoutMs
}, new LoggerFactory([new LogProvider()])); //TODO }, new LoggerFactory([new LogProvider(logger)]));
} }
private class LogProvider : ILoggerProvider private class LogProvider : GlobalBase, ILoggerProvider
{ {
//TODO public LogProvider(Logging.Logger? logger) : base(logger) { }
public void Dispose() { } public void Dispose() { }
public ILogger CreateLogger(string categoryName) => new Logger(); public ILogger CreateLogger(string categoryName) => new Logger(logger);
} }
private class Logger : ILogger private class Logger : GlobalBase, ILogger
{ {
public Logger() : base() { } public Logger(Logging.Logger? logger) : base(logger) { }
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter) public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter)
{ {
if (logLevel <= LogLevel.Information) if (logLevel <= LogLevel.Information)
return; return;
//TODO logger?.WriteLine("Puppeteer", formatter.Invoke(state, exception));
} }
public bool IsEnabled(LogLevel logLevel) => true; public bool IsEnabled(LogLevel logLevel) => true;
@ -49,11 +52,11 @@ internal class ChromiumDownloadClient : DownloadClient
public IDisposable? BeginScope<TState>(TState state) where TState : notnull => null; public IDisposable? BeginScope<TState>(TState state) where TState : notnull => null;
} }
public ChromiumDownloadClient() public ChromiumDownloadClient(GlobalBase clone) : base(clone)
{ {
_httpDownloadClient = new(); _httpDownloadClient = new(this);
if(_browser is null) if(_browser is null)
_browser = StartBrowser().Result; _browser = StartBrowser(this.logger).Result;
} }
private readonly Regex _imageUrlRex = new(@"https?:\/\/.*\.(?:p?jpe?g|gif|a?png|bmp|avif|webp)(\?.*)?"); private readonly Regex _imageUrlRex = new(@"https?:\/\/.*\.(?:p?jpe?g|gif|a?png|bmp|avif|webp)(\?.*)?");
@ -69,16 +72,17 @@ internal class ChromiumDownloadClient : DownloadClient
if (_browser is null) if (_browser is null)
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null); return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
IPage page = _browser.NewPageAsync().Result; IPage page = _browser.NewPageAsync().Result;
page.DefaultTimeout = 10000; page.DefaultTimeout = TrangaSettings.ChromiumPageTimeoutMs;
page.SetExtraHttpHeadersAsync(new() { { "Referer", referrer } });
IResponse response; IResponse response;
try try
{ {
response = page.GoToAsync(url, WaitUntilNavigation.Networkidle0).Result; response = page.GoToAsync(url, WaitUntilNavigation.Networkidle0).Result;
//Log($"Page loaded. {url}"); Log($"Page loaded. {url}");
} }
catch (Exception e) catch (Exception e)
{ {
//Log($"Could not load Page {url}\n{e.Message}"); Log($"Could not load Page {url}\n{e.Message}");
page.CloseAsync(); page.CloseAsync();
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null); return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
} }

View File

@ -1,13 +1,13 @@
using System.Net; using System.Net;
using API.Schema; using HtmlAgilityPack;
namespace API.MangaDownloadClients; namespace Tranga.MangaConnectors;
internal abstract class DownloadClient internal abstract class DownloadClient : GlobalBase
{ {
private readonly Dictionary<RequestType, DateTime> _lastExecutedRateLimit; private readonly Dictionary<RequestType, DateTime> _lastExecutedRateLimit;
protected DownloadClient() protected DownloadClient(GlobalBase clone) : base(clone)
{ {
this._lastExecutedRateLimit = new(); this._lastExecutedRateLimit = new();
} }
@ -16,6 +16,7 @@ internal abstract class DownloadClient
{ {
if (!TrangaSettings.requestLimits.ContainsKey(requestType)) if (!TrangaSettings.requestLimits.ContainsKey(requestType))
{ {
Log("RequestType not configured for rate-limit.");
return new RequestResult(HttpStatusCode.NotAcceptable, null, Stream.Null); return new RequestResult(HttpStatusCode.NotAcceptable, null, Stream.Null);
} }
@ -30,6 +31,7 @@ internal abstract class DownloadClient
if (rateLimitTimeout > TimeSpan.Zero) if (rateLimitTimeout > TimeSpan.Zero)
{ {
Log($"Waiting {rateLimitTimeout.TotalSeconds} seconds");
Thread.Sleep(rateLimitTimeout); Thread.Sleep(rateLimitTimeout);
} }

View File

@ -1,8 +1,8 @@
using System.Net; using System.Net;
using API.Schema; using System.Net.Http.Headers;
using HtmlAgilityPack; using HtmlAgilityPack;
namespace API.MangaDownloadClients; namespace Tranga.MangaConnectors;
internal class HttpDownloadClient : DownloadClient internal class HttpDownloadClient : DownloadClient
{ {
@ -11,16 +11,15 @@ internal class HttpDownloadClient : DownloadClient
Timeout = TimeSpan.FromSeconds(10) Timeout = TimeSpan.FromSeconds(10)
}; };
public HttpDownloadClient() public HttpDownloadClient(GlobalBase clone) : base(clone)
{ {
Client.DefaultRequestHeaders.TryAddWithoutValidation("User-Agent", TrangaSettings.userAgent); Client.DefaultRequestHeaders.TryAddWithoutValidation("User-Agent", TrangaSettings.userAgent);
} }
internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null) internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null)
{ {
//TODO if(clickButton is not null)
//if (clickButton is not null) Log("Can not click button on static site.");
//Log("Can not click button on static site.");
HttpResponseMessage? response = null; HttpResponseMessage? response = null;
while (response is null) while (response is null)
{ {
@ -37,8 +36,10 @@ internal class HttpDownloadClient : DownloadClient
switch (e) switch (e)
{ {
case TaskCanceledException: case TaskCanceledException:
Log($"Request timed out {url}.\n\r{e}");
return new RequestResult(HttpStatusCode.RequestTimeout, null, Stream.Null); return new RequestResult(HttpStatusCode.RequestTimeout, null, Stream.Null);
case HttpRequestException: case HttpRequestException:
Log($"Request failed {url}\n\r{e}");
return new RequestResult(HttpStatusCode.BadRequest, null, Stream.Null); return new RequestResult(HttpStatusCode.BadRequest, null, Stream.Null);
} }
} }
@ -46,6 +47,7 @@ internal class HttpDownloadClient : DownloadClient
if (!response.IsSuccessStatusCode) if (!response.IsSuccessStatusCode)
{ {
Log($"Request-Error {response.StatusCode}: {url}");
return new RequestResult(response.StatusCode, null, Stream.Null); return new RequestResult(response.StatusCode, null, Stream.Null);
} }

View File

@ -0,0 +1,235 @@
using System.IO.Compression;
using System.Net;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using Tranga.Jobs;
using static System.IO.UnixFileMode;
namespace Tranga.MangaConnectors;
/// <summary>
/// Base-Class for all Connectors
/// Provides some methods to be used by all Connectors, as well as a DownloadClient
/// </summary>
public abstract class MangaConnector : GlobalBase
{
internal DownloadClient downloadClient { get; init; } = null!;
public string[] SupportedLanguages;
protected MangaConnector(GlobalBase clone, string name, string[] supportedLanguages) : base(clone)
{
this.name = name;
this.SupportedLanguages = supportedLanguages;
Directory.CreateDirectory(TrangaSettings.coverImageCache);
}
public string name { get; } //Name of the Connector (e.g. Website)
/// <summary>
/// Returns all Publications with the given string.
/// If the string is empty or null, returns all Publication of the Connector
/// </summary>
/// <param name="publicationTitle">Search-Query</param>
/// <returns>Publications matching the query</returns>
public abstract Manga[] GetManga(string publicationTitle = "");
public abstract Manga? GetMangaFromUrl(string url);
public abstract Manga? GetMangaFromId(string publicationId);
/// <summary>
/// Returns all Chapters of the publication in the provided language.
/// If the language is empty or null, returns all Chapters in all Languages.
/// </summary>
/// <param name="manga">Publication to get Chapters for</param>
/// <param name="language">Language of the Chapters</param>
/// <returns>Array of Chapters matching Publication and Language</returns>
public abstract Chapter[] GetChapters(Manga manga, string language="en");
/// <summary>
/// Updates the available Chapters of a Publication
/// </summary>
/// <param name="manga">Publication to check</param>
/// <param name="language">Language to receive chapters for</param>
/// <returns>List of Chapters that were previously not in collection</returns>
public Chapter[] GetNewChapters(Manga manga, string language = "en")
{
Log($"Getting new Chapters for {manga}");
Chapter[] allChapters = this.GetChapters(manga, language);
if (allChapters.Length < 1)
return Array.Empty<Chapter>();
Log($"Checking for duplicates {manga}");
List<Chapter> newChaptersList = allChapters.Where(nChapter => nChapter.chapterNumber >= manga.ignoreChaptersBelow
&& !nChapter.CheckChapterIsDownloaded()).ToList();
Log($"{newChaptersList.Count} new chapters. {manga}");
try
{
Chapter latestChapterAvailable =
allChapters.Max();
manga.latestChapterAvailable =
Convert.ToSingle(latestChapterAvailable.chapterNumber, numberFormatDecimalPoint);
}
catch (Exception e)
{
Log(e.ToString());
Log($"Failed getting new Chapters for {manga}");
}
return newChaptersList.ToArray();
}
public abstract HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null);
/// <summary>
/// Copies the already downloaded cover from cache to downloadLocation
/// </summary>
/// <param name="manga">Publication to retrieve Cover for</param>
/// <param name="retries">Number of times to retry to copy the cover (or download it first)</param>
public void CopyCoverFromCacheToDownloadLocation(Manga manga, int? retries = 1)
{
Log($"Copy cover {manga}");
//Check if Publication already has a Folder and cover
string publicationFolder = manga.CreatePublicationFolder(TrangaSettings.downloadLocation);
DirectoryInfo dirInfo = new (publicationFolder);
if (dirInfo.EnumerateFiles().Any(info => info.Name.Contains("cover", StringComparison.InvariantCultureIgnoreCase)))
{
Log($"Cover exists {manga}");
return;
}
string? fileInCache = manga.coverFileNameInCache;
if (fileInCache is null || !File.Exists(fileInCache))
{
Log($"Cloning cover failed: File missing {fileInCache}.");
if (retries > 0 && manga.coverUrl is not null)
{
Log($"Trying {retries} more times");
SaveCoverImageToCache(manga.coverUrl, manga.internalId, 0);
CopyCoverFromCacheToDownloadLocation(manga, --retries);
}
return;
}
string newFilePath = Path.Join(publicationFolder, $"cover.{Path.GetFileName(fileInCache).Split('.')[^1]}" );
Log($"Cloning cover {fileInCache} -> {newFilePath}");
File.Copy(fileInCache, newFilePath, true);
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(newFilePath, GroupRead | GroupWrite | UserRead | UserWrite);
}
/// <summary>
/// Downloads Image from URL and saves it to the given path(incl. fileName)
/// </summary>
/// <param name="imageUrl"></param>
/// <param name="fullPath"></param>
/// <param name="requestType">RequestType for Rate-Limit</param>
/// <param name="referrer">referrer used in html request header</param>
private HttpStatusCode DownloadImage(string imageUrl, string fullPath, RequestType requestType, string? referrer = null)
{
RequestResult requestResult = downloadClient.MakeRequest(imageUrl, requestType, referrer);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return requestResult.statusCode;
if (requestResult.result == Stream.Null)
return HttpStatusCode.NotFound;
FileStream fs = new (fullPath, FileMode.Create);
requestResult.result.CopyTo(fs);
fs.Close();
return requestResult.statusCode;
}
protected HttpStatusCode DownloadChapterImages(string[] imageUrls, Chapter chapter, RequestType requestType, string? referrer = null, ProgressToken? progressToken = null)
{
string saveArchiveFilePath = chapter.GetArchiveFilePath();
if (progressToken?.cancellationRequested ?? false)
return HttpStatusCode.RequestTimeout;
Log($"Downloading Images for {saveArchiveFilePath}");
if (progressToken is not null)
progressToken.increments += imageUrls.Length;
//Check if Publication Directory already exists
string directoryPath = Path.GetDirectoryName(saveArchiveFilePath)!;
if (!Directory.Exists(directoryPath))
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
Directory.CreateDirectory(directoryPath,
UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute );
else
Directory.CreateDirectory(directoryPath);
if (File.Exists(saveArchiveFilePath)) //Don't download twice.
{
progressToken?.Complete();
return HttpStatusCode.Created;
}
//Create a temporary folder to store images
string tempFolder = Directory.CreateTempSubdirectory("trangatemp").FullName;
int chapterNum = 0;
//Download all Images to temporary Folder
if (imageUrls.Length == 0)
{
Log("No images found");
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(saveArchiveFilePath, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute);
Directory.Delete(tempFolder, true);
progressToken?.Complete();
return HttpStatusCode.NoContent;
}
foreach (string imageUrl in imageUrls)
{
string extension = imageUrl.Split('.')[^1].Split('?')[0];
Log($"Downloading image {chapterNum + 1:000}/{imageUrls.Length:000}"); //TODO
HttpStatusCode status = DownloadImage(imageUrl, Path.Join(tempFolder, $"{chapterNum++}.{extension}"), requestType, referrer);
Log($"{saveArchiveFilePath} {chapterNum + 1:000}/{imageUrls.Length:000} {status}");
if ((int)status < 200 || (int)status >= 300)
{
progressToken?.Complete();
return status;
}
if (progressToken?.cancellationRequested ?? false)
{
progressToken.Complete();
return HttpStatusCode.RequestTimeout;
}
progressToken?.Increment();
}
File.WriteAllText(Path.Join(tempFolder, "ComicInfo.xml"), chapter.GetComicInfoXmlString());
Log($"Creating archive {saveArchiveFilePath}");
//ZIP-it and ship-it
ZipFile.CreateFromDirectory(tempFolder, saveArchiveFilePath);
chapter.CreateChapterMarker();
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
File.SetUnixFileMode(saveArchiveFilePath, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute | OtherRead | OtherExecute);
Directory.Delete(tempFolder, true); //Cleanup
Log("Created archive.");
progressToken?.Complete();
Log("Download complete.");
return HttpStatusCode.OK;
}
protected string SaveCoverImageToCache(string url, string mangaInternalId, RequestType requestType, string? referrer = null)
{
Regex urlRex = new (@"https?:\/\/((?:[a-zA-Z0-9-]+\.)+[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+))");
//https?:\/\/[a-zA-Z0-9-]+\.([a-zA-Z0-9-]+\.[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+)) for only second level domains
Match match = urlRex.Match(url);
string filename = $"{match.Groups[1].Value}-{mangaInternalId}.{match.Groups[3].Value}";
string saveImagePath = Path.Join(TrangaSettings.coverImageCache, filename);
if (File.Exists(saveImagePath))
return saveImagePath;
RequestResult coverResult = downloadClient.MakeRequest(url, requestType, referrer);
using MemoryStream ms = new();
coverResult.result.CopyTo(ms);
Directory.CreateDirectory(TrangaSettings.coverImageCache);
File.WriteAllBytes(saveImagePath, ms.ToArray());
Log($"Saving cover to {saveImagePath}");
return saveImagePath;
}
}

View File

@ -0,0 +1,55 @@
using System.Data;
using System.Diagnostics;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace Tranga.MangaConnectors;
public class MangaConnectorJsonConverter : JsonConverter
{
private GlobalBase _clone;
private readonly HashSet<MangaConnector> _connectors;
internal MangaConnectorJsonConverter(GlobalBase clone, HashSet<MangaConnector> connectors)
{
this._clone = clone;
this._connectors = connectors;
}
public override bool CanConvert(Type objectType)
{
return (objectType == typeof(MangaConnector));
}
public override object ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer)
{
JObject jo = JObject.Load(reader);
string? connectorName = jo.Value<string>("name");
if (connectorName is null)
throw new ConstraintException("Name can not be null.");
return connectorName switch
{
"MangaDex" => this._connectors.First(c => c is MangaDex),
"Manganato" => this._connectors.First(c => c is Manganato),
"MangaKatana" => this._connectors.First(c => c is MangaKatana),
"Mangaworld" => this._connectors.First(c => c is Mangaworld),
"Bato" => this._connectors.First(c => c is Bato),
"ManhuaPlus" => this._connectors.First(c => c is ManhuaPlus),
"MangaHere" => this._connectors.First(c => c is MangaHere),
"AsuraToon" => this._connectors.First(c => c is AsuraToon),
"Weebcentral" => this._connectors.First(c => c is Weebcentral),
"Webtoons" => this._connectors.First(c => c is Webtoons),
_ => throw new UnreachableException($"Could not find Connector with name {connectorName}")
};
}
public override bool CanWrite => false;
/// <summary>
/// Don't call this
/// </summary>
public override void WriteJson(JsonWriter writer, object? value, JsonSerializer serializer)
{
throw new Exception("Dont call this");
}
}

View File

@ -1,27 +1,27 @@
using System.Net; using System.Net;
using System.Text.Json.Nodes; using System.Text.Json.Nodes;
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
using API.MangaDownloadClients; using Tranga.Jobs;
using JsonSerializer = System.Text.Json.JsonSerializer; using JsonSerializer = System.Text.Json.JsonSerializer;
namespace API.Schema.MangaConnectors; namespace Tranga.MangaConnectors;
public class MangaDex : MangaConnector public class MangaDex : MangaConnector
{ {
//https://api.mangadex.org/docs/3-enumerations/#language-codes--localization //https://api.mangadex.org/docs/3-enumerations/#language-codes--localization
//https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes //https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes
//https://gist.github.com/Josantonius/b455e315bc7f790d14b136d61d9ae469 //https://gist.github.com/Josantonius/b455e315bc7f790d14b136d61d9ae469
public MangaDex() : base("MangaDex", ["en","pt","pt-br","it","de","ru","aa","ab","ae","af","ak","am","an","ar-ae","ar-bh","ar-dz","ar-eg","ar-iq","ar-jo","ar-kw","ar-lb","ar-ly","ar-ma","ar-om","ar-qa","ar-sa","ar-sy","ar-tn","ar-ye","ar","as","av","ay","az","ba","be","bg","bh","bi","bm","bn","bo","br","bs","ca","ce","ch","co","cr","cs","cu","cv","cy","da","de-at","de-ch","de-de","de-li","de-lu","div","dv","dz","ee","el","en-au","en-bz","en-ca","en-cb","en-gb","en-ie","en-jm","en-nz","en-ph","en-tt","en-us","en-za","en-zw","eo","es-ar","es-bo","es-cl","es-co","es-cr","es-do","es-ec","es-es","es-gt","es-hn","es-la","es-mx","es-ni","es-pa","es-pe","es-pr","es-py","es-sv","es-us","es-uy","es-ve","es","et","eu","fa","ff","fi","fj","fo","fr-be","fr-ca","fr-ch","fr-fr","fr-lu","fr-mc","fr","fy","ga","gd","gl","gn","gu","gv","ha","he","hi","ho","hr-ba","hr-hr","hr","ht","hu","hy","hz","ia","id","ie","ig","ii","ik","in","io","is","it-ch","it-it","iu","iw","ja","ja-ro","ji","jv","jw","ka","kg","ki","kj","kk","kl","km","kn","ko","ko-ro","kr","ks","ku","kv","kw","ky","kz","la","lb","lg","li","ln","lo","ls","lt","lu","lv","mg","mh","mi","mk","ml","mn","mo","mr","ms-bn","ms-my","ms","mt","my","na","nb","nd","ne","ng","nl-be","nl-nl","nl","nn","no","nr","ns","nv","ny","oc","oj","om","or","os","pa","pi","pl","ps","pt-pt","qu-bo","qu-ec","qu-pe","qu","rm","rn","ro","rw","sa","sb","sc","sd","se-fi","se-no","se-se","se","sg","sh","si","sk","sl","sm","sn","so","sq","sr-ba","sr-sp","sr","ss","st","su","sv-fi","sv-se","sv","sw","sx","syr","ta","te","tg","th","ti","tk","tl","tn","to","tr","ts","tt","tw","ty","ug","uk","ur","us","uz","ve","vi","vo","wa","wo","xh","yi","yo","za","zh-cn","zh-hk","zh-mo","zh-ro","zh-sg","zh-tw","zh","zu"], ["mangadex.org"]) public MangaDex(GlobalBase clone) : base(clone, "MangaDex", ["en","pt","pt-br","it","de","ru","aa","ab","ae","af","ak","am","an","ar-ae","ar-bh","ar-dz","ar-eg","ar-iq","ar-jo","ar-kw","ar-lb","ar-ly","ar-ma","ar-om","ar-qa","ar-sa","ar-sy","ar-tn","ar-ye","ar","as","av","ay","az","ba","be","bg","bh","bi","bm","bn","bo","br","bs","ca","ce","ch","co","cr","cs","cu","cv","cy","da","de-at","de-ch","de-de","de-li","de-lu","div","dv","dz","ee","el","en-au","en-bz","en-ca","en-cb","en-gb","en-ie","en-jm","en-nz","en-ph","en-tt","en-us","en-za","en-zw","eo","es-ar","es-bo","es-cl","es-co","es-cr","es-do","es-ec","es-es","es-gt","es-hn","es-la","es-mx","es-ni","es-pa","es-pe","es-pr","es-py","es-sv","es-us","es-uy","es-ve","es","et","eu","fa","ff","fi","fj","fo","fr-be","fr-ca","fr-ch","fr-fr","fr-lu","fr-mc","fr","fy","ga","gd","gl","gn","gu","gv","ha","he","hi","ho","hr-ba","hr-hr","hr","ht","hu","hy","hz","ia","id","ie","ig","ii","ik","in","io","is","it-ch","it-it","iu","iw","ja","ja-ro","ji","jv","jw","ka","kg","ki","kj","kk","kl","km","kn","ko","ko-ro","kr","ks","ku","kv","kw","ky","kz","la","lb","lg","li","ln","lo","ls","lt","lu","lv","mg","mh","mi","mk","ml","mn","mo","mr","ms-bn","ms-my","ms","mt","my","na","nb","nd","ne","ng","nl-be","nl-nl","nl","nn","no","nr","ns","nv","ny","oc","oj","om","or","os","pa","pi","pl","ps","pt-pt","qu-bo","qu-ec","qu-pe","qu","rm","rn","ro","rw","sa","sb","sc","sd","se-fi","se-no","se-se","se","sg","sh","si","sk","sl","sm","sn","so","sq","sr-ba","sr-sp","sr","ss","st","su","sv-fi","sv-se","sv","sw","sx","syr","ta","te","tg","th","ti","tk","tl","tn","to","tr","ts","tt","tw","ty","ug","uk","ur","us","uz","ve","vi","vo","wa","wo","xh","yi","yo","za","zh-cn","zh-hk","zh-mo","zh-ro","zh-sg","zh-tw","zh","zu"])
{ {
this.downloadClient = new HttpDownloadClient(); this.downloadClient = new HttpDownloadClient(clone);
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "") public override Manga[] GetManga(string publicationTitle = "")
{ {
Log($"Searching Publications. Term={publicationTitle}");
const int limit = 100; //How many values we want returned at once const int limit = 100; //How many values we want returned at once
int offset = 0; //"Page" int offset = 0; //"Page"
int total = int.MaxValue; //How many total results are there, is updated on first request int total = int.MaxValue; //How many total results are there, is updated on first request
HashSet<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> retManga = new(); HashSet<Manga> retManga = new();
int loadedPublicationData = 0; int loadedPublicationData = 0;
List<JsonNode> results = new(); List<JsonNode> results = new();
@ -53,13 +53,15 @@ public class MangaDex : MangaConnector
foreach (JsonNode mangaNode in results) foreach (JsonNode mangaNode in results)
{ {
Log($"Getting publication data. {++loadedPublicationData}/{total}");
if(MangaFromJsonObject(mangaNode.AsObject()) is { } manga) if(MangaFromJsonObject(mangaNode.AsObject()) is { } manga)
retManga.Add(manga); //Add Publication (Manga) to result retManga.Add(manga); //Add Publication (Manga) to result
} }
Log($"Retrieved {retManga.Count} publications. Term={publicationTitle}");
return retManga.ToArray(); return retManga.ToArray();
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId) public override Manga? GetMangaFromId(string publicationId)
{ {
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest($"https://api.mangadex.org/manga/{publicationId}?includes%5B%5D=manga&includes%5B%5D=cover_art&includes%5B%5D=author&includes%5B%5D=artist&includes%5B%5D=tag", RequestType.MangaInfo); downloadClient.MakeRequest($"https://api.mangadex.org/manga/{publicationId}?includes%5B%5D=manga&includes%5B%5D=cover_art&includes%5B%5D=author&includes%5B%5D=artist&includes%5B%5D=tag", RequestType.MangaInfo);
@ -71,14 +73,15 @@ public class MangaDex : MangaConnector
return null; return null;
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url) public override Manga? GetMangaFromUrl(string url)
{ {
Regex idRex = new (@"https:\/\/mangadex.org\/title\/([A-z0-9-]*)\/.*"); Regex idRex = new (@"https:\/\/mangadex.org\/title\/([A-z0-9-]*)\/.*");
string id = idRex.Match(url).Groups[1].Value; string id = idRex.Match(url).Groups[1].Value;
Log($"Got id {id} from {url}");
return GetMangaFromId(id); return GetMangaFromId(id);
} }
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? MangaFromJsonObject(JsonObject manga) private Manga? MangaFromJsonObject(JsonObject manga)
{ {
if (!manga.TryGetPropertyValue("id", out JsonNode? idNode)) if (!manga.TryGetPropertyValue("id", out JsonNode? idNode))
return null; return null;
@ -90,7 +93,7 @@ public class MangaDex : MangaConnector
if (!attributes.TryGetPropertyValue("title", out JsonNode? titleNode)) if (!attributes.TryGetPropertyValue("title", out JsonNode? titleNode))
return null; return null;
string sortName = titleNode!.AsObject().ContainsKey("en") switch string title = titleNode!.AsObject().ContainsKey("en") switch
{ {
true => titleNode.AsObject()["en"]!.GetValue<string>(), true => titleNode.AsObject()["en"]!.GetValue<string>(),
false => titleNode.AsObject().First().Value!.GetValue<string>() false => titleNode.AsObject().First().Value!.GetValue<string>()
@ -105,7 +108,6 @@ public class MangaDex : MangaConnector
altTitlesDict.TryAdd(altTitleNodeObject.First().Key, altTitleNodeObject.First().Value!.GetValue<string>()); altTitlesDict.TryAdd(altTitleNodeObject.First().Key, altTitleNodeObject.First().Value!.GetValue<string>());
} }
} }
List<MangaAltTitle> altTitles = altTitlesDict.Select(t => new MangaAltTitle(t.Key, t.Value)).ToList();
if (!attributes.TryGetPropertyValue("description", out JsonNode? descriptionNode)) if (!attributes.TryGetPropertyValue("description", out JsonNode? descriptionNode))
return null; return null;
@ -119,7 +121,6 @@ public class MangaDex : MangaConnector
if (attributes.TryGetPropertyValue("links", out JsonNode? linksNode) && linksNode is not null) if (attributes.TryGetPropertyValue("links", out JsonNode? linksNode) && linksNode is not null)
foreach (KeyValuePair<string, JsonNode?> linkKv in linksNode!.AsObject()) foreach (KeyValuePair<string, JsonNode?> linkKv in linksNode!.AsObject())
linksDict.TryAdd(linkKv.Key, linkKv.Value.GetValue<string>()); linksDict.TryAdd(linkKv.Key, linkKv.Value.GetValue<string>());
List<Link> links = linksDict.Select(x => new Link(x.Key, x.Value)).ToList();
string? originalLanguage = string? originalLanguage =
attributes.TryGetPropertyValue("originalLanguage", out JsonNode? originalLanguageNode) switch attributes.TryGetPropertyValue("originalLanguage", out JsonNode? originalLanguageNode) switch
@ -128,30 +129,30 @@ public class MangaDex : MangaConnector
false => null false => null
}; };
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased; Manga.ReleaseStatusByte status = Manga.ReleaseStatusByte.Unreleased;
if (attributes.TryGetPropertyValue("status", out JsonNode? statusNode)) if (attributes.TryGetPropertyValue("status", out JsonNode? statusNode))
{ {
releaseStatus = statusNode?.GetValue<string>().ToLower() switch status = statusNode?.GetValue<string>().ToLower() switch
{ {
"ongoing" => MangaReleaseStatus.Continuing, "ongoing" => Manga.ReleaseStatusByte.Continuing,
"completed" => MangaReleaseStatus.Completed, "completed" => Manga.ReleaseStatusByte.Completed,
"hiatus" => MangaReleaseStatus.OnHiatus, "hiatus" => Manga.ReleaseStatusByte.OnHiatus,
"cancelled" => MangaReleaseStatus.Cancelled, "cancelled" => Manga.ReleaseStatusByte.Cancelled,
_ => MangaReleaseStatus.Unreleased _ => Manga.ReleaseStatusByte.Unreleased
}; };
} }
uint year = attributes.TryGetPropertyValue("year", out JsonNode? yearNode) switch int? year = attributes.TryGetPropertyValue("year", out JsonNode? yearNode) switch
{ {
true => yearNode?.GetValue<uint>()??0, true => yearNode?.GetValue<int>(),
false => 0 false => null
}; };
HashSet<string> tags = new(128); HashSet<string> tags = new(128);
if (attributes.TryGetPropertyValue("tags", out JsonNode? tagsNode)) if (attributes.TryGetPropertyValue("tags", out JsonNode? tagsNode))
foreach (JsonNode? tagNode in tagsNode!.AsArray()) foreach (JsonNode? tagNode in tagsNode!.AsArray())
tags.Add(tagNode!["attributes"]!["name"]!["en"]!.GetValue<string>()); tags.Add(tagNode!["attributes"]!["name"]!["en"]!.GetValue<string>());
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
if (!manga.TryGetPropertyValue("relationships", out JsonNode? relationshipsNode)) if (!manga.TryGetPropertyValue("relationships", out JsonNode? relationshipsNode))
return null; return null;
@ -162,31 +163,40 @@ public class MangaDex : MangaConnector
return null; return null;
string fileName = coverNode["attributes"]!["fileName"]!.GetValue<string>(); string fileName = coverNode["attributes"]!["fileName"]!.GetValue<string>();
string coverUrl = $"https://uploads.mangadex.org/covers/{publicationId}/{fileName}"; string coverUrl = $"https://uploads.mangadex.org/covers/{publicationId}/{fileName}";
string coverCacheName = SaveCoverImageToCache(coverUrl, publicationId, RequestType.MangaCover);
List<string> authorNames = new(); List<string> authors = new();
JsonNode?[] authorNodes = relationshipsNode.AsArray() JsonNode?[] authorNodes = relationshipsNode.AsArray()
.Where(rel => rel!["type"]!.GetValue<string>().Equals("author") || rel!["type"]!.GetValue<string>().Equals("artist")).ToArray(); .Where(rel => rel!["type"]!.GetValue<string>().Equals("author") || rel!["type"]!.GetValue<string>().Equals("artist")).ToArray();
foreach (JsonNode? authorNode in authorNodes) foreach (JsonNode? authorNode in authorNodes)
{ {
string authorName = authorNode!["attributes"]!["name"]!.GetValue<string>(); string authorName = authorNode!["attributes"]!["name"]!.GetValue<string>();
if(!authorNames.Contains(authorName)) if(!authors.Contains(authorName))
authorNames.Add(authorName); authors.Add(authorName);
} }
List<Author> authors = authorNames.Select(a => new Author(a)).ToList();
Manga pub = new (publicationId, sortName, description, $"https://mangadex.org/title/{publicationId}", coverUrl, null, year, Manga pub = new(
originalLanguage, releaseStatus, -1, title,
this, authors,
authors, description,
mangaTags, altTitlesDict,
links, tags.ToArray(),
altTitles); coverUrl,
coverCacheName,
return (pub, authors, mangaTags, links, altTitles); linksDict,
year,
originalLanguage,
publicationId,
status,
websiteUrl: $"https://mangadex.org/title/{publicationId}"
);
AddMangaToCache(pub);
return pub;
} }
public override Chapter[] GetChapters(Manga manga, string language="en") public override Chapter[] GetChapters(Manga manga, string language="en")
{ {
Log($"Getting chapters {manga}");
const int limit = 100; //How many values we want returned at once const int limit = 100; //How many values we want returned at once
int offset = 0; //"Page" int offset = 0; //"Page"
int total = int.MaxValue; //How many total results are there, is updated on first request int total = int.MaxValue; //How many total results are there, is updated on first request
@ -197,7 +207,7 @@ public class MangaDex : MangaConnector
//Request next "Page" //Request next "Page"
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest( downloadClient.MakeRequest(
$"https://api.mangadex.org/manga/{manga.ConnectorId}/feed?limit={limit}&offset={offset}&translatedLanguage%5B%5D={language}&contentRating%5B%5D=safe&contentRating%5B%5D=suggestive&contentRating%5B%5D=erotica&contentRating%5B%5D=pornographic", RequestType.MangaDexFeed); $"https://api.mangadex.org/manga/{manga.publicationId}/feed?limit={limit}&offset={offset}&translatedLanguage%5B%5D={language}&contentRating%5B%5D=safe&contentRating%5B%5D=suggestive&contentRating%5B%5D=erotica&contentRating%5B%5D=pornographic", RequestType.MangaDexFeed);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
break; break;
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result); JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
@ -215,67 +225,80 @@ public class MangaDex : MangaConnector
JsonObject attributes = chapter["attributes"]!.AsObject(); JsonObject attributes = chapter["attributes"]!.AsObject();
string chapterId = chapter["id"]!.GetValue<string>(); string chapterId = chapter["id"]!.GetValue<string>();
string url = $"https://mangadex.org/chapter/{chapterId}";
string? title = attributes.ContainsKey("title") && attributes["title"] is not null string? title = attributes.ContainsKey("title") && attributes["title"] is not null
? attributes["title"]!.GetValue<string>() ? attributes["title"]!.GetValue<string>()
: null; : null;
int? volume = attributes.ContainsKey("volume") && attributes["volume"] is not null string? volume = attributes.ContainsKey("volume") && attributes["volume"] is not null
? int.Parse(attributes["volume"]!.GetValue<string>()) ? attributes["volume"]!.GetValue<string>()
: null; : null;
string? chapterNumStr = attributes.ContainsKey("chapter") && attributes["chapter"] is not null string chapterNum = attributes.ContainsKey("chapter") && attributes["chapter"] is not null
? attributes["chapter"]!.GetValue<string>() ? attributes["chapter"]!.GetValue<string>()
: null; : "null";
if(chapterNumStr is null || ChapterNumber.CanParse(chapterNumStr))
continue;
ChapterNumber chapterNumber = new(chapterNumStr);
if (attributes.ContainsKey("pages") && attributes["pages"] is not null && if (attributes.ContainsKey("pages") && attributes["pages"] is not null &&
attributes["pages"]!.GetValue<int>() < 1) attributes["pages"]!.GetValue<int>() < 1)
{ {
Log($"Skipping {chapterId} Vol.{volume} Ch.{chapterNum} {title} because it has no pages or is externally linked.");
continue; continue;
} }
try try
{ {
Chapter newChapter = new Chapter(manga, url, chapterNumber, volume, title); if(!chapters.Any(chp =>
if(!chapters.Contains(newChapter)) chp.volumeNumber.Equals(float.Parse(volume??"0", numberFormatDecimalPoint)) &&
chapters.Add(newChapter); chp.chapterNumber.Equals(float.Parse(chapterNum, numberFormatDecimalPoint))))
chapters.Add(new Chapter(manga, title, volume, chapterNum, chapterId, chapterId));
} }
catch (Exception e) catch (Exception e)
{ {
Log($"Failed to load chapter {chapterNum}: {e.Message}");
} }
} }
} }
//Return Chapters ordered by Chapter-Number //Return Chapters ordered by Chapter-Number
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.Order().ToArray(); return chapters.Order().ToArray();
} }
internal override string[] GetChapterImageUrls(Chapter chapter) public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
{//Request URLs for Chapter-Images {
if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Manga chapterParentManga = chapter.parentManga;
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
//Request URLs for Chapter-Images
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest($"https://api.mangadex.org/at-home/server/{chapter.ChapterId}?forcePort443=false", RequestType.MangaDexImage); downloadClient.MakeRequest($"https://api.mangadex.org/at-home/server/{chapter.url}?forcePort443=false", RequestType.MangaDexImage);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{ {
return []; progressToken?.Cancel();
return requestResult.statusCode;
} }
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result); JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
if (result is null) if (result is null)
{ {
return []; progressToken?.Cancel();
return HttpStatusCode.NoContent;
} }
string baseUrl = result["baseUrl"]!.GetValue<string>(); string baseUrl = result["baseUrl"]!.GetValue<string>();
string hash = result["chapter"]!["hash"]!.GetValue<string>(); string hash = result["chapter"]!["hash"]!.GetValue<string>();
JsonArray imageFileNames = result["chapter"]!["data"]!.AsArray(); JsonArray imageFileNames = result["chapter"]!["data"]!.AsArray();
//Loop through all imageNames and construct urls (imageUrl) //Loop through all imageNames and construct urls (imageUrl)
List<string> imageUrls = new(); HashSet<string> imageUrls = new();
foreach (JsonNode? image in imageFileNames) foreach (JsonNode? image in imageFileNames)
imageUrls.Add($"{baseUrl}/data/{hash}/{image!.GetValue<string>()}"); imageUrls.Add($"{baseUrl}/data/{hash}/{image!.GetValue<string>()}");
return imageUrls.ToArray();
//Download Chapter-Images
return DownloadChapterImages(imageUrls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
} }
} }

View File

@ -1,55 +1,58 @@
using System.Text.RegularExpressions; using System.Net;
using API.MangaDownloadClients; using System.Text.RegularExpressions;
using HtmlAgilityPack; using HtmlAgilityPack;
using Tranga.Jobs;
namespace API.Schema.MangaConnectors; namespace Tranga.MangaConnectors;
public class MangaHere : MangaConnector public class MangaHere : MangaConnector
{ {
public MangaHere() : base("MangaHere", ["en"], ["www.mangahere.cc"]) public MangaHere(GlobalBase clone) : base(clone, "MangaHere", ["en"])
{ {
this.downloadClient = new ChromiumDownloadClient(); this.downloadClient = new ChromiumDownloadClient(clone);
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "") public override Manga[] GetManga(string publicationTitle = "")
{ {
Log($"Searching Publications. Term=\"{publicationTitle}\"");
string sanitizedTitle = string.Join('+', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower(); string sanitizedTitle = string.Join('+', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
string requestUrl = $"https://www.mangahere.cc/search?title={sanitizedTitle}"; string requestUrl = $"https://www.mangahere.cc/search?title={sanitizedTitle}";
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
return []; return Array.Empty<Manga>();
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument); Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
return publications; return publications;
} }
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document) private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
{ {
if (document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),' container ')]").Any(node => node.ChildNodes.Any(cNode => cNode.HasClass("search-keywords")))) if (document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),' container ')]").Any(node => node.ChildNodes.Any(cNode => cNode.HasClass("search-keywords"))))
return []; return Array.Empty<Manga>();
List<string> urls = document.DocumentNode List<string> urls = document.DocumentNode
.SelectNodes("//a[contains(@href, '/manga/') and not(contains(@href, '.html'))]") .SelectNodes("//a[contains(@href, '/manga/') and not(contains(@href, '.html'))]")
.Select(thumb => $"https://www.mangahere.cc{thumb.GetAttributeValue("href", "")}").Distinct().ToList(); .Select(thumb => $"https://www.mangahere.cc{thumb.GetAttributeValue("href", "")}").Distinct().ToList();
HashSet<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new(); HashSet<Manga> ret = new();
foreach (string url in urls) foreach (string url in urls)
{ {
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url); Manga? manga = GetMangaFromUrl(url);
if (manga is { } x) if (manga is not null)
ret.Add(x); ret.Add((Manga)manga);
} }
return ret.ToArray(); return ret.ToArray();
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId) public override Manga? GetMangaFromId(string publicationId)
{ {
return GetMangaFromUrl($"https://www.mangahere.cc/manga/{publicationId}"); return GetMangaFromUrl($"https://www.mangahere.cc/manga/{publicationId}");
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url) public override Manga? GetMangaFromUrl(string url)
{ {
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(url, RequestType.MangaInfo); downloadClient.MakeRequest(url, RequestType.MangaInfo);
@ -61,58 +64,54 @@ public class MangaHere : MangaConnector
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id, url); return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id, url);
} }
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl) private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{ {
string originalLanguage = "", status = ""; string originalLanguage = "", status = "";
Dictionary<string, string> altTitles = new(), links = new(); Dictionary<string, string> altTitles = new(), links = new();
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased; Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
//We dont get posters, because same origin bs HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//img[contains(concat(' ',normalize-space(@class),' '),' detail-info-cover-img ')]"); //We dont get posters, because same origin bs HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//img[contains(concat(' ',normalize-space(@class),' '),' detail-info-cover-img ')]");
string coverUrl = "http://static.mangahere.cc/v20230914/mangahere/images/nopicture.jpg"; string posterUrl = "http://static.mangahere.cc/v20230914/mangahere/images/nopicture.jpg";
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//span[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-title-font ')]"); HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//span[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-title-font ')]");
string sortName = titleNode.InnerText; string sortName = titleNode.InnerText;
List<string> authorNames = document.DocumentNode List<string> authors = document.DocumentNode
.SelectNodes("//p[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-say ')]/a") .SelectNodes("//p[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-say ')]/a")
.Select(node => node.InnerText) .Select(node => node.InnerText)
.ToList(); .ToList();
List<Author> authors = authorNames.Select(n => new Author(n)).ToList();
HashSet<string> tags = document.DocumentNode HashSet<string> tags = document.DocumentNode
.SelectNodes("//p[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-tag-list ')]/a") .SelectNodes("//p[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-tag-list ')]/a")
.Select(node => node.InnerText) .Select(node => node.InnerText)
.ToHashSet(); .ToHashSet();
List<MangaTag> mangaTags = tags.Select(n => new MangaTag(n)).ToList();
status = document.DocumentNode.SelectSingleNode("//span[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-title-tip ')]").InnerText; status = document.DocumentNode.SelectSingleNode("//span[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-title-tip ')]").InnerText;
switch (status.ToLower()) switch (status.ToLower())
{ {
case "cancelled": releaseStatus = MangaReleaseStatus.Cancelled; break; case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
case "hiatus": releaseStatus = MangaReleaseStatus.OnHiatus; break; case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
case "discontinued": releaseStatus = MangaReleaseStatus.Cancelled; break; case "discontinued": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
case "complete": releaseStatus = MangaReleaseStatus.Completed; break; case "complete": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break; case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
} }
HtmlNode descriptionNode = document.DocumentNode HtmlNode descriptionNode = document.DocumentNode
.SelectSingleNode("//p[contains(concat(' ',normalize-space(@class),' '),' fullcontent ')]"); .SelectSingleNode("//p[contains(concat(' ',normalize-space(@class),' '),' fullcontent ')]");
string description = descriptionNode.InnerText; string description = descriptionNode.InnerText;
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, 0, Manga manga = new(sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
originalLanguage, releaseStatus, -1, coverFileNameInCache, links,
this, null, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
authors, AddMangaToCache(manga);
mangaTags, return manga;
[],
[]);
return (manga, authors, mangaTags, [], []);
} }
public override Chapter[] GetChapters(Manga manga, string language="en") public override Chapter[] GetChapters(Manga manga, string language="en")
{ {
string requestUrl = $"https://www.mangahere.cc/manga/{manga.MangaId}"; Log($"Getting chapters {manga}");
string requestUrl = $"https://www.mangahere.cc/manga/{manga.publicationId}";
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
@ -127,38 +126,54 @@ public class MangaHere : MangaConnector
{ {
Match rexMatch = chapterRex.Match(url); Match rexMatch = chapterRex.Match(url);
int? volumeNumber = rexMatch.Groups[1].Value == "TBD" ? null : int.Parse(rexMatch.Groups[1].Value); string volumeNumber = rexMatch.Groups[1].Value == "TBD" ? "0" : rexMatch.Groups[1].Value;
if(!ChapterNumber.CanParse(rexMatch.Groups[2].Value)) string chapterNumber = rexMatch.Groups[2].Value;
continue;
ChapterNumber chapterNumber = new(rexMatch.Groups[2].Value);
string fullUrl = $"https://www.mangahere.cc{url}"; string fullUrl = $"https://www.mangahere.cc{url}";
try try
{ {
chapters.Add(new Chapter(manga, fullUrl, chapterNumber, volumeNumber, null)); chapters.Add(new Chapter(manga, "", volumeNumber, chapterNumber, fullUrl));
} }
catch (Exception e) catch (Exception e)
{ {
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
} }
} }
//Return Chapters ordered by Chapter-Number //Return Chapters ordered by Chapter-Number
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.Order().ToArray(); return chapters.Order().ToArray();
} }
internal override string[] GetChapterImageUrls(Chapter chapter) public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
{ {
if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Manga chapterParentManga = chapter.parentManga;
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
List<string> imageUrls = new(); List<string> imageUrls = new();
int downloaded = 1; int downloaded = 1;
int images = 1; int images = 1;
string url = string.Join('/', chapter.Url.Split('/')[..^1]); string url = string.Join('/', chapter.url.Split('/')[..^1]);
do do
{ {
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest($"{url}/{downloaded}.html", RequestType.Default); downloadClient.MakeRequest($"{url}/{downloaded}.html", RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{ {
return []; progressToken?.Cancel();
return requestResult.statusCode;
}
if (requestResult.htmlDocument is null)
{
progressToken?.Cancel();
return HttpStatusCode.InternalServerError;
} }
imageUrls.AddRange(ParseImageUrlsFromHtml(requestResult.htmlDocument)); imageUrls.AddRange(ParseImageUrlsFromHtml(requestResult.htmlDocument));
@ -166,9 +181,17 @@ public class MangaHere : MangaConnector
images = requestResult.htmlDocument.DocumentNode images = requestResult.htmlDocument.DocumentNode
.SelectNodes("//a[contains(@href, '/manga/')]") .SelectNodes("//a[contains(@href, '/manga/')]")
.MaxBy(node => node.GetAttributeValue("data-page", 0))!.GetAttributeValue("data-page", 0); .MaxBy(node => node.GetAttributeValue("data-page", 0))!.GetAttributeValue("data-page", 0);
logger?.WriteLine($"MangaHere speciality: Get Image-url {downloaded}/{images}");
if (progressToken is not null)
{
progressToken.increments = images * 2;//we also have to download the images later
progressToken.Increment();
}
} while (downloaded++ <= images); } while (downloaded++ <= images);
return imageUrls.ToArray(); if (progressToken is not null)
progressToken.increments = images;//we blip to normal length, in downloadchapterimages it is increasaed by the amount of urls again
return DownloadChapterImages(imageUrls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
} }
private string[] ParseImageUrlsFromHtml(HtmlDocument document) private string[] ParseImageUrlsFromHtml(HtmlDocument document)

View File

@ -1,24 +1,26 @@
using System.Text.RegularExpressions; using System.Net;
using API.MangaDownloadClients; using System.Text.RegularExpressions;
using HtmlAgilityPack; using HtmlAgilityPack;
using Tranga.Jobs;
namespace API.Schema.MangaConnectors; namespace Tranga.MangaConnectors;
public class MangaKatana : MangaConnector public class MangaKatana : MangaConnector
{ {
public MangaKatana() : base("MangaKatana", ["en"], ["mangakatana.com"]) public MangaKatana(GlobalBase clone) : base(clone, "MangaKatana", ["en"])
{ {
this.downloadClient = new HttpDownloadClient(); this.downloadClient = new HttpDownloadClient(clone);
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "") public override Manga[] GetManga(string publicationTitle = "")
{ {
Log($"Searching Publications. Term=\"{publicationTitle}\"");
string sanitizedTitle = string.Join("%20", Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower(); string sanitizedTitle = string.Join("%20", Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
string requestUrl = $"https://mangakatana.com/?search={sanitizedTitle}&search_by=book_name"; string requestUrl = $"https://mangakatana.com/?search={sanitizedTitle}&search_by=book_name";
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return []; return Array.Empty<Manga>();
// ReSharper disable once MergeIntoPattern // ReSharper disable once MergeIntoPattern
// If a single result is found, the user will be redirected to the results directly instead of a result page // If a single result is found, the user will be redirected to the results directly instead of a result page
@ -29,16 +31,17 @@ public class MangaKatana : MangaConnector
return new [] { ParseSinglePublicationFromHtml(requestResult.result, requestResult.redirectedToUrl.Split('/')[^1], requestResult.redirectedToUrl) }; return new [] { ParseSinglePublicationFromHtml(requestResult.result, requestResult.redirectedToUrl.Split('/')[^1], requestResult.redirectedToUrl) };
} }
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.result); Manga[] publications = ParsePublicationsFromHtml(requestResult.result);
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
return publications; return publications;
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId) public override Manga? GetMangaFromId(string publicationId)
{ {
return GetMangaFromUrl($"https://mangakatana.com/manga/{publicationId}"); return GetMangaFromUrl($"https://mangakatana.com/manga/{publicationId}");
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url) public override Manga? GetMangaFromUrl(string url)
{ {
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(url, RequestType.MangaInfo); downloadClient.MakeRequest(url, RequestType.MangaInfo);
@ -47,7 +50,7 @@ public class MangaKatana : MangaConnector
return ParseSinglePublicationFromHtml(requestResult.result, url.Split('/')[^1], url); return ParseSinglePublicationFromHtml(requestResult.result, url.Split('/')[^1], url);
} }
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(Stream html) private Manga[] ParsePublicationsFromHtml(Stream html)
{ {
StreamReader reader = new(html); StreamReader reader = new(html);
string htmlString = reader.ReadToEnd(); string htmlString = reader.ReadToEnd();
@ -55,7 +58,7 @@ public class MangaKatana : MangaConnector
document.LoadHtml(htmlString); document.LoadHtml(htmlString);
IEnumerable<HtmlNode> searchResults = document.DocumentNode.SelectNodes("//*[@id='book_list']/div"); IEnumerable<HtmlNode> searchResults = document.DocumentNode.SelectNodes("//*[@id='book_list']/div");
if (searchResults is null || !searchResults.Any()) if (searchResults is null || !searchResults.Any())
return []; return Array.Empty<Manga>();
List<string> urls = new(); List<string> urls = new();
foreach (HtmlNode mangaResult in searchResults) foreach (HtmlNode mangaResult in searchResults)
{ {
@ -63,29 +66,29 @@ public class MangaKatana : MangaConnector
.First(a => a.Name == "href").Value); .First(a => a.Name == "href").Value);
} }
HashSet<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new(); HashSet<Manga> ret = new();
foreach (string url in urls) foreach (string url in urls)
{ {
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url); Manga? manga = GetMangaFromUrl(url);
if (manga is { } x) if (manga is not null)
ret.Add(x); ret.Add((Manga)manga);
} }
return ret.ToArray(); return ret.ToArray();
} }
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(Stream html, string publicationId, string websiteUrl) private Manga ParseSinglePublicationFromHtml(Stream html, string publicationId, string websiteUrl)
{ {
StreamReader reader = new(html); StreamReader reader = new(html);
string htmlString = reader.ReadToEnd(); string htmlString = reader.ReadToEnd();
HtmlDocument document = new(); HtmlDocument document = new();
document.LoadHtml(htmlString); document.LoadHtml(htmlString);
Dictionary<string, string> altTitlesDict = new(); Dictionary<string, string> altTitles = new();
Dictionary<string, string>? links = null; Dictionary<string, string>? links = null;
HashSet<string> tags = new(); HashSet<string> tags = new();
string[] authorNames = []; string[] authors = Array.Empty<string>();
string originalLanguage = ""; string originalLanguage = "";
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased; Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
HtmlNode infoNode = document.DocumentNode.SelectSingleNode("//*[@id='single_book']"); HtmlNode infoNode = document.DocumentNode.SelectSingleNode("//*[@id='single_book']");
string sortName = infoNode.Descendants("h1").First(n => n.HasClass("heading")).InnerText; string sortName = infoNode.Descendants("h1").First(n => n.HasClass("heading")).InnerText;
@ -102,16 +105,16 @@ public class MangaKatana : MangaConnector
case "altnames": case "altnames":
string[] alts = value.Split(" ; "); string[] alts = value.Split(" ; ");
for (int i = 0; i < alts.Length; i++) for (int i = 0; i < alts.Length; i++)
altTitlesDict.Add(i.ToString(), alts[i]); altTitles.Add(i.ToString(), alts[i]);
break; break;
case "authorsartists": case "authorsartists":
authorNames = value.Split(','); authors = value.Split(',');
break; break;
case "status": case "status":
switch (value.ToLower()) switch (value.ToLower())
{ {
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break; case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
case "completed": releaseStatus = MangaReleaseStatus.Completed; break; case "completed": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
} }
break; break;
case "genres": case "genres":
@ -120,39 +123,34 @@ public class MangaKatana : MangaConnector
} }
} }
string coverUrl = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[1]/div").Descendants("img").First() string posterUrl = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[1]/div").Descendants("img").First()
.GetAttributes().First(a => a.Name == "src").Value; .GetAttributes().First(a => a.Name == "src").Value;
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
string description = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[3]/p").InnerText; string description = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[3]/p").InnerText;
while (description.StartsWith('\n')) while (description.StartsWith('\n'))
description = description.Substring(1); description = description.Substring(1);
uint year = (uint)DateTime.Now.Year; int year = DateTime.Now.Year;
string yearString = infoTable.Descendants("div").First(d => d.HasClass("updateAt")) string yearString = infoTable.Descendants("div").First(d => d.HasClass("updateAt"))
.InnerText.Split('-')[^1]; .InnerText.Split('-')[^1];
if(yearString.Contains("ago") == false) if(yearString.Contains("ago") == false)
{ {
year = uint.Parse(yearString); year = Convert.ToInt32(yearString);
} }
List<Author> authors = authorNames.Select(n => new Author(n)).ToList();
List<MangaTag> mangaTags = tags.Select(n => new MangaTag(n)).ToList();
List<MangaAltTitle> altTitles = altTitlesDict.Select(x => new MangaAltTitle(x.Key, x.Value)).ToList();
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year, Manga manga = new (sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
originalLanguage, releaseStatus, -1, year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
this, AddMangaToCache(manga);
authors, return manga;
mangaTags,
[],
altTitles);
return (manga, authors, mangaTags, [], altTitles);
} }
public override Chapter[] GetChapters(Manga manga, string language="en") public override Chapter[] GetChapters(Manga manga, string language="en")
{ {
string requestUrl = $"https://mangakatana.com/manga/{manga.MangaId}"; Log($"Getting chapters {manga}");
string requestUrl = $"https://mangakatana.com/manga/{manga.publicationId}";
// Leaving this in for verification if the page exists // Leaving this in for verification if the page exists
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
@ -161,6 +159,7 @@ public class MangaKatana : MangaConnector
//Return Chapters ordered by Chapter-Number //Return Chapters ordered by Chapter-Number
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl); List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl);
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.Order().ToArray(); return chapters.Order().ToArray();
} }
@ -184,40 +183,52 @@ public class MangaKatana : MangaConnector
string url = chapterInfo.Descendants("a").First() string url = chapterInfo.Descendants("a").First()
.GetAttributeValue("href", ""); .GetAttributeValue("href", "");
int? volumeNumber = volumeRex.IsMatch(url) ? int.Parse(volumeRex.Match(url).Groups[1].Value) : null; string? volumeNumber = volumeRex.IsMatch(url) ? volumeRex.Match(url).Groups[1].Value : null;
if(!ChapterNumber.CanParse(chapterNumRex.Match(url).Groups[1].Value)) string chapterNumber = chapterNumRex.Match(url).Groups[1].Value;
continue;
ChapterNumber chapterNumber = new(chapterNumRex.Match(url).Groups[1].Value);
string chapterName = chapterNameRex.Match(fullString).Groups[1].Value; string chapterName = chapterNameRex.Match(fullString).Groups[1].Value;
try try
{ {
ret.Add(new Chapter(manga, url, chapterNumber, volumeNumber, chapterName)); ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
} }
catch (Exception e) catch (Exception e)
{ {
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
} }
} }
return ret; return ret;
} }
internal override string[] GetChapterImageUrls(Chapter chapter) public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
{ {
string requestUrl = chapter.Url; if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Manga chapterParentManga = chapter.parentManga;
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
string requestUrl = chapter.url;
// Leaving this in to check if the page exists // Leaving this in to check if the page exists
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{ {
return []; progressToken?.Cancel();
return requestResult.statusCode;
} }
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument); string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
return imageUrls;
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken);
} }
private string[] ParseImageUrlsFromHtml(HtmlDocument document) private string[] ParseImageUrlsFromHtml(string mangaUrl)
{ {
HtmlWeb web = new();
HtmlDocument document = web.Load(mangaUrl);
// Images are loaded dynamically, but the urls are present in a piece of js code on the page // Images are loaded dynamically, but the urls are present in a piece of js code on the page
string js = document.DocumentNode.SelectSingleNode("//script[contains(., 'data-src')]").InnerText string js = document.DocumentNode.SelectSingleNode("//script[contains(., 'data-src')]").InnerText
.Replace("\r", "") .Replace("\r", "")

View File

@ -0,0 +1,232 @@
using System.Globalization;
using System.Net;
using System.Text.RegularExpressions;
using HtmlAgilityPack;
using Tranga.Jobs;
namespace Tranga.MangaConnectors;
public class Manganato : MangaConnector
{
public Manganato(GlobalBase clone) : base(clone, "Manganato", ["en"])
{
this.downloadClient = new HttpDownloadClient(clone);
}
public override Manga[] GetManga(string publicationTitle = "")
{
Log($"Searching Publications. Term=\"{publicationTitle}\"");
string sanitizedTitle = string.Join('_', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
string requestUrl = $"https://manganato.gg/search/story/{sanitizedTitle}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return Array.Empty<Manga>();
if (requestResult.htmlDocument is null)
return Array.Empty<Manga>();
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
return publications;
}
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
{
List<HtmlNode> searchResults = document.DocumentNode.Descendants("div").Where(n => n.HasClass("story_item")).ToList();
Log($"{searchResults.Count} items.");
List<string> urls = new();
foreach (HtmlNode mangaResult in searchResults)
{
try
{
urls.Add(mangaResult.Descendants("h3").First(n => n.HasClass("story_name"))
.Descendants("a").First().GetAttributeValue("href", ""));
} catch
{
//failed to get a url, send it to the void
}
}
HashSet<Manga> ret = new();
foreach (string url in urls)
{
Manga? manga = GetMangaFromUrl(url);
if (manga is not null)
ret.Add((Manga)manga);
}
return ret.ToArray();
}
public override Manga? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://chapmanganato.com/{publicationId}");
}
public override Manga? GetMangaFromUrl(string url)
{
RequestResult requestResult =
downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return null;
if (requestResult.htmlDocument is null)
return null;
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
}
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
Dictionary<string, string> altTitles = new();
Dictionary<string, string>? links = null;
HashSet<string> tags = new();
string[] authors = Array.Empty<string>();
string originalLanguage = "";
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
HtmlNode infoNode = document.DocumentNode.Descendants("ul").First(d => d.HasClass("manga-info-text"));
string sortName = infoNode.Descendants("h1").First().InnerText;
foreach (HtmlNode li in infoNode.Descendants("li"))
{
string text = li.InnerText.Trim().ToLower();
if (text.StartsWith("author(s) :"))
{
authors = li.Descendants("a").Select(a => a.InnerText.Trim()).ToArray();
}
else if (text.StartsWith("status :"))
{
string status = text.Replace("status :", "").Trim().ToLower();
if (string.IsNullOrWhiteSpace(status))
releaseStatus = Manga.ReleaseStatusByte.Continuing;
else if (status == "ongoing")
releaseStatus = Manga.ReleaseStatusByte.Continuing;
else
releaseStatus = Enum.Parse<Manga.ReleaseStatusByte>(status, true);
}
else if (li.HasClass("genres"))
{
tags = li.Descendants("a").Select(a => a.InnerText.Trim()).ToHashSet();
}
}
string posterUrl = document.DocumentNode.Descendants("div").First(s => s.HasClass("manga-info-pic")).Descendants("img").First()
.GetAttributes().First(a => a.Name == "src").Value;
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover, "https://www.manganato.gg/");
string description = document.DocumentNode.SelectSingleNode("//div[@id='contentBox']")
.InnerText.Replace("Description :", "");
while (description.StartsWith('\n'))
description = description.Substring(1);
string pattern = "MMM-dd-yyyy HH:mm";
HtmlNode? oldestChapter = document.DocumentNode
.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),' row ')]/span[@title]").MaxBy(
node => DateTime.ParseExact(node.GetAttributeValue("title", "Dec-31-2400 23:59"), pattern,
CultureInfo.InvariantCulture).Millisecond);
int year = DateTime.ParseExact(oldestChapter?.GetAttributeValue("title", "Dec 31 2400, 23:59")??"Dec 31 2400, 23:59", pattern,
CultureInfo.InvariantCulture).Year;
Manga manga = new (sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
AddMangaToCache(manga);
return manga;
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
Log($"Getting chapters {manga}");
string requestUrl = manga.websiteUrl;
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return Array.Empty<Chapter>();
//Return Chapters ordered by Chapter-Number
if (requestResult.htmlDocument is null)
return Array.Empty<Chapter>();
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.Order().ToArray();
}
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
{
List<Chapter> ret = new();
HtmlNode chapterList = document.DocumentNode.Descendants("div").First(l => l.HasClass("chapter-list"));
Regex volRex = new(@"Vol\.([0-9]+).*");
Regex chapterRex = new(@"https:\/\/chapmanganato.[A-z]+\/manga-[A-z0-9]+\/chapter-([0-9\.]+)");
Regex nameRex = new(@"Chapter ([0-9]+(\.[0-9]+)*){1}:? (.*)");
foreach (HtmlNode chapterInfo in chapterList.Descendants("div").Where(x => x.HasClass("row")))
{
string url = chapterInfo.Descendants("a").First().GetAttributeValue("href", "");
var name = chapterInfo.Descendants("a").First().InnerText.Trim();
string chapterName = nameRex.Match(name).Groups[3].Value;
string chapterNumber = Regex.Match(name, @"Chapter ([0-9]+(\.[0-9]+)*)").Groups[1].Value;
string? volumeNumber = Regex.Match(chapterName, @"Vol\.([0-9]+)").Groups[1].Value;
if (string.IsNullOrWhiteSpace(volumeNumber))
volumeNumber = "0";
try
{
ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
}
catch (Exception e)
{
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
}
}
ret.Reverse();
return ret;
}
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
{
if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Manga chapterParentManga = chapter.parentManga;
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
string requestUrl = chapter.url;
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
progressToken?.Cancel();
return requestResult.statusCode;
}
if (requestResult.htmlDocument is null)
{
progressToken?.Cancel();
return HttpStatusCode.InternalServerError;
}
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, "https://www.manganato.gg", progressToken:progressToken);
}
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
{
List<string> ret = new();
HtmlNode imageContainer =
document.DocumentNode.Descendants("div").First(i => i.HasClass("container-chapter-reader"));
foreach(HtmlNode imageNode in imageContainer.Descendants("img"))
ret.Add(imageNode.GetAttributeValue("src", ""));
return ret.ToArray();
}
}

View File

@ -1,59 +1,62 @@
using System.Text.RegularExpressions; using System.Net;
using API.MangaDownloadClients; using System.Text.RegularExpressions;
using HtmlAgilityPack; using HtmlAgilityPack;
using Tranga.Jobs;
namespace API.Schema.MangaConnectors; namespace Tranga.MangaConnectors;
public class Mangaworld : MangaConnector public class Mangaworld: MangaConnector
{ {
public Mangaworld() : base("Mangaworld", ["it"], ["www.mangaworld.ac"]) public Mangaworld(GlobalBase clone) : base(clone, "Mangaworld", ["it"])
{ {
this.downloadClient = new ChromiumDownloadClient(); this.downloadClient = new ChromiumDownloadClient(clone);
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "") public override Manga[] GetManga(string publicationTitle = "")
{ {
Log($"Searching Publications. Term=\"{publicationTitle}\"");
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower(); string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
string requestUrl = $"https://www.mangaworld.ac/archive?keyword={sanitizedTitle}"; string requestUrl = $"https://www.mangaworld.ac/archive?keyword={sanitizedTitle}";
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return []; return Array.Empty<Manga>();
if (requestResult.htmlDocument is null) if (requestResult.htmlDocument is null)
return []; return Array.Empty<Manga>();
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument); Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
return publications; return publications;
} }
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document) private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
{ {
if (!document.DocumentNode.SelectSingleNode("//div[@class='comics-grid']").ChildNodes if (!document.DocumentNode.SelectSingleNode("//div[@class='comics-grid']").ChildNodes
.Any(node => node.HasClass("entry"))) .Any(node => node.HasClass("entry")))
return []; return Array.Empty<Manga>();
List<string> urls = document.DocumentNode List<string> urls = document.DocumentNode
.SelectNodes( .SelectNodes(
"//div[@class='comics-grid']//div[@class='entry']//a[contains(concat(' ',normalize-space(@class),' '),'thumb')]") "//div[@class='comics-grid']//div[@class='entry']//a[contains(concat(' ',normalize-space(@class),' '),'thumb')]")
.Select(thumb => thumb.GetAttributeValue("href", "")).ToList(); .Select(thumb => thumb.GetAttributeValue("href", "")).ToList();
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new(); HashSet<Manga> ret = new();
foreach (string url in urls) foreach (string url in urls)
{ {
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url); Manga? manga = GetMangaFromUrl(url);
if (manga is { } x) if (manga is not null)
ret.Add(x); ret.Add((Manga)manga);
} }
return ret.ToArray(); return ret.ToArray();
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId) public override Manga? GetMangaFromId(string publicationId)
{ {
return GetMangaFromUrl($"https://www.mangaworld.ac/manga/{publicationId}"); return GetMangaFromUrl($"https://www.mangaworld.ac/manga/{publicationId}");
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url) public override Manga? GetMangaFromUrl(string url)
{ {
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(url, RequestType.MangaInfo); downloadClient.MakeRequest(url, RequestType.MangaInfo);
@ -68,11 +71,12 @@ public class Mangaworld : MangaConnector
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id, url); return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id, url);
} }
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl) private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{ {
Dictionary<string, string> altTitlesDict = new(); Dictionary<string, string> altTitles = new();
Dictionary<string, string>? links = null;
string originalLanguage = ""; string originalLanguage = "";
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased; Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
HtmlNode infoNode = document.DocumentNode.Descendants("div").First(d => d.HasClass("info")); HtmlNode infoNode = document.DocumentNode.Descendants("div").First(d => d.HasClass("info"));
@ -81,59 +85,59 @@ public class Mangaworld : MangaConnector
HtmlNode metadata = infoNode.Descendants().First(d => d.HasClass("meta-data")); HtmlNode metadata = infoNode.Descendants().First(d => d.HasClass("meta-data"));
HtmlNode altTitlesNode = metadata.SelectSingleNode("//span[text()='Titoli alternativi: ' or text()='Titolo alternativo: ']/..").ChildNodes[1]; HtmlNode altTitlesNode = metadata.SelectSingleNode("//span[text()='Titoli alternativi: ' or text()='Titolo alternativo: ']/..").ChildNodes[1];
string[] alts = altTitlesNode.InnerText.Split(", "); string[] alts = altTitlesNode.InnerText.Split(", ");
for(int i = 0; i < alts.Length; i++) for(int i = 0; i < alts.Length; i++)
altTitlesDict.Add(i.ToString(), alts[i]); altTitles.Add(i.ToString(), alts[i]);
List<MangaAltTitle> altTitles = altTitlesDict.Select(a => new MangaAltTitle(a.Key, a.Value)).ToList();
HtmlNode genresNode = HtmlNode genresNode =
metadata.SelectSingleNode("//span[text()='Generi: ' or text()='Genero: ']/.."); metadata.SelectSingleNode("//span[text()='Generi: ' or text()='Genero: ']/..");
HashSet<string> tags = genresNode.SelectNodes("a").Select(node => node.InnerText).ToHashSet(); HashSet<string> tags = genresNode.SelectNodes("a").Select(node => node.InnerText).ToHashSet();
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
HtmlNode authorsNode = HtmlNode authorsNode =
metadata.SelectSingleNode("//span[text()='Autore: ' or text()='Autori: ']/.."); metadata.SelectSingleNode("//span[text()='Autore: ' or text()='Autori: ']/..");
string[] authorNames = authorsNode.SelectNodes("a").Select(node => node.InnerText).ToArray(); string[] authors = authorsNode.SelectNodes("a").Select(node => node.InnerText).ToArray();
List<Author> authors = authorNames.Select(n => new Author(n)).ToList();
string status = metadata.SelectSingleNode("//span[text()='Stato: ']/..").SelectNodes("a").First().InnerText; string status = metadata.SelectSingleNode("//span[text()='Stato: ']/..").SelectNodes("a").First().InnerText;
// ReSharper disable 5 times StringLiteralTypo // ReSharper disable 5 times StringLiteralTypo
switch (status.ToLower()) switch (status.ToLower())
{ {
case "cancellato": releaseStatus = MangaReleaseStatus.Cancelled; break; case "cancellato": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
case "in pausa": releaseStatus = MangaReleaseStatus.OnHiatus; break; case "in pausa": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
case "droppato": releaseStatus = MangaReleaseStatus.Cancelled; break; case "droppato": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
case "finito": releaseStatus = MangaReleaseStatus.Completed; break; case "finito": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
case "in corso": releaseStatus = MangaReleaseStatus.Continuing; break; case "in corso": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
} }
string coverUrl = document.DocumentNode.SelectSingleNode("//img[@class='rounded']").GetAttributeValue("src", ""); string posterUrl = document.DocumentNode.SelectSingleNode("//img[@class='rounded']").GetAttributeValue("src", "");
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId.Replace('/', '-'), RequestType.MangaCover);
string description = document.DocumentNode.SelectSingleNode("//div[@id='noidungm']").InnerText; string description = document.DocumentNode.SelectSingleNode("//div[@id='noidungm']").InnerText;
string yearString = metadata.SelectSingleNode("//span[text()='Anno di uscita: ']/..").SelectNodes("a").First().InnerText; string yearString = metadata.SelectSingleNode("//span[text()='Anno di uscita: ']/..").SelectNodes("a").First().InnerText;
uint year = uint.Parse(yearString); int year = Convert.ToInt32(yearString);
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year, Manga manga = new (sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
originalLanguage, releaseStatus, -1, year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
this, AddMangaToCache(manga);
authors, return manga;
mangaTags,
[],
altTitles);
return (manga, authors, mangaTags, [], altTitles);
} }
public override Chapter[] GetChapters(Manga manga, string language="en") public override Chapter[] GetChapters(Manga manga, string language="en")
{ {
string requestUrl = $"https://www.mangaworld.ac/manga/{manga.MangaId}"; Log($"Getting chapters {manga}");
string requestUrl = $"https://www.mangaworld.ac/manga/{manga.publicationId}";
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return []; return Array.Empty<Chapter>();
//Return Chapters ordered by Chapter-Number
if (requestResult.htmlDocument is null)
return Array.Empty<Chapter>();
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument); List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.Order().ToArray(); return chapters.Order().ToArray();
} }
@ -152,23 +156,20 @@ public class Mangaworld : MangaConnector
{ {
foreach (HtmlNode volNode in document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),'volume-element')]")) foreach (HtmlNode volNode in document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),'volume-element')]"))
{ {
string volumeStr = volumeRex.Match(volNode.SelectNodes("div").First(node => node.HasClass("volume")).SelectSingleNode("p").InnerText).Groups[1].Value; string volume = volumeRex.Match(volNode.SelectNodes("div").First(node => node.HasClass("volume")).SelectSingleNode("p").InnerText).Groups[1].Value;
int volume = int.Parse(volumeStr);
foreach (HtmlNode chNode in volNode.SelectNodes("div").First(node => node.HasClass("volume-chapters")).SelectNodes("div")) foreach (HtmlNode chNode in volNode.SelectNodes("div").First(node => node.HasClass("volume-chapters")).SelectNodes("div"))
{ {
string numberStr = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value; string number = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
if(!ChapterNumber.CanParse(numberStr))
continue;
ChapterNumber chapterNumber = new(numberStr);
string url = chNode.SelectSingleNode("a").GetAttributeValue("href", ""); string url = chNode.SelectSingleNode("a").GetAttributeValue("href", "");
string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value; string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value;
try try
{ {
ret.Add(new Chapter(manga, url, chapterNumber, volume, null)); ret.Add(new Chapter(manga, null, volume, number, url, id));
} }
catch (Exception e) catch (Exception e)
{ {
Log($"Failed to load chapter {number}: {e.Message}");
} }
} }
} }
@ -177,18 +178,16 @@ public class Mangaworld : MangaConnector
{ {
foreach (HtmlNode chNode in chaptersWrapper.SelectNodes("div").Where(node => node.HasClass("chapter"))) foreach (HtmlNode chNode in chaptersWrapper.SelectNodes("div").Where(node => node.HasClass("chapter")))
{ {
string numberStr = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value; string number = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
if(!ChapterNumber.CanParse(numberStr))
continue;
ChapterNumber chapterNumber = new(numberStr);
string url = chNode.SelectSingleNode("a").GetAttributeValue("href", ""); string url = chNode.SelectSingleNode("a").GetAttributeValue("href", "");
string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value; string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value;
try try
{ {
ret.Add(new Chapter(manga, url, chapterNumber, null, null)); ret.Add(new Chapter(manga, null, null, number, url, id));
} }
catch (Exception e) catch (Exception e)
{ {
Log($"Failed to load chapter {number}: {e.Message}");
} }
} }
} }
@ -197,18 +196,34 @@ public class Mangaworld : MangaConnector
return ret; return ret;
} }
internal override string[] GetChapterImageUrls(Chapter chapter) public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
{ {
string requestUrl = $"{chapter.Url}?style=list"; if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Manga chapterParentManga = chapter.parentManga;
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
string requestUrl = $"{chapter.url}?style=list";
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{ {
return []; progressToken?.Cancel();
return requestResult.statusCode;
}
if (requestResult.htmlDocument is null)
{
progressToken?.Cancel();
return HttpStatusCode.InternalServerError;
} }
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument); string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
return imageUrls;
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage,"https://www.mangaworld.bz/", progressToken:progressToken);
} }
private string[] ParseImageUrlsFromHtml(HtmlDocument document) private string[] ParseImageUrlsFromHtml(HtmlDocument document)

View File

@ -1,56 +1,62 @@
using System.Text.RegularExpressions; using System.Net;
using API.MangaDownloadClients; using System.Text.RegularExpressions;
using HtmlAgilityPack; using HtmlAgilityPack;
using Tranga.Jobs;
namespace API.Schema.MangaConnectors; namespace Tranga.MangaConnectors;
public class ManhuaPlus : MangaConnector public class ManhuaPlus : MangaConnector
{ {
public ManhuaPlus() : base("ManhuaPlus", ["en"], ["manhuaplus.org"]) public ManhuaPlus(GlobalBase clone) : base(clone, "ManhuaPlus", ["en"])
{ {
this.downloadClient = new ChromiumDownloadClient(); this.downloadClient = new ChromiumDownloadClient(clone);
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] GetManga(string publicationTitle = "") public override Manga[] GetManga(string publicationTitle = "")
{ {
Log($"Searching Publications. Term=\"{publicationTitle}\"");
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower(); string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
string requestUrl = $"https://manhuaplus.org/search?keyword={sanitizedTitle}"; string requestUrl = $"https://manhuaplus.org/search?keyword={sanitizedTitle}";
RequestResult requestResult = RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default); downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null) if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return []; return Array.Empty<Manga>();
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument); if (requestResult.htmlDocument is null)
return Array.Empty<Manga>();
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
return publications; return publications;
} }
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)[] ParsePublicationsFromHtml(HtmlDocument document) private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
{ {
if (document.DocumentNode.SelectSingleNode("//h1/../..").ChildNodes//I already want to not. if (document.DocumentNode.SelectSingleNode("//h1/../..").ChildNodes//I already want to not.
.Any(node => node.InnerText.Contains("No manga found"))) .Any(node => node.InnerText.Contains("No manga found")))
return []; return Array.Empty<Manga>();
List<string> urls = document.DocumentNode List<string> urls = document.DocumentNode
.SelectNodes("//h1/../..//a[contains(@href, 'https://manhuaplus.org/manga/') and contains(concat(' ',normalize-space(@class),' '),' clamp ') and not(contains(@href, '/chapter'))]") .SelectNodes("//h1/../..//a[contains(@href, 'https://manhuaplus.org/manga/') and contains(concat(' ',normalize-space(@class),' '),' clamp ') and not(contains(@href, '/chapter'))]")
.Select(mangaNode => mangaNode.GetAttributeValue("href", "")).ToList(); .Select(mangaNode => mangaNode.GetAttributeValue("href", "")).ToList();
logger?.WriteLine($"Got {urls.Count} urls.");
List<(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)> ret = new(); HashSet<Manga> ret = new();
foreach (string url in urls) foreach (string url in urls)
{ {
(Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? manga = GetMangaFromUrl(url); Manga? manga = GetMangaFromUrl(url);
if (manga is { } x) if (manga is not null)
ret.Add(x); ret.Add((Manga)manga);
} }
return ret.ToArray(); return ret.ToArray();
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromId(string publicationId) public override Manga? GetMangaFromId(string publicationId)
{ {
return GetMangaFromUrl($"https://manhuaplus.org/manga/{publicationId}"); return GetMangaFromUrl($"https://manhuaplus.org/manga/{publicationId}");
} }
public override (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?)? GetMangaFromUrl(string url) public override Manga? GetMangaFromUrl(string url)
{ {
Regex publicationIdRex = new(@"https:\/\/manhuaplus.org\/manga\/(.*)(\/.*)*"); Regex publicationIdRex = new(@"https:\/\/manhuaplus.org\/manga\/(.*)(\/.*)*");
string publicationId = publicationIdRex.Match(url).Groups[1].Value; string publicationId = publicationIdRex.Match(url).Groups[1].Value;
@ -61,33 +67,34 @@ public class ManhuaPlus : MangaConnector
return null; return null;
} }
private (Manga, List<Author>?, List<MangaTag>?, List<Link>?, List<MangaAltTitle>?) ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl) private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{ {
string originalLanguage = "", status = ""; string originalLanguage = "", status = "";
Dictionary<string, string> altTitles = new(), links = new(); Dictionary<string, string> altTitles = new(), links = new();
HashSet<string> tags = new(); HashSet<string> tags = new();
MangaReleaseStatus releaseStatus = MangaReleaseStatus.Unreleased; Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("/html/body/main/div/div/div[2]/div[1]/figure/a/img");//BRUH HtmlNode posterNode = document.DocumentNode.SelectSingleNode("/html/body/main/div/div/div[2]/div[1]/figure/a/img");//BRUH
Regex posterRex = new(@".*(\/uploads/covers/[a-zA-Z0-9\-\._\~\!\$\&\'\(\)\*\+\,\;\=\:\@]+).*"); Regex posterRex = new(@".*(\/uploads/covers/[a-zA-Z0-9\-\._\~\!\$\&\'\(\)\*\+\,\;\=\:\@]+).*");
string coverUrl = $"https://manhuaplus.org/{posterRex.Match(posterNode.GetAttributeValue("src", "")).Groups[1].Value}"; string posterUrl = $"https://manhuaplus.org/{posterRex.Match(posterNode.GetAttributeValue("src", "")).Groups[1].Value}";
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//h1"); HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//h1");
string sortName = titleNode.InnerText.Replace("\n", ""); string sortName = titleNode.InnerText.Replace("\n", "");
List<string> authorNames = new(); List<string> authors = new();
try try
{ {
HtmlNode[] authorsNodes = document.DocumentNode HtmlNode[] authorsNodes = document.DocumentNode
.SelectNodes("//a[contains(@href, 'https://manhuaplus.org/authors/')]") .SelectNodes("//a[contains(@href, 'https://manhuaplus.org/authors/')]")
.ToArray(); .ToArray();
foreach (HtmlNode authorNode in authorsNodes) foreach (HtmlNode authorNode in authorsNodes)
authorNames.Add(authorNode.InnerText); authors.Add(authorNode.InnerText);
} }
catch (ArgumentNullException e) catch (ArgumentNullException e)
{ {
Log("No authors found.");
} }
List<Author> authors = authorNames.Select(a => new Author(a)).ToList();
try try
{ {
@ -98,42 +105,39 @@ public class ManhuaPlus : MangaConnector
} }
catch (ArgumentNullException e) catch (ArgumentNullException e)
{ {
Log("No genres found");
} }
List<MangaTag> mangaTags = tags.Select(t => new MangaTag(t)).ToList();
Regex yearRex = new(@"(?:[0-9]{1,2}\/){2}([0-9]{2,4}) [0-9]{1,2}:[0-9]{1,2}"); Regex yearRex = new(@"(?:[0-9]{1,2}\/){2}([0-9]{2,4}) [0-9]{1,2}:[0-9]{1,2}");
HtmlNode yearNode = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-clock ')]/../span"); HtmlNode yearNode = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-clock ')]/../span");
Match match = yearRex.Match(yearNode.InnerText); Match match = yearRex.Match(yearNode.InnerText);
uint year = match.Success && match.Groups[1].Success ? uint.Parse(match.Groups[1].Value) : 0; int year = match.Success && match.Groups[1].Success ? int.Parse(match.Groups[1].Value) : 1960;
status = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-rss ')]/../span").InnerText.Replace("\n", ""); status = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-rss ')]/../span").InnerText.Replace("\n", "");
switch (status.ToLower()) switch (status.ToLower())
{ {
case "cancelled": releaseStatus = MangaReleaseStatus.Cancelled; break; case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
case "hiatus": releaseStatus = MangaReleaseStatus.OnHiatus; break; case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
case "discontinued": releaseStatus = MangaReleaseStatus.Cancelled; break; case "discontinued": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
case "complete": releaseStatus = MangaReleaseStatus.Completed; break; case "complete": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
case "ongoing": releaseStatus = MangaReleaseStatus.Continuing; break; case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
} }
HtmlNode descriptionNode = document.DocumentNode HtmlNode descriptionNode = document.DocumentNode
.SelectSingleNode("//div[@id='syn-target']"); .SelectSingleNode("//div[@id='syn-target']");
string description = descriptionNode.InnerText; string description = descriptionNode.InnerText;
Manga manga = new (publicationId, sortName, description, websiteUrl, coverUrl, null, year, Manga manga = new(sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
originalLanguage, releaseStatus, -1, coverFileNameInCache, links,
this, year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
authors, AddMangaToCache(manga);
mangaTags, return manga;
[],
[]);
return (manga, authors, mangaTags, [], []);
} }
public override Chapter[] GetChapters(Manga manga, string language="en") public override Chapter[] GetChapters(Manga manga, string language="en")
{ {
RequestResult result = downloadClient.MakeRequest($"https://manhuaplus.org/manga/{manga.MangaId}", RequestType.Default); Log($"Getting chapters {manga}");
RequestResult result = downloadClient.MakeRequest($"https://manhuaplus.org/manga/{manga.publicationId}", RequestType.Default);
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null) if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
{ {
return Array.Empty<Chapter>(); return Array.Empty<Chapter>();
@ -148,34 +152,52 @@ public class ManhuaPlus : MangaConnector
{ {
Match rexMatch = urlRex.Match(url); Match rexMatch = urlRex.Match(url);
if(!ChapterNumber.CanParse(rexMatch.Groups[1].Value)) string volumeNumber = "1";
continue; string chapterNumber = rexMatch.Groups[1].Value;
ChapterNumber chapterNumber = new(rexMatch.Groups[1].Value);
string fullUrl = url; string fullUrl = url;
try try
{ {
chapters.Add(new Chapter(manga, fullUrl, chapterNumber, null, null)); chapters.Add(new Chapter(manga, "", volumeNumber, chapterNumber, fullUrl));
} }
catch (Exception e) catch (Exception e)
{ {
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
} }
} }
//Return Chapters ordered by Chapter-Number //Return Chapters ordered by Chapter-Number
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.Order().ToArray(); return chapters.Order().ToArray();
} }
internal override string[] GetChapterImageUrls(Chapter chapter) public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
{ {
RequestResult requestResult = this.downloadClient.MakeRequest(chapter.Url, RequestType.Default); if (progressToken?.cancellationRequested ?? false)
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
{ {
return []; progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Manga chapterParentManga = chapter.parentManga;
if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
RequestResult requestResult = this.downloadClient.MakeRequest(chapter.url, RequestType.Default);
if (requestResult.htmlDocument is null)
{
progressToken?.Cancel();
return HttpStatusCode.RequestTimeout;
} }
HtmlDocument document = requestResult.htmlDocument; HtmlDocument document = requestResult.htmlDocument;
HtmlNode[] images = document.DocumentNode.SelectNodes("//a[contains(concat(' ',normalize-space(@class),' '),' readImg ')]/img").ToArray(); HtmlNode[] images = document.DocumentNode.SelectNodes("//a[contains(concat(' ',normalize-space(@class),' '),' readImg ')]/img").ToArray();
List<string> urls = images.Select(node => node.GetAttributeValue("src", "")).ToList(); List<string> urls = images.Select(node => node.GetAttributeValue("src", "")).ToList();
return urls.ToArray();
return DownloadChapterImages(urls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
} }
} }

View File

@ -1,7 +1,7 @@
using System.Net; using System.Net;
using HtmlAgilityPack; using HtmlAgilityPack;
namespace API.MangaDownloadClients; namespace Tranga.MangaConnectors;
public struct RequestResult public struct RequestResult
{ {

View File

@ -1,4 +1,4 @@
namespace API.MangaDownloadClients; namespace Tranga.MangaConnectors;
public enum RequestType : byte public enum RequestType : byte
{ {

View File

@ -0,0 +1,273 @@
using System.Net;
using System.Text.RegularExpressions;
using HtmlAgilityPack;
using Tranga.Jobs;
namespace Tranga.MangaConnectors;
public class Webtoons : MangaConnector
{
public Webtoons(GlobalBase clone) : base(clone, "Webtoons", ["en"])
{
this.downloadClient = new HttpDownloadClient(clone);
}
// Done
public override Manga[] GetManga(string publicationTitle = "")
{
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
Log($"Searching Publications. Term=\"{publicationTitle}\"");
string requestUrl = $"https://www.webtoons.com/en/search?keyword={sanitizedTitle}&searchType=WEBTOON";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) {
Log($"Failed to retrieve site");
return Array.Empty<Manga>();
}
if (requestResult.htmlDocument is null)
{
Log($"Failed to retrieve site");
return Array.Empty<Manga>();
}
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
return publications;
}
// Done
public override Manga? GetMangaFromId(string publicationId)
{
PublicationManager pb = new PublicationManager(publicationId);
return GetMangaFromUrl($"https://www.webtoons.com/en/{pb.Category}/{pb.Title}/list?title_no={pb.Id}");
}
// Done
public override Manga? GetMangaFromUrl(string url)
{
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) {
return null;
}
if (requestResult.htmlDocument is null)
{
Log($"Failed to retrieve site");
return null;
}
Regex regex = new Regex(@".*webtoons\.com\/en\/(?<category>[^\/]+)\/(?<title>[^\/]+)\/list\?title_no=(?<id>\d+).*");
Match match = regex.Match(url);
if(match.Success) {
PublicationManager pm = new PublicationManager(match.Groups["title"].Value, match.Groups["category"].Value, match.Groups["id"].Value);
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, pm.getPublicationId(), url);
}
Log($"Failed match Regex ID");
return null;
}
// Done
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
{
HtmlNode mangaList = document.DocumentNode.SelectSingleNode("//ul[contains(@class, 'card_lst')]");
if (!mangaList.ChildNodes.Any(node => node.Name == "li")) {
Log($"Failed to parse publication");
return Array.Empty<Manga>();
}
List<string> urls = document.DocumentNode
.SelectNodes("//ul[contains(@class, 'card_lst')]/li/a")
.Select(node => node.GetAttributeValue("href", "https://www.webtoons.com"))
.ToList();
HashSet<Manga> ret = new();
foreach (string url in urls)
{
Manga? manga = GetMangaFromUrl(url);
if (manga is not null)
ret.Add((Manga)manga);
}
return ret.ToArray();
}
private string capitalizeString(string str = "") {
if(str.Length == 0) return "";
if(str.Length == 1) return str.ToUpper();
return char.ToUpper(str[0]) + str.Substring(1).ToLower();
}
// Done
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
HtmlNode infoNode1 = document.DocumentNode.SelectSingleNode("//*[@id='content']/div[2]/div[1]/div[1]");
HtmlNode infoNode2 = document.DocumentNode.SelectSingleNode("//*[@id='content']/div[2]/div[2]/div[2]");
string sortName = infoNode1.SelectSingleNode(".//h1[contains(@class, 'subj')]").InnerText;
string description = infoNode2.SelectSingleNode(".//p[contains(@class, 'summary')]")
.InnerText.Trim();
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[contains(@class, 'detail_body') and contains(@class, 'banner')]");
Regex regex = new Regex(@"url\('(?<url>.*?)'\)");
Match match = regex.Match(posterNode.GetAttributeValue("style", ""));
string posterUrl = match.Groups["url"].Value;
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover, websiteUrl);
string genre = infoNode1.SelectSingleNode(".//h2[contains(@class, 'genre')]")
.InnerText.Trim();
string[] tags = [ genre ];
List<HtmlNode> authorsNodes = infoNode1.SelectSingleNode(".//div[contains(@class, 'author_area')]").Descendants("a").ToList();
List<string> authors = authorsNodes.Select(node => node.InnerText.Trim()).ToList();
string originalLanguage = "";
int year = DateTime.Now.Year;
string status1 = infoNode2.SelectSingleNode(".//p").InnerText;
string status2 = infoNode2.SelectSingleNode(".//p/span").InnerText;
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
if(status2.Length == 0 || status1.ToLower() == "completed") {
releaseStatus = Manga.ReleaseStatusByte.Completed;
} else if(status2.ToLower() == "up") {
releaseStatus = Manga.ReleaseStatusByte.Continuing;
}
Manga manga = new(sortName, authors, description, new Dictionary<string, string>(), tags, posterUrl, coverFileNameInCache, new Dictionary<string, string>(),
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
AddMangaToCache(manga);
return manga;
}
// Done
public override Chapter[] GetChapters(Manga manga, string language = "en")
{
PublicationManager pm = new PublicationManager(manga.publicationId);
string requestUrl = $"https://www.webtoons.com/en/{pm.Category}/{pm.Title}/list?title_no={pm.Id}";
// Leaving this in for verification if the page exists
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return Array.Empty<Chapter>();
// Get number of pages
int pages = requestResult.htmlDocument.DocumentNode
.SelectNodes("//div[contains(@class, 'paginate')]/a")
.ToList()
.Count;
List<Chapter> chapters = new List<Chapter>();
for(int page = 1; page <= pages; page++) {
string pageRequestUrl = $"{requestUrl}&page={page}";
chapters.AddRange(ParseChaptersFromHtml(manga, pageRequestUrl));
}
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.Order().ToArray();
}
// Done
private List<Chapter> ParseChaptersFromHtml(Manga manga, string mangaUrl)
{
RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default);
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
{
Log("Failed to load site");
return new List<Chapter>();
}
List<Chapter> ret = new();
foreach (HtmlNode chapterInfo in result.htmlDocument.DocumentNode.SelectNodes("//ul/li[contains(@class, '_episodeItem')]"))
{
HtmlNode infoNode = chapterInfo.SelectSingleNode(".//a");
string url = infoNode.GetAttributeValue("href", "");
string id = chapterInfo.GetAttributeValue("id", "");
if(id == "") continue;
string? volumeNumber = null;
string chapterNumber = chapterInfo.GetAttributeValue("data-episode-no", "");
if(chapterNumber == "") continue;
string chapterName = infoNode.SelectSingleNode(".//span[contains(@class, 'subj')]/span").InnerText.Trim();
ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
}
return ret;
}
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
{
if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Manga chapterParentManga = chapter.parentManga;
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
string requestUrl = chapter.url;
// Leaving this in to check if the page exists
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
progressToken?.Cancel();
return requestResult.statusCode;
}
string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken, referrer: requestUrl);
}
private string[] ParseImageUrlsFromHtml(string mangaUrl)
{
RequestResult requestResult =
downloadClient.MakeRequest(mangaUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
return Array.Empty<string>();
}
if (requestResult.htmlDocument is null)
{
Log($"Failed to retrieve site");
return Array.Empty<string>();
}
return requestResult.htmlDocument.DocumentNode
.SelectNodes("//*[@id='_imageList']/img")
.Select(node =>
node.GetAttributeValue("data-url", ""))
.ToArray();
}
}
internal class PublicationManager {
public PublicationManager(string title = "", string category = "", string id = "") {
this.Title = title;
this.Category = category;
this.Id = id;
}
public PublicationManager(string publicationId) {
string[] parts = publicationId.Split("|");
if(parts.Length == 3) {
this.Title = parts[0];
this.Category = parts[1];
this.Id = parts[2];
} else {
this.Title = "";
this.Category = "";
this.Id = "";
}
}
public string getPublicationId() {
return $"{this.Title}|{this.Category}|{this.Id}";
}
public string Title { get; set; }
public string Category { get; set; }
public string Id { get; set; }
}

View File

@ -0,0 +1,215 @@
using System.Net;
using System.Text.RegularExpressions;
using HtmlAgilityPack;
using Tranga.Jobs;
namespace Tranga.MangaConnectors;
public class Weebcentral : MangaConnector
{
private readonly string _baseUrl = "https://weebcentral.com";
private readonly string[] _filterWords =
{ "a", "the", "of", "as", "to", "no", "for", "on", "with", "be", "and", "in", "wa", "at", "be", "ni" };
public Weebcentral(GlobalBase clone) : base(clone, "Weebcentral", ["en"])
{
downloadClient = new ChromiumDownloadClient(clone);
}
public override Manga[] GetManga(string publicationTitle = "")
{
Log($"Searching Publications. Term=\"{publicationTitle}\"");
const int limit = 32; //How many values we want returned at once
int offset = 0; //"Page"
string requestUrl =
$"{_baseUrl}/search/data?limit={limit}&offset={offset}&text={publicationTitle}&sort=Best+Match&order=Ascending&official=Any&display_mode=Minimal%20Display";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 ||
requestResult.htmlDocument == null)
{
Log($"Failed to retrieve search: {requestResult.statusCode}");
return [];
}
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
return publications;
}
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
{
if (document.DocumentNode.SelectNodes("//article") == null)
return [];
List<string> urls = document.DocumentNode.SelectNodes("/html/body/article/a[@class='link link-hover tooltip tooltip-bottom']")
.Select(elem => elem.GetAttributeValue("href", "")).ToList();
HashSet<Manga> ret = new();
foreach (string url in urls)
{
Manga? manga = GetMangaFromUrl(url);
if (manga is not null)
ret.Add((Manga)manga);
}
return ret.ToArray();
}
public override Manga? GetMangaFromUrl(string url)
{
Regex publicationIdRex = new(@"https:\/\/weebcentral\.com\/series\/(\w*)\/(.*)");
string publicationId = publicationIdRex.Match(url).Groups[1].Value;
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 300 && (int)requestResult.statusCode >= 200 &&
requestResult.htmlDocument is not null)
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
return null;
}
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
HtmlNode? posterNode =
document.DocumentNode.SelectSingleNode("//section[@class='flex items-center justify-center']/picture/img");
string posterUrl = posterNode?.GetAttributeValue("src", "") ?? "";
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
HtmlNode? titleNode = document.DocumentNode.SelectSingleNode("//section/h1");
string sortName = titleNode?.InnerText ?? "Undefined";
HtmlNode[] authorsNodes =
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Author(s): ']/span")?.ToArray() ?? [];
List<string> authors = authorsNodes.Select(n => n.InnerText).ToList();
HtmlNode[] genreNodes =
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Tags(s): ']/span")?.ToArray() ?? [];
HashSet<string> tags = genreNodes.Select(n => n.InnerText).ToHashSet();
HtmlNode? statusNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Status: ']/a");
string status = statusNode?.InnerText ?? "";
Log("unable to parse status");
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
switch (status.ToLower())
{
case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
case "complete": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
}
HtmlNode? yearNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Released: ']/span");
int year = Convert.ToInt32(yearNode?.InnerText ?? "0");
HtmlNode? descriptionNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Description']/p");
string description = descriptionNode?.InnerText ?? "Undefined";
HtmlNode[] altTitleNodes = document.DocumentNode
.SelectNodes("//ul/li[strong/text() = 'Associated Name(s)']/ul/li")?.ToArray() ?? [];
Dictionary<string, string> altTitles = new(), links = new();
for (int i = 0; i < altTitleNodes.Length; i++)
altTitles.Add(i.ToString(), altTitleNodes[i].InnerText);
string originalLanguage = "";
Manga manga = new(sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
coverFileNameInCache, links,
year, originalLanguage, publicationId, releaseStatus, websiteUrl);
AddMangaToCache(manga);
return manga;
}
public override Manga? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://weebcentral.com/series/{publicationId}");
}
public override Chapter[] GetChapters(Manga manga, string language = "en")
{
Log($"Getting chapters {manga}");
string requestUrl = $"{_baseUrl}/series/{manga.publicationId}/full-chapter-list";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return [];
//Return Chapters ordered by Chapter-Number
if (requestResult.htmlDocument is null)
return [];
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.OrderByDescending(c => c.name).ThenBy(c => c.volumeNumber).ThenBy(c => c.chapterNumber).ToArray();
}
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
{
HtmlNode? chaptersWrapper = document.DocumentNode.SelectSingleNode("/html/body");
Regex chapterRex = new(@"(\d+(?:\.\d+)*)");
Regex chapterNameRex = new(@"(\w* )+");
Regex idRex = new(@"https:\/\/weebcentral\.com\/chapters\/(\w*)");
List<Chapter> ret = chaptersWrapper.Descendants("a").Select(elem =>
{
string url = elem.GetAttributeValue("href", "") ?? "Undefined";
if (!url.StartsWith("https://") && !url.StartsWith("http://"))
return new Chapter(manga, null, null, "-1", "undefined");
Match idMatch = idRex.Match(url);
string? id = idMatch.Success ? idMatch.Groups[1].Value : null;
string chapterNode = elem.SelectSingleNode("span[@class='grow flex items-center gap-2']/span")?.InnerText ??
"Undefined";
MatchCollection chapterNumberMatch = chapterRex.Matches(chapterNode);
string chapterNumber = chapterNumberMatch.Count > 0 ? chapterNumberMatch[^1].Groups[1].Value : "-1";
MatchCollection chapterNameMatch = chapterNameRex.Matches(chapterNode);
string chapterName = chapterNameMatch.Count > 0
? string.Join(" - ",
chapterNameMatch.Select(m => m.Groups[1].Value.Trim())
.Where(name => name.Length > 0 && !name.Equals("Chapter", StringComparison.OrdinalIgnoreCase)).ToArray()).Trim()
: "";
return new Chapter(manga, chapterName != "" ? chapterName : null, null, chapterNumber, url, id);
}).Where(elem => elem.chapterNumber != -1 && elem.url != "undefined").ToList();
ret.Reverse();
return ret;
}
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
{
if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Manga chapterParentManga = chapter.parentManga;
if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
RequestResult requestResult = downloadClient.MakeRequest(chapter.url, RequestType.Default);
if (requestResult.htmlDocument is null)
{
progressToken?.Cancel();
return HttpStatusCode.RequestTimeout;
}
HtmlDocument? document = requestResult.htmlDocument;
HtmlNode[] imageNodes =
document.DocumentNode.SelectNodes($"//section[@hx-get='{chapter.url}/images']/img")?.ToArray() ?? [];
string[] urls = imageNodes.Select(imgNode => imgNode.GetAttributeValue("src", "")).ToArray();
return DownloadChapterImages(urls, chapter, RequestType.MangaImage, progressToken: progressToken, referrer: "https://weebcentral.com/");
}
}

Some files were not shown because too many files have changed in this diff Show More