89 Commits

Author SHA1 Message Date
f0de0a29da Merge pull request #400 from C9Glax/dependabot/github_actions/docker/build-push-action-6.18.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/build-push-action from 6.17.0 to 6.18.0
2025-05-28 15:50:47 +02:00
d4227f2b8f Bump docker/build-push-action from 6.17.0 to 6.18.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.17.0 to 6.18.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.17.0...v6.18.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-version: 6.18.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-05-28 05:59:24 +00:00
cd00d35f22 Merge pull request #395 from C9Glax/dependabot/github_actions/docker/build-push-action-6.17.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/build-push-action from 6.16.0 to 6.17.0
2025-05-16 16:15:40 +02:00
4ef3e877ce Bump docker/build-push-action from 6.16.0 to 6.17.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.16.0 to 6.17.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.16.0...v6.17.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-version: 6.17.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-05-16 05:31:47 +00:00
7dba2518f9 Merge pull request #388 from C9Glax/dependabot/github_actions/docker/build-push-action-6.16.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/build-push-action from 6.15.0 to 6.16.0
2025-04-25 09:01:00 +02:00
7506a0201e Bump docker/build-push-action from 6.15.0 to 6.16.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.15.0 to 6.16.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.15.0...v6.16.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-version: 6.16.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-04-25 05:51:29 +00:00
91fb815153 Update README.md
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-03-29 21:17:22 +01:00
6faf8bc733 Merge pull request #376 from C9Glax/cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
Weebcentral fixxes
2025-03-18 18:13:47 +01:00
bdff5b7aec Merge pull request #375 from TheyCallMeTravis/webtoons-search_regex_fix
Some checks failed
Docker Image CI / build (push) Has been cancelled
webtoons - fix search regex
2025-03-18 18:12:35 +01:00
5af8060d7b Merge pull request #374 from TheyCallMeTravis/weebcentral-fixsearch
Weebcentral - Fix Search Results Parse
2025-03-18 18:12:23 +01:00
6ed8ff1d52 webtoons - fix search regex parsing 2025-03-18 10:12:42 -05:00
3324ed6e4a Weebcentral - Fix Search Results Parse 2025-03-17 14:29:09 -05:00
67fd9d284b Merge pull request #369 from TheyCallMeTravis/WeebCentral-add_referrer
Some checks failed
Docker Image CI / build (push) Has been cancelled
WeebCentral - add referer to DownloadChapterImages
2025-03-15 10:24:28 +01:00
08f26dd21d add referer to DownloadChapterImages 2025-03-14 21:18:51 -05:00
89ed500751 Update actions for Server-V2
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-03-08 19:02:14 +01:00
b00b0ee030 Merge branch 'master' into cuttingedge-merge-ServerV2
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-03-08 19:00:42 +01:00
e47c52ad48 Merge pull request #367 from C9Glax/cuttingedge
Cuttingedge merge
2025-03-08 18:58:40 +01:00
293f0af8e3 Merge pull request #366 from merlinmarijn/manganato-domain-switch
Some checks are pending
Docker Image CI / build (push) Waiting to run
Manganato connector search fix
2025-03-08 07:40:11 +01:00
ebfa34e386 Update Manganato.cs 2025-03-07 22:33:24 +01:00
14524407f9 Update Manganato.cs 2025-03-07 22:29:40 +01:00
d56f0b383a Merge pull request #365 from merlinmarijn/manganato-domain-switch
Some checks are pending
Docker Image CI / build (push) Waiting to run
Manganato fix chapter naming format in CBZ files (i am sorry)
2025-03-07 21:54:06 +01:00
70391c83c1 Update Manganato.cs
i found out, i am stupid
2025-03-07 21:39:17 +01:00
dc7696ee26 Merge pull request #364 from merlinmarijn/manganato-domain-switch
Some checks are pending
Docker Image CI / build (push) Waiting to run
Enforce correct referrer check for access to Manganato
2025-03-07 20:19:52 +01:00
49dab9a670 Referrer policy changed
- Updated: image hosting platform seem to have changed a policy requiring now to send the referrer from the actual site instead of just allowing any connecting regardless of the referrer address
2025-03-07 19:57:27 +01:00
c9bc79fbd5 Update new_connector.yml
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-03-07 10:19:08 +01:00
83ce315f87 Merge pull request #357 from merlinmarijn/manganato-domain-switch
Some checks are pending
Docker Image CI / build (push) Waiting to run
Update Connector for Manganato connector: Migrate from .com to .gg & Adjust HTML Parsing
#358 @merlinmarijn
2025-03-07 10:06:44 +01:00
59511056d0 added try around getting urls 2025-03-03 23:43:35 +01:00
ed3ca5dba8 removed leftover comment 2025-03-03 23:04:43 +01:00
8df05d7e8a fixed image referrer 2025-03-03 22:59:25 +01:00
95d1e37b47 Update Manganato.cs 2025-03-03 22:27:37 +01:00
b6494ab7f9 Merge pull request #354 from C9Glax/dependabot/github_actions/docker/setup-qemu-action-3.6.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/setup-qemu-action from 3.5.0 to 3.6.0
2025-03-03 13:54:59 +01:00
1d1d01b6e5 Bump docker/setup-qemu-action from 3.5.0 to 3.6.0
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 3.5.0 to 3.6.0.
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
- [Commits](https://github.com/docker/setup-qemu-action/compare/v3.5.0...v3.6.0)

---
updated-dependencies:
- dependency-name: docker/setup-qemu-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-03-03 05:09:43 +00:00
5bb4977876 Merge pull request #353 from ale-ben/cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
Weebcentral: File name also depends on original chapter name
2025-03-02 16:20:07 +01:00
c6bb1c9180 [cuttingedge] fix(Chapter): Minor logic change to account for all chapterName cases 2025-03-02 16:06:21 +01:00
9a066e7ac7 [cuttingedge] fix(Weebcentral): Updated CheckChapterIsDownloaded logic to also consider chapter name if present 2025-03-02 15:57:09 +01:00
4bafffded4 [cuttingedge] feat(Weebcentra): When ordering chapters, order name desc to put special chapters first 2025-03-02 15:56:12 +01:00
942b43da67 Merge branch 'refs/heads/cuttingedge' into cuttingedge-merge-ServerV2 2025-03-02 10:06:48 +01:00
ce5538b352 Merge pull request #341 from Makhuta/cuttingedge
Some checks are pending
Docker Image CI / build (push) Waiting to run
Added to HandleGet support for connector languages
2025-03-02 09:51:11 +01:00
0cfdf17bd4 Merge pull request #350 from C9Glax/dependabot/github_actions/docker/setup-buildx-action-3.10.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/setup-buildx-action from 3.9.0 to 3.10.0
2025-03-02 09:50:21 +01:00
0c48c1e020 Merge pull request #351 from C9Glax/dependabot/github_actions/docker/build-push-action-6.15.0
Bump docker/build-push-action from 6.14.0 to 6.15.0
2025-03-02 09:50:17 +01:00
0638e75ed6 Merge pull request #349 from C9Glax/dependabot/github_actions/docker/setup-qemu-action-3.5.0
Bump docker/setup-qemu-action from 3.4.0 to 3.5.0
2025-03-02 09:49:16 +01:00
5a4bc1c6de [cuttingedge] fix(Weebcentral): Handle case of chapter name with multiple number parts 2025-03-01 12:06:51 +01:00
71f663ca2f [cuttingedge] fix(Weebcentral): File name also depends on original chapter name 2025-03-01 11:39:12 +01:00
1b61a16061 Bump docker/build-push-action from 6.14.0 to 6.15.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.14.0 to 6.15.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.14.0...v6.15.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-27 05:25:23 +00:00
db81fdce39 Bump docker/setup-buildx-action from 3.9.0 to 3.10.0
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3.9.0 to 3.10.0.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3.9.0...v3.10.0)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-27 05:25:21 +00:00
fdb5451162 Bump docker/setup-qemu-action from 3.4.0 to 3.5.0
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 3.4.0 to 3.5.0.
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
- [Commits](https://github.com/docker/setup-qemu-action/compare/v3.4.0...v3.5.0)

---
updated-dependencies:
- dependency-name: docker/setup-qemu-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-27 05:25:19 +00:00
6b7632b071 Merge pull request #344 from C9Glax/dependabot/github_actions/docker/build-push-action-6.14.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/build-push-action from 6.13.0 to 6.14.0
2025-02-20 16:49:19 +01:00
06c080dfce Bump docker/build-push-action from 6.13.0 to 6.14.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.13.0 to 6.14.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.13.0...v6.14.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-20 05:50:25 +00:00
8130e11a9c Merge pull request #342 from C9Glax/cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
Cuttingedge master merge
2025-02-14 15:51:42 +01:00
659a42d370 Add
- ability to get supported languages of connector for use in monitoring languages selector
2025-02-12 14:07:13 +01:00
9cef068785 Merge pull request #340 from Makhuta/cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
Fix the Webtoons connector getting few chapters multiple times
2025-02-11 21:24:14 +01:00
4ad3149523 Fix
- fixed when parsing chapters the pages was incorrectly parsed resulting into adding the chapters from the last page multiple times (was still downloading OK but it would try to download the chapters from last page multiple times)
2025-02-11 20:16:30 +01:00
e6d40a7b36 Remove unused code from Weebcentral
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-02-09 18:37:55 +01:00
a95cb90561 Update Nuget packages 2025-02-09 18:37:41 +01:00
603e1b41d9 Add Chromium referer header 2025-02-09 18:37:33 +01:00
bb8a514830 Do not create .duplicate files anymore.
Just warn in log and delete (or attempt to delete)
2025-02-09 17:57:08 +01:00
edacaaba8a Update Readme 2025-02-09 17:38:54 +01:00
d97da26994 spelling error 2025-02-09 17:37:53 +01:00
8b923d73c4 Merge pull request #337 from Makhuta/cuttingedge
Add Manga Connector
2025-02-09 17:34:08 +01:00
814efd3528 Merge remote-tracking branch 'origin/cuttingedge' into cuttingedge 2025-02-09 17:28:03 +01:00
2cd5d8bc4f Merge branch 'cuttingedge-merge-ServerV2' into cuttingedge 2025-02-09 17:27:42 +01:00
5a864ab9b7 Remove Manga4Life 2025-02-09 17:27:35 +01:00
c700974693 Merge pull request #339 from C9Glax/dependabot/github_actions/docker/setup-qemu-action-3.4.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/setup-qemu-action from 3.3.0 to 3.4.0
2025-02-09 17:18:42 +01:00
553b5558d3 Merge pull request #338 from C9Glax/dependabot/github_actions/docker/setup-buildx-action-3.9.0
Bump docker/setup-buildx-action from 3.8.0 to 3.9.0
2025-02-09 17:18:25 +01:00
c9bbfee26b Merge pull request #331 from C9Glax/dependabot/github_actions/docker/build-push-action-6.13.0
Bump docker/build-push-action from 6.12.0 to 6.13.0
2025-02-09 17:18:10 +01:00
6e869eeb0d Bump docker/setup-qemu-action from 3.3.0 to 3.4.0
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 3.3.0 to 3.4.0.
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
- [Commits](https://github.com/docker/setup-qemu-action/compare/v3.3.0...v3.4.0)

---
updated-dependencies:
- dependency-name: docker/setup-qemu-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-07 05:13:18 +00:00
be7da69dbd Bump docker/setup-buildx-action from 3.8.0 to 3.9.0
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3.8.0 to 3.9.0.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3.8.0...v3.9.0)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-07 05:13:16 +00:00
7f13d9b1e6 Fix
- forgotten comma
2025-02-06 15:39:06 +01:00
0c9e3205c2 Add Manga Connector
- added [Webtoon](https://www.webtoons.com) manga connector
- modified/added support for saving covers with refferer
2025-02-06 15:37:30 +01:00
8c3b70b32e Bump docker/build-push-action from 6.12.0 to 6.13.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.12.0 to 6.13.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.12.0...v6.13.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-01-27 06:02:55 +00:00
4f7031ecfc Merge pull request #328 from ale-ben/cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
fix: Add escape to Weebcentral regex
2025-01-25 23:26:48 +01:00
f7a285aabd [cuttingedge] fix: Add escape to Weebcentral regex 2025-01-25 11:40:00 +01:00
786482398c Merge pull request #327 from ale-ben/cuttingedge
Some checks are pending
Docker Image CI / build (push) Waiting to run
Fix bug that prevented download of chapters 0
2025-01-24 22:09:53 +01:00
7921dcb1cb [cuttingedge] fix: Change condition for newChapters. Should solve #323 2025-01-24 21:52:52 +01:00
d0c9313279 Merge pull request #322 from C9Glax/dependabot/github_actions/docker/build-push-action-6.12.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/build-push-action from 6.11.0 to 6.12.0
2025-01-16 17:00:36 +01:00
58cf4cf4e0 Bump docker/build-push-action from 6.11.0 to 6.12.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.11.0 to 6.12.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.11.0...v6.12.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-01-16 05:52:33 +00:00
280d715a7c Merge branch 'cuttingedge-merge-ServerV2' into cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
2025-01-15 23:14:20 +01:00
b4edcccafe Merge branch 'cuttingedge-merge-ServerV2' into cuttingedge 2025-01-15 23:02:55 +01:00
1701881f4b Merge branch 'cuttingedge-merge-ServerV2' into cuttingedge 2025-01-15 22:53:47 +01:00
e5be5703f8 Merge branch 'cuttingedge-merge-ServerV2' into cuttingedge 2025-01-15 22:25:16 +01:00
ce217aae4f Merge remote-tracking branch 'origin/cuttingedge' into cuttingedge 2025-01-15 22:18:11 +01:00
3abf7224d0 Merge pull request #316 from ale-ben/cuttingedge
Some checks failed
Docker Image CI / build (push) Has been cancelled
Fixed regex to capture chapters with decimal (1.5, ..)
2025-01-11 00:41:20 +01:00
b39dbd5671 [cuttingedge] fix(weebcentral): Fixed regex to capture chapters with decimal (1.5, ..) 2025-01-10 22:10:34 +01:00
375fad0c21 Merge pull request #314 from C9Glax/dependabot/github_actions/docker/setup-qemu-action-3.3.0
Some checks failed
Docker Image CI / build (push) Has been cancelled
Bump docker/setup-qemu-action from 3.2.0 to 3.3.0
2025-01-10 11:02:46 +01:00
ee0d17c24f Merge pull request #315 from C9Glax/dependabot/github_actions/docker/build-push-action-6.11.0
Bump docker/build-push-action from 6.9.0 to 6.11.0
2025-01-10 11:02:26 +01:00
36ab3c3fdb Bump docker/build-push-action from 6.9.0 to 6.11.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 6.9.0 to 6.11.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v6.9.0...v6.11.0)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-01-10 05:46:13 +00:00
c3d60c6586 Bump docker/setup-qemu-action from 3.2.0 to 3.3.0
Bumps [docker/setup-qemu-action](https://github.com/docker/setup-qemu-action) from 3.2.0 to 3.3.0.
- [Release notes](https://github.com/docker/setup-qemu-action/releases)
- [Commits](https://github.com/docker/setup-qemu-action/compare/v3.2.0...v3.3.0)

---
updated-dependencies:
- dependency-name: docker/setup-qemu-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-01-10 05:46:10 +00:00
b96ae4a2d2 Merge pull request #304 from C9Glax/dependabot/github_actions/docker/setup-buildx-action-3.8.0
Bump docker/setup-buildx-action from 3.7.1 to 3.8.0
2024-12-17 17:38:07 +01:00
3a25c0b221 Bump docker/setup-buildx-action from 3.7.1 to 3.8.0
Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3.7.1 to 3.8.0.
- [Release notes](https://github.com/docker/setup-buildx-action/releases)
- [Commits](https://github.com/docker/setup-buildx-action/compare/v3.7.1...v3.8.0)

---
updated-dependencies:
- dependency-name: docker/setup-buildx-action
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-12-17 05:59:19 +00:00
18 changed files with 425 additions and 420 deletions

View File

@ -12,7 +12,7 @@ body:
- type: checkboxes
attributes:
label: Is the Website free to access?
description: We can't support pay-to-use sites.
description: We can't support pay-to-use sites, or captcha-proxied sites as Cloudflare.
options:
- label: The Website is freely accessible.
required: true
@ -20,4 +20,4 @@ body:
attributes:
label: Anything else?
validations:
required: false
required: false

View File

@ -17,12 +17,12 @@ jobs:
# https://github.com/docker/setup-qemu-action#usage
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
uses: docker/setup-qemu-action@v3.6.0
# https://github.com/marketplace/actions/docker-setup-buildx
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3.7.1
uses: docker/setup-buildx-action@v3.10.0
# https://github.com/docker/login-action#docker-hub
- name: Login to Docker Hub
@ -33,7 +33,7 @@ jobs:
# https://github.com/docker/build-push-action#multi-platform-image
- name: Build and push API
uses: docker/build-push-action@v6.9.0
uses: docker/build-push-action@v6.18.0
with:
context: ./
file: ./Dockerfile

View File

@ -1,45 +0,0 @@
name: Docker Image CI
on:
push:
branches: [ "dev" ]
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
# https://github.com/docker/setup-qemu-action#usage
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
# https://github.com/marketplace/actions/docker-setup-buildx
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3.7.1
# https://github.com/docker/login-action#docker-hub
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
# https://github.com/docker/build-push-action#multi-platform-image
- name: Build and push API
uses: docker/build-push-action@v6.9.0
with:
context: ./
file: ./Dockerfile
#platforms: linux/amd64,linux/arm64,linux/riscv64,linux/ppc64le,linux/s390x,linux/386,linux/mips64le,linux/mips64,linux/arm/v7,linux/arm/v6
platforms: linux/amd64,linux/arm64
pull: true
push: true
tags: |
glax/tranga-api:dev

View File

@ -17,12 +17,12 @@ jobs:
# https://github.com/docker/setup-qemu-action#usage
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
uses: docker/setup-qemu-action@v3.6.0
# https://github.com/marketplace/actions/docker-setup-buildx
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3.7.1
uses: docker/setup-buildx-action@v3.10.0
# https://github.com/docker/login-action#docker-hub
- name: Login to Docker Hub
@ -33,7 +33,7 @@ jobs:
# https://github.com/docker/build-push-action#multi-platform-image
- name: Build and push API
uses: docker/build-push-action@v6.9.0
uses: docker/build-push-action@v6.18.0
with:
context: ./
file: ./Dockerfile

View File

@ -2,7 +2,7 @@ name: Docker Image CI
on:
push:
branches: [ "Server-V2" ]
branches: [ "postgres-Server-V2" ]
workflow_dispatch:
jobs:
@ -17,12 +17,12 @@ jobs:
# https://github.com/docker/setup-qemu-action#usage
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
uses: docker/setup-qemu-action@v3.6.0
# https://github.com/marketplace/actions/docker-setup-buildx
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3.7.1
uses: docker/setup-buildx-action@v3.10.0
# https://github.com/docker/login-action#docker-hub
- name: Login to Docker Hub
@ -33,7 +33,7 @@ jobs:
# https://github.com/docker/build-push-action#multi-platform-image
- name: Build and push API
uses: docker/build-push-action@v6.9.0
uses: docker/build-push-action@v6.18.0
with:
context: ./
file: ./Dockerfile

View File

@ -1,3 +1,7 @@
# Testers for V2 wanted!
[Details](https://github.com/C9Glax/tranga/pull/355#issuecomment-2764217944)
<!-- PROJECT LOGO -->
<br />
<div align="center">
@ -45,14 +49,13 @@ Tranga can download Chapters and Metadata from "Scanlation" sites such as
- [MangaDex.org](https://mangadex.org/) (Multilingual)
- [Manganato.com](https://manganato.com/) (en)
- [Mangasee.com](https://mangasee123.com/) (en)
- [MangaKatana.com](https://mangakatana.com) (en)
- [Mangaworld.bz](https://www.mangaworld.bz/) (it)
- [Bato.to](https://bato.to/v3x) (en)
- [Manga4Life](https://manga4life.com) (en)
- [ManhuaPlus](https://manhuaplus.org/) (en)
- [MangaHere](https://www.mangahere.cc/) (en) (Their covers aren't scrapeable.)
- [Weebcentral](https://weebcentral.com) (en)
- [Webtoons](https://www.webtoons.com/en/)
- ❓ Open an [issue](https://github.com/C9Glax/tranga/issues/new?assignees=&labels=New+Connector&projects=&template=new_connector.yml&title=%5BNew+Connector%5D%3A+)
and trigger a library-scan with [Komga](https://komga.org/) and [Kavita](https://www.kavitareader.com/).

View File

@ -44,7 +44,7 @@ public readonly struct Chapter : IComparable
if (name is not null && name.Length > 0)
{
string chapterName = IllegalStrings.Replace(string.Concat(LegalCharacters.Matches(name)), "");
this.fileName = $"{chapterVolNumStr} - {chapterName}";
this.fileName = chapterName.Length > 0 ? $"{chapterVolNumStr} - {chapterName}" : chapterVolNumStr;
}
else
this.fileName = chapterVolNumStr;
@ -96,17 +96,20 @@ public readonly struct Chapter : IComparable
if(mangaArchive is null)
{
FileInfo[] archives = new DirectoryInfo(mangaDirectory).GetFiles("*.cbz");
Regex volChRex = new(@"(?:Vol(?:ume)?\.([0-9]+)\D*)?Ch(?:apter)?\.([0-9]+(?:\.[0-9]+)*)");
Regex volChRex = new(@"(?:Vol(?:ume)?\.([0-9]+)\D*)?Ch(?:apter)?\.([0-9]+(?:\.[0-9]+)*)(?: - (.*))?.cbz");
Chapter t = this;
mangaArchive = archives.FirstOrDefault(archive =>
{
Match m = volChRex.Match(archive.Name);
if (m.Groups[1].Success)
return m.Groups[1].Value == t.volumeNumber.ToString(GlobalBase.numberFormatDecimalPoint) &&
m.Groups[2].Value == t.chapterNumber.ToString(GlobalBase.numberFormatDecimalPoint);
else
return m.Groups[2].Value == t.chapterNumber.ToString(GlobalBase.numberFormatDecimalPoint);
/*
* 1. If the volumeNumber is not present in the filename, it is not checked.
* 2. Check the chapterNumber in the chapter against the one in the filename.
* 3. The chpaterName has to either be absent both in the chapter and the filename or match.
*/
return (!m.Groups[1].Success || m.Groups[1].Value == t.volumeNumber.ToString(GlobalBase.numberFormatDecimalPoint)) &&
m.Groups[2].Value == t.chapterNumber.ToString(GlobalBase.numberFormatDecimalPoint) &&
((!m.Groups[3].Success && string.IsNullOrEmpty(t.name)) || m.Groups[3].Value == t.name);
});
}

View File

@ -167,9 +167,10 @@ public class JobBoss : GlobalBase
Log($"Adding Job {job}");
if (!AddJob(job, file.FullName)) //If we detect a duplicate, delete the file.
{
string path = string.Concat(file.FullName, ".duplicate");
file.MoveTo(path);
Log($"Duplicate detected or otherwise not able to add job to list.\nMoved job {job} to {path}");
//string path = string.Concat(file.FullName, ".duplicate");
//file.MoveTo(path);
//Log($"Duplicate detected or otherwise not able to add job to list.\nMoved job {job} to {path}");
Log($"Duplicate detected or otherwise not able to add job to list. Removed the file {file.FullName} {job}");
}
}
catch (Exception e)

View File

@ -73,6 +73,7 @@ internal class ChromiumDownloadClient : DownloadClient
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
IPage page = _browser.NewPageAsync().Result;
page.DefaultTimeout = TrangaSettings.ChromiumPageTimeoutMs;
page.SetExtraHttpHeadersAsync(new() { { "Referer", referrer } });
IResponse response;
try
{

View File

@ -60,7 +60,7 @@ public abstract class MangaConnector : GlobalBase
return Array.Empty<Chapter>();
Log($"Checking for duplicates {manga}");
List<Chapter> newChaptersList = allChapters.Where(nChapter => nChapter.chapterNumber > manga.ignoreChaptersBelow
List<Chapter> newChaptersList = allChapters.Where(nChapter => nChapter.chapterNumber >= manga.ignoreChaptersBelow
&& !nChapter.CheckChapterIsDownloaded()).ToList();
Log($"{newChaptersList.Count} new chapters. {manga}");
try
@ -213,7 +213,7 @@ public abstract class MangaConnector : GlobalBase
return HttpStatusCode.OK;
}
protected string SaveCoverImageToCache(string url, string mangaInternalId, RequestType requestType)
protected string SaveCoverImageToCache(string url, string mangaInternalId, RequestType requestType, string? referrer = null)
{
Regex urlRex = new (@"https?:\/\/((?:[a-zA-Z0-9-]+\.)+[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+))");
//https?:\/\/[a-zA-Z0-9-]+\.([a-zA-Z0-9-]+\.[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+)) for only second level domains
@ -224,7 +224,7 @@ public abstract class MangaConnector : GlobalBase
if (File.Exists(saveImagePath))
return saveImagePath;
RequestResult coverResult = downloadClient.MakeRequest(url, requestType);
RequestResult coverResult = downloadClient.MakeRequest(url, requestType, referrer);
using MemoryStream ms = new();
coverResult.result.CopyTo(ms);
Directory.CreateDirectory(TrangaSettings.coverImageCache);

View File

@ -34,11 +34,11 @@ public class MangaConnectorJsonConverter : JsonConverter
"MangaKatana" => this._connectors.First(c => c is MangaKatana),
"Mangaworld" => this._connectors.First(c => c is Mangaworld),
"Bato" => this._connectors.First(c => c is Bato),
"Manga4Life" => this._connectors.First(c => c is MangaLife),
"ManhuaPlus" => this._connectors.First(c => c is ManhuaPlus),
"MangaHere" => this._connectors.First(c => c is MangaHere),
"AsuraToon" => this._connectors.First(c => c is AsuraToon),
"Weebcentral" => this._connectors.First(c => c is Weebcentral),
"Webtoons" => this._connectors.First(c => c is Webtoons),
_ => throw new UnreachableException($"Could not find Connector with name {connectorName}")
};
}

View File

@ -1,206 +0,0 @@
using System.Net;
using System.Text.RegularExpressions;
using HtmlAgilityPack;
using Tranga.Jobs;
namespace Tranga.MangaConnectors;
public class MangaLife : MangaConnector
{
public MangaLife(GlobalBase clone) : base(clone, "Manga4Life", ["en"])
{
this.downloadClient = new ChromiumDownloadClient(clone);
}
public override Manga[] GetManga(string publicationTitle = "")
{
Log($"Searching Publications. Term=\"{publicationTitle}\"");
string sanitizedTitle = WebUtility.UrlEncode(publicationTitle);
string requestUrl = $"https://manga4life.com/search/?name={sanitizedTitle}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return Array.Empty<Manga>();
if (requestResult.htmlDocument is null)
return Array.Empty<Manga>();
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
return publications;
}
public override Manga? GetMangaFromId(string publicationId)
{
return GetMangaFromUrl($"https://manga4life.com/manga/{publicationId}");
}
public override Manga? GetMangaFromUrl(string url)
{
Regex publicationIdRex = new(@"https:\/\/(www\.)?manga4life.com\/manga\/(.*)(\/.*)*");
string publicationId = publicationIdRex.Match(url).Groups[2].Value;
RequestResult requestResult = this.downloadClient.MakeRequest(url, RequestType.MangaInfo);
if(requestResult.htmlDocument is not null)
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
return null;
}
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
{
HtmlNode resultsNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']/div[last()]/div[1]/div");
if (resultsNode.Descendants("div").Count() == 1 && resultsNode.Descendants("div").First().HasClass("NoResults"))
{
Log("No results.");
return Array.Empty<Manga>();
}
Log($"{resultsNode.SelectNodes("div").Count} items.");
HashSet<Manga> ret = new();
foreach (HtmlNode resultNode in resultsNode.SelectNodes("div"))
{
string url = resultNode.Descendants().First(d => d.HasClass("SeriesName")).GetAttributeValue("href", "");
Manga? manga = GetMangaFromUrl($"https://manga4life.com{url}");
if (manga is not null)
ret.Add((Manga)manga);
}
return ret.ToArray();
}
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
string originalLanguage = "", status = "";
Dictionary<string, string> altTitles = new(), links = new();
HashSet<string> tags = new();
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//img");
string posterUrl = posterNode.GetAttributeValue("src", "");
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//h1");
string sortName = titleNode.InnerText;
HtmlNode[] authorsNodes = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Author(s):']/..").Descendants("a")
.ToArray();
List<string> authors = new();
foreach (HtmlNode authorNode in authorsNodes)
authors.Add(authorNode.InnerText);
HtmlNode[] genreNodes = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Genre(s):']/..").Descendants("a")
.ToArray();
foreach (HtmlNode genreNode in genreNodes)
tags.Add(genreNode.InnerText);
HtmlNode yearNode = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Released:']/..").Descendants("a")
.First();
int year = Convert.ToInt32(yearNode.InnerText);
HtmlNode[] statusNodes = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Status:']/..").Descendants("a")
.ToArray();
foreach (HtmlNode statusNode in statusNodes)
if (statusNode.InnerText.Contains("publish", StringComparison.CurrentCultureIgnoreCase))
status = statusNode.InnerText.Split(' ')[0];
switch (status.ToLower())
{
case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
case "discontinued": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
case "complete": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
}
HtmlNode descriptionNode = document.DocumentNode
.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Description:']/..")
.Descendants("div").First();
string description = descriptionNode.InnerText;
Manga manga = new(sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
coverFileNameInCache, links, year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
AddMangaToCache(manga);
return manga;
}
public override Chapter[] GetChapters(Manga manga, string language="en")
{
Log($"Getting chapters {manga}");
RequestResult result = downloadClient.MakeRequest($"https://manga4life.com/manga/{manga.publicationId}", RequestType.Default, clickButton:"[class*='ShowAllChapters']");
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
{
return Array.Empty<Chapter>();
}
HtmlNodeCollection chapterNodes = result.htmlDocument.DocumentNode.SelectNodes(
"//a[contains(concat(' ',normalize-space(@class),' '),' ChapterLink ')]");
string[] urls = chapterNodes.Select(node => node.GetAttributeValue("href", "")).ToArray();
Regex urlRex = new (@"-chapter-([0-9\\.]+)(-index-([0-9\\.]+))?");
List<Chapter> chapters = new();
foreach (string url in urls)
{
Match rexMatch = urlRex.Match(url);
string volumeNumber = "1";
if (rexMatch.Groups[3].Value.Length > 0)
volumeNumber = rexMatch.Groups[3].Value;
string chapterNumber = rexMatch.Groups[1].Value;
string fullUrl = $"https://manga4life.com{url}";
fullUrl = fullUrl.Replace(Regex.Match(url,"(-page-[0-9])").Value,"");
try
{
chapters.Add(new Chapter(manga, "", volumeNumber, chapterNumber, fullUrl));
}
catch (Exception e)
{
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
}
}
//Return Chapters ordered by Chapter-Number
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.Order().ToArray();
}
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
{
if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Manga chapterParentManga = chapter.parentManga;
if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
RequestResult requestResult = this.downloadClient.MakeRequest(chapter.url, RequestType.Default);
if (requestResult.htmlDocument is null)
{
progressToken?.Cancel();
return HttpStatusCode.RequestTimeout;
}
HtmlDocument document = requestResult.htmlDocument;
HtmlNode gallery = document.DocumentNode.Descendants("div").First(div => div.HasClass("ImageGallery"));
HtmlNode[] images = gallery.Descendants("img").Where(img => img.HasClass("img-fluid")).ToArray();
List<string> urls = new();
foreach(HtmlNode galleryImage in images)
urls.Add(galleryImage.GetAttributeValue("src", ""));
string comicInfoPath = Path.GetTempFileName();
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
return DownloadChapterImages(urls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
}
}

View File

@ -17,7 +17,7 @@ public class Manganato : MangaConnector
{
Log($"Searching Publications. Term=\"{publicationTitle}\"");
string sanitizedTitle = string.Join('_', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
string requestUrl = $"https://manganato.com/search/story/{sanitizedTitle}";
string requestUrl = $"https://manganato.gg/search/story/{sanitizedTitle}";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
@ -32,13 +32,19 @@ public class Manganato : MangaConnector
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
{
List<HtmlNode> searchResults = document.DocumentNode.Descendants("div").Where(n => n.HasClass("search-story-item")).ToList();
List<HtmlNode> searchResults = document.DocumentNode.Descendants("div").Where(n => n.HasClass("story_item")).ToList();
Log($"{searchResults.Count} items.");
List<string> urls = new();
foreach (HtmlNode mangaResult in searchResults)
{
urls.Add(mangaResult.Descendants("a").First(n => n.HasClass("item-title")).GetAttributes()
.First(a => a.Name == "href").Value);
try
{
urls.Add(mangaResult.Descendants("h3").First(n => n.HasClass("story_name"))
.Descendants("a").First().GetAttributeValue("href", ""));
} catch
{
//failed to get a url, send it to the void
}
}
HashSet<Manga> ret = new();
@ -78,61 +84,49 @@ public class Manganato : MangaConnector
string originalLanguage = "";
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
HtmlNode infoNode = document.DocumentNode.Descendants("div").First(d => d.HasClass("story-info-right"));
HtmlNode infoNode = document.DocumentNode.Descendants("ul").First(d => d.HasClass("manga-info-text"));
string sortName = infoNode.Descendants("h1").First().InnerText;
HtmlNode infoTable = infoNode.Descendants().First(d => d.Name == "table");
foreach (HtmlNode row in infoTable.Descendants("tr"))
foreach (HtmlNode li in infoNode.Descendants("li"))
{
string key = row.SelectNodes("td").First().InnerText.ToLower();
string value = row.SelectNodes("td").Last().InnerText;
string keySanitized = string.Concat(Regex.Matches(key, "[a-z]"));
switch (keySanitized)
string text = li.InnerText.Trim().ToLower();
if (text.StartsWith("author(s) :"))
{
case "alternative":
string[] alts = value.Split(" ; ");
for(int i = 0; i < alts.Length; i++)
altTitles.Add(i.ToString(), alts[i]);
break;
case "authors":
authors = value.Split('-');
for (int i = 0; i < authors.Length; i++)
authors[i] = authors[i].Replace("\r\n", "");
break;
case "status":
switch (value.ToLower())
{
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
case "completed": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
}
break;
case "genres":
string[] genres = value.Split(" - ");
for (int i = 0; i < genres.Length; i++)
genres[i] = genres[i].Replace("\r\n", "");
tags = genres.ToHashSet();
break;
authors = li.Descendants("a").Select(a => a.InnerText.Trim()).ToArray();
}
else if (text.StartsWith("status :"))
{
string status = text.Replace("status :", "").Trim().ToLower();
if (string.IsNullOrWhiteSpace(status))
releaseStatus = Manga.ReleaseStatusByte.Continuing;
else if (status == "ongoing")
releaseStatus = Manga.ReleaseStatusByte.Continuing;
else
releaseStatus = Enum.Parse<Manga.ReleaseStatusByte>(status, true);
}
else if (li.HasClass("genres"))
{
tags = li.Descendants("a").Select(a => a.InnerText.Trim()).ToHashSet();
}
}
string posterUrl = document.DocumentNode.Descendants("span").First(s => s.HasClass("info-image")).Descendants("img").First()
string posterUrl = document.DocumentNode.Descendants("div").First(s => s.HasClass("manga-info-pic")).Descendants("img").First()
.GetAttributes().First(a => a.Name == "src").Value;
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover, "https://www.manganato.gg/");
string description = document.DocumentNode.Descendants("div").First(d => d.HasClass("panel-story-info-description"))
string description = document.DocumentNode.SelectSingleNode("//div[@id='contentBox']")
.InnerText.Replace("Description :", "");
while (description.StartsWith('\n'))
description = description.Substring(1);
string pattern = "MMM dd,yyyy HH:mm";
string pattern = "MMM-dd-yyyy HH:mm";
HtmlNode? oldestChapter = document.DocumentNode
.SelectNodes("//span[contains(concat(' ',normalize-space(@class),' '),' chapter-time ')]").MaxBy(
node => DateTime.ParseExact(node.GetAttributeValue("title", "Dec 31 2400, 23:59"), pattern,
.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),' row ')]/span[@title]").MaxBy(
node => DateTime.ParseExact(node.GetAttributeValue("title", "Dec-31-2400 23:59"), pattern,
CultureInfo.InvariantCulture).Millisecond);
@ -148,7 +142,7 @@ public class Manganato : MangaConnector
public override Chapter[] GetChapters(Manga manga, string language="en")
{
Log($"Getting chapters {manga}");
string requestUrl = $"https://chapmanganato.com/{manga.publicationId}";
string requestUrl = manga.websiteUrl;
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
@ -166,21 +160,21 @@ public class Manganato : MangaConnector
{
List<Chapter> ret = new();
HtmlNode chapterList = document.DocumentNode.Descendants("ul").First(l => l.HasClass("row-content-chapter"));
HtmlNode chapterList = document.DocumentNode.Descendants("div").First(l => l.HasClass("chapter-list"));
Regex volRex = new(@"Vol\.([0-9]+).*");
Regex chapterRex = new(@"https:\/\/chapmanganato.[A-z]+\/manga-[A-z0-9]+\/chapter-([0-9\.]+)");
Regex nameRex = new(@"Chapter ([0-9]+(\.[0-9]+)*){1}:? (.*)");
foreach (HtmlNode chapterInfo in chapterList.Descendants("li"))
foreach (HtmlNode chapterInfo in chapterList.Descendants("div").Where(x => x.HasClass("row")))
{
string fullString = chapterInfo.Descendants("a").First(d => d.HasClass("chapter-name")).InnerText;
string url = chapterInfo.Descendants("a").First(d => d.HasClass("chapter-name"))
.GetAttributeValue("href", "");
string? volumeNumber = volRex.IsMatch(fullString) ? volRex.Match(fullString).Groups[1].Value : null;
string chapterNumber = chapterRex.Match(url).Groups[1].Value;
string chapterName = nameRex.Match(fullString).Groups[3].Value;
string url = chapterInfo.Descendants("a").First().GetAttributeValue("href", "");
var name = chapterInfo.Descendants("a").First().InnerText.Trim();
string chapterName = nameRex.Match(name).Groups[3].Value;
string chapterNumber = Regex.Match(name, @"Chapter ([0-9]+(\.[0-9]+)*)").Groups[1].Value;
string? volumeNumber = Regex.Match(chapterName, @"Vol\.([0-9]+)").Groups[1].Value;
if (string.IsNullOrWhiteSpace(volumeNumber))
volumeNumber = "0";
try
{
ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
@ -221,7 +215,7 @@ public class Manganato : MangaConnector
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, "https://chapmanganato.com/", progressToken:progressToken);
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, "https://www.manganato.gg", progressToken:progressToken);
}
private string[] ParseImageUrlsFromHtml(HtmlDocument document)

View File

@ -0,0 +1,273 @@
using System.Net;
using System.Text.RegularExpressions;
using HtmlAgilityPack;
using Tranga.Jobs;
namespace Tranga.MangaConnectors;
public class Webtoons : MangaConnector
{
public Webtoons(GlobalBase clone) : base(clone, "Webtoons", ["en"])
{
this.downloadClient = new HttpDownloadClient(clone);
}
// Done
public override Manga[] GetManga(string publicationTitle = "")
{
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
Log($"Searching Publications. Term=\"{publicationTitle}\"");
string requestUrl = $"https://www.webtoons.com/en/search?keyword={sanitizedTitle}&searchType=WEBTOON";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) {
Log($"Failed to retrieve site");
return Array.Empty<Manga>();
}
if (requestResult.htmlDocument is null)
{
Log($"Failed to retrieve site");
return Array.Empty<Manga>();
}
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
return publications;
}
// Done
public override Manga? GetMangaFromId(string publicationId)
{
PublicationManager pb = new PublicationManager(publicationId);
return GetMangaFromUrl($"https://www.webtoons.com/en/{pb.Category}/{pb.Title}/list?title_no={pb.Id}");
}
// Done
public override Manga? GetMangaFromUrl(string url)
{
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) {
return null;
}
if (requestResult.htmlDocument is null)
{
Log($"Failed to retrieve site");
return null;
}
Regex regex = new Regex(@".*webtoons\.com\/en\/(?<category>[^\/]+)\/(?<title>[^\/]+)\/list\?title_no=(?<id>\d+).*");
Match match = regex.Match(url);
if(match.Success) {
PublicationManager pm = new PublicationManager(match.Groups["title"].Value, match.Groups["category"].Value, match.Groups["id"].Value);
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, pm.getPublicationId(), url);
}
Log($"Failed match Regex ID");
return null;
}
// Done
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
{
HtmlNode mangaList = document.DocumentNode.SelectSingleNode("//ul[contains(@class, 'card_lst')]");
if (!mangaList.ChildNodes.Any(node => node.Name == "li")) {
Log($"Failed to parse publication");
return Array.Empty<Manga>();
}
List<string> urls = document.DocumentNode
.SelectNodes("//ul[contains(@class, 'card_lst')]/li/a")
.Select(node => node.GetAttributeValue("href", "https://www.webtoons.com"))
.ToList();
HashSet<Manga> ret = new();
foreach (string url in urls)
{
Manga? manga = GetMangaFromUrl(url);
if (manga is not null)
ret.Add((Manga)manga);
}
return ret.ToArray();
}
private string capitalizeString(string str = "") {
if(str.Length == 0) return "";
if(str.Length == 1) return str.ToUpper();
return char.ToUpper(str[0]) + str.Substring(1).ToLower();
}
// Done
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
HtmlNode infoNode1 = document.DocumentNode.SelectSingleNode("//*[@id='content']/div[2]/div[1]/div[1]");
HtmlNode infoNode2 = document.DocumentNode.SelectSingleNode("//*[@id='content']/div[2]/div[2]/div[2]");
string sortName = infoNode1.SelectSingleNode(".//h1[contains(@class, 'subj')]").InnerText;
string description = infoNode2.SelectSingleNode(".//p[contains(@class, 'summary')]")
.InnerText.Trim();
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[contains(@class, 'detail_body') and contains(@class, 'banner')]");
Regex regex = new Regex(@"url\('(?<url>.*?)'\)");
Match match = regex.Match(posterNode.GetAttributeValue("style", ""));
string posterUrl = match.Groups["url"].Value;
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover, websiteUrl);
string genre = infoNode1.SelectSingleNode(".//h2[contains(@class, 'genre')]")
.InnerText.Trim();
string[] tags = [ genre ];
List<HtmlNode> authorsNodes = infoNode1.SelectSingleNode(".//div[contains(@class, 'author_area')]").Descendants("a").ToList();
List<string> authors = authorsNodes.Select(node => node.InnerText.Trim()).ToList();
string originalLanguage = "";
int year = DateTime.Now.Year;
string status1 = infoNode2.SelectSingleNode(".//p").InnerText;
string status2 = infoNode2.SelectSingleNode(".//p/span").InnerText;
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
if(status2.Length == 0 || status1.ToLower() == "completed") {
releaseStatus = Manga.ReleaseStatusByte.Completed;
} else if(status2.ToLower() == "up") {
releaseStatus = Manga.ReleaseStatusByte.Continuing;
}
Manga manga = new(sortName, authors, description, new Dictionary<string, string>(), tags, posterUrl, coverFileNameInCache, new Dictionary<string, string>(),
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
AddMangaToCache(manga);
return manga;
}
// Done
public override Chapter[] GetChapters(Manga manga, string language = "en")
{
PublicationManager pm = new PublicationManager(manga.publicationId);
string requestUrl = $"https://www.webtoons.com/en/{pm.Category}/{pm.Title}/list?title_no={pm.Id}";
// Leaving this in for verification if the page exists
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return Array.Empty<Chapter>();
// Get number of pages
int pages = requestResult.htmlDocument.DocumentNode
.SelectNodes("//div[contains(@class, 'paginate')]/a")
.ToList()
.Count;
List<Chapter> chapters = new List<Chapter>();
for(int page = 1; page <= pages; page++) {
string pageRequestUrl = $"{requestUrl}&page={page}";
chapters.AddRange(ParseChaptersFromHtml(manga, pageRequestUrl));
}
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.Order().ToArray();
}
// Done
private List<Chapter> ParseChaptersFromHtml(Manga manga, string mangaUrl)
{
RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default);
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
{
Log("Failed to load site");
return new List<Chapter>();
}
List<Chapter> ret = new();
foreach (HtmlNode chapterInfo in result.htmlDocument.DocumentNode.SelectNodes("//ul/li[contains(@class, '_episodeItem')]"))
{
HtmlNode infoNode = chapterInfo.SelectSingleNode(".//a");
string url = infoNode.GetAttributeValue("href", "");
string id = chapterInfo.GetAttributeValue("id", "");
if(id == "") continue;
string? volumeNumber = null;
string chapterNumber = chapterInfo.GetAttributeValue("data-episode-no", "");
if(chapterNumber == "") continue;
string chapterName = infoNode.SelectSingleNode(".//span[contains(@class, 'subj')]/span").InnerText.Trim();
ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
}
return ret;
}
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
{
if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
return HttpStatusCode.RequestTimeout;
}
Manga chapterParentManga = chapter.parentManga;
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
string requestUrl = chapter.url;
// Leaving this in to check if the page exists
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
progressToken?.Cancel();
return requestResult.statusCode;
}
string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken, referrer: requestUrl);
}
private string[] ParseImageUrlsFromHtml(string mangaUrl)
{
RequestResult requestResult =
downloadClient.MakeRequest(mangaUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
{
return Array.Empty<string>();
}
if (requestResult.htmlDocument is null)
{
Log($"Failed to retrieve site");
return Array.Empty<string>();
}
return requestResult.htmlDocument.DocumentNode
.SelectNodes("//*[@id='_imageList']/img")
.Select(node =>
node.GetAttributeValue("data-url", ""))
.ToArray();
}
}
internal class PublicationManager {
public PublicationManager(string title = "", string category = "", string id = "") {
this.Title = title;
this.Category = category;
this.Id = id;
}
public PublicationManager(string publicationId) {
string[] parts = publicationId.Split("|");
if(parts.Length == 3) {
this.Title = parts[0];
this.Category = parts[1];
this.Id = parts[2];
} else {
this.Title = "";
this.Category = "";
this.Id = "";
}
}
public string getPublicationId() {
return $"{this.Title}|{this.Category}|{this.Id}";
}
public string Title { get; set; }
public string Category { get; set; }
public string Id { get; set; }
}

View File

@ -1,7 +1,6 @@
using System.Net;
using System.Text.RegularExpressions;
using HtmlAgilityPack;
using Soenneker.Utils.String.NeedlemanWunsch;
using Tranga.Jobs;
namespace Tranga.MangaConnectors;
@ -22,10 +21,10 @@ public class Weebcentral : MangaConnector
{
Log($"Searching Publications. Term=\"{publicationTitle}\"");
const int limit = 32; //How many values we want returned at once
var offset = 0; //"Page"
var requestUrl =
int offset = 0; //"Page"
string requestUrl =
$"{_baseUrl}/search/data?limit={limit}&offset={offset}&text={publicationTitle}&sort=Best+Match&order=Ascending&official=Any&display_mode=Minimal%20Display";
var requestResult =
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 ||
requestResult.htmlDocument == null)
@ -34,7 +33,7 @@ public class Weebcentral : MangaConnector
return [];
}
var publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
return publications;
@ -43,15 +42,15 @@ public class Weebcentral : MangaConnector
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
{
if (document.DocumentNode.SelectNodes("//article") == null)
return Array.Empty<Manga>();
return [];
var urls = document.DocumentNode.SelectNodes("/html/body/article/a[@class='link link-hover']")
List<string> urls = document.DocumentNode.SelectNodes("/html/body/article/a[@class='link link-hover tooltip tooltip-bottom']")
.Select(elem => elem.GetAttributeValue("href", "")).ToList();
HashSet<Manga> ret = new();
foreach (var url in urls)
foreach (string url in urls)
{
var manga = GetMangaFromUrl(url);
Manga? manga = GetMangaFromUrl(url);
if (manga is not null)
ret.Add((Manga)manga);
}
@ -62,9 +61,9 @@ public class Weebcentral : MangaConnector
public override Manga? GetMangaFromUrl(string url)
{
Regex publicationIdRex = new(@"https:\/\/weebcentral\.com\/series\/(\w*)\/(.*)");
var publicationId = publicationIdRex.Match(url).Groups[1].Value;
string publicationId = publicationIdRex.Match(url).Groups[1].Value;
var requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
if ((int)requestResult.statusCode < 300 && (int)requestResult.statusCode >= 200 &&
requestResult.htmlDocument is not null)
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
@ -73,26 +72,26 @@ public class Weebcentral : MangaConnector
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
{
var posterNode =
HtmlNode? posterNode =
document.DocumentNode.SelectSingleNode("//section[@class='flex items-center justify-center']/picture/img");
var posterUrl = posterNode?.GetAttributeValue("src", "") ?? "";
var coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
string posterUrl = posterNode?.GetAttributeValue("src", "") ?? "";
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
var titleNode = document.DocumentNode.SelectSingleNode("//section/h1");
var sortName = titleNode?.InnerText ?? "Undefined";
HtmlNode? titleNode = document.DocumentNode.SelectSingleNode("//section/h1");
string sortName = titleNode?.InnerText ?? "Undefined";
HtmlNode[] authorsNodes =
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Author(s): ']/span")?.ToArray() ?? [];
var authors = authorsNodes.Select(n => n.InnerText).ToList();
List<string> authors = authorsNodes.Select(n => n.InnerText).ToList();
HtmlNode[] genreNodes =
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Tags(s): ']/span")?.ToArray() ?? [];
HashSet<string> tags = genreNodes.Select(n => n.InnerText).ToHashSet();
var statusNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Status: ']/a");
var status = statusNode?.InnerText ?? "";
HtmlNode? statusNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Status: ']/a");
string status = statusNode?.InnerText ?? "";
Log("unable to parse status");
var releaseStatus = Manga.ReleaseStatusByte.Unreleased;
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
switch (status.ToLower())
{
case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
@ -101,19 +100,19 @@ public class Weebcentral : MangaConnector
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
}
var yearNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Released: ']/span");
var year = Convert.ToInt32(yearNode?.InnerText ?? "0");
HtmlNode? yearNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Released: ']/span");
int year = Convert.ToInt32(yearNode?.InnerText ?? "0");
var descriptionNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Description']/p");
var description = descriptionNode?.InnerText ?? "Undefined";
HtmlNode? descriptionNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Description']/p");
string description = descriptionNode?.InnerText ?? "Undefined";
HtmlNode[] altTitleNodes = document.DocumentNode
.SelectNodes("//ul/li[strong/text() = 'Associated Name(s)']/ul/li")?.ToArray() ?? [];
Dictionary<string, string> altTitles = new(), links = new();
for (var i = 0; i < altTitleNodes.Length; i++)
for (int i = 0; i < altTitleNodes.Length; i++)
altTitles.Add(i.ToString(), altTitleNodes[i].InnerText);
var originalLanguage = "";
string originalLanguage = "";
Manga manga = new(sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
coverFileNameInCache, links,
@ -127,74 +126,54 @@ public class Weebcentral : MangaConnector
return GetMangaFromUrl($"https://weebcentral.com/series/{publicationId}");
}
private string ToFilteredString(string input)
{
return string.Join(' ', input.ToLower().Split(' ').Where(word => _filterWords.Contains(word) == false));
}
private SearchResult[] FilteredResults(string publicationTitle, SearchResult[] unfilteredSearchResults)
{
Dictionary<SearchResult, int> similarity = new();
foreach (var sr in unfilteredSearchResults)
{
List<int> scores = new();
var filteredPublicationString = ToFilteredString(publicationTitle);
var filteredSString = ToFilteredString(sr.s);
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(filteredSString, filteredPublicationString));
foreach (var srA in sr.a)
{
var filteredAString = ToFilteredString(srA);
scores.Add(NeedlemanWunschStringUtil.CalculateSimilarity(filteredAString, filteredPublicationString));
}
similarity.Add(sr, scores.Sum() / scores.Count);
}
var ret = similarity.OrderBy(s => s.Value).Take(10).Select(s => s.Key).ToList();
return ret.ToArray();
}
public override Chapter[] GetChapters(Manga manga, string language = "en")
{
Log($"Getting chapters {manga}");
var requestUrl = $"{_baseUrl}/series/{manga.publicationId}/full-chapter-list";
var requestResult =
string requestUrl = $"{_baseUrl}/series/{manga.publicationId}/full-chapter-list";
RequestResult requestResult =
downloadClient.MakeRequest(requestUrl, RequestType.Default);
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
return Array.Empty<Chapter>();
return [];
//Return Chapters ordered by Chapter-Number
if (requestResult.htmlDocument is null)
return Array.Empty<Chapter>();
var chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
return [];
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
Log($"Got {chapters.Count} chapters. {manga}");
return chapters.Order().ToArray();
return chapters.OrderByDescending(c => c.name).ThenBy(c => c.volumeNumber).ThenBy(c => c.chapterNumber).ToArray();
}
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
{
var chaptersWrapper = document.DocumentNode.SelectSingleNode("/html/body");
HtmlNode? chaptersWrapper = document.DocumentNode.SelectSingleNode("/html/body");
Regex chapterRex = new(@".* (\d+)");
Regex chapterRex = new(@"(\d+(?:\.\d+)*)");
Regex chapterNameRex = new(@"(\w* )+");
Regex idRex = new(@"https:\/\/weebcentral\.com\/chapters\/(\w*)");
var ret = chaptersWrapper.Descendants("a").Select(elem =>
List<Chapter> ret = chaptersWrapper.Descendants("a").Select(elem =>
{
var url = elem.GetAttributeValue("href", "") ?? "Undefined";
string url = elem.GetAttributeValue("href", "") ?? "Undefined";
if (!url.StartsWith("https://") && !url.StartsWith("http://"))
return new Chapter(manga, null, null, "-1", "undefined");
var idMatch = idRex.Match(url);
var id = idMatch.Success ? idMatch.Groups[1].Value : null;
Match idMatch = idRex.Match(url);
string? id = idMatch.Success ? idMatch.Groups[1].Value : null;
var chapterNode = elem.SelectSingleNode("span[@class='grow flex items-center gap-2']/span")?.InnerText ??
"Undefined";
string chapterNode = elem.SelectSingleNode("span[@class='grow flex items-center gap-2']/span")?.InnerText ??
"Undefined";
var chapterNumberMatch = chapterRex.Match(chapterNode);
var chapterNumber = chapterNumberMatch.Success ? chapterNumberMatch.Groups[1].Value : "-1";
MatchCollection chapterNumberMatch = chapterRex.Matches(chapterNode);
string chapterNumber = chapterNumberMatch.Count > 0 ? chapterNumberMatch[^1].Groups[1].Value : "-1";
MatchCollection chapterNameMatch = chapterNameRex.Matches(chapterNode);
string chapterName = chapterNameMatch.Count > 0
? string.Join(" - ",
chapterNameMatch.Select(m => m.Groups[1].Value.Trim())
.Where(name => name.Length > 0 && !name.Equals("Chapter", StringComparison.OrdinalIgnoreCase)).ToArray()).Trim()
: "";
return new Chapter(manga, null, null, chapterNumber, url, id);
return new Chapter(manga, chapterName != "" ? chapterName : null, null, chapterNumber, url, id);
}).Where(elem => elem.chapterNumber != -1 && elem.url != "undefined").ToList();
ret.Reverse();
@ -209,7 +188,7 @@ public class Weebcentral : MangaConnector
return HttpStatusCode.RequestTimeout;
}
var chapterParentManga = chapter.parentManga;
Manga chapterParentManga = chapter.parentManga;
if (progressToken?.cancellationRequested ?? false)
{
progressToken.Cancel();
@ -218,26 +197,19 @@ public class Weebcentral : MangaConnector
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
var requestResult = downloadClient.MakeRequest(chapter.url, RequestType.Default);
RequestResult requestResult = downloadClient.MakeRequest(chapter.url, RequestType.Default);
if (requestResult.htmlDocument is null)
{
progressToken?.Cancel();
return HttpStatusCode.RequestTimeout;
}
var document = requestResult.htmlDocument;
HtmlDocument? document = requestResult.htmlDocument;
var imageNodes =
HtmlNode[] imageNodes =
document.DocumentNode.SelectNodes($"//section[@hx-get='{chapter.url}/images']/img")?.ToArray() ?? [];
var urls = imageNodes.Select(imgNode => imgNode.GetAttributeValue("src", "")).ToArray();
string[] urls = imageNodes.Select(imgNode => imgNode.GetAttributeValue("src", "")).ToArray();
return DownloadChapterImages(urls, chapter, RequestType.MangaImage, progressToken: progressToken);
return DownloadChapterImages(urls, chapter, RequestType.MangaImage, progressToken: progressToken, referrer: "https://weebcentral.com/");
}
private struct SearchResult
{
public string i { get; set; }
public string s { get; set; }
public string[] a { get; set; }
}
}
}

View File

@ -118,6 +118,15 @@ public class Server : GlobalBase
case "Connectors":
SendResponse(HttpStatusCode.OK, response, _parent.GetConnectors().Select(con => con.name).ToArray());
break;
case "Languages":
if (!requestVariables.TryGetValue("connector", out connectorName) ||
!_parent.TryGetConnector(connectorName, out connector))
{
SendResponse(HttpStatusCode.BadRequest, response);
break;
}
SendResponse(HttpStatusCode.OK, response, connector);
break;
case "Manga/Cover":
if (!requestVariables.TryGetValue("internalId", out internalId) ||
!_parent.TryGetPublicationById(internalId, out manga))

View File

@ -22,11 +22,11 @@ public partial class Tranga : GlobalBase
new MangaKatana(this),
new Mangaworld(this),
new Bato(this),
new MangaLife(this),
new ManhuaPlus(this),
new MangaHere(this),
new AsuraToon(this),
new Weebcentral(this)
new Weebcentral(this),
new Webtoons(this),
};
foreach(DirectoryInfo dir in new DirectoryInfo(Path.GetTempPath()).GetDirectories("trangatemp"))//Cleanup old temp folders
dir.Delete();

View File

@ -10,9 +10,9 @@
<ItemGroup>
<PackageReference Include="GlaxArguments" Version="1.1.0" />
<PackageReference Include="HtmlAgilityPack" Version="1.11.71" />
<PackageReference Include="HtmlAgilityPack" Version="1.11.72" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="PuppeteerSharp" Version="20.0.5" />
<PackageReference Include="PuppeteerSharp" Version="20.1.0" />
<PackageReference Include="Soenneker.Utils.String.NeedlemanWunsch" Version="2.1.301" />
</ItemGroup>