mirror of
https://github.com/C9Glax/tranga.git
synced 2025-07-01 08:24:17 +02:00
Compare commits
624 Commits
017701867d
...
cuttingedg
Author | SHA1 | Date | |
---|---|---|---|
12a542da39 | |||
3f5c9d0ca1 | |||
538825f0ef | |||
f0de0a29da | |||
d4227f2b8f | |||
cd00d35f22 | |||
4ef3e877ce | |||
7dba2518f9 | |||
7506a0201e | |||
91fb815153 | |||
6faf8bc733 | |||
bdff5b7aec | |||
5af8060d7b | |||
6ed8ff1d52 | |||
3324ed6e4a | |||
67fd9d284b | |||
08f26dd21d | |||
89ed500751 | |||
b00b0ee030 | |||
e47c52ad48 | |||
293f0af8e3 | |||
ebfa34e386 | |||
14524407f9 | |||
d56f0b383a | |||
70391c83c1 | |||
dc7696ee26 | |||
49dab9a670 | |||
c9bc79fbd5 | |||
83ce315f87 | |||
59511056d0 | |||
ed3ca5dba8 | |||
8df05d7e8a | |||
95d1e37b47 | |||
b6494ab7f9 | |||
1d1d01b6e5 | |||
5bb4977876 | |||
c6bb1c9180 | |||
9a066e7ac7 | |||
4bafffded4 | |||
942b43da67 | |||
ce5538b352 | |||
0cfdf17bd4 | |||
0c48c1e020 | |||
0638e75ed6 | |||
5a4bc1c6de | |||
71f663ca2f | |||
1b61a16061 | |||
db81fdce39 | |||
fdb5451162 | |||
6b7632b071 | |||
06c080dfce | |||
8130e11a9c | |||
659a42d370 | |||
9cef068785 | |||
4ad3149523 | |||
e6d40a7b36 | |||
a95cb90561 | |||
603e1b41d9 | |||
bb8a514830 | |||
edacaaba8a | |||
d97da26994 | |||
8b923d73c4 | |||
814efd3528 | |||
2cd5d8bc4f | |||
5a864ab9b7 | |||
c700974693 | |||
553b5558d3 | |||
c9bbfee26b | |||
6e869eeb0d | |||
be7da69dbd | |||
7f13d9b1e6 | |||
0c9e3205c2 | |||
8c3b70b32e | |||
4f7031ecfc | |||
f7a285aabd | |||
786482398c | |||
7921dcb1cb | |||
d0c9313279 | |||
58cf4cf4e0 | |||
280d715a7c | |||
d0b775444d | |||
b4edcccafe | |||
268441a47d | |||
1701881f4b | |||
78a9322036 | |||
e5be5703f8 | |||
cc32b3dfae | |||
ce217aae4f | |||
123a8b06b2 | |||
2350c5a04b | |||
f532e2ff76 | |||
3abf7224d0 | |||
b39dbd5671 | |||
375fad0c21 | |||
ee0d17c24f | |||
36ab3c3fdb | |||
c3d60c6586 | |||
6aa8413c40 | |||
b96ae4a2d2 | |||
3a25c0b221 | |||
e1f1a05724 | |||
72d9bda0e8 | |||
a40a9c84df | |||
825b945ad1 | |||
b8c624f3ea | |||
93cfdddd19 | |||
4c8d9bfaf2 | |||
dd988658c0 | |||
cf4c84a47f | |||
5d9bfc3adf | |||
5a770c8e9f | |||
e3bd7620aa | |||
428d6e13d1 | |||
1e6a65c0fd | |||
025d43b752 | |||
113c0abba7 | |||
747df0bde5 | |||
463f360808 | |||
85d7c07b13 | |||
553f56ecaf | |||
9cc4f8c090 | |||
204fb7614d | |||
d6e73ffcdf | |||
5a8202f872 | |||
55cc2a2e84 | |||
b619109ea1 | |||
72943330c3 | |||
38bc1e4d53 | |||
47479f7a0d | |||
b2381be860 | |||
657e1b338b | |||
ee265a7519 | |||
5b0624654b | |||
a75549c699 | |||
f46244cb9c | |||
9db3f1b0da | |||
dc9cd4b1dd | |||
3566ad774d | |||
94b81969c7 | |||
bd8cb86c52 | |||
34c5436b33 | |||
4690394437 | |||
02cf8578c9 | |||
067497ddd0 | |||
4b88cdbd90 | |||
420013f07b | |||
8cee11aa22 | |||
198bbdcf94 | |||
c58adf64fa | |||
957debea01 | |||
5186ae66c9 | |||
c35e1ef517 | |||
8f6891142b | |||
b52e6d4908 | |||
30c44760e7 | |||
a3ae3c320d | |||
ea262889e6 | |||
445542b653 | |||
b7718220ef | |||
34c62e8658 | |||
a9fcc93670 | |||
68d7ef258f | |||
fdea4f5ea5 | |||
ac3039e587 | |||
3829a1cf26 | |||
c3daa0b751 | |||
3a072beea3 | |||
8e6f2798a9 | |||
9cbde9a6b4 | |||
0870aa9fdb | |||
172650e644 | |||
52ff2e54a8 | |||
61d80a93cf | |||
7be3ee52e9 | |||
981eb0fd9f | |||
47f3044a6d | |||
6d03cc5f8d | |||
290c405f52 | |||
fcdbd32872 | |||
eb6c37cc53 | |||
d922842186 | |||
69323d6d60 | |||
46a0fb8c48 | |||
ec8eb40941 | |||
d2074fae35 | |||
713bbc230f | |||
32ab9a552f | |||
c11c68d6d7 | |||
09fdb6e5f1 | |||
e86ad03b1e | |||
9dfbe89e87 | |||
98e75af486 | |||
e2f5c3badc | |||
cda07bb9aa | |||
7c18466e95 | |||
ce1c4d3f65 | |||
52d0489a1b | |||
f89aea6ac8 | |||
5f05ba1049 | |||
a20ee01cfa | |||
cf5cbba9a8 | |||
600b56033d | |||
fdea3659f1 | |||
7f3754fb64 | |||
2dac5db4da | |||
3456fc6564 | |||
35f2625f05 | |||
0b9948e367 | |||
96f3dbce65 | |||
895128a462 | |||
a94186455b | |||
7d3deee74c | |||
5980b64caa | |||
cbecb257ef | |||
8316ed08a7 | |||
7ff9ac53ee | |||
6faaaf4139 | |||
9b8b80cd24 | |||
15f3e2b8ec | |||
2be29e4019 | |||
e8dbf7a718 | |||
a968f4328d | |||
398b6fff05 | |||
f5da2f8526 | |||
73093ab86c | |||
fccaf9fcbe | |||
3122aa32e8 | |||
02fad2dd44 | |||
e0a7d1a187 | |||
d0f9a4102c | |||
9f178821b6 | |||
682fd0bc2a | |||
dfa8e66f34 | |||
8f51d22303 | |||
d41de84262 | |||
1bd20791b8 | |||
03aeab44cd | |||
6d723b6355 | |||
7b91bb699f | |||
14e33cc496 | |||
6f3bba99b0 | |||
2d848843d0 | |||
63b493fa9c | |||
001a37b8ef | |||
69d6884517 | |||
db73af3bdd | |||
59547efab2 | |||
f4336f9777 | |||
bec3ac52a9 | |||
ea37e81ece | |||
6a20783d48 | |||
21af75f410 | |||
a629792818 | |||
34dd78810d | |||
e1c504226c | |||
200a22228f | |||
bc10136331 | |||
06df6e0767 | |||
ba029b71f5 | |||
082802ddbe | |||
d5f1df0400 | |||
d00881e611 | |||
72bc7ec07b | |||
89b5aa266e | |||
926c0d5833 | |||
80e2568113 | |||
3b6417eff2 | |||
2812a6dff1 | |||
1991862a42 | |||
40e4d5c203 | |||
49e9731184 | |||
a4e85f254f | |||
4f47aeadcf | |||
e0c1356fea | |||
0d9b3d2499 | |||
8e5d15ead9 | |||
b8c28e6d21 | |||
9ea5e436fe | |||
b4c310638a | |||
159341ff3c | |||
29338b9b17 | |||
0eda8913b0 | |||
5ca50630e4 | |||
d0bfb262bf | |||
4f14f15ade | |||
d89a24fd11 | |||
a5859e3c82 | |||
dd2fa3fbd7 | |||
33e5d65785 | |||
d60ed77dbe | |||
e15c6816b5 | |||
4a4fe4b40d | |||
4881789970 | |||
be1e6fe988 | |||
f61e51e506 | |||
eba511749b | |||
de4c57a0cd | |||
e368c3c98a | |||
d17ca1d97a | |||
e9376e3782 | |||
7c217a7e33 | |||
a437fcbca1 | |||
1dcfecd66f | |||
6db4646336 | |||
8a6298e3fd | |||
194705c124 | |||
f4d5969003 | |||
9d92069a4b | |||
5614729eab | |||
d52ec8d36f | |||
37dfb4df02 | |||
42feea3ad5 | |||
bbc750d731 | |||
08dd01942f | |||
351144e763 | |||
aea4c0c61b | |||
7b9e935db7 | |||
048b165d76 | |||
ebe3012c69 | |||
a5dbed9525 | |||
811ddd903f | |||
f948809bcd | |||
7ceb9cd4cb | |||
57f1e037ef | |||
6ca8d58e43 | |||
e3211b95e2 | |||
b5e9e03f64 | |||
98bd8a983b | |||
f4996659ef | |||
e05684d5d1 | |||
4a7d23c0d9 | |||
1d44b6d9c6 | |||
811a183af2 | |||
fb0755eb89 | |||
2e8b896f3b | |||
4692cc297a | |||
3d855020eb | |||
c6d0168d2f | |||
d52213002e | |||
ec9290f41f | |||
6b91796e5a | |||
9f9ea569d5 | |||
4bd1150a0e | |||
8b62e2c467 | |||
7ec262a2e4 | |||
d32d5976ee | |||
58cff6513a | |||
783f229a6a | |||
aaf06da8e1 | |||
51a26a3cba | |||
762da4c859 | |||
daba940b45 | |||
79e61a62c7 | |||
06fe98323a | |||
5f820c53f5 | |||
c69f1f6569 | |||
5bdbd9e2e4 | |||
f729c44f88 | |||
f4966b0348 | |||
df2fc4a036 | |||
0ab2ae03ce | |||
95236daf41 | |||
294ce01bc3 | |||
13565d1c7a | |||
54b24ac37f | |||
c67e89f1dd | |||
4ba44d3ac3 | |||
8631cf6376 | |||
df4d547e2b | |||
006b71b496 | |||
5f03b0d89c | |||
6dc1ea0030 | |||
ff08754610 | |||
d1a6c0ad3d | |||
0260868968 | |||
b1f72dcb81 | |||
b0f353819b | |||
8f8d019861 | |||
21a7392493 | |||
0d5db15f87 | |||
431fde0d76 | |||
e022bf3081 | |||
c25a4f69ec | |||
82bdb248b9 | |||
b27114eaad | |||
051eb4a417 | |||
482704af2c | |||
af4229920d | |||
537ad3a5f8 | |||
6a8697fc3a | |||
94582496ef | |||
17ef5eae0f | |||
d5b6d4e8ee | |||
05190bc9e2 | |||
d211dd2d01 | |||
590547e407 | |||
2ad04c5c46 | |||
189569ccdf | |||
2872eeea09 | |||
c0cfeaa35d | |||
2fd780996c | |||
b390bb8ea5 | |||
847829e617 | |||
0f29da00de | |||
9b2a6de841 | |||
17a27c9922 | |||
6c9071b22b | |||
abfe42b7c1 | |||
72ae124418 | |||
bee6e7ba37 | |||
8079ffc742 | |||
6d6e33491b | |||
a8697a14a3 | |||
e2adac937a | |||
b4708c5d10 | |||
597abde115 | |||
2a824bbb8d | |||
9691eb0d08 | |||
4888e18fd2 | |||
0aa92a7913 | |||
db53e2156b | |||
1cce0f204e | |||
ce41c49a0e | |||
b8570e5eef | |||
1f24a2349d | |||
ca95460218 | |||
e801cc4cbf | |||
2c4c8de8b5 | |||
0b4461265c | |||
c008d55f26 | |||
9b990aecea | |||
299fa6afda | |||
c03e927565 | |||
bb6c553afa | |||
33d78ed757 | |||
84272ddd1e | |||
2f0fbbd3cb | |||
5bc414fd59 | |||
2eaeadb92c | |||
d8df6eccb1 | |||
db64b717eb | |||
1afe36a525 | |||
aa692f6978 | |||
c706824222 | |||
3ca6245fc2 | |||
2dd82aad13 | |||
3c4867a276 | |||
bae157cdb4 | |||
3b818ff1af | |||
5d12be2983 | |||
31a4e693e0 | |||
e49db9a4cb | |||
54142e61fe | |||
cd5ca0e302 | |||
95da900213 | |||
b5be4e0dd8 | |||
0c135aa89e | |||
e11ee4dafe | |||
05573f65f9 | |||
d986c808e3 | |||
5df63b00c2 | |||
903bb5af5e | |||
cc8453d4a8 | |||
800d4c1ec1 | |||
b4f97eefcf | |||
29f6de2590 | |||
23e5c4a7b1 | |||
e15717cb04 | |||
b995fc568a | |||
442d949371 | |||
263d0e6036 | |||
7c7d43021e | |||
5cdc7d7207 | |||
1bcbd1517f | |||
b72da45ae9 | |||
01041e43ac | |||
4c1a659f16 | |||
2e02f0b237 | |||
77f93d87f9 | |||
45c0f19a9d | |||
7c09deb143 | |||
449d406eab | |||
083ce238d8 | |||
5f9ffb8aad | |||
92bc3d5aa8 | |||
49ab8928b1 | |||
391efcb9bc | |||
963ad375e8 | |||
0a5ded2036 | |||
4843c7f05c | |||
6adbda2359 | |||
425cf7e0d6 | |||
8f5dd5aab5 | |||
733ae285f1 | |||
2e1c8ce34f | |||
c965bc38d1 | |||
37266ea095 | |||
8caac538c9 | |||
7c7f711bb4 | |||
d78897eb74 | |||
438c11af4f | |||
38df54baff | |||
98d187d133 | |||
5352cca058 | |||
3381909afd | |||
7219641859 | |||
f63851d95d | |||
e72301d062 | |||
2302e1009b | |||
40fea6cc7f | |||
5458c43f21 | |||
f78bec43d6 | |||
88876fb8f4 | |||
c71aec8882 | |||
ddfba0d864 | |||
ca9c0b22c1 | |||
6844d0a242 | |||
fd9319de27 | |||
726be70af3 | |||
19c9ecb3e7 | |||
f01a786e59 | |||
59f9bcc7d0 | |||
2796a2adb5 | |||
e07b191293 | |||
9bf650f5fc | |||
334795b263 | |||
51a6f216af | |||
238a2775f4 | |||
fec970d7d6 | |||
e642d50c47 | |||
fafcdac00a | |||
1785aa28ea | |||
f22c332cab | |||
b3bf523e1e | |||
06b2e11164 | |||
7972f07801 | |||
d89af7cc5b | |||
31a0c6ffb2 | |||
668a3b3a96 | |||
3938c61297 | |||
4f3bcd245d | |||
129c95f123 | |||
e2cdf27d40 | |||
4156365b18 | |||
d3ccddd8db | |||
13075a8704 | |||
e7d9f53a93 | |||
dc6dfd4aa1 | |||
0fba09b1e8 | |||
f08b9e85ec | |||
95fcc73c74 | |||
73492d8102 | |||
c69dd22ecf | |||
17b6c523a2 | |||
6c3f7604fe | |||
94f88f08e9 | |||
47327524be | |||
3b96419739 | |||
b7c9b4e9b4 | |||
13adb45444 | |||
b8fbee578e | |||
c1fb42b537 | |||
dcc12ec3ea | |||
8c554076b2 | |||
a10fbdf3a5 | |||
f246209685 | |||
41c561bd1d | |||
fc7d5463c3 | |||
3c2ce266f6 | |||
306cb87d67 | |||
23cda74487 | |||
3ceee63dfc | |||
4e5a6fe97b | |||
b3b1971dad | |||
2699f35b62 | |||
7a14583d6a | |||
660f6a1648 | |||
482fcb7102 | |||
b6cdb07e3f | |||
0875e7ee12 | |||
cb6482ebae | |||
87ea077281 | |||
c1aa4cf6b5 | |||
f5b6b1785f | |||
2553a150d1 | |||
b149d377dc | |||
0209159c5c | |||
e31820eb00 | |||
c4d69c27a4 | |||
3ee53b7436 | |||
64ec0963e1 | |||
27c4ed719c | |||
4f4b0cb3a8 | |||
48d312da0b | |||
1fe4b75ac7 | |||
c580fafc62 | |||
58040ecb10 | |||
2960a9b8f0 | |||
f52bb8eb89 | |||
ae0dc548ae | |||
051b85d08b | |||
d89ca0a2ef | |||
f1f640c1f6 | |||
9319aa7d1f | |||
656e62628e | |||
ba27adf255 | |||
88ca75e883 | |||
67c23b357f | |||
4a5271e2a7 | |||
fec5ad664c | |||
3cea5fb431 | |||
7fa44fba54 | |||
d6b5a29fdc | |||
a4a49d40f0 | |||
28fa85f05c | |||
1066e1ca2e | |||
39307f4313 | |||
a316ee3d48 | |||
569622099d | |||
2ab21b15cf | |||
7acdf7a19b | |||
af8716fcb1 | |||
4827b90c3d | |||
4ca7b107eb |
@ -23,3 +23,5 @@
|
||||
**/values.dev.yaml
|
||||
LICENSE
|
||||
README.md
|
||||
Manga
|
||||
settings
|
||||
|
21
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
21
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
name: Bug Report
|
||||
description: File a bug report
|
||||
title: "[It broke]: "
|
||||
labels: ["bug"]
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What is broken?
|
||||
description: What happened? How did we get here?
|
||||
placeholder: The place where you tell me what you expected to happen, and what happened instead.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Log-output
|
||||
description: The output of `docker logs tranga-api`
|
||||
render: C#
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional stuff
|
||||
description: Screenshots, anything you think might help
|
23
.github/ISSUE_TEMPLATE/new_connector.yml
vendored
Normal file
23
.github/ISSUE_TEMPLATE/new_connector.yml
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
name: New Connector Request
|
||||
description: Request a new site to be added
|
||||
title: "[New Connector]: "
|
||||
labels: ["New Connector"]
|
||||
body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: Website-Link
|
||||
placeholder: https://
|
||||
validations:
|
||||
required: true
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is the Website free to access?
|
||||
description: We can't support pay-to-use sites, or captcha-proxied sites as Cloudflare.
|
||||
options:
|
||||
- label: The Website is freely accessible.
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Anything else?
|
||||
validations:
|
||||
required: false
|
10
.github/workflows/docker-image-cuttingedge.yml
vendored
10
.github/workflows/docker-image-cuttingedge.yml
vendored
@ -13,16 +13,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# https://github.com/docker/setup-qemu-action#usage
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
uses: docker/setup-qemu-action@v3.6.0
|
||||
|
||||
# https://github.com/marketplace/actions/docker-setup-buildx
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2.9.1
|
||||
uses: docker/setup-buildx-action@v3.11.0
|
||||
|
||||
# https://github.com/docker/login-action#docker-hub
|
||||
- name: Login to Docker Hub
|
||||
@ -33,12 +33,12 @@ jobs:
|
||||
|
||||
# https://github.com/docker/build-push-action#multi-platform-image
|
||||
- name: Build and push API
|
||||
uses: docker/build-push-action@v4.1.1
|
||||
uses: docker/build-push-action@v6.18.0
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
#platforms: linux/amd64,linux/arm64,linux/riscv64,linux/ppc64le,linux/s390x,linux/386,linux/mips64le,linux/mips64,linux/arm/v7,linux/arm/v6
|
||||
platforms: linux/amd64
|
||||
platforms: linux/amd64,linux/arm64
|
||||
pull: true
|
||||
push: true
|
||||
tags: |
|
||||
|
12
.github/workflows/docker-image-master.yml
vendored
12
.github/workflows/docker-image-master.yml
vendored
@ -3,8 +3,6 @@ name: Docker Image CI
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@ -15,16 +13,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# https://github.com/docker/setup-qemu-action#usage
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
uses: docker/setup-qemu-action@v3.6.0
|
||||
|
||||
# https://github.com/marketplace/actions/docker-setup-buildx
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2.9.1
|
||||
uses: docker/setup-buildx-action@v3.11.0
|
||||
|
||||
# https://github.com/docker/login-action#docker-hub
|
||||
- name: Login to Docker Hub
|
||||
@ -35,12 +33,12 @@ jobs:
|
||||
|
||||
# https://github.com/docker/build-push-action#multi-platform-image
|
||||
- name: Build and push API
|
||||
uses: docker/build-push-action@v4.1.1
|
||||
uses: docker/build-push-action@v6.18.0
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
#platforms: linux/amd64,linux/arm64,linux/riscv64,linux/ppc64le,linux/s390x,linux/386,linux/mips64le,linux/mips64,linux/arm/v7,linux/arm/v6
|
||||
platforms: linux/amd64
|
||||
platforms: linux/amd64,linux/arm64
|
||||
pull: true
|
||||
push: true
|
||||
tags: |
|
||||
|
@ -1,6 +1,8 @@
|
||||
name: Docker Image CI
|
||||
name: Docker Image CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "postgres-Server-V2" ]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@ -11,16 +13,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# https://github.com/docker/setup-qemu-action#usage
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
uses: docker/setup-qemu-action@v3.6.0
|
||||
|
||||
# https://github.com/marketplace/actions/docker-setup-buildx
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2.9.1
|
||||
uses: docker/setup-buildx-action@v3.11.0
|
||||
|
||||
# https://github.com/docker/login-action#docker-hub
|
||||
- name: Login to Docker Hub
|
||||
@ -30,14 +32,14 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# https://github.com/docker/build-push-action#multi-platform-image
|
||||
- name: Build and push base
|
||||
uses: docker/build-push-action@v4.1.1
|
||||
- name: Build and push API
|
||||
uses: docker/build-push-action@v6.18.0
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile-base
|
||||
file: ./Dockerfile
|
||||
#platforms: linux/amd64,linux/arm64,linux/riscv64,linux/ppc64le,linux/s390x,linux/386,linux/mips64le,linux/mips64,linux/arm/v7,linux/arm/v6
|
||||
platforms: linux/amd64
|
||||
platforms: linux/amd64,linux/arm64
|
||||
pull: true
|
||||
push: true
|
||||
tags: |
|
||||
glax/tranga-base:latest
|
||||
glax/tranga-api:Server-V2
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -19,3 +19,7 @@ riderModule.iml
|
||||
/.idea
|
||||
cover.jpg
|
||||
cover.png
|
||||
/.vscode
|
||||
/Manga
|
||||
/settings
|
||||
*.DotSettings.user
|
@ -2,13 +2,14 @@
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net7.0</TargetFramework>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>12</LangVersion>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Spectre.Console.Cli" Version="0.47.1-preview.0.11" />
|
||||
<PackageReference Include="Spectre.Console.Cli" Version="0.49.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
@ -44,19 +44,21 @@ internal sealed class TrangaCli : Command<TrangaCli.Settings>
|
||||
if(settings.fileLogger is true)
|
||||
enabledLoggers.Add(Logger.LoggerType.FileLogger);
|
||||
|
||||
string? logFilePath = settings.fileLoggerPath ?? "";
|
||||
Logger logger = new(enabledLoggers.ToArray(), Console.Out, Console.OutputEncoding, logFilePath);
|
||||
string? logFolderPath = settings.fileLoggerPath ?? "";
|
||||
Logger logger = new(enabledLoggers.ToArray(), Console.Out, Console.OutputEncoding, logFolderPath);
|
||||
|
||||
TrangaSettings trangaSettings = new (settings.downloadLocation, settings.workingDirectory, settings.apiPort);
|
||||
|
||||
Directory.CreateDirectory(trangaSettings.downloadLocation);
|
||||
Directory.CreateDirectory(trangaSettings.workingDirectory);
|
||||
if(settings.workingDirectory is not null)
|
||||
TrangaSettings.LoadFromWorkingDirectory(settings.workingDirectory);
|
||||
else
|
||||
TrangaSettings.CreateOrUpdate();
|
||||
if(settings.downloadLocation is not null)
|
||||
TrangaSettings.CreateOrUpdate(downloadDirectory: settings.downloadLocation);
|
||||
|
||||
Tranga.Tranga? api = null;
|
||||
|
||||
Thread trangaApi = new Thread(() =>
|
||||
{
|
||||
api = new(logger, trangaSettings);
|
||||
api = new(logger);
|
||||
});
|
||||
trangaApi.Start();
|
||||
|
||||
@ -99,7 +101,7 @@ internal sealed class TrangaCli : Command<TrangaCli.Settings>
|
||||
parameters.Add(new ValueTuple<string, string>(name, value));
|
||||
}
|
||||
|
||||
string requestString = $"http://localhost:{trangaSettings.apiPortNumber}/{requestPath}";
|
||||
string requestString = $"http://localhost:{TrangaSettings.apiPortNumber}/{requestPath}";
|
||||
if (parameters.Any())
|
||||
{
|
||||
requestString += "?";
|
||||
|
50
Dockerfile
50
Dockerfile
@ -1,16 +1,44 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
ARG DOTNET=8.0
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/sdk:7.0 as build-env
|
||||
WORKDIR /src
|
||||
COPY CLI /src/CLI
|
||||
COPY Tranga /src/Tranga
|
||||
COPY Logging /src/Logging
|
||||
COPY Tranga.sln /src
|
||||
RUN dotnet restore /src/Tranga/Tranga.csproj
|
||||
RUN dotnet publish -c Release -o /publish
|
||||
|
||||
FROM glax/tranga-base:latest as runtime
|
||||
FROM --platform=$TARGETPLATFORM mcr.microsoft.com/dotnet/runtime:$DOTNET AS base
|
||||
WORKDIR /publish
|
||||
COPY --from=build-env /publish .
|
||||
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
|
||||
ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium
|
||||
ENV XDG_CONFIG_HOME=/tmp/.chromium
|
||||
ENV XDG_CACHE_HOME=/tmp/.chromium
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y libx11-6 libx11-xcb1 libatk1.0-0 libgtk-3-0 libcups2 libdrm2 libxkbcommon0 libxcomposite1 libxdamage1 libxrandr2 libgbm1 libpango-1.0-0 libcairo2 libasound2 libxshmfence1 libnss3 chromium \
|
||||
&& apt-get autopurge -y \
|
||||
&& apt-get autoclean -y
|
||||
|
||||
FROM --platform=$BUILDPLATFORM mcr.microsoft.com/dotnet/sdk:$DOTNET AS build-env
|
||||
WORKDIR /src
|
||||
|
||||
COPY Tranga.sln /src
|
||||
COPY CLI/CLI.csproj /src/CLI/CLI.csproj
|
||||
COPY Logging/Logging.csproj /src/Logging/Logging.csproj
|
||||
COPY Tranga/Tranga.csproj /src/Tranga/Tranga.csproj
|
||||
RUN dotnet restore /src/Tranga.sln
|
||||
|
||||
COPY . /src/
|
||||
RUN dotnet publish -c Release --property:OutputPath=/publish -maxcpucount:1
|
||||
|
||||
FROM --platform=$TARGETPLATFORM base AS runtime
|
||||
EXPOSE 6531
|
||||
ARG UNAME=tranga
|
||||
ARG UID=1000
|
||||
ARG GID=1000
|
||||
RUN groupadd -g $GID -o $UNAME \
|
||||
&& useradd -m -u $UID -g $GID -o -s /bin/bash $UNAME \
|
||||
&& mkdir /usr/share/tranga-api \
|
||||
&& mkdir /Manga \
|
||||
&& chown 1000:1000 /usr/share/tranga-api \
|
||||
&& chown 1000:1000 /Manga
|
||||
USER $UNAME
|
||||
|
||||
WORKDIR /publish
|
||||
COPY --chown=1000:1000 --from=build-env /publish .
|
||||
USER 0
|
||||
ENTRYPOINT ["dotnet", "/publish/Tranga.dll"]
|
||||
CMD ["-f", "-c", "-l", "/usr/share/tranga-api/logs"]
|
@ -1,8 +0,0 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
#FROM mcr.microsoft.com/dotnet/aspnet:7.0 as runtime
|
||||
FROM mcr.microsoft.com/dotnet/runtime:7.0 as runtime
|
||||
WORKDIR /publish
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y libx11-6 libx11-xcb1 libatk1.0-0 libgtk-3-0 libcups2 libdrm2 libxkbcommon0 libxcomposite1 libxdamage1 libxrandr2 libgbm1 libpango-1.0-0 libcairo2 libasound2 libxshmfence1 libnss3
|
||||
RUN apt-get autopurge -y
|
||||
RUN apt-get autoclean -y
|
@ -1,6 +1,6 @@
|
||||
namespace Logging;
|
||||
|
||||
public class LogMessage
|
||||
public readonly struct LogMessage
|
||||
{
|
||||
public DateTime logTime { get; }
|
||||
public string caller { get; }
|
||||
|
@ -20,17 +20,17 @@ public class Logger : TextWriter
|
||||
private readonly FormattedConsoleLogger? _formattedConsoleLogger;
|
||||
private readonly MemoryLogger _memoryLogger;
|
||||
|
||||
public Logger(LoggerType[] enabledLoggers, TextWriter? stdOut, Encoding? encoding, string? logFilePath)
|
||||
public Logger(LoggerType[] enabledLoggers, TextWriter? stdOut, Encoding? encoding, string? logFolderPath)
|
||||
{
|
||||
this.Encoding = encoding ?? Encoding.UTF8;
|
||||
if(enabledLoggers.Contains(LoggerType.FileLogger) && (logFilePath is null || logFilePath == ""))
|
||||
DateTime now = DateTime.Now;
|
||||
if(enabledLoggers.Contains(LoggerType.FileLogger) && (logFolderPath is null || logFolderPath == ""))
|
||||
{
|
||||
DateTime now = DateTime.Now;
|
||||
logFilePath = Path.Join(LogDirectoryPath,
|
||||
string filePath = Path.Join(LogDirectoryPath,
|
||||
$"{now.ToShortDateString()}_{now.Hour}-{now.Minute}-{now.Second}.log");
|
||||
_fileLogger = new FileLogger(logFilePath, encoding);
|
||||
}else if (enabledLoggers.Contains(LoggerType.FileLogger) && logFilePath is not null)
|
||||
_fileLogger = new FileLogger(logFilePath, encoding);
|
||||
_fileLogger = new FileLogger(filePath, encoding);
|
||||
}else if (enabledLoggers.Contains(LoggerType.FileLogger) && logFolderPath is not null)
|
||||
_fileLogger = new FileLogger(Path.Join(logFolderPath, $"{now.ToShortDateString()}_{now.Hour}-{now.Minute}-{now.Second}.log") , encoding);
|
||||
|
||||
|
||||
if (enabledLoggers.Contains(LoggerType.ConsoleLogger) && stdOut is not null)
|
||||
@ -43,6 +43,7 @@ public class Logger : TextWriter
|
||||
throw new ArgumentException($"stdOut can not be null for LoggerType {LoggerType.ConsoleLogger}");
|
||||
}
|
||||
_memoryLogger = new MemoryLogger(encoding);
|
||||
WriteLine(GetType().ToString(), $"Logfile: {logFilePath}");
|
||||
}
|
||||
|
||||
public void WriteLine(string caller, string? value)
|
||||
|
@ -1,9 +1,10 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net7.0</TargetFramework>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>12</LangVersion>
|
||||
</PropertyGroup>
|
||||
|
||||
</Project>
|
||||
|
@ -6,7 +6,6 @@ public class MemoryLogger : LoggerBase
|
||||
{
|
||||
private readonly SortedList<DateTime, LogMessage> _logMessages = new();
|
||||
private int _lastLogMessageIndex = 0;
|
||||
private bool _lockLogMessages = false;
|
||||
|
||||
public MemoryLogger(Encoding? encoding = null) : base(encoding)
|
||||
{
|
||||
@ -15,12 +14,9 @@ public class MemoryLogger : LoggerBase
|
||||
|
||||
protected override void Write(LogMessage value)
|
||||
{
|
||||
if (!_lockLogMessages)
|
||||
lock (_logMessages)
|
||||
{
|
||||
_lockLogMessages = true;
|
||||
while(!_logMessages.TryAdd(DateTime.Now, value))
|
||||
Thread.Sleep(10);
|
||||
_lockLogMessages = false;
|
||||
_logMessages.Add(DateTime.Now, value);
|
||||
}
|
||||
}
|
||||
|
||||
@ -41,11 +37,9 @@ public class MemoryLogger : LoggerBase
|
||||
|
||||
for (int retIndex = 0; retIndex < ret.Length; retIndex++)
|
||||
{
|
||||
if (!_lockLogMessages)
|
||||
lock (_logMessages)
|
||||
{
|
||||
_lockLogMessages = true;
|
||||
ret[retIndex] = _logMessages.GetValueAtIndex(_logMessages.Count - retLength + retIndex).ToString();
|
||||
_lockLogMessages = false;
|
||||
}
|
||||
}
|
||||
|
||||
@ -63,14 +57,12 @@ public class MemoryLogger : LoggerBase
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!_lockLogMessages)
|
||||
lock(_logMessages)
|
||||
{
|
||||
_lockLogMessages = true;
|
||||
ret.Add(_logMessages.GetValueAtIndex(_lastLogMessageIndex + retIndex).ToString());
|
||||
_lockLogMessages = false;
|
||||
}
|
||||
}
|
||||
catch (NullReferenceException e)//Called when LogMessage has not finished writing
|
||||
catch (NullReferenceException)//Called when LogMessage has not finished writing
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
81
README.md
81
README.md
@ -1,11 +1,6 @@
|
||||
<!-- PROJECT SHIELDS -->
|
||||
<!--
|
||||
*** I'm using markdown "reference style" links for readability.
|
||||
*** Reference links are enclosed in brackets [ ] instead of parentheses ( ).
|
||||
*** See the bottom of this document for the declaration of the reference variables
|
||||
*** for contributors-url, forks-url, etc. This is an optional, concise syntax you may use.
|
||||
*** https://www.markdownguide.org/basic-syntax/#reference-style-links
|
||||
-->
|
||||
# Testers for V2 wanted!
|
||||
|
||||
[Details](https://github.com/C9Glax/tranga/pull/355#issuecomment-2764217944)
|
||||
|
||||
<!-- PROJECT LOGO -->
|
||||
<br />
|
||||
@ -21,8 +16,6 @@
|
||||
</p>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<!-- TABLE OF CONTENTS -->
|
||||
<details>
|
||||
<summary>Table of Contents</summary>
|
||||
@ -54,35 +47,40 @@
|
||||
|
||||
Tranga can download Chapters and Metadata from "Scanlation" sites such as
|
||||
|
||||
- [MangaDex.org](https://mangadex.org/)
|
||||
- [Manganato.com](https://manganato.com/)
|
||||
- [Mangasee](https://mangasee123.com/)
|
||||
- [MangaKatana](https://mangakatana.com)
|
||||
- ❓ Open an [issue](https://github.com/C9Glax/tranga/issues)
|
||||
- [MangaDex.org](https://mangadex.org/) (Multilingual)
|
||||
- [Manganato.com](https://manganato.com/) (en)
|
||||
- [MangaKatana.com](https://mangakatana.com) (en)
|
||||
- [Mangaworld.bz](https://www.mangaworld.bz/) (it)
|
||||
- [Bato.to](https://bato.to/v3x) (en)
|
||||
- [ManhuaPlus](https://manhuaplus.org/) (en)
|
||||
- [MangaHere](https://www.mangahere.cc/) (en) (Their covers aren't scrapeable.)
|
||||
- [Weebcentral](https://weebcentral.com) (en)
|
||||
- [Webtoons](https://www.webtoons.com/en/)
|
||||
- ❓ Open an [issue](https://github.com/C9Glax/tranga/issues/new?assignees=&labels=New+Connector&projects=&template=new_connector.yml&title=%5BNew+Connector%5D%3A+)
|
||||
|
||||
and trigger an scan with [Komga](https://komga.org/) and [Kavita](https://www.kavitareader.com/).
|
||||
Notifications will can sent to your devices using [Gotify](https://gotify.net/) and [LunaSea](https://www.lunasea.app/).
|
||||
and trigger a library-scan with [Komga](https://komga.org/) and [Kavita](https://www.kavitareader.com/).
|
||||
Notifications can be sent to your devices using [Gotify](https://gotify.net/), [LunaSea](https://www.lunasea.app/) or [Ntfy](https://ntfy.sh/
|
||||
).
|
||||
|
||||
### What this does and doesn't do
|
||||
|
||||
Tranga (this git-repo) will open a port (standard 6531) and listen for requests to add Jobs to Monitor and/or download specific Manga.
|
||||
The configuration is all done through HTTP-Requests.
|
||||
The frontend in this repo is **CLI**-based.
|
||||
_**For a web-frontend use [tranga-website](https://github.com/C9Glax/tranga-website).**_
|
||||
|
||||
This project downloads the images for a Manga from the specified Scanlation-Website and packages them with some metadata - from that same website - in a .cbz-archive (per chapter).
|
||||
It does this on an interval, and checks for any Chapters (.cbz-Archive) not already existing in your specified Download-Location. (If you rename or move files, it will download those again)
|
||||
Tranga can (if configured) trigger a scan in Komga or Kavita, however the directory in which the Manga reside has to be available to both Tranga and Komga/Kavita.
|
||||
|
||||
The project doesn't manage metadata, doesn't curate, change or enhance any information that isn't available on the selected Scanlation-Site.
|
||||
The project doesn't manage metadata, and doesn't curate, change or enhance any information that isn't available on the selected Scanlation-Site.
|
||||
It will blindly use whatever is scrapes (yes this is a glorified Web-scraper).
|
||||
|
||||
|
||||
### Inspiration:
|
||||
|
||||
Because [Kaizoku](https://github.com/oae/kaizoku) was relying on [mangal](https://github.com/metafates/mangal) and mangal
|
||||
hasn't received bugfixes for it's issues with Titles not showing up, or throwing errors because of illegal characters,
|
||||
there were no alternatives for automatic downloads. However [Kaizoku](https://github.com/oae/kaizoku) certainly had a great Web-UI.
|
||||
hasn't received bugfixes for its issues with Titles not showing up, or throwing errors because of illegal characters,
|
||||
there were no alternatives for automatic downloads. However, [Kaizoku](https://github.com/oae/kaizoku) certainly had a great Web-UI.
|
||||
|
||||
That is why I wanted to create my own project, in a language I understand, and that I am able to maintain myself.
|
||||
|
||||
@ -92,46 +90,41 @@ That is why I wanted to create my own project, in a language I understand, and t
|
||||
|
||||
- .NET-Core
|
||||
- Newtonsoft.JSON
|
||||
- [PuppeteerSharp](https://www.puppeteersharp.com/) for Mangasee
|
||||
- [PuppeteerSharp](https://www.puppeteersharp.com/)
|
||||
- [Html Agility Pack (HAP)](https://html-agility-pack.net/)
|
||||
- [Soenneker.Utils.String.NeedlemanWunsch](https://github.com/soenneker/soenneker.utils.string.needlemanwunsch)
|
||||
- 💙 Blåhaj 🦈
|
||||
|
||||
<p align="right">(<a href="#readme-top">back to top</a>)</p>
|
||||
|
||||
## Star History
|
||||
|
||||
<a href="https://star-history.com/#c9glax/tranga&Date">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=c9glax/tranga&type=Date&theme=dark" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=c9glax/tranga&type=Date" />
|
||||
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=c9glax/tranga&type=Date" />
|
||||
</picture>
|
||||
</a>
|
||||
|
||||
<!-- GETTING STARTED -->
|
||||
## Getting Started
|
||||
|
||||
There is two release types:
|
||||
|
||||
- CLI
|
||||
- Docker
|
||||
|
||||
### CLI
|
||||
|
||||
Head over to [releases](https://git.bernloehr.eu/glax/Tranga/releases) and download.
|
||||
|
||||
|
||||
~~The CLI will guide you through setup.~~ Not in the current version.
|
||||
Right now it is barebones with options to view logs and make HTTP-Requests
|
||||
|
||||
### Docker
|
||||
|
||||
Download [docker-compose.yaml](https://git.bernloehr.eu/glax/Tranga/src/branch/master/docker-compose.yaml) and configure to your needs.
|
||||
Mount `/Manga` to wherever you want your chapters (`.cbz`-Archives) downloaded (for exampled where Komga/Kavita can access them).
|
||||
Mount `/Manga` to wherever you want your chapters (`.cbz`-Archives) downloaded (where Komga/Kavita can access them).
|
||||
The `docker-compose` also includes [tranga-website](https://github.com/C9Glax/tranga-website) as frontend. For its configuration refer to the repo README.
|
||||
|
||||
For compatibility do not execute the compose as root (which you should not do anyways...) but as user that can
|
||||
access the folder.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
#### To Build
|
||||
[.NET-Core 7.0 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/7.0)
|
||||
[.NET-Core 8.0 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/8.0)
|
||||
#### To Run
|
||||
[.NET-Core 7.0 Runtime](https://dotnet.microsoft.com/en-us/download/dotnet/7.0) scroll down a bit, should be on the right the second item.
|
||||
|
||||
<!-- ROADMAP -->
|
||||
## Roadmap
|
||||
|
||||
- [ ] Docker ARM support
|
||||
- [ ] ❓
|
||||
[.NET-Core 8.0 Runtime](https://dotnet.microsoft.com/en-us/download/dotnet/8.0) scroll down a bit, should be on the right the second item.
|
||||
|
||||
See the [open issues](https://github.com/C9Glax/tranga/issues) for a full list of proposed features (and known issues).
|
||||
|
||||
|
@ -8,5 +8,7 @@
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=mangakatana/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Manganato/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Mangasee/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Mangaworld/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Ntfy/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Taskmanager/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Tranga/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>
|
@ -1,5 +1,7 @@
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Xml.Linq;
|
||||
using static System.IO.UnixFileMode;
|
||||
|
||||
namespace Tranga;
|
||||
|
||||
@ -7,33 +9,45 @@ namespace Tranga;
|
||||
/// Has to be Part of a publication
|
||||
/// Includes the Chapter-Name, -VolumeNumber, -ChapterNumber, the location of the chapter on the internet and the saveName of the local file.
|
||||
/// </summary>
|
||||
public readonly struct Chapter
|
||||
public readonly struct Chapter : IComparable
|
||||
{
|
||||
// ReSharper disable once MemberCanBePrivate.Global
|
||||
public Manga parentManga { get; }
|
||||
public string? name { get; }
|
||||
public string? volumeNumber { get; }
|
||||
public string chapterNumber { get; }
|
||||
public float volumeNumber { get; }
|
||||
public float chapterNumber { get; }
|
||||
public string url { get; }
|
||||
// ReSharper disable once MemberCanBePrivate.Global
|
||||
public string fileName { get; }
|
||||
public string? id { get; }
|
||||
|
||||
private static readonly Regex LegalCharacters = new (@"([A-z]*[0-9]* *\.*-*,*\]*\[*'*\'*\)*\(*~*!*)*");
|
||||
private static readonly Regex IllegalStrings = new(@"Vol(ume)?.?", RegexOptions.IgnoreCase);
|
||||
public Chapter(Manga parentManga, string? name, string? volumeNumber, string chapterNumber, string url)
|
||||
private static readonly Regex IllegalStrings = new(@"(Vol(ume)?|Ch(apter)?)\.?", RegexOptions.IgnoreCase);
|
||||
|
||||
public Chapter(Manga parentManga, string? name, string? volumeNumber, string chapterNumber, string url, string? id = null)
|
||||
: this(parentManga, name, float.Parse(volumeNumber??"0", GlobalBase.numberFormatDecimalPoint),
|
||||
float.Parse(chapterNumber, GlobalBase.numberFormatDecimalPoint), url, id)
|
||||
{
|
||||
}
|
||||
|
||||
public Chapter(Manga parentManga, string? name, float? volumeNumber, float chapterNumber, string url, string? id = null)
|
||||
{
|
||||
this.parentManga = parentManga;
|
||||
this.name = name;
|
||||
this.volumeNumber = volumeNumber;
|
||||
this.volumeNumber = volumeNumber??0;
|
||||
this.chapterNumber = chapterNumber;
|
||||
this.url = url;
|
||||
this.id = id;
|
||||
|
||||
string chapterName = string.Concat(LegalCharacters.Matches(name ?? ""));
|
||||
string volStr = this.volumeNumber is not null ? $"Vol.{this.volumeNumber} " : "";
|
||||
string chNumberStr = $"Ch.{chapterNumber} ";
|
||||
string chNameStr = chapterName.Length > 0 ? $"- {chapterName}" : "";
|
||||
chNameStr = IllegalStrings.Replace(chNameStr, "");
|
||||
this.fileName = $"{volStr}{chNumberStr}{chNameStr}";
|
||||
string chapterVolNumStr = $"Vol.{this.volumeNumber} Ch.{chapterNumber}";
|
||||
|
||||
if (name is not null && name.Length > 0)
|
||||
{
|
||||
string chapterName = IllegalStrings.Replace(string.Concat(LegalCharacters.Matches(name)), "");
|
||||
this.fileName = chapterName.Length > 0 ? $"{chapterVolNumStr} - {chapterName}" : chapterVolNumStr;
|
||||
}
|
||||
else
|
||||
this.fileName = chapterVolNumStr;
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
@ -41,37 +55,88 @@ public readonly struct Chapter
|
||||
return $"Chapter {parentManga.sortName} {parentManga.internalId} {chapterNumber} {name}";
|
||||
}
|
||||
|
||||
public override bool Equals(object? obj)
|
||||
{
|
||||
if (obj is not Chapter)
|
||||
return false;
|
||||
return CompareTo(obj) == 0;
|
||||
}
|
||||
|
||||
public int CompareTo(object? obj)
|
||||
{
|
||||
if(obj is not Chapter otherChapter)
|
||||
throw new ArgumentException($"{obj} can not be compared to {this}");
|
||||
return volumeNumber.CompareTo(otherChapter.volumeNumber) switch
|
||||
{
|
||||
<0 => -1,
|
||||
>0 => 1,
|
||||
_ => chapterNumber.CompareTo(otherChapter.chapterNumber)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a chapter-archive is already present
|
||||
/// </summary>
|
||||
/// <returns>true if chapter is present</returns>
|
||||
internal bool CheckChapterIsDownloaded(string downloadLocation)
|
||||
internal bool CheckChapterIsDownloaded()
|
||||
{
|
||||
string newFilePath = GetArchiveFilePath(downloadLocation);
|
||||
if (!Directory.Exists(Path.Join(downloadLocation, parentManga.folderName)))
|
||||
string mangaDirectory = Path.Join(TrangaSettings.downloadLocation, parentManga.folderName);
|
||||
if (!Directory.Exists(mangaDirectory))
|
||||
return false;
|
||||
FileInfo[] archives = new DirectoryInfo(Path.Join(downloadLocation, parentManga.folderName)).GetFiles();
|
||||
Regex chapterInfoRex = new(@"Ch\.[0-9.]+");
|
||||
Regex chapterRex = new(@"[0-9]+(\.[0-9]+)?");
|
||||
|
||||
if (File.Exists(newFilePath))
|
||||
return true;
|
||||
|
||||
string cn = this.chapterNumber;
|
||||
if (archives.FirstOrDefault(archive => chapterRex.Match(chapterInfoRex.Match(archive.Name).Value).Value == cn) is { } path)
|
||||
FileInfo? mangaArchive = null;
|
||||
string markerPath = Path.Join(mangaDirectory, $".{id}");
|
||||
if (this.id is not null && File.Exists(markerPath))
|
||||
{
|
||||
File.Move(path.FullName, newFilePath);
|
||||
return true;
|
||||
if(File.Exists(File.ReadAllText(markerPath)))
|
||||
mangaArchive = new FileInfo(File.ReadAllText(markerPath));
|
||||
else
|
||||
File.Delete(markerPath);
|
||||
}
|
||||
return false;
|
||||
|
||||
if(mangaArchive is null)
|
||||
{
|
||||
FileInfo[] archives = new DirectoryInfo(mangaDirectory).GetFiles("*.cbz");
|
||||
Regex volChRex = new(@"(?:Vol(?:ume)?\.([0-9]+)\D*)?Ch(?:apter)?\.([0-9]+(?:\.[0-9]+)*)(?: - (.*))?.cbz");
|
||||
|
||||
Chapter t = this;
|
||||
mangaArchive = archives.FirstOrDefault(archive =>
|
||||
{
|
||||
Match m = volChRex.Match(archive.Name);
|
||||
/*
|
||||
* 1. If the volumeNumber is not present in the filename, it is not checked.
|
||||
* 2. Check the chapterNumber in the chapter against the one in the filename.
|
||||
* 3. The chpaterName has to either be absent both in the chapter and the filename or match.
|
||||
*/
|
||||
return (!m.Groups[1].Success || m.Groups[1].Value == t.volumeNumber.ToString(GlobalBase.numberFormatDecimalPoint)) &&
|
||||
m.Groups[2].Value == t.chapterNumber.ToString(GlobalBase.numberFormatDecimalPoint) &&
|
||||
((!m.Groups[3].Success && string.IsNullOrEmpty(t.name)) || m.Groups[3].Value == t.name);
|
||||
});
|
||||
}
|
||||
|
||||
string correctPath = GetArchiveFilePath();
|
||||
if(mangaArchive is not null && mangaArchive.FullName != correctPath)
|
||||
mangaArchive.MoveTo(correctPath, true);
|
||||
return (mangaArchive is not null);
|
||||
}
|
||||
|
||||
public void CreateChapterMarker()
|
||||
{
|
||||
if (this.id is null)
|
||||
return;
|
||||
string path = Path.Join(TrangaSettings.downloadLocation, parentManga.folderName, $".{id}");
|
||||
File.WriteAllText(path, GetArchiveFilePath());
|
||||
File.SetAttributes(path, FileAttributes.Hidden);
|
||||
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
File.SetUnixFileMode(path, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute | OtherRead | OtherExecute);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates full file path of chapter-archive
|
||||
/// </summary>
|
||||
/// <returns>Filepath</returns>
|
||||
internal string GetArchiveFilePath(string downloadLocation)
|
||||
internal string GetArchiveFilePath()
|
||||
{
|
||||
return Path.Join(downloadLocation, parentManga.folderName, $"{parentManga.folderName} - {this.fileName}.cbz");
|
||||
return Path.Join(TrangaSettings.downloadLocation, parentManga.folderName, $"{parentManga.folderName} - {this.fileName}.cbz");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -1,4 +1,5 @@
|
||||
using System.Globalization;
|
||||
using System.Text.RegularExpressions;
|
||||
using Logging;
|
||||
using Newtonsoft.Json;
|
||||
using Tranga.LibraryConnectors;
|
||||
@ -8,31 +9,53 @@ namespace Tranga;
|
||||
|
||||
public abstract class GlobalBase
|
||||
{
|
||||
protected Logger? logger { get; init; }
|
||||
protected TrangaSettings settings { get; init; }
|
||||
[JsonIgnore]
|
||||
public Logger? logger { get; init; }
|
||||
protected HashSet<NotificationConnector> notificationConnectors { get; init; }
|
||||
protected HashSet<LibraryConnector> libraryConnectors { get; init; }
|
||||
protected List<Manga> cachedPublications { get; init; }
|
||||
protected static readonly NumberFormatInfo numberFormatDecimalPoint = new (){ NumberDecimalSeparator = "." };
|
||||
private Dictionary<string, Manga> cachedPublications { get; init; }
|
||||
public static readonly NumberFormatInfo numberFormatDecimalPoint = new (){ NumberDecimalSeparator = "." };
|
||||
protected static readonly Regex baseUrlRex = new(@"https?:\/\/[0-9A-z\.-]+(:[0-9]+)?");
|
||||
|
||||
protected GlobalBase(GlobalBase clone)
|
||||
{
|
||||
this.logger = clone.logger;
|
||||
this.settings = clone.settings;
|
||||
this.notificationConnectors = clone.notificationConnectors;
|
||||
this.libraryConnectors = clone.libraryConnectors;
|
||||
this.cachedPublications = clone.cachedPublications;
|
||||
}
|
||||
|
||||
protected GlobalBase(Logger? logger, TrangaSettings settings)
|
||||
protected GlobalBase(Logger? logger)
|
||||
{
|
||||
this.logger = logger;
|
||||
this.settings = settings;
|
||||
this.notificationConnectors = settings.LoadNotificationConnectors(this);
|
||||
this.libraryConnectors = settings.LoadLibraryConnectors(this);
|
||||
this.notificationConnectors = TrangaSettings.LoadNotificationConnectors(this);
|
||||
this.libraryConnectors = TrangaSettings.LoadLibraryConnectors(this);
|
||||
this.cachedPublications = new();
|
||||
}
|
||||
|
||||
protected void AddMangaToCache(Manga manga)
|
||||
{
|
||||
if (!this.cachedPublications.TryAdd(manga.internalId, manga))
|
||||
{
|
||||
Log($"Overwriting Manga {manga.internalId}");
|
||||
this.cachedPublications[manga.internalId] = manga;
|
||||
}
|
||||
}
|
||||
|
||||
protected Manga? GetCachedManga(string internalId)
|
||||
{
|
||||
return cachedPublications.TryGetValue(internalId, out Manga manga) switch
|
||||
{
|
||||
true => manga,
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
protected IEnumerable<Manga> GetAllCachedManga()
|
||||
{
|
||||
return cachedPublications.Values;
|
||||
}
|
||||
|
||||
protected void Log(string message)
|
||||
{
|
||||
logger?.WriteLine(this.GetType().Name, message);
|
||||
@ -43,27 +66,32 @@ public abstract class GlobalBase
|
||||
Log(string.Format(fStr, replace));
|
||||
}
|
||||
|
||||
protected void SendNotifications(string title, string text)
|
||||
protected void SendNotifications(string title, string text, bool buffer = false)
|
||||
{
|
||||
foreach (NotificationConnector nc in notificationConnectors)
|
||||
nc.SendNotification(title, text);
|
||||
nc.SendNotification(title, text, buffer);
|
||||
}
|
||||
|
||||
protected void AddNotificationConnector(NotificationConnector notificationConnector)
|
||||
{
|
||||
Log($"Adding {notificationConnector}");
|
||||
notificationConnectors.RemoveWhere(nc => nc.GetType() == notificationConnector.GetType());
|
||||
notificationConnectors.RemoveWhere(nc => nc.notificationConnectorType == notificationConnector.notificationConnectorType);
|
||||
notificationConnectors.Add(notificationConnector);
|
||||
|
||||
while(IsFileInUse(settings.notificationConnectorsFilePath))
|
||||
while(IsFileInUse(TrangaSettings.notificationConnectorsFilePath))
|
||||
Thread.Sleep(100);
|
||||
File.WriteAllText(settings.notificationConnectorsFilePath, JsonConvert.SerializeObject(notificationConnectors));
|
||||
Log("Exporting notificationConnectors");
|
||||
File.WriteAllText(TrangaSettings.notificationConnectorsFilePath, JsonConvert.SerializeObject(notificationConnectors));
|
||||
}
|
||||
|
||||
protected void DeleteNotificationConnector(NotificationConnector.NotificationConnectorType notificationConnectorType)
|
||||
{
|
||||
Log($"Removing {notificationConnectorType}");
|
||||
notificationConnectors.RemoveWhere(nc => nc.notificationConnectorType == notificationConnectorType);
|
||||
while(IsFileInUse(TrangaSettings.notificationConnectorsFilePath))
|
||||
Thread.Sleep(100);
|
||||
Log("Exporting notificationConnectors");
|
||||
File.WriteAllText(TrangaSettings.notificationConnectorsFilePath, JsonConvert.SerializeObject(notificationConnectors));
|
||||
}
|
||||
|
||||
protected void UpdateLibraries()
|
||||
@ -75,21 +103,28 @@ public abstract class GlobalBase
|
||||
protected void AddLibraryConnector(LibraryConnector libraryConnector)
|
||||
{
|
||||
Log($"Adding {libraryConnector}");
|
||||
libraryConnectors.RemoveWhere(lc => lc.GetType() == libraryConnector.GetType());
|
||||
libraryConnectors.RemoveWhere(lc => lc.libraryType == libraryConnector.libraryType);
|
||||
libraryConnectors.Add(libraryConnector);
|
||||
|
||||
while(IsFileInUse(settings.libraryConnectorsFilePath))
|
||||
while(IsFileInUse(TrangaSettings.libraryConnectorsFilePath))
|
||||
Thread.Sleep(100);
|
||||
File.WriteAllText(settings.libraryConnectorsFilePath, JsonConvert.SerializeObject(libraryConnectors));
|
||||
Log("Exporting libraryConnectors");
|
||||
File.WriteAllText(TrangaSettings.libraryConnectorsFilePath, JsonConvert.SerializeObject(libraryConnectors, Formatting.Indented));
|
||||
}
|
||||
|
||||
protected void DeleteLibraryConnector(LibraryConnector.LibraryType libraryType)
|
||||
{
|
||||
Log($"Removing {libraryType}");
|
||||
libraryConnectors.RemoveWhere(lc => lc.libraryType == libraryType);
|
||||
while(IsFileInUse(TrangaSettings.libraryConnectorsFilePath))
|
||||
Thread.Sleep(100);
|
||||
Log("Exporting libraryConnectors");
|
||||
File.WriteAllText(TrangaSettings.libraryConnectorsFilePath, JsonConvert.SerializeObject(libraryConnectors, Formatting.Indented));
|
||||
}
|
||||
|
||||
protected bool IsFileInUse(string filePath)
|
||||
protected bool IsFileInUse(string filePath) => IsFileInUse(filePath, this.logger);
|
||||
|
||||
public static bool IsFileInUse(string filePath, Logger? logger)
|
||||
{
|
||||
if (!File.Exists(filePath))
|
||||
return false;
|
||||
@ -101,7 +136,7 @@ public abstract class GlobalBase
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
Log($"File is in use {filePath}");
|
||||
logger?.WriteLine($"File is in use {filePath}");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
using System.Text;
|
||||
using System.Net;
|
||||
using Tranga.MangaConnectors;
|
||||
|
||||
namespace Tranga.Jobs;
|
||||
@ -7,35 +7,48 @@ public class DownloadChapter : Job
|
||||
{
|
||||
public Chapter chapter { get; init; }
|
||||
|
||||
public DownloadChapter(GlobalBase clone, MangaConnector connector, Chapter chapter, DateTime lastExecution, string? parentJobId = null) : base(clone, connector, lastExecution, parentJobId: parentJobId)
|
||||
public DownloadChapter(GlobalBase clone, MangaConnector connector, Chapter chapter, DateTime lastExecution, string? parentJobId = null) : base(clone, JobType.DownloadChapterJob, connector, lastExecution, parentJobId: parentJobId)
|
||||
{
|
||||
this.chapter = chapter;
|
||||
}
|
||||
|
||||
public DownloadChapter(GlobalBase clone, MangaConnector connector, Chapter chapter, string? parentJobId = null) : base(clone, connector, parentJobId: parentJobId)
|
||||
public DownloadChapter(GlobalBase clone, MangaConnector connector, Chapter chapter, string? parentJobId = null) : base(clone, JobType.DownloadChapterJob, connector, parentJobId: parentJobId)
|
||||
{
|
||||
this.chapter = chapter;
|
||||
}
|
||||
|
||||
protected override string GetId()
|
||||
{
|
||||
return Convert.ToBase64String(Encoding.ASCII.GetBytes(string.Concat(this.GetType().ToString(), chapter.parentManga.internalId, chapter.chapterNumber)));
|
||||
return $"{GetType()}-{chapter.parentManga.internalId}-{chapter.chapterNumber}";
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
return $"DownloadChapter {id} {chapter}";
|
||||
return $"{id} Chapter: {chapter}";
|
||||
}
|
||||
|
||||
protected override IEnumerable<Job> ExecuteReturnSubTasksInternal()
|
||||
protected override IEnumerable<Job> ExecuteReturnSubTasksInternal(JobBoss jobBoss)
|
||||
{
|
||||
Task downloadTask = new(delegate
|
||||
{
|
||||
mangaConnector.DownloadChapter(chapter, this.progressToken);
|
||||
UpdateLibraries();
|
||||
SendNotifications("Chapter downloaded", $"{chapter.parentManga.sortName} - {chapter.chapterNumber}");
|
||||
mangaConnector.CopyCoverFromCacheToDownloadLocation(chapter.parentManga);
|
||||
HttpStatusCode success = mangaConnector.DownloadChapter(chapter, this.progressToken);
|
||||
chapter.parentManga.UpdateLatestDownloadedChapter(chapter);
|
||||
if (success == HttpStatusCode.OK)
|
||||
{
|
||||
UpdateLibraries();
|
||||
SendNotifications("Chapter downloaded", $"{chapter.parentManga.sortName} - {chapter.chapterNumber}", true);
|
||||
}
|
||||
});
|
||||
downloadTask.Start();
|
||||
return Array.Empty<Job>();
|
||||
}
|
||||
|
||||
public override bool Equals(object? obj)
|
||||
{
|
||||
if (obj is not DownloadChapter otherJob)
|
||||
return false;
|
||||
return otherJob.mangaConnector == this.mangaConnector &&
|
||||
otherJob.chapter.Equals(this.chapter);
|
||||
}
|
||||
}
|
@ -1,45 +1,59 @@
|
||||
using System.Text;
|
||||
using Tranga.MangaConnectors;
|
||||
using Tranga.MangaConnectors;
|
||||
|
||||
namespace Tranga.Jobs;
|
||||
|
||||
public class DownloadNewChapters : Job
|
||||
{
|
||||
public Manga manga { get; init; }
|
||||
public Manga manga { get; set; }
|
||||
public string translatedLanguage { get; init; }
|
||||
|
||||
public DownloadNewChapters(GlobalBase clone, MangaConnector connector, Manga manga, DateTime lastExecution,
|
||||
bool recurring = false, TimeSpan? recurrence = null, string? parentJobId = null) : base(clone, connector, lastExecution, recurring,
|
||||
bool recurring = false, TimeSpan? recurrence = null, string? parentJobId = null, string translatedLanguage = "en") : base(clone, JobType.DownloadNewChaptersJob, connector, lastExecution, recurring,
|
||||
recurrence, parentJobId)
|
||||
{
|
||||
this.manga = manga;
|
||||
this.translatedLanguage = translatedLanguage;
|
||||
}
|
||||
|
||||
public DownloadNewChapters(GlobalBase clone, MangaConnector connector, Manga manga, bool recurring = false, TimeSpan? recurrence = null, string? parentJobId = null) : base (clone, connector, recurring, recurrence, parentJobId)
|
||||
public DownloadNewChapters(GlobalBase clone, MangaConnector connector, Manga manga, bool recurring = false, TimeSpan? recurrence = null, string? parentJobId = null, string translatedLanguage = "en") : base (clone, JobType.DownloadNewChaptersJob, connector, recurring, recurrence, parentJobId)
|
||||
{
|
||||
this.manga = manga;
|
||||
this.translatedLanguage = translatedLanguage;
|
||||
}
|
||||
|
||||
protected override string GetId()
|
||||
{
|
||||
return Convert.ToBase64String(Encoding.ASCII.GetBytes(string.Concat(this.GetType().ToString(), manga.internalId)));
|
||||
return $"{GetType()}-{manga.internalId}";
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
return $"DownloadChapter {id} {manga}";
|
||||
return $"{id} Manga: {manga}";
|
||||
}
|
||||
|
||||
protected override IEnumerable<Job> ExecuteReturnSubTasksInternal()
|
||||
protected override IEnumerable<Job> ExecuteReturnSubTasksInternal(JobBoss jobBoss)
|
||||
{
|
||||
Chapter[] chapters = mangaConnector.GetNewChapters(manga);
|
||||
manga.SaveSeriesInfoJson();
|
||||
Chapter[] chapters = mangaConnector.GetNewChapters(manga, this.translatedLanguage);
|
||||
this.progressToken.increments = chapters.Length;
|
||||
List<Job> jobs = new();
|
||||
mangaConnector.CopyCoverFromCacheToDownloadLocation(manga);
|
||||
foreach (Chapter chapter in chapters)
|
||||
{
|
||||
DownloadChapter downloadChapterJob = new(this, this.mangaConnector, chapter, parentJobId: this.id);
|
||||
jobs.Add(downloadChapterJob);
|
||||
}
|
||||
UpdateMetadata updateMetadataJob = new(this, this.mangaConnector, this.manga, parentJobId: this.id);
|
||||
jobs.Add(updateMetadataJob);
|
||||
progressToken.Complete();
|
||||
return jobs;
|
||||
}
|
||||
|
||||
public override bool Equals(object? obj)
|
||||
{
|
||||
if (obj is not DownloadNewChapters otherJob)
|
||||
return false;
|
||||
return otherJob.mangaConnector == this.mangaConnector &&
|
||||
otherJob.manga.publicationId == this.manga.publicationId;
|
||||
}
|
||||
}
|
@ -13,9 +13,13 @@ public abstract class Job : GlobalBase
|
||||
public string id => GetId();
|
||||
internal IEnumerable<Job>? subJobs { get; private set; }
|
||||
public string? parentJobId { get; init; }
|
||||
public enum JobType : byte { DownloadChapterJob, DownloadNewChaptersJob, UpdateMetaDataJob }
|
||||
|
||||
internal Job(GlobalBase clone, MangaConnector connector, bool recurring = false, TimeSpan? recurrenceTime = null, string? parentJobId = null) : base(clone)
|
||||
public JobType jobType;
|
||||
|
||||
internal Job(GlobalBase clone, JobType jobType, MangaConnector connector, bool recurring = false, TimeSpan? recurrenceTime = null, string? parentJobId = null) : base(clone)
|
||||
{
|
||||
this.jobType = jobType;
|
||||
this.mangaConnector = connector;
|
||||
this.progressToken = new ProgressToken(0);
|
||||
this.recurring = recurring;
|
||||
@ -27,9 +31,10 @@ public abstract class Job : GlobalBase
|
||||
this.parentJobId = parentJobId;
|
||||
}
|
||||
|
||||
internal Job(GlobalBase clone, MangaConnector connector, DateTime lastExecution, bool recurring = false,
|
||||
internal Job(GlobalBase clone, JobType jobType, MangaConnector connector, DateTime lastExecution, bool recurring = false,
|
||||
TimeSpan? recurrenceTime = null, string? parentJobId = null) : base(clone)
|
||||
{
|
||||
this.jobType = jobType;
|
||||
this.mangaConnector = connector;
|
||||
this.progressToken = new ProgressToken(0);
|
||||
this.recurring = recurring;
|
||||
@ -59,14 +64,14 @@ public abstract class Job : GlobalBase
|
||||
|
||||
public void ResetProgress()
|
||||
{
|
||||
this.progressToken.increments = this.progressToken.increments - this.progressToken.incrementsCompleted;
|
||||
this.progressToken.increments -= progressToken.incrementsCompleted;
|
||||
this.lastExecution = DateTime.Now;
|
||||
this.progressToken.Waiting();
|
||||
}
|
||||
|
||||
public void ExecutionEnqueue()
|
||||
{
|
||||
this.progressToken.increments = this.progressToken.increments - this.progressToken.incrementsCompleted;
|
||||
this.lastExecution = recurrenceTime is not null ? DateTime.Now.Subtract((TimeSpan)recurrenceTime) : DateTime.UnixEpoch;
|
||||
this.progressToken.increments -= progressToken.incrementsCompleted;
|
||||
this.progressToken.Standby();
|
||||
}
|
||||
|
||||
@ -81,13 +86,13 @@ public abstract class Job : GlobalBase
|
||||
subJob.Cancel();
|
||||
}
|
||||
|
||||
public IEnumerable<Job> ExecuteReturnSubTasks()
|
||||
public IEnumerable<Job> ExecuteReturnSubTasks(JobBoss jobBoss)
|
||||
{
|
||||
progressToken.Start();
|
||||
subJobs = ExecuteReturnSubTasksInternal();
|
||||
subJobs = ExecuteReturnSubTasksInternal(jobBoss);
|
||||
lastExecution = DateTime.Now;
|
||||
return subJobs;
|
||||
}
|
||||
|
||||
protected abstract IEnumerable<Job> ExecuteReturnSubTasksInternal();
|
||||
protected abstract IEnumerable<Job> ExecuteReturnSubTasksInternal(JobBoss jobBoss);
|
||||
}
|
@ -1,5 +1,9 @@
|
||||
using Newtonsoft.Json;
|
||||
using System.Diagnostics;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text.RegularExpressions;
|
||||
using Newtonsoft.Json;
|
||||
using Tranga.MangaConnectors;
|
||||
using static System.IO.UnixFileMode;
|
||||
|
||||
namespace Tranga.Jobs;
|
||||
|
||||
@ -10,31 +14,27 @@ public class JobBoss : GlobalBase
|
||||
|
||||
public JobBoss(GlobalBase clone, HashSet<MangaConnector> connectors) : base(clone)
|
||||
{
|
||||
if (File.Exists(settings.jobsFilePath))
|
||||
{
|
||||
this.jobs = JsonConvert.DeserializeObject<HashSet<Job>>(File.ReadAllText(settings.jobsFilePath), new JobJsonConverter(this, new MangaConnectorJsonConverter(this, connectors)))!;
|
||||
foreach (Job job in this.jobs)
|
||||
this.jobs.FirstOrDefault(jjob => jjob.id == job.parentJobId)?.AddSubJob(job);
|
||||
}
|
||||
else
|
||||
this.jobs = new();
|
||||
foreach (DownloadNewChapters ncJob in this.jobs.Where(job => job is DownloadNewChapters))
|
||||
cachedPublications.Add(ncJob.manga);
|
||||
this.jobs = new();
|
||||
LoadJobsList(connectors);
|
||||
this.mangaConnectorJobQueue = new();
|
||||
Log($"Next job in {jobs.MinBy(job => job.nextExecution)?.nextExecution.Subtract(DateTime.Now)} {jobs.MinBy(job => job.nextExecution)?.id}");
|
||||
}
|
||||
|
||||
public void AddJob(Job job)
|
||||
public bool AddJob(Job job, string? jobFile = null)
|
||||
{
|
||||
if (ContainsJobLike(job))
|
||||
{
|
||||
Log($"Already Contains Job {job}");
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!this.jobs.Add(job))
|
||||
return false;
|
||||
Log($"Added {job}");
|
||||
this.jobs.Add(job);
|
||||
ExportJobsList();
|
||||
UpdateJobFile(job, jobFile);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public void AddJobs(IEnumerable<Job> jobsToAdd)
|
||||
@ -43,17 +43,13 @@ public class JobBoss : GlobalBase
|
||||
AddJob(job);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compares contents of the provided job and all current jobs
|
||||
/// Does not check if objects are the same
|
||||
/// </summary>
|
||||
public bool ContainsJobLike(Job job)
|
||||
{
|
||||
if (job is DownloadChapter dcJob)
|
||||
{
|
||||
return this.GetJobsLike(dcJob.mangaConnector, chapter: dcJob.chapter).Any();
|
||||
}else if (job is DownloadNewChapters ncJob)
|
||||
{
|
||||
return this.GetJobsLike(ncJob.mangaConnector, ncJob.manga).Any();
|
||||
}
|
||||
|
||||
return false;
|
||||
return this.jobs.Any(existingJob => existingJob.Equals(job));
|
||||
}
|
||||
|
||||
public void RemoveJob(Job job)
|
||||
@ -61,20 +57,21 @@ public class JobBoss : GlobalBase
|
||||
Log($"Removing {job}");
|
||||
job.Cancel();
|
||||
this.jobs.Remove(job);
|
||||
if(job.subJobs is not null)
|
||||
if(job.subJobs is not null && job.subJobs.Any())
|
||||
RemoveJobs(job.subJobs);
|
||||
ExportJobsList();
|
||||
UpdateJobFile(job);
|
||||
}
|
||||
|
||||
public void RemoveJobs(IEnumerable<Job?> jobsToRemove)
|
||||
{
|
||||
Log($"Removing {jobsToRemove.Count()} jobs.");
|
||||
foreach (Job? job in jobsToRemove)
|
||||
List<Job?> toRemove = jobsToRemove.ToList(); //Prevent multiple enumeration
|
||||
Log($"Removing {toRemove.Count()} jobs.");
|
||||
foreach (Job? job in toRemove)
|
||||
if(job is not null)
|
||||
RemoveJob(job);
|
||||
}
|
||||
|
||||
public IEnumerable<Job> GetJobsLike(string? connectorName = null, string? internalId = null, string? chapterNumber = null)
|
||||
public IEnumerable<Job> GetJobsLike(string? connectorName = null, string? internalId = null, float? chapterNumber = null)
|
||||
{
|
||||
IEnumerable<Job> ret = this.jobs;
|
||||
if (connectorName is not null)
|
||||
@ -86,7 +83,7 @@ public class JobBoss : GlobalBase
|
||||
if (jjob is not DownloadChapter job)
|
||||
return false;
|
||||
return job.chapter.parentManga.internalId == internalId &&
|
||||
job.chapter.chapterNumber == chapterNumber;
|
||||
job.chapter.chapterNumber.Equals(chapterNumber);
|
||||
});
|
||||
else if (internalId is not null)
|
||||
ret = ret.Where(jjob =>
|
||||
@ -102,7 +99,7 @@ public class JobBoss : GlobalBase
|
||||
Chapter? chapter = null)
|
||||
{
|
||||
if (chapter is not null)
|
||||
return GetJobsLike(mangaConnector?.name, chapter.Value.parentManga.internalId, chapter?.chapterNumber);
|
||||
return GetJobsLike(mangaConnector?.name, chapter.Value.parentManga.internalId, chapter.Value.chapterNumber);
|
||||
else
|
||||
return GetJobsLike(mangaConnector?.name, publication?.internalId);
|
||||
}
|
||||
@ -128,39 +125,152 @@ public class JobBoss : GlobalBase
|
||||
|
||||
private bool QueueContainsJob(Job job)
|
||||
{
|
||||
mangaConnectorJobQueue.TryAdd(job.mangaConnector, new Queue<Job>());
|
||||
if (mangaConnectorJobQueue.TryAdd(job.mangaConnector, new Queue<Job>()))//If we can add the queue, there is certainly no job in it
|
||||
return true;
|
||||
return mangaConnectorJobQueue[job.mangaConnector].Contains(job);
|
||||
}
|
||||
|
||||
public void AddJobToQueue(Job job)
|
||||
{
|
||||
Log($"Adding Job to Queue. {job}");
|
||||
mangaConnectorJobQueue.TryAdd(job.mangaConnector, new Queue<Job>());
|
||||
Queue<Job> connectorJobQueue = mangaConnectorJobQueue[job.mangaConnector];
|
||||
if(!connectorJobQueue.Contains(job))
|
||||
connectorJobQueue.Enqueue(job);
|
||||
if(!QueueContainsJob(job))
|
||||
mangaConnectorJobQueue[job.mangaConnector].Enqueue(job);
|
||||
job.ExecutionEnqueue();
|
||||
}
|
||||
|
||||
public void AddJobsToQueue(IEnumerable<Job> jobs)
|
||||
private void AddJobsToQueue(IEnumerable<Job> newJobs)
|
||||
{
|
||||
foreach(Job job in jobs)
|
||||
foreach(Job job in newJobs)
|
||||
AddJobToQueue(job);
|
||||
}
|
||||
|
||||
public void ExportJobsList()
|
||||
private void LoadJobsList(HashSet<MangaConnector> connectors)
|
||||
{
|
||||
Log($"Exporting {settings.jobsFilePath}");
|
||||
string content = JsonConvert.SerializeObject(this.jobs);
|
||||
while(IsFileInUse(settings.jobsFilePath))
|
||||
Thread.Sleep(10);
|
||||
File.WriteAllText(settings.jobsFilePath, content);
|
||||
if (!Directory.Exists(TrangaSettings.jobsFolderPath)) //No jobs to load
|
||||
{
|
||||
Directory.CreateDirectory(TrangaSettings.jobsFolderPath);
|
||||
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
File.SetUnixFileMode(TrangaSettings.jobsFolderPath, UserRead | UserWrite | UserExecute | GroupRead | OtherRead);
|
||||
return;
|
||||
}
|
||||
|
||||
//Load json-job-files
|
||||
foreach (FileInfo file in Directory.GetFiles(TrangaSettings.jobsFolderPath, "*.json").Select(f => new FileInfo(f)))
|
||||
{
|
||||
Log($"Adding {file.Name}");
|
||||
try
|
||||
{
|
||||
Job? job = JsonConvert.DeserializeObject<Job>(File.ReadAllText(file.FullName),
|
||||
new JobJsonConverter(this, new MangaConnectorJsonConverter(this, connectors)));
|
||||
if (job is null) throw new NullReferenceException();
|
||||
|
||||
Log($"Adding Job {job}");
|
||||
if (!AddJob(job, file.FullName)) //If we detect a duplicate, delete the file.
|
||||
{
|
||||
//string path = string.Concat(file.FullName, ".duplicate");
|
||||
//file.MoveTo(path);
|
||||
//Log($"Duplicate detected or otherwise not able to add job to list.\nMoved job {job} to {path}");
|
||||
Log($"Duplicate detected or otherwise not able to add job to list. Removed the file {file.FullName} {job}");
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
if (e is not UnreachableException or NullReferenceException)
|
||||
throw;
|
||||
Log(e.Message);
|
||||
string newName = file.FullName + ".failed";
|
||||
Log($"Failed loading file {file.Name}.\nMoving to {newName}.\n" +
|
||||
$"If you think this is a bug, upload contents of the file to the Bugreport!");
|
||||
File.Move(file.FullName, newName);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
//Connect jobs to parent-jobs and add Publications to cache
|
||||
foreach (Job job in this.jobs)
|
||||
{
|
||||
Log($"Loading Job {job}");
|
||||
Job? parentJob = this.jobs.FirstOrDefault(jjob => jjob.id == job.parentJobId);
|
||||
if (parentJob is not null)
|
||||
{
|
||||
parentJob.AddSubJob(job);
|
||||
Log($"Parent Job {parentJob}");
|
||||
}
|
||||
if (job is DownloadNewChapters dncJob)
|
||||
AddMangaToCache(dncJob.manga);
|
||||
}
|
||||
|
||||
string[] coverFiles = Directory.GetFiles(TrangaSettings.coverImageCache);
|
||||
foreach(string fileName in coverFiles.Where(fileName => !GetAllCachedManga().Any(manga => manga.coverFileNameInCache == fileName)))
|
||||
File.Delete(fileName);
|
||||
}
|
||||
|
||||
internal void UpdateJobFile(Job job, string? oldFile = null)
|
||||
{
|
||||
string newJobFilePath = Path.Join(TrangaSettings.jobsFolderPath, $"{job.id}.json");
|
||||
string oldFilePath = oldFile??Path.Join(TrangaSettings.jobsFolderPath, $"{job.id}.json");
|
||||
|
||||
//Delete old file
|
||||
if (File.Exists(oldFilePath))
|
||||
{
|
||||
Log($"Deleting Job-file {oldFilePath}");
|
||||
try
|
||||
{
|
||||
while(IsFileInUse(oldFilePath))
|
||||
Thread.Sleep(10);
|
||||
File.Delete(oldFilePath);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Error deleting {oldFilePath} job {job.id}\n{e}");
|
||||
return; //Don't export a new file when we haven't actually deleted the old one
|
||||
}
|
||||
}
|
||||
|
||||
//Export job (in new file) if it is still in our jobs list
|
||||
if (GetJobById(job.id) is not null)
|
||||
{
|
||||
Log($"Exporting Job {newJobFilePath}");
|
||||
string jobStr = JsonConvert.SerializeObject(job, Formatting.Indented);
|
||||
while(IsFileInUse(newJobFilePath))
|
||||
Thread.Sleep(10);
|
||||
File.WriteAllText(newJobFilePath, jobStr);
|
||||
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
File.SetUnixFileMode(newJobFilePath, UserRead | UserWrite | GroupRead | OtherRead);
|
||||
}
|
||||
}
|
||||
|
||||
private void UpdateAllJobFiles()
|
||||
{
|
||||
Log("Exporting Jobs");
|
||||
foreach (Job job in this.jobs)
|
||||
UpdateJobFile(job);
|
||||
|
||||
//Remove files with jobs not in this.jobs-list
|
||||
Regex idRex = new (@"(.*)\.json");
|
||||
foreach (FileInfo file in new DirectoryInfo(TrangaSettings.jobsFolderPath).EnumerateFiles())
|
||||
{
|
||||
if (idRex.IsMatch(file.Name))
|
||||
{
|
||||
string id = idRex.Match(file.Name).Groups[1].Value;
|
||||
if (!this.jobs.Any(job => job.id == id))
|
||||
{
|
||||
try
|
||||
{
|
||||
file.Delete();
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log(e.ToString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void CheckJobs()
|
||||
{
|
||||
foreach (Job job in jobs.Where(job => job.nextExecution < DateTime.Now && !QueueContainsJob(job)).OrderBy(job => job.nextExecution))
|
||||
AddJobToQueue(job);
|
||||
AddJobsToQueue(jobs.Where(job => job.progressToken.state == ProgressToken.State.Waiting && job.nextExecution < DateTime.Now && !QueueContainsJob(job)).OrderBy(job => job.nextExecution));
|
||||
foreach (Queue<Job> jobQueue in mangaConnectorJobQueue.Values)
|
||||
{
|
||||
if(jobQueue.Count < 1)
|
||||
@ -168,22 +278,23 @@ public class JobBoss : GlobalBase
|
||||
Job queueHead = jobQueue.Peek();
|
||||
if (queueHead.progressToken.state is ProgressToken.State.Complete or ProgressToken.State.Cancelled)
|
||||
{
|
||||
switch (queueHead)
|
||||
{
|
||||
case DownloadChapter:
|
||||
RemoveJob(queueHead);
|
||||
break;
|
||||
case DownloadNewChapters:
|
||||
if(queueHead.recurring)
|
||||
queueHead.progressToken.Complete();
|
||||
break;
|
||||
}
|
||||
if(!queueHead.recurring)
|
||||
RemoveJob(queueHead);
|
||||
else
|
||||
queueHead.ResetProgress();
|
||||
jobQueue.Dequeue();
|
||||
Log($"Next job in {jobs.MinBy(job => job.nextExecution)?.nextExecution.Subtract(DateTime.Now)} {jobs.MinBy(job => job.nextExecution)?.id}");
|
||||
}else if (queueHead.progressToken.state is ProgressToken.State.Standby)
|
||||
{
|
||||
Job[] subJobs = jobQueue.Peek().ExecuteReturnSubTasks().ToArray();
|
||||
Job eJob = jobQueue.Peek();
|
||||
Job[] subJobs = eJob.ExecuteReturnSubTasks(this).ToArray();
|
||||
UpdateJobFile(eJob);
|
||||
AddJobs(subJobs);
|
||||
AddJobsToQueue(subJobs);
|
||||
}else if (queueHead.progressToken.state is ProgressToken.State.Running && DateTime.Now.Subtract(queueHead.progressToken.lastUpdate) > TimeSpan.FromMinutes(5))
|
||||
{
|
||||
Log($"{queueHead} inactive for more than 5 minutes. Cancelling.");
|
||||
queueHead.Cancel();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -23,8 +23,24 @@ public class JobJsonConverter : JsonConverter
|
||||
public override object ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer)
|
||||
{
|
||||
JObject jo = JObject.Load(reader);
|
||||
if (jo.ContainsKey("manga"))//DownloadNewChapters
|
||||
|
||||
if (jo.ContainsKey("jobType") && jo["jobType"]!.Value<byte>() == (byte)Job.JobType.UpdateMetaDataJob)
|
||||
{
|
||||
return new UpdateMetadata(this._clone,
|
||||
jo.GetValue("mangaConnector")!.ToObject<MangaConnector>(JsonSerializer.Create(new JsonSerializerSettings()
|
||||
{
|
||||
Converters =
|
||||
{
|
||||
this._mangaConnectorJsonConverter
|
||||
}
|
||||
}))!,
|
||||
jo.GetValue("manga")!.ToObject<Manga>(),
|
||||
jo.GetValue("parentJobId")!.Value<string?>());
|
||||
}else if ((jo.ContainsKey("jobType") && jo["jobType"]!.Value<byte>() == (byte)Job.JobType.DownloadNewChaptersJob) || jo.ContainsKey("translatedLanguage"))//TODO change to jobType
|
||||
{
|
||||
DateTime lastExecution = jo.GetValue("lastExecution") is {} le
|
||||
? le.ToObject<DateTime>()
|
||||
: DateTime.UnixEpoch; //TODO do null checks on all variables
|
||||
return new DownloadNewChapters(this._clone,
|
||||
jo.GetValue("mangaConnector")!.ToObject<MangaConnector>(JsonSerializer.Create(new JsonSerializerSettings()
|
||||
{
|
||||
@ -34,13 +50,11 @@ public class JobJsonConverter : JsonConverter
|
||||
}
|
||||
}))!,
|
||||
jo.GetValue("manga")!.ToObject<Manga>(),
|
||||
jo.GetValue("lastExecution")!.ToObject<DateTime>(),
|
||||
lastExecution,
|
||||
jo.GetValue("recurring")!.Value<bool>(),
|
||||
jo.GetValue("recurrenceTime")!.ToObject<TimeSpan?>(),
|
||||
jo.GetValue("parentJobId")!.Value<string?>());
|
||||
}
|
||||
|
||||
if (jo.ContainsKey("chapter"))//DownloadChapter
|
||||
}else if ((jo.ContainsKey("jobType") && jo["jobType"]!.Value<byte>() == (byte)Job.JobType.DownloadChapterJob) || jo.ContainsKey("chapter"))//TODO change to jobType
|
||||
{
|
||||
return new DownloadChapter(this._clone,
|
||||
jo.GetValue("mangaConnector")!.ToObject<MangaConnector>(JsonSerializer.Create(new JsonSerializerSettings()
|
||||
|
@ -6,11 +6,11 @@ public class ProgressToken
|
||||
public int increments { get; set; }
|
||||
public int incrementsCompleted { get; set; }
|
||||
public float progress => GetProgress();
|
||||
|
||||
public DateTime lastUpdate { get; private set; }
|
||||
public DateTime executionStarted { get; private set; }
|
||||
public TimeSpan timeRemaining => GetTimeRemaining();
|
||||
|
||||
public enum State { Running, Complete, Standby, Cancelled }
|
||||
public enum State { Running, Complete, Standby, Cancelled, Waiting }
|
||||
public State state { get; private set; }
|
||||
|
||||
public ProgressToken(int increments)
|
||||
@ -18,14 +18,15 @@ public class ProgressToken
|
||||
this.cancellationRequested = false;
|
||||
this.increments = increments;
|
||||
this.incrementsCompleted = 0;
|
||||
this.state = State.Complete;
|
||||
this.state = State.Waiting;
|
||||
this.executionStarted = DateTime.UnixEpoch;
|
||||
this.lastUpdate = DateTime.UnixEpoch;
|
||||
}
|
||||
|
||||
private float GetProgress()
|
||||
{
|
||||
if(increments > 0 && incrementsCompleted > 0)
|
||||
return (float)incrementsCompleted / (float)increments;
|
||||
return incrementsCompleted / (float)increments;
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -38,6 +39,7 @@ public class ProgressToken
|
||||
|
||||
public void Increment()
|
||||
{
|
||||
this.lastUpdate = DateTime.Now;
|
||||
this.incrementsCompleted++;
|
||||
if (incrementsCompleted > increments)
|
||||
state = State.Complete;
|
||||
@ -45,22 +47,32 @@ public class ProgressToken
|
||||
|
||||
public void Standby()
|
||||
{
|
||||
this.lastUpdate = DateTime.Now;
|
||||
state = State.Standby;
|
||||
}
|
||||
|
||||
public void Start()
|
||||
{
|
||||
this.lastUpdate = DateTime.Now;
|
||||
state = State.Running;
|
||||
this.executionStarted = DateTime.Now;
|
||||
}
|
||||
|
||||
public void Complete()
|
||||
{
|
||||
this.lastUpdate = DateTime.Now;
|
||||
state = State.Complete;
|
||||
}
|
||||
|
||||
public void Cancel()
|
||||
{
|
||||
this.lastUpdate = DateTime.Now;
|
||||
state = State.Cancelled;
|
||||
}
|
||||
|
||||
public void Waiting()
|
||||
{
|
||||
this.lastUpdate = DateTime.Now;
|
||||
state = State.Waiting;
|
||||
}
|
||||
}
|
76
Tranga/Jobs/UpdateMetadata.cs
Normal file
76
Tranga/Jobs/UpdateMetadata.cs
Normal file
@ -0,0 +1,76 @@
|
||||
using Tranga.MangaConnectors;
|
||||
|
||||
namespace Tranga.Jobs;
|
||||
|
||||
public class UpdateMetadata : Job
|
||||
{
|
||||
public Manga manga { get; set; }
|
||||
|
||||
public UpdateMetadata(GlobalBase clone, MangaConnector connector, Manga manga, string? parentJobId = null) : base(clone, JobType.UpdateMetaDataJob, connector, parentJobId: parentJobId)
|
||||
{
|
||||
this.manga = manga;
|
||||
}
|
||||
|
||||
protected override string GetId()
|
||||
{
|
||||
return $"{GetType()}-{manga.internalId}";
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
return $"{id} Manga: {manga}";
|
||||
}
|
||||
|
||||
protected override IEnumerable<Job> ExecuteReturnSubTasksInternal(JobBoss jobBoss)
|
||||
{
|
||||
//Retrieve new Metadata
|
||||
Manga? possibleUpdatedManga = mangaConnector.GetMangaFromId(manga.publicationId);
|
||||
if (possibleUpdatedManga is { } updatedManga)
|
||||
{
|
||||
if (updatedManga.Equals(this.manga)) //Check if anything changed
|
||||
{
|
||||
this.progressToken.Complete();
|
||||
return Array.Empty<Job>();
|
||||
}
|
||||
|
||||
this.manga = manga.WithMetadata(updatedManga);
|
||||
this.manga.SaveSeriesInfoJson(true);
|
||||
this.mangaConnector.CopyCoverFromCacheToDownloadLocation(manga);
|
||||
foreach (Job job in jobBoss.GetJobsLike(publication: this.manga))
|
||||
{
|
||||
string oldFile;
|
||||
if (job is DownloadNewChapters dc)
|
||||
{
|
||||
oldFile = dc.id;
|
||||
dc.manga = this.manga;
|
||||
}
|
||||
else if (job is UpdateMetadata um)
|
||||
{
|
||||
oldFile = um.id;
|
||||
um.manga = this.manga;
|
||||
}
|
||||
else
|
||||
continue;
|
||||
jobBoss.UpdateJobFile(job, oldFile);
|
||||
}
|
||||
this.progressToken.Complete();
|
||||
}
|
||||
else
|
||||
{
|
||||
Log($"Could not find Manga {manga}");
|
||||
this.progressToken.Cancel();
|
||||
return Array.Empty<Job>();
|
||||
}
|
||||
this.progressToken.Cancel();
|
||||
return Array.Empty<Job>();
|
||||
}
|
||||
|
||||
public override bool Equals(object? obj)
|
||||
{
|
||||
|
||||
if (obj is not UpdateMetadata otherJob)
|
||||
return false;
|
||||
return otherJob.mangaConnector == this.mangaConnector &&
|
||||
otherJob.manga.publicationId == this.manga.publicationId;
|
||||
}
|
||||
}
|
@ -1,4 +1,5 @@
|
||||
using System.Text.Json.Nodes;
|
||||
using Logging;
|
||||
using Newtonsoft.Json;
|
||||
using JsonSerializer = System.Text.Json.JsonSerializer;
|
||||
|
||||
@ -8,7 +9,7 @@ public class Kavita : LibraryConnector
|
||||
{
|
||||
|
||||
public Kavita(GlobalBase clone, string baseUrl, string username, string password) :
|
||||
base(clone, baseUrl, GetToken(baseUrl, username, password), LibraryType.Kavita)
|
||||
base(clone, baseUrl, GetToken(baseUrl, username, password, clone.logger), LibraryType.Kavita)
|
||||
{
|
||||
}
|
||||
|
||||
@ -22,7 +23,7 @@ public class Kavita : LibraryConnector
|
||||
return $"Kavita {baseUrl}";
|
||||
}
|
||||
|
||||
private static string GetToken(string baseUrl, string username, string password)
|
||||
private static string GetToken(string baseUrl, string username, string password, Logger? logger = null)
|
||||
{
|
||||
HttpClient client = new()
|
||||
{
|
||||
@ -37,21 +38,44 @@ public class Kavita : LibraryConnector
|
||||
RequestUri = new Uri($"{baseUrl}/api/Account/login"),
|
||||
Content = new StringContent($"{{\"username\":\"{username}\",\"password\":\"{password}\"}}", System.Text.Encoding.UTF8, "application/json")
|
||||
};
|
||||
|
||||
HttpResponseMessage response = client.Send(requestMessage);
|
||||
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(response.Content.ReadAsStream());
|
||||
if (result is not null)
|
||||
return result["token"]!.GetValue<string>();
|
||||
else throw new Exception("Did not receive token.");
|
||||
try
|
||||
{
|
||||
HttpResponseMessage response = client.Send(requestMessage);
|
||||
logger?.WriteLine($"Kavita | GetToken {requestMessage.RequestUri} -> {response.StatusCode}");
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(response.Content.ReadAsStream());
|
||||
if (result is not null)
|
||||
return result["token"]!.GetValue<string>();
|
||||
}
|
||||
else
|
||||
{
|
||||
logger?.WriteLine($"Kavita | {response.Content}");
|
||||
}
|
||||
}
|
||||
catch (HttpRequestException e)
|
||||
{
|
||||
logger?.WriteLine($"Kavita | Unable to retrieve token:\n\r{e}");
|
||||
}
|
||||
logger?.WriteLine("Kavita | Did not receive token.");
|
||||
return "";
|
||||
}
|
||||
|
||||
public override void UpdateLibrary()
|
||||
protected override void UpdateLibraryInternal()
|
||||
{
|
||||
Log("Updating libraries.");
|
||||
foreach (KavitaLibrary lib in GetLibraries())
|
||||
NetClient.MakePost($"{baseUrl}/api/Library/scan?libraryId={lib.id}", "Bearer", auth, logger);
|
||||
}
|
||||
|
||||
internal override bool Test()
|
||||
{
|
||||
foreach (KavitaLibrary lib in GetLibraries())
|
||||
if (NetClient.MakePost($"{baseUrl}/api/Library/scan?libraryId={lib.id}", "Bearer", auth, logger))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fetches all libraries available to the user
|
||||
/// </summary>
|
||||
@ -59,7 +83,7 @@ public class Kavita : LibraryConnector
|
||||
private IEnumerable<KavitaLibrary> GetLibraries()
|
||||
{
|
||||
Log("Getting libraries.");
|
||||
Stream data = NetClient.MakeRequest($"{baseUrl}/api/Library", "Bearer", auth, logger);
|
||||
Stream data = NetClient.MakeRequest($"{baseUrl}/api/Library/libraries", "Bearer", auth, logger);
|
||||
if (data == Stream.Null)
|
||||
{
|
||||
Log("No libraries returned");
|
||||
@ -72,11 +96,13 @@ public class Kavita : LibraryConnector
|
||||
return Array.Empty<KavitaLibrary>();
|
||||
}
|
||||
|
||||
HashSet<KavitaLibrary> ret = new();
|
||||
List<KavitaLibrary> ret = new();
|
||||
|
||||
foreach (JsonNode? jsonNode in result)
|
||||
{
|
||||
var jObject = (JsonObject?)jsonNode;
|
||||
JsonObject? jObject = (JsonObject?)jsonNode;
|
||||
if(jObject is null)
|
||||
continue;
|
||||
int libraryId = jObject!["id"]!.GetValue<int>();
|
||||
string libraryName = jObject["name"]!.GetValue<string>();
|
||||
ret.Add(new KavitaLibrary(libraryId, libraryName));
|
||||
|
@ -25,13 +25,21 @@ public class Komga : LibraryConnector
|
||||
return $"Komga {baseUrl}";
|
||||
}
|
||||
|
||||
public override void UpdateLibrary()
|
||||
protected override void UpdateLibraryInternal()
|
||||
{
|
||||
Log("Updating libraries.");
|
||||
foreach (KomgaLibrary lib in GetLibraries())
|
||||
NetClient.MakePost($"{baseUrl}/api/v1/libraries/{lib.id}/scan", "Basic", auth, logger);
|
||||
}
|
||||
|
||||
internal override bool Test()
|
||||
{
|
||||
foreach (KomgaLibrary lib in GetLibraries())
|
||||
if (NetClient.MakePost($"{baseUrl}/api/v1/libraries/{lib.id}/scan", "Basic", auth, logger))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fetches all libraries available to the user
|
||||
/// </summary>
|
||||
|
@ -17,14 +17,62 @@ public abstract class LibraryConnector : GlobalBase
|
||||
public string baseUrl { get; }
|
||||
// ReSharper disable once MemberCanBeProtected.Global
|
||||
public string auth { get; } //Base64 encoded, if you use your password everywhere, you have problems
|
||||
private DateTime? _updateLibraryRequested = null;
|
||||
private readonly Thread? _libraryBufferThread = null;
|
||||
private const int NoChangeTimeout = 2, BiggestInterval = 20;
|
||||
|
||||
protected LibraryConnector(GlobalBase clone, string baseUrl, string auth, LibraryType libraryType) : base(clone)
|
||||
{
|
||||
this.baseUrl = baseUrl;
|
||||
Log($"Creating libraryConnector {Enum.GetName(libraryType)}");
|
||||
if (!baseUrlRex.IsMatch(baseUrl))
|
||||
throw new ArgumentException("Base url does not match pattern");
|
||||
if(auth == "")
|
||||
throw new ArgumentNullException(nameof(auth), "Auth can not be empty");
|
||||
this.baseUrl = baseUrlRex.Match(baseUrl).Value;
|
||||
this.auth = auth;
|
||||
this.libraryType = libraryType;
|
||||
|
||||
if (TrangaSettings.bufferLibraryUpdates)
|
||||
{
|
||||
_libraryBufferThread = new(CheckLibraryBuffer);
|
||||
_libraryBufferThread.Start();
|
||||
}
|
||||
}
|
||||
public abstract void UpdateLibrary();
|
||||
|
||||
private void CheckLibraryBuffer()
|
||||
{
|
||||
while (true)
|
||||
{
|
||||
if (_updateLibraryRequested is not null && DateTime.Now.Subtract((DateTime)_updateLibraryRequested) > TimeSpan.FromMinutes(NoChangeTimeout)) //If no updates have been requested for NoChangeTimeout minutes, update library
|
||||
{
|
||||
UpdateLibraryInternal();
|
||||
_updateLibraryRequested = null;
|
||||
}
|
||||
Thread.Sleep(100);
|
||||
}
|
||||
}
|
||||
|
||||
public void UpdateLibrary()
|
||||
{
|
||||
_updateLibraryRequested ??= DateTime.Now;
|
||||
if (!TrangaSettings.bufferLibraryUpdates)
|
||||
{
|
||||
UpdateLibraryInternal();
|
||||
return;
|
||||
}else if (_updateLibraryRequested is not null &&
|
||||
DateTime.Now.Subtract((DateTime)_updateLibraryRequested) > TimeSpan.FromMinutes(BiggestInterval)) //If the last update has been more than BiggestInterval minutes ago, update library
|
||||
{
|
||||
UpdateLibraryInternal();
|
||||
_updateLibraryRequested = null;
|
||||
}
|
||||
else if(_updateLibraryRequested is not null)
|
||||
{
|
||||
Log($"Buffering Library Updates (Updates in latest {((DateTime)_updateLibraryRequested).Add(TimeSpan.FromMinutes(BiggestInterval)).Subtract(DateTime.Now)} or {((DateTime)_updateLibraryRequested).Add(TimeSpan.FromMinutes(NoChangeTimeout)).Subtract(DateTime.Now)})");
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void UpdateLibraryInternal();
|
||||
internal abstract bool Test();
|
||||
|
||||
protected static class NetClient
|
||||
{
|
||||
@ -38,15 +86,33 @@ public abstract class LibraryConnector : GlobalBase
|
||||
Method = HttpMethod.Get,
|
||||
RequestUri = new Uri(url)
|
||||
};
|
||||
HttpResponseMessage response = client.Send(requestMessage);
|
||||
logger?.WriteLine("LibraryManager.NetClient", $"GET {url} -> {(int)response.StatusCode}: {response.ReasonPhrase}");
|
||||
try
|
||||
{
|
||||
|
||||
if(response.StatusCode is HttpStatusCode.Unauthorized && response.RequestMessage!.RequestUri!.AbsoluteUri != url)
|
||||
return MakeRequest(response.RequestMessage!.RequestUri!.AbsoluteUri, authScheme, auth, logger);
|
||||
else if (response.IsSuccessStatusCode)
|
||||
return response.Content.ReadAsStream();
|
||||
else
|
||||
HttpResponseMessage response = client.Send(requestMessage);
|
||||
logger?.WriteLine("LibraryManager.NetClient",
|
||||
$"GET {url} -> {(int)response.StatusCode}: {response.ReasonPhrase}");
|
||||
|
||||
if (response.StatusCode is HttpStatusCode.Unauthorized &&
|
||||
response.RequestMessage!.RequestUri!.AbsoluteUri != url)
|
||||
return MakeRequest(response.RequestMessage!.RequestUri!.AbsoluteUri, authScheme, auth, logger);
|
||||
else if (response.IsSuccessStatusCode)
|
||||
return response.Content.ReadAsStream();
|
||||
else
|
||||
return Stream.Null;
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
switch (e)
|
||||
{
|
||||
case HttpRequestException:
|
||||
logger?.WriteLine("LibraryManager.NetClient", $"Failed to make Request:\n\r{e}\n\rContinuing.");
|
||||
break;
|
||||
default:
|
||||
throw;
|
||||
}
|
||||
return Stream.Null;
|
||||
}
|
||||
}
|
||||
|
||||
public static bool MakePost(string url, string authScheme, string auth, Logger? logger)
|
||||
|
@ -5,7 +5,7 @@ namespace Tranga.LibraryConnectors;
|
||||
|
||||
public class LibraryManagerJsonConverter : JsonConverter
|
||||
{
|
||||
private GlobalBase _clone;
|
||||
private readonly GlobalBase _clone;
|
||||
|
||||
internal LibraryManagerJsonConverter(GlobalBase clone)
|
||||
{
|
||||
|
140
Tranga/Manga.cs
140
Tranga/Manga.cs
@ -1,6 +1,7 @@
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Web;
|
||||
using Newtonsoft.Json;
|
||||
using static System.IO.UnixFileMode;
|
||||
|
||||
@ -11,51 +12,102 @@ namespace Tranga;
|
||||
/// </summary>
|
||||
public struct Manga
|
||||
{
|
||||
public string sortName { get; }
|
||||
public List<string> authors { get; }
|
||||
public string sortName { get; private set; }
|
||||
public List<string> authors { get; private set; }
|
||||
// ReSharper disable once UnusedAutoPropertyAccessor.Global
|
||||
public Dictionary<string,string> altTitles { get; }
|
||||
public Dictionary<string,string> altTitles { get; private set; }
|
||||
// ReSharper disable once MemberCanBePrivate.Global
|
||||
public string? description { get; }
|
||||
public string[] tags { get; }
|
||||
public string? description { get; private set; }
|
||||
public string[] tags { get; private set; }
|
||||
// ReSharper disable once UnusedAutoPropertyAccessor.Global
|
||||
public string? coverUrl { get; }
|
||||
public string? coverFileNameInCache { get; set; }
|
||||
public string? coverUrl { get; private set; }
|
||||
public string? coverFileNameInCache { get; private set; }
|
||||
// ReSharper disable once UnusedAutoPropertyAccessor.Global
|
||||
public Dictionary<string,string> links { get; }
|
||||
// ReSharper disable once MemberCanBePrivate.Global
|
||||
public int? year { get; }
|
||||
public int? year { get; private set; }
|
||||
public string? originalLanguage { get; }
|
||||
// ReSharper disable once MemberCanBePrivate.Global
|
||||
public string status { get; }
|
||||
// ReSharper disable twice MemberCanBePrivate.Global
|
||||
public string status { get; private set; }
|
||||
public ReleaseStatusByte releaseStatus { get; private set; }
|
||||
public enum ReleaseStatusByte : byte
|
||||
{
|
||||
Continuing = 0,
|
||||
Completed = 1,
|
||||
OnHiatus = 2,
|
||||
Cancelled = 3,
|
||||
Unreleased = 4
|
||||
};
|
||||
public string folderName { get; private set; }
|
||||
public string publicationId { get; }
|
||||
public string internalId { get; }
|
||||
public float ignoreChaptersBelow { get; set; }
|
||||
public float latestChapterDownloaded { get; set; }
|
||||
public float latestChapterAvailable { get; set; }
|
||||
|
||||
private static readonly Regex LegalCharacters = new (@"[A-Z]*[a-z]*[0-9]* *\.*-*,*'*\'*\)*\(*~*!*");
|
||||
public string? websiteUrl { get; private set; }
|
||||
|
||||
private static readonly Regex LegalCharacters = new (@"[A-Za-zÀ-ÖØ-öø-ÿ0-9 \.\-,'\'\)\(~!\+]*");
|
||||
|
||||
[JsonConstructor]
|
||||
public Manga(string sortName, List<string> authors, string? description, Dictionary<string,string> altTitles, string[] tags, string? coverUrl, string? coverFileNameInCache, Dictionary<string,string>? links, int? year, string? originalLanguage, string status, string publicationId, string? folderName = null, float? ignoreChaptersBelow = 0)
|
||||
public Manga(string sortName, List<string> authors, string? description, Dictionary<string,string> altTitles, string[] tags, string? coverUrl, string? coverFileNameInCache, Dictionary<string,string>? links, int? year, string? originalLanguage, string publicationId, ReleaseStatusByte releaseStatus, string? websiteUrl = null, string? folderName = null, float? ignoreChaptersBelow = 0)
|
||||
{
|
||||
this.sortName = sortName;
|
||||
this.authors = authors;
|
||||
this.description = description;
|
||||
this.altTitles = altTitles;
|
||||
this.tags = tags;
|
||||
this.sortName = HttpUtility.HtmlDecode(sortName);
|
||||
this.authors = authors.Select(HttpUtility.HtmlDecode).ToList()!;
|
||||
this.description = HttpUtility.HtmlDecode(description);
|
||||
this.altTitles = altTitles.ToDictionary(a => HttpUtility.HtmlDecode(a.Key), a => HttpUtility.HtmlDecode(a.Value));
|
||||
this.tags = tags.Select(HttpUtility.HtmlDecode).ToArray()!;
|
||||
this.coverFileNameInCache = coverFileNameInCache;
|
||||
this.coverUrl = coverUrl;
|
||||
this.links = links ?? new Dictionary<string, string>();
|
||||
this.year = year;
|
||||
this.originalLanguage = originalLanguage;
|
||||
this.status = status;
|
||||
this.publicationId = publicationId;
|
||||
this.folderName = folderName ?? string.Concat(LegalCharacters.Matches(sortName));
|
||||
this.folderName = folderName ?? string.Concat(LegalCharacters.Matches(HttpUtility.HtmlDecode(sortName)));
|
||||
while (this.folderName.EndsWith('.'))
|
||||
this.folderName = this.folderName.Substring(0, this.folderName.Length - 1);
|
||||
string onlyLowerLetters = string.Concat(this.sortName.ToLower().Where(Char.IsLetter));
|
||||
this.internalId = Convert.ToBase64String(Encoding.ASCII.GetBytes($"{onlyLowerLetters}{this.year}"));
|
||||
this.internalId = DateTime.Now.Ticks.ToString();
|
||||
this.ignoreChaptersBelow = ignoreChaptersBelow ?? 0f;
|
||||
this.latestChapterDownloaded = 0;
|
||||
this.latestChapterAvailable = 0;
|
||||
this.releaseStatus = releaseStatus;
|
||||
this.status = Enum.GetName(releaseStatus) ?? "";
|
||||
this.websiteUrl = websiteUrl;
|
||||
}
|
||||
|
||||
public Manga WithMetadata(Manga newManga)
|
||||
{
|
||||
return this with
|
||||
{
|
||||
sortName = newManga.sortName,
|
||||
description = newManga.description,
|
||||
coverUrl = newManga.coverUrl,
|
||||
authors = authors.Union(newManga.authors).ToList(),
|
||||
altTitles = altTitles.UnionBy(newManga.altTitles, kv => kv.Key).ToDictionary(x => x.Key, x => x.Value),
|
||||
tags = tags.Union(newManga.tags).ToArray(),
|
||||
status = newManga.status,
|
||||
releaseStatus = newManga.releaseStatus,
|
||||
websiteUrl = newManga.websiteUrl,
|
||||
year = newManga.year,
|
||||
coverFileNameInCache = newManga.coverFileNameInCache
|
||||
};
|
||||
}
|
||||
|
||||
public override bool Equals(object? obj)
|
||||
{
|
||||
if (obj is not Manga compareManga)
|
||||
return false;
|
||||
return this.description == compareManga.description &&
|
||||
this.year == compareManga.year &&
|
||||
this.status == compareManga.status &&
|
||||
this.releaseStatus == compareManga.releaseStatus &&
|
||||
this.sortName == compareManga.sortName &&
|
||||
this.latestChapterAvailable.Equals(compareManga.latestChapterAvailable) &&
|
||||
this.authors.All(a => compareManga.authors.Contains(a)) &&
|
||||
(this.coverFileNameInCache??"").Equals(compareManga.coverFileNameInCache) &&
|
||||
(this.websiteUrl??"").Equals(compareManga.websiteUrl) &&
|
||||
this.tags.All(t => compareManga.tags.Contains(t));
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
@ -76,17 +128,32 @@ public struct Manga
|
||||
public void MovePublicationFolder(string downloadDirectory, string newFolderName)
|
||||
{
|
||||
string oldPath = Path.Join(downloadDirectory, this.folderName);
|
||||
this.folderName = newFolderName;
|
||||
this.folderName = newFolderName;//Create new Path with the new folderName
|
||||
string newPath = CreatePublicationFolder(downloadDirectory);
|
||||
if(Directory.Exists(oldPath))
|
||||
Directory.Move(oldPath, newPath);
|
||||
if (Directory.Exists(oldPath))
|
||||
{
|
||||
if (Directory.Exists(newPath)) //Move/Overwrite old Files, Delete old Directory
|
||||
{
|
||||
IEnumerable<string> newPathFileNames = new DirectoryInfo(newPath).GetFiles().Select(fi => fi.Name);
|
||||
foreach(FileInfo fileInfo in new DirectoryInfo(oldPath).GetFiles().Where(fi => newPathFileNames.Contains(fi.Name) == false))
|
||||
File.Move(fileInfo.FullName, Path.Join(newPath, fileInfo.Name), true);
|
||||
Directory.Delete(oldPath);
|
||||
}else
|
||||
Directory.Move(oldPath, newPath);
|
||||
}
|
||||
}
|
||||
|
||||
public void SaveSeriesInfoJson(string downloadDirectory)
|
||||
public void UpdateLatestDownloadedChapter(Chapter chapter)//TODO check files if chapters are all downloaded
|
||||
{
|
||||
string publicationFolder = CreatePublicationFolder(downloadDirectory);
|
||||
float chapterNumber = Convert.ToSingle(chapter.chapterNumber, GlobalBase.numberFormatDecimalPoint);
|
||||
latestChapterDownloaded = latestChapterDownloaded < chapterNumber ? chapterNumber : latestChapterDownloaded;
|
||||
}
|
||||
|
||||
public void SaveSeriesInfoJson(bool overwrite = false)
|
||||
{
|
||||
string publicationFolder = CreatePublicationFolder(TrangaSettings.downloadLocation);
|
||||
string seriesInfoPath = Path.Join(publicationFolder, "series.json");
|
||||
if(!File.Exists(seriesInfoPath))
|
||||
if(overwrite || (!overwrite && !File.Exists(seriesInfoPath)))
|
||||
File.WriteAllText(seriesInfoPath,this.GetSeriesInfoJson());
|
||||
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
File.SetUnixFileMode(seriesInfoPath, GroupRead | GroupWrite | OtherRead | OtherWrite | UserRead | UserWrite);
|
||||
@ -95,7 +162,7 @@ public struct Manga
|
||||
/// <returns>Serialized JSON String for series.json</returns>
|
||||
private string GetSeriesInfoJson()
|
||||
{
|
||||
SeriesInfo si = new (new Metadata(this.sortName, this.year.ToString() ?? string.Empty, this.status, this.description ?? ""));
|
||||
SeriesInfo si = new (new Metadata(this));
|
||||
return System.Text.Json.JsonSerializer.Serialize(si);
|
||||
}
|
||||
|
||||
@ -125,16 +192,21 @@ public struct Manga
|
||||
[JsonRequired]public string status { get; }
|
||||
[JsonRequired]public string description_text { get; }
|
||||
|
||||
public Metadata(string name, string year, string status, string description_text)
|
||||
public Metadata(Manga manga) : this(manga.sortName, manga.year.ToString() ?? string.Empty, manga.releaseStatus, manga.description ?? "")
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public Metadata(string name, string year, ReleaseStatusByte status, string description_text)
|
||||
{
|
||||
this.name = name;
|
||||
this.year = year;
|
||||
if(status.ToLower() == "ongoing" || status.ToLower() == "hiatus")
|
||||
this.status = "Continuing";
|
||||
else if (status.ToLower() == "completed" || status.ToLower() == "cancelled" || status.ToLower() == "discontinued")
|
||||
this.status = "Ended";
|
||||
else
|
||||
this.status = status;
|
||||
this.status = status switch
|
||||
{
|
||||
ReleaseStatusByte.Continuing => "Continuing",
|
||||
ReleaseStatusByte.Completed => "Ended",
|
||||
_ => Enum.GetName(status) ?? "Ended"
|
||||
};
|
||||
this.description_text = description_text;
|
||||
|
||||
//kill it with fire, but otherwise Komga will not parse
|
||||
|
217
Tranga/MangaConnectors/AsuraToon.cs
Normal file
217
Tranga/MangaConnectors/AsuraToon.cs
Normal file
@ -0,0 +1,217 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class AsuraToon : MangaConnector
|
||||
{
|
||||
|
||||
public AsuraToon(GlobalBase clone) : base(clone, "AsuraToon", ["en"])
|
||||
{
|
||||
this.downloadClient = new ChromiumDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://asuracomic.net/series?name={sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return Array.Empty<Manga>();
|
||||
}
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://asuracomic.net/series/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return null;
|
||||
}
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
HtmlNodeCollection mangaList = document.DocumentNode.SelectNodes("//a[starts-with(@href,'series')]");
|
||||
if (mangaList is null || mangaList.Count < 1)
|
||||
return [];
|
||||
|
||||
IEnumerable<string> urls = mangaList.Select(a => $"https://asuracomic.net/{a.GetAttributeValue("href", "")}");
|
||||
|
||||
List<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string? originalLanguage = null;
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
|
||||
HtmlNodeCollection genreNodes = document.DocumentNode.SelectNodes("//h3[text()='Genres']/../div/button");
|
||||
string[] tags = genreNodes.Select(b => b.InnerText).ToArray();
|
||||
|
||||
HtmlNode statusNode = document.DocumentNode.SelectSingleNode("//h3[text()='Status']/../h3[2]");
|
||||
Manga.ReleaseStatusByte releaseStatus = statusNode.InnerText.ToLower() switch
|
||||
{
|
||||
"ongoing" => Manga.ReleaseStatusByte.Continuing,
|
||||
"hiatus" => Manga.ReleaseStatusByte.OnHiatus,
|
||||
"completed" => Manga.ReleaseStatusByte.Completed,
|
||||
"dropped" => Manga.ReleaseStatusByte.Cancelled,
|
||||
"season end" => Manga.ReleaseStatusByte.Continuing,
|
||||
"coming soon" => Manga.ReleaseStatusByte.Unreleased,
|
||||
_ => Manga.ReleaseStatusByte.Unreleased
|
||||
};
|
||||
|
||||
HtmlNode coverNode =
|
||||
document.DocumentNode.SelectSingleNode("//img[@alt='poster']");
|
||||
string coverUrl = coverNode.GetAttributeValue("src", "");
|
||||
string coverFileNameInCache = SaveCoverImageToCache(coverUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
HtmlNode titleNode =
|
||||
document.DocumentNode.SelectSingleNode("//title");
|
||||
string sortName = Regex.Match(titleNode.InnerText, @"(.*) - Asura Scans").Groups[1].Value;
|
||||
|
||||
HtmlNode descriptionNode =
|
||||
document.DocumentNode.SelectSingleNode("//h3[starts-with(text(),'Synopsis')]/../span");
|
||||
string description = descriptionNode?.InnerText??"";
|
||||
|
||||
HtmlNodeCollection authorNodes = document.DocumentNode.SelectNodes("//h3[text()='Author']/../h3[not(text()='Author' or text()='_')]");
|
||||
HtmlNodeCollection artistNodes = document.DocumentNode.SelectNodes("//h3[text()='Artist']/../h3[not(text()='Artist' or text()='_')]");
|
||||
IEnumerable<string> authorNames = authorNodes is null ? [] : authorNodes.Select(a => a.InnerText);
|
||||
IEnumerable<string> artistNames = artistNodes is null ? [] : artistNodes.Select(a => a.InnerText);
|
||||
List<string> authors = authorNames.Concat(artistNames).ToList();
|
||||
|
||||
HtmlNode? firstChapterNode = document.DocumentNode.SelectSingleNode("//a[contains(@href, 'chapter/1')]/../following-sibling::h3");
|
||||
int? year = int.Parse(firstChapterNode?.InnerText.Split(' ')[^1] ?? "2000");
|
||||
|
||||
Manga manga = new (sortName, authors, description, altTitles, tags, coverUrl, coverFileNameInCache, links,
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"https://asuracomic.net/series/{manga.publicationId}";
|
||||
// Leaving this in for verification if the page exists
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl);
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, string mangaUrl)
|
||||
{
|
||||
RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default);
|
||||
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
|
||||
{
|
||||
Log("Failed to load site");
|
||||
return new List<Chapter>();
|
||||
}
|
||||
|
||||
List<Chapter> ret = new();
|
||||
|
||||
HtmlNodeCollection chapterURLNodes = result.htmlDocument.DocumentNode.SelectNodes("//a[contains(@href, '/chapter/')]");
|
||||
Regex infoRex = new(@"Chapter ([0-9]+)(.*)?");
|
||||
|
||||
foreach (HtmlNode chapterInfo in chapterURLNodes)
|
||||
{
|
||||
string chapterUrl = chapterInfo.GetAttributeValue("href", "");
|
||||
|
||||
Match match = infoRex.Match(chapterInfo.InnerText);
|
||||
string chapterNumber = match.Groups[1].Value;
|
||||
string? chapterName = match.Groups[2].Success && match.Groups[2].Length > 1 ? match.Groups[2].Value : null;
|
||||
string url = $"https://asuracomic.net/series/{chapterUrl}";
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, chapterName, null, chapterNumber, url));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
string requestUrl = chapter.url;
|
||||
// Leaving this in to check if the page exists
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
|
||||
|
||||
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
|
||||
private string[] ParseImageUrlsFromHtml(string mangaUrl)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(mangaUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
HtmlNodeCollection images =
|
||||
requestResult.htmlDocument.DocumentNode.SelectNodes("//img[contains(@alt, 'chapter page')]");
|
||||
|
||||
return images.Select(i => i.GetAttributeValue("src", "")).ToArray();
|
||||
}
|
||||
}
|
230
Tranga/MangaConnectors/Bato.cs
Normal file
230
Tranga/MangaConnectors/Bato.cs
Normal file
@ -0,0 +1,230 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class Bato : MangaConnector
|
||||
{
|
||||
|
||||
public Bato(GlobalBase clone) : base(clone, "Bato", ["en"])
|
||||
{
|
||||
this.downloadClient = new HttpDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://bato.to/v3x-search?word={sanitizedTitle}&lang=en";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return Array.Empty<Manga>();
|
||||
}
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://bato.to/title/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return null;
|
||||
}
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
HtmlNode mangaList = document.DocumentNode.SelectSingleNode("//div[@data-hk='0-0-2']");
|
||||
if (!mangaList.ChildNodes.Any(node => node.Name == "div"))
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
List<string> urls = mangaList.ChildNodes
|
||||
.Select(node => $"https://bato.to{node.Descendants("div").First().FirstChild.GetAttributeValue("href", "")}").ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
HtmlNode infoNode = document.DocumentNode.SelectSingleNode("/html/body/div/main/div[1]/div[2]");
|
||||
|
||||
string sortName = infoNode.Descendants("h3").First().InnerText;
|
||||
string description = document.DocumentNode
|
||||
.SelectSingleNode("//div[contains(concat(' ',normalize-space(@class),' '),'prose')]").InnerText;
|
||||
|
||||
string[] altTitlesList = infoNode.ChildNodes[1].ChildNodes[2].InnerText.Split('/');
|
||||
int i = 0;
|
||||
Dictionary<string, string> altTitles = altTitlesList.ToDictionary(s => i++.ToString(), s => s);
|
||||
|
||||
string posterUrl = document.DocumentNode.SelectNodes("//img")
|
||||
.First(child => child.GetAttributeValue("data-hk", "") == "0-1-0").GetAttributeValue("src", "").Replace("&", "&");
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
List<HtmlNode> genreNodes = document.DocumentNode.SelectSingleNode("//b[text()='Genres:']/..").SelectNodes("span").ToList();
|
||||
string[] tags = genreNodes.Select(node => node.FirstChild.InnerText).ToArray();
|
||||
|
||||
List<HtmlNode> authorsNodes = infoNode.ChildNodes[1].ChildNodes[3].Descendants("a").ToList();
|
||||
List<string> authors = authorsNodes.Select(node => node.InnerText.Replace("amp;", "")).ToList();
|
||||
|
||||
HtmlNode? originalLanguageNode = document.DocumentNode.SelectSingleNode("//span[text()='Tr From']/..");
|
||||
string originalLanguage = originalLanguageNode is not null ? originalLanguageNode.LastChild.InnerText : "";
|
||||
|
||||
if (!int.TryParse(
|
||||
document.DocumentNode.SelectSingleNode("//span[text()='Original Publication:']/..").LastChild.InnerText.Split('-')[0],
|
||||
out int year))
|
||||
year = DateTime.Now.Year;
|
||||
|
||||
string status = document.DocumentNode.SelectSingleNode("//span[text()='Original Publication:']/..")
|
||||
.ChildNodes[2].InnerText;
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
switch (status.ToLower())
|
||||
{
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
case "completed": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
|
||||
case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "pending": releaseStatus = Manga.ReleaseStatusByte.Unreleased; break;
|
||||
}
|
||||
|
||||
Manga manga = new (sortName, authors, description, altTitles, tags, posterUrl, coverFileNameInCache, new Dictionary<string, string>(),
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"https://bato.to/title/{manga.publicationId}";
|
||||
// Leaving this in for verification if the page exists
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl);
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, string mangaUrl)
|
||||
{
|
||||
RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default);
|
||||
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
|
||||
{
|
||||
Log("Failed to load site");
|
||||
return new List<Chapter>();
|
||||
}
|
||||
|
||||
List<Chapter> ret = new();
|
||||
|
||||
HtmlNode chapterList =
|
||||
result.htmlDocument.DocumentNode.SelectSingleNode("/html/body/div/main/div[3]/astro-island/div/div[2]/div/div/astro-slot");
|
||||
|
||||
Regex numberRex = new(@"\/title\/.+\/([0-9])+(?:-vol_([0-9]+))?-ch_([0-9\.]+)");
|
||||
|
||||
foreach (HtmlNode chapterInfo in chapterList.SelectNodes("div"))
|
||||
{
|
||||
HtmlNode infoNode = chapterInfo.FirstChild.FirstChild;
|
||||
string chapterUrl = infoNode.GetAttributeValue("href", "");
|
||||
|
||||
Match match = numberRex.Match(chapterUrl);
|
||||
string id = match.Groups[1].Value;
|
||||
string? volumeNumber = match.Groups[2].Success ? match.Groups[2].Value : null;
|
||||
string chapterNumber = match.Groups[3].Value;
|
||||
string chapterName = chapterNumber;
|
||||
string url = $"https://bato.to{chapterUrl}?load=2";
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
string requestUrl = chapter.url;
|
||||
// Leaving this in to check if the page exists
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
|
||||
|
||||
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
|
||||
private string[] ParseImageUrlsFromHtml(string mangaUrl)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(mangaUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
HtmlDocument document = requestResult.htmlDocument;
|
||||
|
||||
HtmlNode images = document.DocumentNode.SelectNodes("//astro-island").First(node =>
|
||||
node.GetAttributeValue("component-url", "").Contains("/_astro/ImageList."));
|
||||
|
||||
string weirdString = images.OuterHtml;
|
||||
string weirdString2 = Regex.Match(weirdString, @"props=\""(.*)}\""").Groups[1].Value;
|
||||
string[] urls = Regex.Matches(weirdString2, @"(https:\/\/[A-z\-0-9\.\?\&\;\=\/]+)\\")
|
||||
.Select(match => match.Groups[1].Value.Replace("&", "&")).ToArray();
|
||||
|
||||
return urls;
|
||||
}
|
||||
}
|
117
Tranga/MangaConnectors/ChromiumDownloadClient.cs
Normal file
117
Tranga/MangaConnectors/ChromiumDownloadClient.cs
Normal file
@ -0,0 +1,117 @@
|
||||
using System.Net;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using PuppeteerSharp;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
internal class ChromiumDownloadClient : DownloadClient
|
||||
{
|
||||
private static IBrowser? _browser;
|
||||
private readonly HttpDownloadClient _httpDownloadClient;
|
||||
|
||||
private static async Task<IBrowser> StartBrowser(Logging.Logger? logger = null)
|
||||
{
|
||||
logger?.WriteLine("Starting ChromiumDownloadClient Puppeteer");
|
||||
return await Puppeteer.LaunchAsync(new LaunchOptions
|
||||
{
|
||||
Headless = true,
|
||||
Args = new [] {
|
||||
"--disable-gpu",
|
||||
"--disable-dev-shm-usage",
|
||||
"--disable-setuid-sandbox",
|
||||
"--no-sandbox"},
|
||||
Timeout = TrangaSettings.ChromiumStartupTimeoutMs
|
||||
}, new LoggerFactory([new LogProvider(logger)]));
|
||||
}
|
||||
|
||||
private class LogProvider : GlobalBase, ILoggerProvider
|
||||
{
|
||||
public LogProvider(Logging.Logger? logger) : base(logger) { }
|
||||
|
||||
public void Dispose() { }
|
||||
|
||||
public ILogger CreateLogger(string categoryName) => new Logger(logger);
|
||||
}
|
||||
|
||||
private class Logger : GlobalBase, ILogger
|
||||
{
|
||||
public Logger(Logging.Logger? logger) : base(logger) { }
|
||||
|
||||
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter)
|
||||
{
|
||||
if (logLevel <= LogLevel.Information)
|
||||
return;
|
||||
logger?.WriteLine("Puppeteer", formatter.Invoke(state, exception));
|
||||
}
|
||||
|
||||
public bool IsEnabled(LogLevel logLevel) => true;
|
||||
|
||||
public IDisposable? BeginScope<TState>(TState state) where TState : notnull => null;
|
||||
}
|
||||
|
||||
public ChromiumDownloadClient(GlobalBase clone) : base(clone)
|
||||
{
|
||||
_httpDownloadClient = new(this);
|
||||
if(_browser is null)
|
||||
_browser = StartBrowser(this.logger).Result;
|
||||
}
|
||||
|
||||
private readonly Regex _imageUrlRex = new(@"https?:\/\/.*\.(?:p?jpe?g|gif|a?png|bmp|avif|webp)(\?.*)?");
|
||||
internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null)
|
||||
{
|
||||
return _imageUrlRex.IsMatch(url)
|
||||
? _httpDownloadClient.MakeRequestInternal(url, referrer)
|
||||
: MakeRequestBrowser(url, referrer, clickButton);
|
||||
}
|
||||
|
||||
private RequestResult MakeRequestBrowser(string url, string? referrer = null, string? clickButton = null)
|
||||
{
|
||||
if (_browser is null)
|
||||
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
|
||||
IPage page = _browser.NewPageAsync().Result;
|
||||
page.DefaultTimeout = TrangaSettings.ChromiumPageTimeoutMs;
|
||||
page.SetExtraHttpHeadersAsync(new() { { "Referer", referrer } });
|
||||
IResponse response;
|
||||
try
|
||||
{
|
||||
response = page.GoToAsync(url, WaitUntilNavigation.Networkidle0).Result;
|
||||
Log($"Page loaded. {url}");
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Could not load Page {url}\n{e.Message}");
|
||||
page.CloseAsync();
|
||||
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
|
||||
}
|
||||
|
||||
Stream stream = Stream.Null;
|
||||
HtmlDocument? document = null;
|
||||
|
||||
if (response.Headers.TryGetValue("Content-Type", out string? content))
|
||||
{
|
||||
if (content.Contains("text/html"))
|
||||
{
|
||||
if (clickButton is not null && page.QuerySelectorAsync(clickButton).Result is not null)
|
||||
page.ClickAsync(clickButton).Wait();
|
||||
string htmlString = page.GetContentAsync().Result;
|
||||
stream = new MemoryStream(Encoding.Default.GetBytes(htmlString));
|
||||
document = new ();
|
||||
document.LoadHtml(htmlString);
|
||||
}else if (content.Contains("image"))
|
||||
{
|
||||
stream = new MemoryStream(response.BufferAsync().Result);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
page.CloseAsync();
|
||||
return new RequestResult(HttpStatusCode.InternalServerError, null, Stream.Null);
|
||||
}
|
||||
|
||||
page.CloseAsync();
|
||||
return new RequestResult(response.Status, document, stream, false, "");
|
||||
}
|
||||
}
|
@ -1,107 +1,44 @@
|
||||
using System.Net;
|
||||
using System.Net.Http.Headers;
|
||||
using HtmlAgilityPack;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
internal class DownloadClient : GlobalBase
|
||||
internal abstract class DownloadClient : GlobalBase
|
||||
{
|
||||
private readonly Dictionary<RequestType, DateTime> _lastExecutedRateLimit;
|
||||
|
||||
protected DownloadClient(GlobalBase clone) : base(clone)
|
||||
{
|
||||
private static readonly HttpClient Client = new()
|
||||
{
|
||||
Timeout = TimeSpan.FromSeconds(60),
|
||||
DefaultRequestHeaders =
|
||||
{
|
||||
UserAgent =
|
||||
{
|
||||
new ProductInfoHeaderValue("Tranga", "0.1")
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private readonly Dictionary<byte, DateTime> _lastExecutedRateLimit;
|
||||
private readonly Dictionary<byte, TimeSpan> _rateLimit;
|
||||
|
||||
public DownloadClient(GlobalBase clone, Dictionary<byte, int> rateLimitRequestsPerMinute) : base(clone)
|
||||
{
|
||||
_lastExecutedRateLimit = new();
|
||||
_rateLimit = new();
|
||||
foreach(KeyValuePair<byte, int> limit in rateLimitRequestsPerMinute)
|
||||
_rateLimit.Add(limit.Key, TimeSpan.FromMinutes(1).Divide(limit.Value));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request Webpage
|
||||
/// </summary>
|
||||
/// <param name="url"></param>
|
||||
/// <param name="requestType">For RateLimits: Same Endpoints use same type</param>
|
||||
/// <param name="referrer">Used in http request header</param>
|
||||
/// <returns>RequestResult with StatusCode and Stream of received data</returns>
|
||||
public RequestResult MakeRequest(string url, byte requestType, string? referrer = null)
|
||||
{
|
||||
if (_rateLimit.TryGetValue(requestType, out TimeSpan value))
|
||||
_lastExecutedRateLimit.TryAdd(requestType, DateTime.Now.Subtract(value));
|
||||
else
|
||||
{
|
||||
Log("RequestType not configured for rate-limit.");
|
||||
return new RequestResult(HttpStatusCode.NotAcceptable, Stream.Null);
|
||||
}
|
||||
|
||||
TimeSpan rateLimitTimeout = _rateLimit[requestType]
|
||||
.Subtract(DateTime.Now.Subtract(_lastExecutedRateLimit[requestType]));
|
||||
|
||||
if(rateLimitTimeout > TimeSpan.Zero)
|
||||
Thread.Sleep(rateLimitTimeout);
|
||||
|
||||
HttpResponseMessage? response = null;
|
||||
while (response is null)
|
||||
{
|
||||
try
|
||||
{
|
||||
HttpRequestMessage requestMessage = new(HttpMethod.Get, url);
|
||||
if(referrer is not null)
|
||||
requestMessage.Headers.Referrer = new Uri(referrer);
|
||||
_lastExecutedRateLimit[requestType] = DateTime.Now;
|
||||
//Log($"Requesting {requestType} {url}");
|
||||
response = Client.Send(requestMessage);
|
||||
}
|
||||
catch (HttpRequestException e)
|
||||
{
|
||||
Log("Exception:\n\t{0}\n\tWaiting {1} before retrying.", e.Message, _rateLimit[requestType] * 2);
|
||||
Thread.Sleep(_rateLimit[requestType] * 2);
|
||||
}
|
||||
}
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
Log($"Request-Error {response.StatusCode}: {response.ReasonPhrase}");
|
||||
return new RequestResult(response.StatusCode, Stream.Null);
|
||||
}
|
||||
|
||||
// Request has been redirected to another page. For example, it redirects directly to the results when there is only 1 result
|
||||
if(response.RequestMessage is not null && response.RequestMessage.RequestUri is not null)
|
||||
{
|
||||
return new RequestResult(response.StatusCode, response.Content.ReadAsStream(), true, response.RequestMessage.RequestUri.AbsoluteUri);
|
||||
}
|
||||
|
||||
return new RequestResult(response.StatusCode, response.Content.ReadAsStream());
|
||||
}
|
||||
|
||||
public struct RequestResult
|
||||
{
|
||||
public HttpStatusCode statusCode { get; }
|
||||
public Stream result { get; }
|
||||
public bool hasBeenRedirected { get; }
|
||||
public string? redirectedToUrl { get; }
|
||||
|
||||
public RequestResult(HttpStatusCode statusCode, Stream result)
|
||||
{
|
||||
this.statusCode = statusCode;
|
||||
this.result = result;
|
||||
}
|
||||
|
||||
public RequestResult(HttpStatusCode statusCode, Stream result, bool hasBeenRedirected, string redirectedTo)
|
||||
: this(statusCode, result)
|
||||
{
|
||||
this.hasBeenRedirected = hasBeenRedirected;
|
||||
redirectedToUrl = redirectedTo;
|
||||
}
|
||||
}
|
||||
this._lastExecutedRateLimit = new();
|
||||
}
|
||||
|
||||
public RequestResult MakeRequest(string url, RequestType requestType, string? referrer = null, string? clickButton = null)
|
||||
{
|
||||
if (!TrangaSettings.requestLimits.ContainsKey(requestType))
|
||||
{
|
||||
Log("RequestType not configured for rate-limit.");
|
||||
return new RequestResult(HttpStatusCode.NotAcceptable, null, Stream.Null);
|
||||
}
|
||||
|
||||
int rateLimit = TrangaSettings.userAgent == TrangaSettings.DefaultUserAgent
|
||||
? TrangaSettings.DefaultRequestLimits[requestType]
|
||||
: TrangaSettings.requestLimits[requestType];
|
||||
|
||||
TimeSpan timeBetweenRequests = TimeSpan.FromMinutes(1).Divide(rateLimit);
|
||||
_lastExecutedRateLimit.TryAdd(requestType, DateTime.Now.Subtract(timeBetweenRequests));
|
||||
|
||||
TimeSpan rateLimitTimeout = timeBetweenRequests.Subtract(DateTime.Now.Subtract(_lastExecutedRateLimit[requestType]));
|
||||
|
||||
if (rateLimitTimeout > TimeSpan.Zero)
|
||||
{
|
||||
Log($"Waiting {rateLimitTimeout.TotalSeconds} seconds");
|
||||
Thread.Sleep(rateLimitTimeout);
|
||||
}
|
||||
|
||||
RequestResult result = MakeRequestInternal(url, referrer, clickButton);
|
||||
_lastExecutedRateLimit[requestType] = DateTime.Now;
|
||||
return result;
|
||||
}
|
||||
|
||||
internal abstract RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null);
|
||||
}
|
75
Tranga/MangaConnectors/HttpDownloadClient.cs
Normal file
75
Tranga/MangaConnectors/HttpDownloadClient.cs
Normal file
@ -0,0 +1,75 @@
|
||||
using System.Net;
|
||||
using System.Net.Http.Headers;
|
||||
using HtmlAgilityPack;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
internal class HttpDownloadClient : DownloadClient
|
||||
{
|
||||
private static readonly HttpClient Client = new()
|
||||
{
|
||||
Timeout = TimeSpan.FromSeconds(10)
|
||||
};
|
||||
|
||||
public HttpDownloadClient(GlobalBase clone) : base(clone)
|
||||
{
|
||||
Client.DefaultRequestHeaders.TryAddWithoutValidation("User-Agent", TrangaSettings.userAgent);
|
||||
}
|
||||
|
||||
internal override RequestResult MakeRequestInternal(string url, string? referrer = null, string? clickButton = null)
|
||||
{
|
||||
if(clickButton is not null)
|
||||
Log("Can not click button on static site.");
|
||||
HttpResponseMessage? response = null;
|
||||
while (response is null)
|
||||
{
|
||||
HttpRequestMessage requestMessage = new(HttpMethod.Get, url);
|
||||
if (referrer is not null)
|
||||
requestMessage.Headers.Referrer = new Uri(referrer);
|
||||
//Log($"Requesting {requestType} {url}");
|
||||
try
|
||||
{
|
||||
response = Client.Send(requestMessage);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
switch (e)
|
||||
{
|
||||
case TaskCanceledException:
|
||||
Log($"Request timed out {url}.\n\r{e}");
|
||||
return new RequestResult(HttpStatusCode.RequestTimeout, null, Stream.Null);
|
||||
case HttpRequestException:
|
||||
Log($"Request failed {url}\n\r{e}");
|
||||
return new RequestResult(HttpStatusCode.BadRequest, null, Stream.Null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
Log($"Request-Error {response.StatusCode}: {url}");
|
||||
return new RequestResult(response.StatusCode, null, Stream.Null);
|
||||
}
|
||||
|
||||
Stream stream = response.Content.ReadAsStream();
|
||||
|
||||
HtmlDocument? document = null;
|
||||
|
||||
if (response.Content.Headers.ContentType?.MediaType == "text/html")
|
||||
{
|
||||
StreamReader reader = new (stream);
|
||||
document = new ();
|
||||
document.LoadHtml(reader.ReadToEnd());
|
||||
stream.Position = 0;
|
||||
}
|
||||
|
||||
// Request has been redirected to another page. For example, it redirects directly to the results when there is only 1 result
|
||||
if (response.RequestMessage is not null && response.RequestMessage.RequestUri is not null)
|
||||
{
|
||||
return new RequestResult(response.StatusCode, document, stream, true,
|
||||
response.RequestMessage.RequestUri.AbsoluteUri);
|
||||
}
|
||||
|
||||
return new RequestResult(response.StatusCode, document, stream);
|
||||
}
|
||||
}
|
@ -1,5 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.IO.Compression;
|
||||
using System.IO.Compression;
|
||||
using System.Net;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text.RegularExpressions;
|
||||
@ -15,13 +14,16 @@ namespace Tranga.MangaConnectors;
|
||||
public abstract class MangaConnector : GlobalBase
|
||||
{
|
||||
internal DownloadClient downloadClient { get; init; } = null!;
|
||||
public string[] SupportedLanguages;
|
||||
|
||||
protected MangaConnector(GlobalBase clone) : base(clone)
|
||||
protected MangaConnector(GlobalBase clone, string name, string[] supportedLanguages) : base(clone)
|
||||
{
|
||||
Directory.CreateDirectory(settings.coverImageCache);
|
||||
this.name = name;
|
||||
this.SupportedLanguages = supportedLanguages;
|
||||
Directory.CreateDirectory(TrangaSettings.coverImageCache);
|
||||
}
|
||||
|
||||
public abstract string name { get; } //Name of the Connector (e.g. Website)
|
||||
public string name { get; } //Name of the Connector (e.g. Website)
|
||||
|
||||
/// <summary>
|
||||
/// Returns all Publications with the given string.
|
||||
@ -33,6 +35,8 @@ public abstract class MangaConnector : GlobalBase
|
||||
|
||||
public abstract Manga? GetMangaFromUrl(string url);
|
||||
|
||||
public abstract Manga? GetMangaFromId(string publicationId);
|
||||
|
||||
/// <summary>
|
||||
/// Returns all Chapters of the publication in the provided language.
|
||||
/// If the language is empty or null, returns all Chapters in all Languages.
|
||||
@ -51,100 +55,42 @@ public abstract class MangaConnector : GlobalBase
|
||||
public Chapter[] GetNewChapters(Manga manga, string language = "en")
|
||||
{
|
||||
Log($"Getting new Chapters for {manga}");
|
||||
Chapter[] newChapters = this.GetChapters(manga, language);
|
||||
Chapter[] allChapters = this.GetChapters(manga, language);
|
||||
if (allChapters.Length < 1)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
Log($"Checking for duplicates {manga}");
|
||||
List<Chapter> newChaptersList = newChapters.Where(nChapter =>
|
||||
float.Parse(nChapter.chapterNumber, numberFormatDecimalPoint) > manga.ignoreChaptersBelow &&
|
||||
!nChapter.CheckChapterIsDownloaded(settings.downloadLocation)).ToList();
|
||||
List<Chapter> newChaptersList = allChapters.Where(nChapter => nChapter.chapterNumber >= manga.ignoreChaptersBelow
|
||||
&& !nChapter.CheckChapterIsDownloaded()).ToList();
|
||||
Log($"{newChaptersList.Count} new chapters. {manga}");
|
||||
try
|
||||
{
|
||||
Chapter latestChapterAvailable =
|
||||
allChapters.Max();
|
||||
manga.latestChapterAvailable =
|
||||
Convert.ToSingle(latestChapterAvailable.chapterNumber, numberFormatDecimalPoint);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log(e.ToString());
|
||||
Log($"Failed getting new Chapters for {manga}");
|
||||
}
|
||||
|
||||
return newChaptersList.ToArray();
|
||||
}
|
||||
|
||||
public Chapter[] SelectChapters(Manga manga, string searchTerm, string? language = null)
|
||||
{
|
||||
Chapter[] availableChapters = this.GetChapters(manga, language??"en");
|
||||
Regex volumeRegex = new ("((v(ol)*(olume)*){1} *([0-9]+(-[0-9]+)?){1})", RegexOptions.IgnoreCase);
|
||||
Regex chapterRegex = new ("((c(h)*(hapter)*){1} *([0-9]+(-[0-9]+)?){1})", RegexOptions.IgnoreCase);
|
||||
Regex singleResultRegex = new("([0-9]+)", RegexOptions.IgnoreCase);
|
||||
Regex rangeResultRegex = new("([0-9]+(-[0-9]+))", RegexOptions.IgnoreCase);
|
||||
Regex allRegex = new("a(ll)?", RegexOptions.IgnoreCase);
|
||||
if (volumeRegex.IsMatch(searchTerm) && chapterRegex.IsMatch(searchTerm))
|
||||
{
|
||||
string volume = singleResultRegex.Match(volumeRegex.Match(searchTerm).Value).Value;
|
||||
string chapter = singleResultRegex.Match(chapterRegex.Match(searchTerm).Value).Value;
|
||||
return availableChapters.Where(aCh => aCh.volumeNumber is not null &&
|
||||
aCh.volumeNumber.Equals(volume, StringComparison.InvariantCultureIgnoreCase) &&
|
||||
aCh.chapterNumber.Equals(chapter, StringComparison.InvariantCultureIgnoreCase))
|
||||
.ToArray();
|
||||
}
|
||||
else if (volumeRegex.IsMatch(searchTerm))
|
||||
{
|
||||
string volume = volumeRegex.Match(searchTerm).Value;
|
||||
if (rangeResultRegex.IsMatch(volume))
|
||||
{
|
||||
string range = rangeResultRegex.Match(volume).Value;
|
||||
int start = Convert.ToInt32(range.Split('-')[0]);
|
||||
int end = Convert.ToInt32(range.Split('-')[1]);
|
||||
return availableChapters.Where(aCh => aCh.volumeNumber is not null &&
|
||||
Convert.ToInt32(aCh.volumeNumber) >= start &&
|
||||
Convert.ToInt32(aCh.volumeNumber) <= end).ToArray();
|
||||
}
|
||||
else if (singleResultRegex.IsMatch(volume))
|
||||
{
|
||||
string volumeNumber = singleResultRegex.Match(volume).Value;
|
||||
return availableChapters.Where(aCh =>
|
||||
aCh.volumeNumber is not null &&
|
||||
aCh.volumeNumber.Equals(volumeNumber, StringComparison.InvariantCultureIgnoreCase)).ToArray();
|
||||
}
|
||||
|
||||
}
|
||||
else if (chapterRegex.IsMatch(searchTerm))
|
||||
{
|
||||
string chapter = chapterRegex.Match(searchTerm).Value;
|
||||
if (rangeResultRegex.IsMatch(chapter))
|
||||
{
|
||||
string range = rangeResultRegex.Match(chapter).Value;
|
||||
int start = Convert.ToInt32(range.Split('-')[0]);
|
||||
int end = Convert.ToInt32(range.Split('-')[1]);
|
||||
return availableChapters.Where(aCh => Convert.ToInt32(aCh.chapterNumber) >= start &&
|
||||
Convert.ToInt32(aCh.chapterNumber) <= end).ToArray();
|
||||
}
|
||||
else if (singleResultRegex.IsMatch(chapter))
|
||||
{
|
||||
string chapterNumber = singleResultRegex.Match(chapter).Value;
|
||||
return availableChapters.Where(aCh =>
|
||||
aCh.chapterNumber.Equals(chapterNumber, StringComparison.InvariantCultureIgnoreCase)).ToArray();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (rangeResultRegex.IsMatch(searchTerm))
|
||||
{
|
||||
int start = Convert.ToInt32(searchTerm.Split('-')[0]);
|
||||
int end = Convert.ToInt32(searchTerm.Split('-')[1]);
|
||||
return availableChapters[start..(end + 1)];
|
||||
}
|
||||
else if(singleResultRegex.IsMatch(searchTerm))
|
||||
return new [] { availableChapters[Convert.ToInt32(searchTerm)] };
|
||||
else if (allRegex.IsMatch(searchTerm))
|
||||
return availableChapters;
|
||||
}
|
||||
|
||||
return Array.Empty<Chapter>();
|
||||
}
|
||||
|
||||
public abstract HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null);
|
||||
|
||||
/// <summary>
|
||||
/// Copies the already downloaded cover from cache to downloadLocation
|
||||
/// </summary>
|
||||
/// <param name="manga">Publication to retrieve Cover for</param>
|
||||
public void CopyCoverFromCacheToDownloadLocation(Manga manga)
|
||||
/// <param name="retries">Number of times to retry to copy the cover (or download it first)</param>
|
||||
public void CopyCoverFromCacheToDownloadLocation(Manga manga, int? retries = 1)
|
||||
{
|
||||
Log($"Copy cover {manga}");
|
||||
//Check if Publication already has a Folder and cover
|
||||
string publicationFolder = manga.CreatePublicationFolder(settings.downloadLocation);
|
||||
string publicationFolder = manga.CreatePublicationFolder(TrangaSettings.downloadLocation);
|
||||
DirectoryInfo dirInfo = new (publicationFolder);
|
||||
if (dirInfo.EnumerateFiles().Any(info => info.Name.Contains("cover", StringComparison.InvariantCultureIgnoreCase)))
|
||||
{
|
||||
@ -152,12 +98,24 @@ public abstract class MangaConnector : GlobalBase
|
||||
return;
|
||||
}
|
||||
|
||||
string fileInCache = Path.Join(settings.coverImageCache, manga.coverFileNameInCache);
|
||||
string? fileInCache = manga.coverFileNameInCache;
|
||||
if (fileInCache is null || !File.Exists(fileInCache))
|
||||
{
|
||||
Log($"Cloning cover failed: File missing {fileInCache}.");
|
||||
if (retries > 0 && manga.coverUrl is not null)
|
||||
{
|
||||
Log($"Trying {retries} more times");
|
||||
SaveCoverImageToCache(manga.coverUrl, manga.internalId, 0);
|
||||
CopyCoverFromCacheToDownloadLocation(manga, --retries);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
string newFilePath = Path.Join(publicationFolder, $"cover.{Path.GetFileName(fileInCache).Split('.')[^1]}" );
|
||||
Log($"Cloning cover {fileInCache} -> {newFilePath}");
|
||||
File.Copy(fileInCache, newFilePath, true);
|
||||
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
File.SetUnixFileMode(newFilePath, GroupRead | GroupWrite | OtherRead | OtherWrite | UserRead | UserWrite);
|
||||
File.SetUnixFileMode(newFilePath, GroupRead | GroupWrite | UserRead | UserWrite);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@ -167,43 +125,65 @@ public abstract class MangaConnector : GlobalBase
|
||||
/// <param name="fullPath"></param>
|
||||
/// <param name="requestType">RequestType for Rate-Limit</param>
|
||||
/// <param name="referrer">referrer used in html request header</param>
|
||||
private HttpStatusCode DownloadImage(string imageUrl, string fullPath, byte requestType, string? referrer = null)
|
||||
private HttpStatusCode DownloadImage(string imageUrl, string fullPath, RequestType requestType, string? referrer = null)
|
||||
{
|
||||
DownloadClient.RequestResult requestResult = downloadClient.MakeRequest(imageUrl, requestType, referrer);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.result == Stream.Null)
|
||||
RequestResult requestResult = downloadClient.MakeRequest(imageUrl, requestType, referrer);
|
||||
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return requestResult.statusCode;
|
||||
byte[] buffer = new byte[requestResult.result.Length];
|
||||
requestResult.result.ReadExactly(buffer, 0, buffer.Length);
|
||||
File.WriteAllBytes(fullPath, buffer);
|
||||
if (requestResult.result == Stream.Null)
|
||||
return HttpStatusCode.NotFound;
|
||||
|
||||
FileStream fs = new (fullPath, FileMode.Create);
|
||||
requestResult.result.CopyTo(fs);
|
||||
fs.Close();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
protected HttpStatusCode DownloadChapterImages(string[] imageUrls, string saveArchiveFilePath, byte requestType, string? comicInfoPath = null, string? referrer = null, ProgressToken? progressToken = null)
|
||||
protected HttpStatusCode DownloadChapterImages(string[] imageUrls, Chapter chapter, RequestType requestType, string? referrer = null, ProgressToken? progressToken = null)
|
||||
{
|
||||
string saveArchiveFilePath = chapter.GetArchiveFilePath();
|
||||
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
Log($"Downloading Images for {saveArchiveFilePath}");
|
||||
if(progressToken is not null)
|
||||
progressToken.increments = imageUrls.Length;
|
||||
if (progressToken is not null)
|
||||
progressToken.increments += imageUrls.Length;
|
||||
//Check if Publication Directory already exists
|
||||
string directoryPath = Path.GetDirectoryName(saveArchiveFilePath)!;
|
||||
if (!Directory.Exists(directoryPath))
|
||||
Directory.CreateDirectory(directoryPath);
|
||||
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
Directory.CreateDirectory(directoryPath,
|
||||
UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute );
|
||||
else
|
||||
Directory.CreateDirectory(directoryPath);
|
||||
|
||||
if (File.Exists(saveArchiveFilePath)) //Don't download twice.
|
||||
return HttpStatusCode.OK;
|
||||
{
|
||||
progressToken?.Complete();
|
||||
return HttpStatusCode.Created;
|
||||
}
|
||||
|
||||
//Create a temporary folder to store images
|
||||
string tempFolder = Directory.CreateTempSubdirectory().FullName;
|
||||
string tempFolder = Directory.CreateTempSubdirectory("trangatemp").FullName;
|
||||
|
||||
int chapter = 0;
|
||||
int chapterNum = 0;
|
||||
//Download all Images to temporary Folder
|
||||
if (imageUrls.Length == 0)
|
||||
{
|
||||
Log("No images found");
|
||||
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
File.SetUnixFileMode(saveArchiveFilePath, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute);
|
||||
Directory.Delete(tempFolder, true);
|
||||
progressToken?.Complete();
|
||||
return HttpStatusCode.NoContent;
|
||||
}
|
||||
foreach (string imageUrl in imageUrls)
|
||||
{
|
||||
string[] split = imageUrl.Split('.');
|
||||
string extension = split[^1];
|
||||
Log($"Downloading image {chapter + 1:000}/{imageUrls.Length:000}"); //TODO
|
||||
HttpStatusCode status = DownloadImage(imageUrl, Path.Join(tempFolder, $"{chapter++}.{extension}"), requestType, referrer);
|
||||
string extension = imageUrl.Split('.')[^1].Split('?')[0];
|
||||
Log($"Downloading image {chapterNum + 1:000}/{imageUrls.Length:000}"); //TODO
|
||||
HttpStatusCode status = DownloadImage(imageUrl, Path.Join(tempFolder, $"{chapterNum++}.{extension}"), requestType, referrer);
|
||||
Log($"{saveArchiveFilePath} {chapterNum + 1:000}/{imageUrls.Length:000} {status}");
|
||||
if ((int)status < 200 || (int)status >= 300)
|
||||
{
|
||||
progressToken?.Complete();
|
||||
@ -211,40 +191,45 @@ public abstract class MangaConnector : GlobalBase
|
||||
}
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken?.Complete();
|
||||
progressToken.Complete();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
progressToken?.Increment();
|
||||
}
|
||||
|
||||
if(comicInfoPath is not null)
|
||||
File.Copy(comicInfoPath, Path.Join(tempFolder, "ComicInfo.xml"));
|
||||
File.WriteAllText(Path.Join(tempFolder, "ComicInfo.xml"), chapter.GetComicInfoXmlString());
|
||||
|
||||
Log($"Creating archive {saveArchiveFilePath}");
|
||||
//ZIP-it and ship-it
|
||||
ZipFile.CreateFromDirectory(tempFolder, saveArchiveFilePath);
|
||||
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
File.SetUnixFileMode(saveArchiveFilePath, GroupRead | GroupWrite | OtherRead | OtherWrite | UserRead | UserWrite);
|
||||
chapter.CreateChapterMarker();
|
||||
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
File.SetUnixFileMode(saveArchiveFilePath, UserRead | UserWrite | UserExecute | GroupRead | GroupWrite | GroupExecute | OtherRead | OtherExecute);
|
||||
Directory.Delete(tempFolder, true); //Cleanup
|
||||
|
||||
Log("Created archive.");
|
||||
progressToken?.Complete();
|
||||
Log("Download complete.");
|
||||
return HttpStatusCode.OK;
|
||||
}
|
||||
|
||||
protected string SaveCoverImageToCache(string url, byte requestType)
|
||||
protected string SaveCoverImageToCache(string url, string mangaInternalId, RequestType requestType, string? referrer = null)
|
||||
{
|
||||
string[] split = url.Split('/');
|
||||
string filename = split[^1];
|
||||
string saveImagePath = Path.Join(settings.coverImageCache, filename);
|
||||
Regex urlRex = new (@"https?:\/\/((?:[a-zA-Z0-9-]+\.)+[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+))");
|
||||
//https?:\/\/[a-zA-Z0-9-]+\.([a-zA-Z0-9-]+\.[a-zA-Z0-9]+)\/(?:.+\/)*(.+\.([a-zA-Z]+)) for only second level domains
|
||||
Match match = urlRex.Match(url);
|
||||
string filename = $"{match.Groups[1].Value}-{mangaInternalId}.{match.Groups[3].Value}";
|
||||
string saveImagePath = Path.Join(TrangaSettings.coverImageCache, filename);
|
||||
|
||||
if (File.Exists(saveImagePath))
|
||||
return filename;
|
||||
return saveImagePath;
|
||||
|
||||
DownloadClient.RequestResult coverResult = downloadClient.MakeRequest(url, requestType);
|
||||
RequestResult coverResult = downloadClient.MakeRequest(url, requestType, referrer);
|
||||
using MemoryStream ms = new();
|
||||
coverResult.result.CopyTo(ms);
|
||||
Directory.CreateDirectory(TrangaSettings.coverImageCache);
|
||||
File.WriteAllBytes(saveImagePath, ms.ToArray());
|
||||
Log($"Saving cover to {saveImagePath}");
|
||||
return filename;
|
||||
return saveImagePath;
|
||||
}
|
||||
}
|
@ -1,4 +1,6 @@
|
||||
using Newtonsoft.Json;
|
||||
using System.Data;
|
||||
using System.Diagnostics;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
@ -6,12 +8,12 @@ namespace Tranga.MangaConnectors;
|
||||
public class MangaConnectorJsonConverter : JsonConverter
|
||||
{
|
||||
private GlobalBase _clone;
|
||||
private HashSet<MangaConnector> connectors;
|
||||
private readonly HashSet<MangaConnector> _connectors;
|
||||
|
||||
internal MangaConnectorJsonConverter(GlobalBase clone, HashSet<MangaConnector> connectors)
|
||||
{
|
||||
this._clone = clone;
|
||||
this.connectors = connectors;
|
||||
this._connectors = connectors;
|
||||
}
|
||||
|
||||
public override bool CanConvert(Type objectType)
|
||||
@ -22,19 +24,23 @@ public class MangaConnectorJsonConverter : JsonConverter
|
||||
public override object ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer)
|
||||
{
|
||||
JObject jo = JObject.Load(reader);
|
||||
switch (jo.GetValue("name")!.Value<string>()!)
|
||||
string? connectorName = jo.Value<string>("name");
|
||||
if (connectorName is null)
|
||||
throw new ConstraintException("Name can not be null.");
|
||||
return connectorName switch
|
||||
{
|
||||
case "MangaDex":
|
||||
return this.connectors.First(c => c is MangaDex);
|
||||
case "Manganato":
|
||||
return this.connectors.First(c => c is Manganato);
|
||||
case "MangaKatana":
|
||||
return this.connectors.First(c => c is MangaKatana);
|
||||
case "Mangasee":
|
||||
return this.connectors.First(c => c is Mangasee);
|
||||
}
|
||||
|
||||
throw new Exception();
|
||||
"MangaDex" => this._connectors.First(c => c is MangaDex),
|
||||
"Manganato" => this._connectors.First(c => c is Manganato),
|
||||
"MangaKatana" => this._connectors.First(c => c is MangaKatana),
|
||||
"Mangaworld" => this._connectors.First(c => c is Mangaworld),
|
||||
"Bato" => this._connectors.First(c => c is Bato),
|
||||
"ManhuaPlus" => this._connectors.First(c => c is ManhuaPlus),
|
||||
"MangaHere" => this._connectors.First(c => c is MangaHere),
|
||||
"AsuraToon" => this._connectors.First(c => c is AsuraToon),
|
||||
"Weebcentral" => this._connectors.First(c => c is Weebcentral),
|
||||
"Webtoons" => this._connectors.First(c => c is Webtoons),
|
||||
_ => throw new UnreachableException($"Could not find Connector with name {connectorName}")
|
||||
};
|
||||
}
|
||||
|
||||
public override bool CanWrite => false;
|
||||
|
@ -1,5 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Net;
|
||||
using System.Net;
|
||||
using System.Text.Json.Nodes;
|
||||
using System.Text.RegularExpressions;
|
||||
using Tranga.Jobs;
|
||||
@ -8,44 +7,34 @@ using JsonSerializer = System.Text.Json.JsonSerializer;
|
||||
namespace Tranga.MangaConnectors;
|
||||
public class MangaDex : MangaConnector
|
||||
{
|
||||
public override string name { get; }
|
||||
|
||||
private enum RequestType : byte
|
||||
//https://api.mangadex.org/docs/3-enumerations/#language-codes--localization
|
||||
//https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes
|
||||
//https://gist.github.com/Josantonius/b455e315bc7f790d14b136d61d9ae469
|
||||
public MangaDex(GlobalBase clone) : base(clone, "MangaDex", ["en","pt","pt-br","it","de","ru","aa","ab","ae","af","ak","am","an","ar-ae","ar-bh","ar-dz","ar-eg","ar-iq","ar-jo","ar-kw","ar-lb","ar-ly","ar-ma","ar-om","ar-qa","ar-sa","ar-sy","ar-tn","ar-ye","ar","as","av","ay","az","ba","be","bg","bh","bi","bm","bn","bo","br","bs","ca","ce","ch","co","cr","cs","cu","cv","cy","da","de-at","de-ch","de-de","de-li","de-lu","div","dv","dz","ee","el","en-au","en-bz","en-ca","en-cb","en-gb","en-ie","en-jm","en-nz","en-ph","en-tt","en-us","en-za","en-zw","eo","es-ar","es-bo","es-cl","es-co","es-cr","es-do","es-ec","es-es","es-gt","es-hn","es-la","es-mx","es-ni","es-pa","es-pe","es-pr","es-py","es-sv","es-us","es-uy","es-ve","es","et","eu","fa","ff","fi","fj","fo","fr-be","fr-ca","fr-ch","fr-fr","fr-lu","fr-mc","fr","fy","ga","gd","gl","gn","gu","gv","ha","he","hi","ho","hr-ba","hr-hr","hr","ht","hu","hy","hz","ia","id","ie","ig","ii","ik","in","io","is","it-ch","it-it","iu","iw","ja","ja-ro","ji","jv","jw","ka","kg","ki","kj","kk","kl","km","kn","ko","ko-ro","kr","ks","ku","kv","kw","ky","kz","la","lb","lg","li","ln","lo","ls","lt","lu","lv","mg","mh","mi","mk","ml","mn","mo","mr","ms-bn","ms-my","ms","mt","my","na","nb","nd","ne","ng","nl-be","nl-nl","nl","nn","no","nr","ns","nv","ny","oc","oj","om","or","os","pa","pi","pl","ps","pt-pt","qu-bo","qu-ec","qu-pe","qu","rm","rn","ro","rw","sa","sb","sc","sd","se-fi","se-no","se-se","se","sg","sh","si","sk","sl","sm","sn","so","sq","sr-ba","sr-sp","sr","ss","st","su","sv-fi","sv-se","sv","sw","sx","syr","ta","te","tg","th","ti","tk","tl","tn","to","tr","ts","tt","tw","ty","ug","uk","ur","us","uz","ve","vi","vo","wa","wo","xh","yi","yo","za","zh-cn","zh-hk","zh-mo","zh-ro","zh-sg","zh-tw","zh","zu"])
|
||||
{
|
||||
Manga,
|
||||
Feed,
|
||||
AtHomeServer,
|
||||
CoverUrl,
|
||||
Author,
|
||||
}
|
||||
|
||||
public MangaDex(GlobalBase clone) : base(clone)
|
||||
{
|
||||
name = "MangaDex";
|
||||
this.downloadClient = new DownloadClient(clone, new Dictionary<byte, int>()
|
||||
{
|
||||
{(byte)RequestType.Manga, 250},
|
||||
{(byte)RequestType.Feed, 250},
|
||||
{(byte)RequestType.AtHomeServer, 40},
|
||||
{(byte)RequestType.CoverUrl, 250},
|
||||
{(byte)RequestType.Author, 250}
|
||||
});
|
||||
this.downloadClient = new HttpDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
Log($"Searching Publications. Term={publicationTitle}");
|
||||
const int limit = 100; //How many values we want returned at once
|
||||
int offset = 0; //"Page"
|
||||
int total = int.MaxValue; //How many total results are there, is updated on first request
|
||||
HashSet<Manga> retManga = new();
|
||||
int loadedPublicationData = 0;
|
||||
List<JsonNode> results = new();
|
||||
|
||||
//Request all search-results
|
||||
while (offset < total) //As long as we haven't requested all "Pages"
|
||||
{
|
||||
//Request next Page
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest(
|
||||
$"https://api.mangadex.org/manga?limit={limit}&title={publicationTitle}&offset={offset}", (byte)RequestType.Manga);
|
||||
RequestResult requestResult = downloadClient.MakeRequest(
|
||||
$"https://api.mangadex.org/manga?limit={limit}&title={publicationTitle}&offset={offset}" +
|
||||
$"&contentRating%5B%5D=safe&contentRating%5B%5D=suggestive&contentRating%5B%5D=erotica" +
|
||||
$"&contentRating%5B%5D=pornographic" +
|
||||
$"&includes%5B%5D=manga&includes%5B%5D=cover_art&includes%5B%5D=author" +
|
||||
$"&includes%5B%5D=artist&includes%5B%5D=tag", RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
break;
|
||||
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
||||
@ -54,28 +43,28 @@ public class MangaDex : MangaConnector
|
||||
if (result is null)
|
||||
break;
|
||||
|
||||
total = result["total"]!.GetValue<int>(); //Update the total number of Publications
|
||||
if(result.ContainsKey("total"))
|
||||
total = result["total"]!.GetValue<int>(); //Update the total number of Publications
|
||||
else continue;
|
||||
|
||||
JsonArray mangaInResult = result["data"]!.AsArray(); //Manga-data-Array
|
||||
//Loop each Manga and extract information from JSON
|
||||
foreach (JsonNode? mangaNode in mangaInResult)
|
||||
{
|
||||
Log($"Getting publication data. {++loadedPublicationData}/{total}");
|
||||
Manga manga = MangaFromJsonObject((JsonObject)mangaNode);
|
||||
retManga.Add(manga); //Add Publication (Manga) to result
|
||||
}
|
||||
if (result.ContainsKey("data"))
|
||||
results.AddRange(result["data"]!.AsArray()!);//Manga-data-Array
|
||||
}
|
||||
Log($"Retrieved {retManga.Count} publications. Term=\"{publicationTitle}\"");
|
||||
|
||||
foreach (JsonNode mangaNode in results)
|
||||
{
|
||||
Log($"Getting publication data. {++loadedPublicationData}/{total}");
|
||||
if(MangaFromJsonObject(mangaNode.AsObject()) is { } manga)
|
||||
retManga.Add(manga); //Add Publication (Manga) to result
|
||||
}
|
||||
Log($"Retrieved {retManga.Count} publications. Term={publicationTitle}");
|
||||
return retManga.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
Regex idRex = new (@"https:\/\/mangadex.org\/title\/([A-z0-9-]*)\/.*");
|
||||
string id = idRex.Match(url).Groups[1].Value;
|
||||
Log($"Got id {id} from {url}");
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest($"https://api.mangadex.org/manga/{id}", (byte)RequestType.Manga);
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest($"https://api.mangadex.org/manga/{publicationId}?includes%5B%5D=manga&includes%5B%5D=cover_art&includes%5B%5D=author&includes%5B%5D=artist&includes%5B%5D=tag", RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
||||
@ -84,75 +73,107 @@ public class MangaDex : MangaConnector
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga MangaFromJsonObject(JsonObject manga)
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
JsonObject attributes = manga["attributes"]!.AsObject();
|
||||
Regex idRex = new (@"https:\/\/mangadex.org\/title\/([A-z0-9-]*)\/.*");
|
||||
string id = idRex.Match(url).Groups[1].Value;
|
||||
Log($"Got id {id} from {url}");
|
||||
return GetMangaFromId(id);
|
||||
}
|
||||
|
||||
string publicationId = manga["id"]!.GetValue<string>();
|
||||
private Manga? MangaFromJsonObject(JsonObject manga)
|
||||
{
|
||||
if (!manga.TryGetPropertyValue("id", out JsonNode? idNode))
|
||||
return null;
|
||||
string publicationId = idNode!.GetValue<string>();
|
||||
|
||||
string title = attributes["title"]!.AsObject().ContainsKey("en") && attributes["title"]!["en"] is not null
|
||||
? attributes["title"]!["en"]!.GetValue<string>()
|
||||
: attributes["title"]![((IDictionary<string, JsonNode?>)attributes["title"]!.AsObject()).Keys.First()]!.GetValue<string>();
|
||||
if (!manga.TryGetPropertyValue("attributes", out JsonNode? attributesNode))
|
||||
return null;
|
||||
JsonObject attributes = attributesNode!.AsObject();
|
||||
|
||||
string? description = attributes["description"]!.AsObject().ContainsKey("en") && attributes["description"]!["en"] is not null
|
||||
? attributes["description"]!["en"]!.GetValue<string?>()
|
||||
: null;
|
||||
if (!attributes.TryGetPropertyValue("title", out JsonNode? titleNode))
|
||||
return null;
|
||||
string title = titleNode!.AsObject().ContainsKey("en") switch
|
||||
{
|
||||
true => titleNode.AsObject()["en"]!.GetValue<string>(),
|
||||
false => titleNode.AsObject().First().Value!.GetValue<string>()
|
||||
};
|
||||
|
||||
JsonArray altTitlesObject = attributes["altTitles"]!.AsArray();
|
||||
Dictionary<string, string> altTitlesDict = new();
|
||||
foreach (JsonNode? altTitleNode in altTitlesObject)
|
||||
if (attributes.TryGetPropertyValue("altTitles", out JsonNode? altTitlesNode))
|
||||
{
|
||||
JsonObject altTitleObject = (JsonObject)altTitleNode!;
|
||||
string key = ((IDictionary<string, JsonNode?>)altTitleObject).Keys.ToArray()[0];
|
||||
altTitlesDict.TryAdd(key, altTitleObject[key]!.GetValue<string>());
|
||||
}
|
||||
|
||||
JsonArray tagsObject = attributes["tags"]!.AsArray();
|
||||
HashSet<string> tags = new();
|
||||
foreach (JsonNode? tagNode in tagsObject)
|
||||
{
|
||||
JsonObject tagObject = (JsonObject)tagNode!;
|
||||
if(tagObject["attributes"]!["name"]!.AsObject().ContainsKey("en"))
|
||||
tags.Add(tagObject["attributes"]!["name"]!["en"]!.GetValue<string>());
|
||||
}
|
||||
|
||||
string? posterId = null;
|
||||
HashSet<string> authorIds = new();
|
||||
if (manga.ContainsKey("relationships") && manga["relationships"] is not null)
|
||||
{
|
||||
JsonArray relationships = manga["relationships"]!.AsArray();
|
||||
posterId = relationships.FirstOrDefault(relationship => relationship!["type"]!.GetValue<string>() == "cover_art")!["id"]!.GetValue<string>();
|
||||
foreach (JsonNode? node in relationships.Where(relationship =>
|
||||
relationship!["type"]!.GetValue<string>() == "author"))
|
||||
authorIds.Add(node!["id"]!.GetValue<string>());
|
||||
}
|
||||
string? coverUrl = GetCoverUrl(publicationId, posterId);
|
||||
string? coverCacheName = null;
|
||||
if (coverUrl is not null)
|
||||
coverCacheName = SaveCoverImageToCache(coverUrl, (byte)RequestType.AtHomeServer);
|
||||
|
||||
List<string> authors = GetAuthors(authorIds);
|
||||
|
||||
Dictionary<string, string> linksDict = new();
|
||||
if (attributes.ContainsKey("links") && attributes["links"] is not null)
|
||||
{
|
||||
JsonObject linksObject = attributes["links"]!.AsObject();
|
||||
foreach (string key in ((IDictionary<string, JsonNode?>)linksObject).Keys)
|
||||
foreach (JsonNode? altTitleNode in altTitlesNode!.AsArray())
|
||||
{
|
||||
linksDict.Add(key, linksObject[key]!.GetValue<string>());
|
||||
JsonObject altTitleNodeObject = altTitleNode!.AsObject();
|
||||
altTitlesDict.TryAdd(altTitleNodeObject.First().Key, altTitleNodeObject.First().Value!.GetValue<string>());
|
||||
}
|
||||
}
|
||||
|
||||
int? year = attributes.ContainsKey("year") && attributes["year"] is not null
|
||||
? attributes["year"]!.GetValue<int?>()
|
||||
: null;
|
||||
if (!attributes.TryGetPropertyValue("description", out JsonNode? descriptionNode))
|
||||
return null;
|
||||
string description = descriptionNode!.AsObject().ContainsKey("en") switch
|
||||
{
|
||||
true => descriptionNode.AsObject()["en"]!.GetValue<string>(),
|
||||
false => descriptionNode.AsObject().FirstOrDefault().Value?.GetValue<string>() ?? ""
|
||||
};
|
||||
|
||||
Dictionary<string, string> linksDict = new();
|
||||
if (attributes.TryGetPropertyValue("links", out JsonNode? linksNode) && linksNode is not null)
|
||||
foreach (KeyValuePair<string, JsonNode?> linkKv in linksNode!.AsObject())
|
||||
linksDict.TryAdd(linkKv.Key, linkKv.Value.GetValue<string>());
|
||||
|
||||
string? originalLanguage =
|
||||
attributes.ContainsKey("originalLanguage") && attributes["originalLanguage"] is not null
|
||||
? attributes["originalLanguage"]!.GetValue<string?>()
|
||||
: null;
|
||||
attributes.TryGetPropertyValue("originalLanguage", out JsonNode? originalLanguageNode) switch
|
||||
{
|
||||
true => originalLanguageNode?.GetValue<string>(),
|
||||
false => null
|
||||
};
|
||||
|
||||
string status = attributes["status"]!.GetValue<string>();
|
||||
Manga.ReleaseStatusByte status = Manga.ReleaseStatusByte.Unreleased;
|
||||
if (attributes.TryGetPropertyValue("status", out JsonNode? statusNode))
|
||||
{
|
||||
status = statusNode?.GetValue<string>().ToLower() switch
|
||||
{
|
||||
"ongoing" => Manga.ReleaseStatusByte.Continuing,
|
||||
"completed" => Manga.ReleaseStatusByte.Completed,
|
||||
"hiatus" => Manga.ReleaseStatusByte.OnHiatus,
|
||||
"cancelled" => Manga.ReleaseStatusByte.Cancelled,
|
||||
_ => Manga.ReleaseStatusByte.Unreleased
|
||||
};
|
||||
}
|
||||
|
||||
int? year = attributes.TryGetPropertyValue("year", out JsonNode? yearNode) switch
|
||||
{
|
||||
true => yearNode?.GetValue<int>(),
|
||||
false => null
|
||||
};
|
||||
|
||||
HashSet<string> tags = new(128);
|
||||
if (attributes.TryGetPropertyValue("tags", out JsonNode? tagsNode))
|
||||
foreach (JsonNode? tagNode in tagsNode!.AsArray())
|
||||
tags.Add(tagNode!["attributes"]!["name"]!["en"]!.GetValue<string>());
|
||||
|
||||
|
||||
if (!manga.TryGetPropertyValue("relationships", out JsonNode? relationshipsNode))
|
||||
return null;
|
||||
|
||||
JsonNode? coverNode = relationshipsNode!.AsArray()
|
||||
.FirstOrDefault(rel => rel!["type"]!.GetValue<string>().Equals("cover_art"));
|
||||
if (coverNode is null)
|
||||
return null;
|
||||
string fileName = coverNode["attributes"]!["fileName"]!.GetValue<string>();
|
||||
string coverUrl = $"https://uploads.mangadex.org/covers/{publicationId}/{fileName}";
|
||||
string coverCacheName = SaveCoverImageToCache(coverUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
List<string> authors = new();
|
||||
JsonNode?[] authorNodes = relationshipsNode.AsArray()
|
||||
.Where(rel => rel!["type"]!.GetValue<string>().Equals("author") || rel!["type"]!.GetValue<string>().Equals("artist")).ToArray();
|
||||
foreach (JsonNode? authorNode in authorNodes)
|
||||
{
|
||||
string authorName = authorNode!["attributes"]!["name"]!.GetValue<string>();
|
||||
if(!authors.Contains(authorName))
|
||||
authors.Add(authorName);
|
||||
}
|
||||
|
||||
Manga pub = new(
|
||||
title,
|
||||
@ -165,10 +186,11 @@ public class MangaDex : MangaConnector
|
||||
linksDict,
|
||||
year,
|
||||
originalLanguage,
|
||||
publicationId,
|
||||
status,
|
||||
publicationId
|
||||
websiteUrl: $"https://mangadex.org/title/{publicationId}"
|
||||
);
|
||||
cachedPublications.Add(pub);
|
||||
AddMangaToCache(pub);
|
||||
return pub;
|
||||
}
|
||||
|
||||
@ -183,9 +205,9 @@ public class MangaDex : MangaConnector
|
||||
while (offset < total)
|
||||
{
|
||||
//Request next "Page"
|
||||
DownloadClient.RequestResult requestResult =
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(
|
||||
$"https://api.mangadex.org/manga/{manga.publicationId}/feed?limit={limit}&offset={offset}&translatedLanguage%5B%5D={language}", (byte)RequestType.Feed);
|
||||
$"https://api.mangadex.org/manga/{manga.publicationId}/feed?limit={limit}&offset={offset}&translatedLanguage%5B%5D={language}&contentRating%5B%5D=safe&contentRating%5B%5D=suggestive&contentRating%5B%5D=erotica&contentRating%5B%5D=pornographic", RequestType.MangaDexFeed);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
break;
|
||||
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
||||
@ -201,6 +223,7 @@ public class MangaDex : MangaConnector
|
||||
{
|
||||
JsonObject chapter = (JsonObject)jsonNode!;
|
||||
JsonObject attributes = chapter["attributes"]!.AsObject();
|
||||
|
||||
string chapterId = chapter["id"]!.GetValue<string>();
|
||||
|
||||
string? title = attributes.ContainsKey("title") && attributes["title"] is not null
|
||||
@ -215,30 +238,57 @@ public class MangaDex : MangaConnector
|
||||
? attributes["chapter"]!.GetValue<string>()
|
||||
: "null";
|
||||
|
||||
if(chapterNum is not "null")
|
||||
chapters.Add(new Chapter(manga, title, volume, chapterNum, chapterId));
|
||||
|
||||
if (attributes.ContainsKey("pages") && attributes["pages"] is not null &&
|
||||
attributes["pages"]!.GetValue<int>() < 1)
|
||||
{
|
||||
Log($"Skipping {chapterId} Vol.{volume} Ch.{chapterNum} {title} because it has no pages or is externally linked.");
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
if(!chapters.Any(chp =>
|
||||
chp.volumeNumber.Equals(float.Parse(volume??"0", numberFormatDecimalPoint)) &&
|
||||
chp.chapterNumber.Equals(float.Parse(chapterNum, numberFormatDecimalPoint))))
|
||||
chapters.Add(new Chapter(manga, title, volume, chapterNum, chapterId, chapterId));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNum}: {e.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.OrderBy(chapter => Convert.ToSingle(chapter.chapterNumber, numberFormatDecimalPoint)).ToArray();
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
//Request URLs for Chapter-Images
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest($"https://api.mangadex.org/at-home/server/{chapter.url}?forcePort443=false'", (byte)RequestType.AtHomeServer);
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest($"https://api.mangadex.org/at-home/server/{chapter.url}?forcePort443=false", RequestType.MangaDexImage);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
||||
if (result is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.NoContent;
|
||||
}
|
||||
|
||||
string baseUrl = result["baseUrl"]!.GetValue<string>();
|
||||
string hash = result["chapter"]!["hash"]!.GetValue<string>();
|
||||
@ -248,56 +298,7 @@ public class MangaDex : MangaConnector
|
||||
foreach (JsonNode? image in imageFileNames)
|
||||
imageUrls.Add($"{baseUrl}/data/{hash}/{image!.GetValue<string>()}");
|
||||
|
||||
string comicInfoPath = Path.GetTempFileName();
|
||||
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
|
||||
|
||||
//Download Chapter-Images
|
||||
return DownloadChapterImages(imageUrls.ToArray(), chapter.GetArchiveFilePath(settings.downloadLocation), (byte)RequestType.AtHomeServer, comicInfoPath, progressToken:progressToken);
|
||||
}
|
||||
|
||||
private string? GetCoverUrl(string publicationId, string? posterId)
|
||||
{
|
||||
Log($"Getting CoverUrl for Publication {publicationId}");
|
||||
if (posterId is null)
|
||||
{
|
||||
Log("No cover.");
|
||||
return null;
|
||||
}
|
||||
|
||||
//Request information where to download Cover
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest($"https://api.mangadex.org/cover/{posterId}", (byte)RequestType.CoverUrl);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
||||
if (result is null)
|
||||
return null;
|
||||
|
||||
string fileName = result["data"]!["attributes"]!["fileName"]!.GetValue<string>();
|
||||
|
||||
string coverUrl = $"https://uploads.mangadex.org/covers/{publicationId}/{fileName}";
|
||||
Log($"Cover-Url {publicationId} -> {coverUrl}");
|
||||
return coverUrl;
|
||||
}
|
||||
|
||||
private List<string> GetAuthors(IEnumerable<string> authorIds)
|
||||
{
|
||||
Log("Retrieving authors.");
|
||||
List<string> ret = new();
|
||||
foreach (string authorId in authorIds)
|
||||
{
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest($"https://api.mangadex.org/author/{authorId}", (byte)RequestType.Author);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return ret;
|
||||
JsonObject? result = JsonSerializer.Deserialize<JsonObject>(requestResult.result);
|
||||
if (result is null)
|
||||
return ret;
|
||||
|
||||
string authorName = result["data"]!["attributes"]!["name"]!.GetValue<string>();
|
||||
ret.Add(authorName);
|
||||
Log($"Got author {authorId} -> {authorName}");
|
||||
}
|
||||
return ret;
|
||||
return DownloadChapterImages(imageUrls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
}
|
208
Tranga/MangaConnectors/MangaHere.cs
Normal file
208
Tranga/MangaConnectors/MangaHere.cs
Normal file
@ -0,0 +1,208 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class MangaHere : MangaConnector
|
||||
{
|
||||
public MangaHere(GlobalBase clone) : base(clone, "MangaHere", ["en"])
|
||||
{
|
||||
this.downloadClient = new ChromiumDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join('+', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://www.mangahere.cc/search?title={sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
if (document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),' container ')]").Any(node => node.ChildNodes.Any(cNode => cNode.HasClass("search-keywords"))))
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
List<string> urls = document.DocumentNode
|
||||
.SelectNodes("//a[contains(@href, '/manga/') and not(contains(@href, '.html'))]")
|
||||
.Select(thumb => $"https://www.mangahere.cc{thumb.GetAttributeValue("href", "")}").Distinct().ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://www.mangahere.cc/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
|
||||
return null;
|
||||
|
||||
Regex idRex = new (@"https:\/\/www\.mangahere\.[a-z]{0,63}\/manga\/([0-9A-z\-]+).*");
|
||||
string id = idRex.Match(url).Groups[1].Value;
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id, url);
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
|
||||
//We dont get posters, because same origin bs HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//img[contains(concat(' ',normalize-space(@class),' '),' detail-info-cover-img ')]");
|
||||
string posterUrl = "http://static.mangahere.cc/v20230914/mangahere/images/nopicture.jpg";
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//span[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-title-font ')]");
|
||||
string sortName = titleNode.InnerText;
|
||||
|
||||
List<string> authors = document.DocumentNode
|
||||
.SelectNodes("//p[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-say ')]/a")
|
||||
.Select(node => node.InnerText)
|
||||
.ToList();
|
||||
|
||||
HashSet<string> tags = document.DocumentNode
|
||||
.SelectNodes("//p[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-tag-list ')]/a")
|
||||
.Select(node => node.InnerText)
|
||||
.ToHashSet();
|
||||
|
||||
status = document.DocumentNode.SelectSingleNode("//span[contains(concat(' ',normalize-space(@class),' '),' detail-info-right-title-tip ')]").InnerText;
|
||||
switch (status.ToLower())
|
||||
{
|
||||
case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
|
||||
case "discontinued": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "complete": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
}
|
||||
|
||||
HtmlNode descriptionNode = document.DocumentNode
|
||||
.SelectSingleNode("//p[contains(concat(' ',normalize-space(@class),' '),' fullcontent ')]");
|
||||
string description = descriptionNode.InnerText;
|
||||
|
||||
Manga manga = new(sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
|
||||
coverFileNameInCache, links,
|
||||
null, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"https://www.mangahere.cc/manga/{manga.publicationId}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 || requestResult.htmlDocument is null)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
List<string> urls = requestResult.htmlDocument.DocumentNode.SelectNodes("//div[@id='list-1']/ul//li//a[contains(@href, '/manga/')]")
|
||||
.Select(node => node.GetAttributeValue("href", "")).ToList();
|
||||
Regex chapterRex = new(@".*\/manga\/[a-zA-Z0-9\-\._\~\!\$\&\'\(\)\*\+\,\;\=\:\@]+\/v([0-9(TBD)]+)\/c([0-9\.]+)\/.*");
|
||||
|
||||
List<Chapter> chapters = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Match rexMatch = chapterRex.Match(url);
|
||||
|
||||
string volumeNumber = rexMatch.Groups[1].Value == "TBD" ? "0" : rexMatch.Groups[1].Value;
|
||||
string chapterNumber = rexMatch.Groups[2].Value;
|
||||
string fullUrl = $"https://www.mangahere.cc{url}";
|
||||
|
||||
try
|
||||
{
|
||||
chapters.Add(new Chapter(manga, "", volumeNumber, chapterNumber, fullUrl));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
|
||||
List<string> imageUrls = new();
|
||||
|
||||
int downloaded = 1;
|
||||
int images = 1;
|
||||
string url = string.Join('/', chapter.url.Split('/')[..^1]);
|
||||
do
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest($"{url}/{downloaded}.html", RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.InternalServerError;
|
||||
}
|
||||
|
||||
imageUrls.AddRange(ParseImageUrlsFromHtml(requestResult.htmlDocument));
|
||||
|
||||
images = requestResult.htmlDocument.DocumentNode
|
||||
.SelectNodes("//a[contains(@href, '/manga/')]")
|
||||
.MaxBy(node => node.GetAttributeValue("data-page", 0))!.GetAttributeValue("data-page", 0);
|
||||
logger?.WriteLine($"MangaHere speciality: Get Image-url {downloaded}/{images}");
|
||||
if (progressToken is not null)
|
||||
{
|
||||
progressToken.increments = images * 2;//we also have to download the images later
|
||||
progressToken.Increment();
|
||||
}
|
||||
} while (downloaded++ <= images);
|
||||
|
||||
if (progressToken is not null)
|
||||
progressToken.increments = images;//we blip to normal length, in downloadchapterimages it is increasaed by the amount of urls again
|
||||
return DownloadChapterImages(imageUrls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
|
||||
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
|
||||
{
|
||||
return document.DocumentNode
|
||||
.SelectNodes("//img[contains(concat(' ',normalize-space(@class),' '),' reader-main-img ')]")
|
||||
.Select(node =>
|
||||
{
|
||||
string url = node.GetAttributeValue("src", "");
|
||||
return url.StartsWith("//") ? $"https:{url}" : url;
|
||||
})
|
||||
.ToArray();
|
||||
}
|
||||
}
|
@ -1,5 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Net;
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
@ -8,24 +7,18 @@ namespace Tranga.MangaConnectors;
|
||||
|
||||
public class MangaKatana : MangaConnector
|
||||
{
|
||||
public override string name { get; }
|
||||
|
||||
public MangaKatana(GlobalBase clone) : base(clone)
|
||||
public MangaKatana(GlobalBase clone) : base(clone, "MangaKatana", ["en"])
|
||||
{
|
||||
this.name = "MangaKatana";
|
||||
this.downloadClient = new DownloadClient(clone, new Dictionary<byte, int>()
|
||||
{
|
||||
{1, 60}
|
||||
});
|
||||
this.downloadClient = new HttpDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join('_', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
|
||||
string sanitizedTitle = string.Join("%20", Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://mangakatana.com/?search={sanitizedTitle}&search_by=book_name";
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, 1);
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
@ -35,7 +28,7 @@ public class MangaKatana : MangaConnector
|
||||
&& requestResult.redirectedToUrl is not null
|
||||
&& requestResult.redirectedToUrl.Contains("mangakatana.com/manga"))
|
||||
{
|
||||
return new [] { ParseSinglePublicationFromHtml(requestResult.result, requestResult.redirectedToUrl.Split('/')[^1]) };
|
||||
return new [] { ParseSinglePublicationFromHtml(requestResult.result, requestResult.redirectedToUrl.Split('/')[^1], requestResult.redirectedToUrl) };
|
||||
}
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.result);
|
||||
@ -43,13 +36,18 @@ public class MangaKatana : MangaConnector
|
||||
return publications;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://mangakatana.com/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, 1);
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
return ParseSinglePublicationFromHtml(requestResult.result, url.Split('/')[^1]);
|
||||
return ParseSinglePublicationFromHtml(requestResult.result, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(Stream html)
|
||||
@ -79,18 +77,18 @@ public class MangaKatana : MangaConnector
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(Stream html, string publicationId)
|
||||
private Manga ParseSinglePublicationFromHtml(Stream html, string publicationId, string websiteUrl)
|
||||
{
|
||||
StreamReader reader = new(html);
|
||||
string htmlString = reader.ReadToEnd();
|
||||
HtmlDocument document = new();
|
||||
document.LoadHtml(htmlString);
|
||||
string status = "";
|
||||
Dictionary<string, string> altTitles = new();
|
||||
Dictionary<string, string>? links = null;
|
||||
HashSet<string> tags = new();
|
||||
string[] authors = Array.Empty<string>();
|
||||
string originalLanguage = "";
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
|
||||
HtmlNode infoNode = document.DocumentNode.SelectSingleNode("//*[@id='single_book']");
|
||||
string sortName = infoNode.Descendants("h1").First(n => n.HasClass("heading")).InnerText;
|
||||
@ -113,7 +111,11 @@ public class MangaKatana : MangaConnector
|
||||
authors = value.Split(',');
|
||||
break;
|
||||
case "status":
|
||||
status = value;
|
||||
switch (value.ToLower())
|
||||
{
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
case "completed": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
}
|
||||
break;
|
||||
case "genres":
|
||||
tags = row.SelectNodes("div").Last().Descendants("a").Select(a => a.InnerText).ToHashSet();
|
||||
@ -124,7 +126,7 @@ public class MangaKatana : MangaConnector
|
||||
string posterUrl = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[1]/div").Descendants("img").First()
|
||||
.GetAttributes().First(a => a.Name == "src").Value;
|
||||
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, 1);
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
string description = document.DocumentNode.SelectSingleNode("//*[@id='single_book']/div[3]/p").InnerText;
|
||||
while (description.StartsWith('\n'))
|
||||
@ -140,8 +142,8 @@ public class MangaKatana : MangaConnector
|
||||
}
|
||||
|
||||
Manga manga = new (sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
|
||||
year, originalLanguage, status, publicationId);
|
||||
cachedPublications.Add(manga);
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
@ -150,15 +152,15 @@ public class MangaKatana : MangaConnector
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"https://mangakatana.com/manga/{manga.publicationId}";
|
||||
// Leaving this in for verification if the page exists
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, 1);
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestUrl);
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.OrderBy(chapter => Convert.ToSingle(chapter.chapterNumber, numberFormatDecimalPoint)).ToArray();
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, string mangaUrl)
|
||||
@ -171,16 +173,27 @@ public class MangaKatana : MangaConnector
|
||||
|
||||
HtmlNode chapterList = document.DocumentNode.SelectSingleNode("//div[contains(@class, 'chapters')]/table/tbody");
|
||||
|
||||
Regex volumeRex = new(@"[0-9a-z\-\.]+\/[0-9a-z\-]*v([0-9\.]+)");
|
||||
Regex chapterNumRex = new(@"[0-9a-z\-\.]+\/[0-9a-z\-]*c([0-9\.]+)");
|
||||
Regex chapterNameRex = new(@"Chapter [0-9\.]+:? (.*)");
|
||||
|
||||
foreach (HtmlNode chapterInfo in chapterList.Descendants("tr"))
|
||||
{
|
||||
string fullString = chapterInfo.Descendants("a").First().InnerText;
|
||||
|
||||
string? volumeNumber = fullString.Contains("Vol.") ? fullString.Replace("Vol.", "").Split(' ')[0] : null;
|
||||
string chapterNumber = fullString.Split(':')[0].Split("Chapter ")[1].Split(" ")[0].Replace('-', '.');
|
||||
string chapterName = string.Concat(fullString.Split(':')[1..]);
|
||||
string url = chapterInfo.Descendants("a").First()
|
||||
.GetAttributeValue("href", "");
|
||||
ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
|
||||
|
||||
string? volumeNumber = volumeRex.IsMatch(url) ? volumeRex.Match(url).Groups[1].Value : null;
|
||||
string chapterNumber = chapterNumRex.Match(url).Groups[1].Value;
|
||||
string chapterName = chapterNameRex.Match(fullString).Groups[1].Value;
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
@ -189,22 +202,26 @@ public class MangaKatana : MangaConnector
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
string requestUrl = chapter.url;
|
||||
// Leaving this in to check if the page exists
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, 1);
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
|
||||
|
||||
string comicInfoPath = Path.GetTempFileName();
|
||||
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
|
||||
|
||||
return DownloadChapterImages(imageUrls, chapter.GetArchiveFilePath(settings.downloadLocation), 1, comicInfoPath, "https://mangakatana.com/", progressToken:progressToken);
|
||||
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
|
||||
private string[] ParseImageUrlsFromHtml(string mangaUrl)
|
||||
|
@ -8,44 +8,43 @@ namespace Tranga.MangaConnectors;
|
||||
|
||||
public class Manganato : MangaConnector
|
||||
{
|
||||
public override string name { get; }
|
||||
|
||||
public Manganato(GlobalBase clone) : base(clone)
|
||||
public Manganato(GlobalBase clone) : base(clone, "Manganato", ["en"])
|
||||
{
|
||||
this.name = "Manganato";
|
||||
this.downloadClient = new DownloadClient(clone, new Dictionary<byte, int>()
|
||||
{
|
||||
{1, 60}
|
||||
});
|
||||
this.downloadClient = new HttpDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join('_', Regex.Matches(publicationTitle, "[A-z]*")).ToLower();
|
||||
string requestUrl = $"https://manganato.com/search/story/{sanitizedTitle}";
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, 1);
|
||||
string sanitizedTitle = string.Join('_', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://manganato.gg/search/story/{sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.result);
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Manga>();
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(Stream html)
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
StreamReader reader = new (html);
|
||||
string htmlString = reader.ReadToEnd();
|
||||
HtmlDocument document = new ();
|
||||
document.LoadHtml(htmlString);
|
||||
IEnumerable<HtmlNode> searchResults = document.DocumentNode.Descendants("div").Where(n => n.HasClass("search-story-item"));
|
||||
List<HtmlNode> searchResults = document.DocumentNode.Descendants("div").Where(n => n.HasClass("story_item")).ToList();
|
||||
Log($"{searchResults.Count} items.");
|
||||
List<string> urls = new();
|
||||
foreach (HtmlNode mangaResult in searchResults)
|
||||
{
|
||||
urls.Add(mangaResult.Descendants("a").First(n => n.HasClass("item-title")).GetAttributes()
|
||||
.First(a => a.Name == "href").Value);
|
||||
try
|
||||
{
|
||||
urls.Add(mangaResult.Descendants("h3").First(n => n.HasClass("story_name"))
|
||||
.Descendants("a").First().GetAttributeValue("href", ""));
|
||||
} catch
|
||||
{
|
||||
//failed to get a url, send it to the void
|
||||
}
|
||||
}
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
@ -59,116 +58,131 @@ public class Manganato : MangaConnector
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://chapmanganato.com/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, 1);
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
|
||||
return ParseSinglePublicationFromHtml(requestResult.result, url.Split('/')[^1]);
|
||||
if (requestResult.htmlDocument is null)
|
||||
return null;
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, url.Split('/')[^1], url);
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(Stream html, string publicationId)
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
StreamReader reader = new (html);
|
||||
string htmlString = reader.ReadToEnd();
|
||||
HtmlDocument document = new ();
|
||||
document.LoadHtml(htmlString);
|
||||
string status = "";
|
||||
Dictionary<string, string> altTitles = new();
|
||||
Dictionary<string, string>? links = null;
|
||||
HashSet<string> tags = new();
|
||||
string[] authors = Array.Empty<string>();
|
||||
string originalLanguage = "";
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
|
||||
HtmlNode infoNode = document.DocumentNode.Descendants("div").First(d => d.HasClass("story-info-right"));
|
||||
HtmlNode infoNode = document.DocumentNode.Descendants("ul").First(d => d.HasClass("manga-info-text"));
|
||||
|
||||
string sortName = infoNode.Descendants("h1").First().InnerText;
|
||||
|
||||
HtmlNode infoTable = infoNode.Descendants().First(d => d.Name == "table");
|
||||
|
||||
foreach (HtmlNode row in infoTable.Descendants("tr"))
|
||||
foreach (HtmlNode li in infoNode.Descendants("li"))
|
||||
{
|
||||
string key = row.SelectNodes("td").First().InnerText.ToLower();
|
||||
string value = row.SelectNodes("td").Last().InnerText;
|
||||
string keySanitized = string.Concat(Regex.Matches(key, "[a-z]"));
|
||||
string text = li.InnerText.Trim().ToLower();
|
||||
|
||||
switch (keySanitized)
|
||||
if (text.StartsWith("author(s) :"))
|
||||
{
|
||||
case "alternative":
|
||||
string[] alts = value.Split(" ; ");
|
||||
for(int i = 0; i < alts.Length; i++)
|
||||
altTitles.Add(i.ToString(), alts[i]);
|
||||
break;
|
||||
case "authors":
|
||||
authors = value.Split('-');
|
||||
break;
|
||||
case "status":
|
||||
status = value;
|
||||
break;
|
||||
case "genres":
|
||||
string[] genres = value.Split(" - ");
|
||||
tags = genres.ToHashSet();
|
||||
break;
|
||||
authors = li.Descendants("a").Select(a => a.InnerText.Trim()).ToArray();
|
||||
}
|
||||
else if (text.StartsWith("status :"))
|
||||
{
|
||||
string status = text.Replace("status :", "").Trim().ToLower();
|
||||
if (string.IsNullOrWhiteSpace(status))
|
||||
releaseStatus = Manga.ReleaseStatusByte.Continuing;
|
||||
else if (status == "ongoing")
|
||||
releaseStatus = Manga.ReleaseStatusByte.Continuing;
|
||||
else
|
||||
releaseStatus = Enum.Parse<Manga.ReleaseStatusByte>(status, true);
|
||||
}
|
||||
else if (li.HasClass("genres"))
|
||||
{
|
||||
tags = li.Descendants("a").Select(a => a.InnerText.Trim()).ToHashSet();
|
||||
}
|
||||
}
|
||||
|
||||
string posterUrl = document.DocumentNode.Descendants("span").First(s => s.HasClass("info-image")).Descendants("img").First()
|
||||
string posterUrl = document.DocumentNode.Descendants("div").First(s => s.HasClass("manga-info-pic")).Descendants("img").First()
|
||||
.GetAttributes().First(a => a.Name == "src").Value;
|
||||
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, 1);
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover, "https://www.manganato.gg/");
|
||||
|
||||
string description = document.DocumentNode.Descendants("div").First(d => d.HasClass("panel-story-info-description"))
|
||||
string description = document.DocumentNode.SelectSingleNode("//div[@id='contentBox']")
|
||||
.InnerText.Replace("Description :", "");
|
||||
while (description.StartsWith('\n'))
|
||||
description = description.Substring(1);
|
||||
|
||||
string yearString = document.DocumentNode.Descendants("li").Last(li => li.HasClass("a-h")).Descendants("span")
|
||||
.First(s => s.HasClass("chapter-time")).InnerText;
|
||||
int year = Convert.ToInt32(yearString.Split(',')[^1]) + 2000;
|
||||
string pattern = "MMM-dd-yyyy HH:mm";
|
||||
|
||||
HtmlNode? oldestChapter = document.DocumentNode
|
||||
.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),' row ')]/span[@title]").MaxBy(
|
||||
node => DateTime.ParseExact(node.GetAttributeValue("title", "Dec-31-2400 23:59"), pattern,
|
||||
CultureInfo.InvariantCulture).Millisecond);
|
||||
|
||||
|
||||
int year = DateTime.ParseExact(oldestChapter?.GetAttributeValue("title", "Dec 31 2400, 23:59")??"Dec 31 2400, 23:59", pattern,
|
||||
CultureInfo.InvariantCulture).Year;
|
||||
|
||||
Manga manga = new (sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
|
||||
year, originalLanguage, status, publicationId);
|
||||
cachedPublications.Add(manga);
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"https://chapmanganato.com/{manga.publicationId}";
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, 1);
|
||||
string requestUrl = manga.websiteUrl;
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.result);
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Chapter>();
|
||||
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.OrderBy(chapter => Convert.ToSingle(chapter.chapterNumber, numberFormatDecimalPoint)).ToArray();
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, Stream html)
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
|
||||
{
|
||||
StreamReader reader = new (html);
|
||||
string htmlString = reader.ReadToEnd();
|
||||
HtmlDocument document = new ();
|
||||
document.LoadHtml(htmlString);
|
||||
List<Chapter> ret = new();
|
||||
|
||||
HtmlNode chapterList = document.DocumentNode.Descendants("ul").First(l => l.HasClass("row-content-chapter"));
|
||||
HtmlNode chapterList = document.DocumentNode.Descendants("div").First(l => l.HasClass("chapter-list"));
|
||||
|
||||
foreach (HtmlNode chapterInfo in chapterList.Descendants("li"))
|
||||
Regex volRex = new(@"Vol\.([0-9]+).*");
|
||||
Regex chapterRex = new(@"https:\/\/chapmanganato.[A-z]+\/manga-[A-z0-9]+\/chapter-([0-9\.]+)");
|
||||
Regex nameRex = new(@"Chapter ([0-9]+(\.[0-9]+)*){1}:? (.*)");
|
||||
|
||||
foreach (HtmlNode chapterInfo in chapterList.Descendants("div").Where(x => x.HasClass("row")))
|
||||
{
|
||||
string fullString = chapterInfo.Descendants("a").First(d => d.HasClass("chapter-name")).InnerText;
|
||||
|
||||
string? volumeNumber = fullString.Contains("Vol.") ? fullString.Replace("Vol.", "").Split(' ')[0] : null;
|
||||
string chapterNumber = fullString.Split(':')[0].Split("Chapter ")[1].Replace('-','.');
|
||||
string chapterName = string.Concat(fullString.Split(':')[1..]);
|
||||
string url = chapterInfo.Descendants("a").First(d => d.HasClass("chapter-name"))
|
||||
.GetAttributeValue("href", "");
|
||||
ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
|
||||
string url = chapterInfo.Descendants("a").First().GetAttributeValue("href", "");
|
||||
var name = chapterInfo.Descendants("a").First().InnerText.Trim();
|
||||
string chapterName = nameRex.Match(name).Groups[3].Value;
|
||||
string chapterNumber = Regex.Match(name, @"Chapter ([0-9]+(\.[0-9]+)*)").Groups[1].Value;
|
||||
string? volumeNumber = Regex.Match(chapterName, @"Vol\.([0-9]+)").Groups[1].Value;
|
||||
if (string.IsNullOrWhiteSpace(volumeNumber))
|
||||
volumeNumber = "0";
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
ret.Reverse();
|
||||
return ret;
|
||||
@ -177,29 +191,35 @@ public class Manganato : MangaConnector
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
string requestUrl = chapter.url;
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, 1);
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.result);
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.InternalServerError;
|
||||
}
|
||||
|
||||
string comicInfoPath = Path.GetTempFileName();
|
||||
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
|
||||
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
|
||||
|
||||
return DownloadChapterImages(imageUrls, chapter.GetArchiveFilePath(settings.downloadLocation), 1, comicInfoPath, "https://chapmanganato.com/", progressToken:progressToken);
|
||||
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, "https://www.manganato.gg", progressToken:progressToken);
|
||||
}
|
||||
|
||||
private string[] ParseImageUrlsFromHtml(Stream html)
|
||||
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
|
||||
{
|
||||
StreamReader reader = new (html);
|
||||
string htmlString = reader.ReadToEnd();
|
||||
HtmlDocument document = new ();
|
||||
document.LoadHtml(htmlString);
|
||||
List<string> ret = new();
|
||||
|
||||
HtmlNode imageContainer =
|
||||
|
@ -1,279 +0,0 @@
|
||||
using System.Globalization;
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Xml.Linq;
|
||||
using HtmlAgilityPack;
|
||||
using Newtonsoft.Json;
|
||||
using PuppeteerSharp;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class Mangasee : MangaConnector
|
||||
{
|
||||
public override string name { get; }
|
||||
private IBrowser? _browser;
|
||||
private const string ChromiumVersion = "1154303";
|
||||
|
||||
public Mangasee(GlobalBase clone) : base(clone)
|
||||
{
|
||||
this.name = "Mangasee";
|
||||
this.downloadClient = new DownloadClient(clone, new Dictionary<byte, int>()
|
||||
{
|
||||
{ 1, 60 }
|
||||
});
|
||||
|
||||
Task d = new Task(DownloadBrowser);
|
||||
d.Start();
|
||||
}
|
||||
|
||||
private async void DownloadBrowser()
|
||||
{
|
||||
BrowserFetcher browserFetcher = new BrowserFetcher();
|
||||
foreach(string rev in browserFetcher.LocalRevisions().Where(rev => rev != ChromiumVersion))
|
||||
browserFetcher.Remove(rev);
|
||||
if (!browserFetcher.LocalRevisions().Contains(ChromiumVersion))
|
||||
{
|
||||
Log("Downloading headless browser");
|
||||
DateTime last = DateTime.Now.Subtract(TimeSpan.FromSeconds(5));
|
||||
browserFetcher.DownloadProgressChanged += (_, args) =>
|
||||
{
|
||||
double currentBytes = Convert.ToDouble(args.BytesReceived) / Convert.ToDouble(args.TotalBytesToReceive);
|
||||
if (args.TotalBytesToReceive == args.BytesReceived)
|
||||
Log("Browser downloaded.");
|
||||
else if (DateTime.Now > last.AddSeconds(1))
|
||||
{
|
||||
Log($"Browser download progress: {currentBytes:P2}");
|
||||
last = DateTime.Now;
|
||||
}
|
||||
|
||||
};
|
||||
if (!browserFetcher.CanDownloadAsync(ChromiumVersion).Result)
|
||||
{
|
||||
Log($"Can't download browser version {ChromiumVersion}");
|
||||
throw new Exception();
|
||||
}
|
||||
await browserFetcher.DownloadAsync(ChromiumVersion);
|
||||
}
|
||||
|
||||
Log("Starting Browser.");
|
||||
this._browser = await Puppeteer.LaunchAsync(new LaunchOptions
|
||||
{
|
||||
Headless = true,
|
||||
ExecutablePath = browserFetcher.GetExecutablePath(ChromiumVersion),
|
||||
Args = new [] {
|
||||
"--disable-gpu",
|
||||
"--disable-dev-shm-usage",
|
||||
"--disable-setuid-sandbox",
|
||||
"--no-sandbox"}
|
||||
});
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string requestUrl = $"https://mangasee123.com/_search.php";
|
||||
DownloadClient.RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, 1);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.result, publicationTitle);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
while (this._browser is null)
|
||||
{
|
||||
Log("Waiting for headless browser to download...");
|
||||
Thread.Sleep(1000);
|
||||
}
|
||||
|
||||
Regex publicationIdRex = new(@"https:\/\/mangasee123.com\/manga\/(.*)(\/.*)*");
|
||||
string publicationId = publicationIdRex.Match(url).Groups[1].Value;
|
||||
IPage page = _browser!.NewPageAsync().Result;
|
||||
IResponse response = page.GoToAsync(url, WaitUntilNavigation.DOMContentLoaded).Result;
|
||||
if (response.Ok)
|
||||
{
|
||||
HtmlDocument document = new();
|
||||
document.LoadHtml(page.GetContentAsync().Result);
|
||||
page.CloseAsync();
|
||||
return ParseSinglePublicationFromHtml(document, publicationId);
|
||||
}
|
||||
|
||||
page.CloseAsync();
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(Stream html, string publicationTitle)
|
||||
{
|
||||
string jsonString = new StreamReader(html).ReadToEnd();
|
||||
List<SearchResultItem> result = JsonConvert.DeserializeObject<List<SearchResultItem>>(jsonString)!;
|
||||
Dictionary<SearchResultItem, int> queryFiltered = new();
|
||||
foreach (SearchResultItem resultItem in result)
|
||||
{
|
||||
int matches = resultItem.GetMatches(publicationTitle);
|
||||
if (matches > 0)
|
||||
queryFiltered.TryAdd(resultItem, matches);
|
||||
}
|
||||
|
||||
queryFiltered = queryFiltered.Where(item => item.Value >= publicationTitle.Split(' ').Length - 1)
|
||||
.ToDictionary(item => item.Key, item => item.Value);
|
||||
|
||||
Log($"Retrieved {queryFiltered.Count} publications.");
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
List<SearchResultItem> orderedFiltered =
|
||||
queryFiltered.OrderBy(item => item.Value).ToDictionary(item => item.Key, item => item.Value).Keys.ToList();
|
||||
|
||||
foreach (SearchResultItem orderedItem in orderedFiltered)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl($"https://mangasee123.com/manga/{orderedItem.i}");
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
HashSet<string> tags = new();
|
||||
|
||||
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//img");
|
||||
string posterUrl = posterNode.GetAttributeValue("src", "");
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, 1);
|
||||
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//div[@class='BoxBody']//div[@class='row']//h1");
|
||||
string sortName = titleNode.InnerText;
|
||||
|
||||
HtmlNode[] authorsNodes = document.DocumentNode.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Author(s):']/..").Descendants("a").ToArray();
|
||||
List<string> authors = new();
|
||||
foreach(HtmlNode authorNode in authorsNodes)
|
||||
authors.Add(authorNode.InnerText);
|
||||
|
||||
HtmlNode[] genreNodes = document.DocumentNode.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Genre(s):']/..").Descendants("a").ToArray();
|
||||
foreach (HtmlNode genreNode in genreNodes)
|
||||
tags.Add(genreNode.InnerText);
|
||||
|
||||
HtmlNode yearNode = document.DocumentNode.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Released:']/..").Descendants("a").First();
|
||||
int year = Convert.ToInt32(yearNode.InnerText);
|
||||
|
||||
HtmlNode[] statusNodes = document.DocumentNode.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Status:']/..").Descendants("a").ToArray();
|
||||
foreach(HtmlNode statusNode in statusNodes)
|
||||
if (statusNode.InnerText.Contains("publish", StringComparison.CurrentCultureIgnoreCase))
|
||||
status = statusNode.InnerText.Split(' ')[0];
|
||||
|
||||
HtmlNode descriptionNode = document.DocumentNode.SelectNodes("//div[@class='BoxBody']//div[@class='row']//span[text()='Description:']/..").Descendants("div").First();
|
||||
string description = descriptionNode.InnerText;
|
||||
|
||||
Manga manga = new (sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
|
||||
year, originalLanguage, status, publicationId);
|
||||
cachedPublications.Add(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
// ReSharper disable once ClassNeverInstantiated.Local Will be instantiated during deserialization
|
||||
private class SearchResultItem
|
||||
{
|
||||
public string i { get; init; }
|
||||
public string s { get; init; }
|
||||
public string[] a { get; init; }
|
||||
|
||||
[JsonConstructor]
|
||||
public SearchResultItem(string i, string s, string[] a)
|
||||
{
|
||||
this.i = i;
|
||||
this.s = s;
|
||||
this.a = a;
|
||||
}
|
||||
|
||||
public int GetMatches(string title)
|
||||
{
|
||||
int ret = 0;
|
||||
Regex cleanRex = new("[A-z0-9]*");
|
||||
string[] badWords = { "a", "an", "no", "ni", "so", "as", "and", "the", "of", "that", "in", "is", "for" };
|
||||
|
||||
string[] titleTerms = title.Split(new[] { ' ', '-' }).Where(str => !badWords.Contains(str)).ToArray();
|
||||
|
||||
foreach (Match matchTerm in cleanRex.Matches(this.i))
|
||||
ret += titleTerms.Count(titleTerm =>
|
||||
titleTerm.Equals(matchTerm.Value, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
foreach (Match matchTerm in cleanRex.Matches(this.s))
|
||||
ret += titleTerms.Count(titleTerm =>
|
||||
titleTerm.Equals(matchTerm.Value, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
foreach(string alt in this.a)
|
||||
foreach (Match matchTerm in cleanRex.Matches(alt))
|
||||
ret += titleTerms.Count(titleTerm =>
|
||||
titleTerm.Equals(matchTerm.Value, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
XDocument doc = XDocument.Load($"https://mangasee123.com/rss/{manga.publicationId}.xml");
|
||||
XElement[] chapterItems = doc.Descendants("item").ToArray();
|
||||
List<Chapter> chapters = new();
|
||||
foreach (XElement chapter in chapterItems)
|
||||
{
|
||||
string volumeNumber = "1";
|
||||
string chapterName = chapter.Descendants("title").First().Value;
|
||||
string chapterNumber = Regex.Matches(chapterName, "[0-9]+")[^1].ToString();
|
||||
|
||||
string url = chapter.Descendants("link").First().Value;
|
||||
url = url.Replace(Regex.Matches(url,"(-page-[0-9])")[0].ToString(),"");
|
||||
chapters.Add(new Chapter(manga, "", volumeNumber, chapterNumber, url));
|
||||
}
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.OrderBy(chapter => Convert.ToSingle(chapter.chapterNumber, numberFormatDecimalPoint)).ToArray();
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
while (this._browser is null && !(progressToken?.cancellationRequested??false))
|
||||
{
|
||||
Log("Waiting for headless browser to download...");
|
||||
Thread.Sleep(1000);
|
||||
}
|
||||
if (progressToken?.cancellationRequested??false)
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
IPage page = _browser!.NewPageAsync().Result;
|
||||
IResponse response = page.GoToAsync(chapter.url).Result;
|
||||
if (response.Ok)
|
||||
{
|
||||
HtmlDocument document = new ();
|
||||
document.LoadHtml(page.GetContentAsync().Result);
|
||||
page.CloseAsync();
|
||||
|
||||
HtmlNode gallery = document.DocumentNode.Descendants("div").First(div => div.HasClass("ImageGallery"));
|
||||
HtmlNode[] images = gallery.Descendants("img").Where(img => img.HasClass("img-fluid")).ToArray();
|
||||
List<string> urls = new();
|
||||
foreach(HtmlNode galleryImage in images)
|
||||
urls.Add(galleryImage.GetAttributeValue("src", ""));
|
||||
|
||||
string comicInfoPath = Path.GetTempFileName();
|
||||
File.WriteAllText(comicInfoPath, chapter.GetComicInfoXmlString());
|
||||
|
||||
return DownloadChapterImages(urls.ToArray(), chapter.GetArchiveFilePath(settings.downloadLocation), 1, comicInfoPath, progressToken:progressToken);
|
||||
}
|
||||
|
||||
page.CloseAsync();
|
||||
return response.Status;
|
||||
}
|
||||
}
|
240
Tranga/MangaConnectors/Mangaworld.cs
Normal file
240
Tranga/MangaConnectors/Mangaworld.cs
Normal file
@ -0,0 +1,240 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class Mangaworld: MangaConnector
|
||||
{
|
||||
public Mangaworld(GlobalBase clone) : base(clone, "Mangaworld", ["it"])
|
||||
{
|
||||
this.downloadClient = new ChromiumDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://www.mangaworld.ac/archive?keyword={sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Manga>();
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
if (!document.DocumentNode.SelectSingleNode("//div[@class='comics-grid']").ChildNodes
|
||||
.Any(node => node.HasClass("entry")))
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
List<string> urls = document.DocumentNode
|
||||
.SelectNodes(
|
||||
"//div[@class='comics-grid']//div[@class='entry']//a[contains(concat(' ',normalize-space(@class),' '),'thumb')]")
|
||||
.Select(thumb => thumb.GetAttributeValue("href", "")).ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://www.mangaworld.ac/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return null;
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
return null;
|
||||
|
||||
Regex idRex = new (@"https:\/\/www\.mangaworld\.[a-z]{0,63}\/manga\/([0-9]+\/[0-9A-z\-]+).*");
|
||||
string id = idRex.Match(url).Groups[1].Value;
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, id, url);
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
Dictionary<string, string> altTitles = new();
|
||||
Dictionary<string, string>? links = null;
|
||||
string originalLanguage = "";
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
|
||||
HtmlNode infoNode = document.DocumentNode.Descendants("div").First(d => d.HasClass("info"));
|
||||
|
||||
string sortName = infoNode.Descendants("h1").First().InnerText;
|
||||
|
||||
HtmlNode metadata = infoNode.Descendants().First(d => d.HasClass("meta-data"));
|
||||
|
||||
HtmlNode altTitlesNode = metadata.SelectSingleNode("//span[text()='Titoli alternativi: ' or text()='Titolo alternativo: ']/..").ChildNodes[1];
|
||||
|
||||
string[] alts = altTitlesNode.InnerText.Split(", ");
|
||||
for(int i = 0; i < alts.Length; i++)
|
||||
altTitles.Add(i.ToString(), alts[i]);
|
||||
|
||||
HtmlNode genresNode =
|
||||
metadata.SelectSingleNode("//span[text()='Generi: ' or text()='Genero: ']/..");
|
||||
HashSet<string> tags = genresNode.SelectNodes("a").Select(node => node.InnerText).ToHashSet();
|
||||
|
||||
HtmlNode authorsNode =
|
||||
metadata.SelectSingleNode("//span[text()='Autore: ' or text()='Autori: ']/..");
|
||||
string[] authors = authorsNode.SelectNodes("a").Select(node => node.InnerText).ToArray();
|
||||
|
||||
string status = metadata.SelectSingleNode("//span[text()='Stato: ']/..").SelectNodes("a").First().InnerText;
|
||||
// ReSharper disable 5 times StringLiteralTypo
|
||||
switch (status.ToLower())
|
||||
{
|
||||
case "cancellato": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "in pausa": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
|
||||
case "droppato": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "finito": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
case "in corso": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
}
|
||||
|
||||
string posterUrl = document.DocumentNode.SelectSingleNode("//img[@class='rounded']").GetAttributeValue("src", "");
|
||||
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId.Replace('/', '-'), RequestType.MangaCover);
|
||||
|
||||
string description = document.DocumentNode.SelectSingleNode("//div[@id='noidungm']").InnerText;
|
||||
|
||||
string yearString = metadata.SelectSingleNode("//span[text()='Anno di uscita: ']/..").SelectNodes("a").First().InnerText;
|
||||
int year = Convert.ToInt32(yearString);
|
||||
|
||||
Manga manga = new (sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl, coverFileNameInCache, links,
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"https://www.mangaworld.ac/manga/{manga.publicationId}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Chapter>();
|
||||
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
|
||||
{
|
||||
List<Chapter> ret = new();
|
||||
|
||||
HtmlNode chaptersWrapper =
|
||||
document.DocumentNode.SelectSingleNode(
|
||||
"//div[contains(concat(' ',normalize-space(@class),' '),'chapters-wrapper')]");
|
||||
|
||||
Regex volumeRex = new(@"[Vv]olume ([0-9]+).*");
|
||||
Regex chapterRex = new(@"[Cc]apitolo ([0-9]+(?:\.[0-9]+)?).*");
|
||||
Regex idRex = new(@".*\/read\/([a-z0-9]+)(?:[?\/].*)?");
|
||||
if (chaptersWrapper.Descendants("div").Any(descendant => descendant.HasClass("volume-element")))
|
||||
{
|
||||
foreach (HtmlNode volNode in document.DocumentNode.SelectNodes("//div[contains(concat(' ',normalize-space(@class),' '),'volume-element')]"))
|
||||
{
|
||||
string volume = volumeRex.Match(volNode.SelectNodes("div").First(node => node.HasClass("volume")).SelectSingleNode("p").InnerText).Groups[1].Value;
|
||||
foreach (HtmlNode chNode in volNode.SelectNodes("div").First(node => node.HasClass("volume-chapters")).SelectNodes("div"))
|
||||
{
|
||||
|
||||
string number = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
|
||||
string url = chNode.SelectSingleNode("a").GetAttributeValue("href", "");
|
||||
string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value;
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, null, volume, number, url, id));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {number}: {e.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach (HtmlNode chNode in chaptersWrapper.SelectNodes("div").Where(node => node.HasClass("chapter")))
|
||||
{
|
||||
string number = chapterRex.Match(chNode.SelectSingleNode("a").SelectSingleNode("span").InnerText).Groups[1].Value;
|
||||
string url = chNode.SelectSingleNode("a").GetAttributeValue("href", "");
|
||||
string id = idRex.Match(chNode.SelectSingleNode("a").GetAttributeValue("href", "")).Groups[1].Value;
|
||||
try
|
||||
{
|
||||
ret.Add(new Chapter(manga, null, null, number, url, id));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {number}: {e.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ret.Reverse();
|
||||
return ret;
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
string requestUrl = $"{chapter.url}?style=list";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.InternalServerError;
|
||||
}
|
||||
|
||||
string[] imageUrls = ParseImageUrlsFromHtml(requestResult.htmlDocument);
|
||||
|
||||
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage,"https://www.mangaworld.bz/", progressToken:progressToken);
|
||||
}
|
||||
|
||||
private string[] ParseImageUrlsFromHtml(HtmlDocument document)
|
||||
{
|
||||
List<string> ret = new();
|
||||
|
||||
HtmlNode imageContainer =
|
||||
document.DocumentNode.SelectSingleNode("//div[@id='page']");
|
||||
foreach(HtmlNode imageNode in imageContainer.Descendants("img"))
|
||||
ret.Add(imageNode.GetAttributeValue("src", ""));
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
}
|
203
Tranga/MangaConnectors/ManhuaPlus.cs
Normal file
203
Tranga/MangaConnectors/ManhuaPlus.cs
Normal file
@ -0,0 +1,203 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class ManhuaPlus : MangaConnector
|
||||
{
|
||||
public ManhuaPlus(GlobalBase clone) : base(clone, "ManhuaPlus", ["en"])
|
||||
{
|
||||
this.downloadClient = new ChromiumDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(str => str.Length > 0)).ToLower();
|
||||
string requestUrl = $"https://manhuaplus.org/search?keyword={sanitizedTitle}";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
return Array.Empty<Manga>();
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
if (document.DocumentNode.SelectSingleNode("//h1/../..").ChildNodes//I already want to not.
|
||||
.Any(node => node.InnerText.Contains("No manga found")))
|
||||
return Array.Empty<Manga>();
|
||||
|
||||
List<string> urls = document.DocumentNode
|
||||
.SelectNodes("//h1/../..//a[contains(@href, 'https://manhuaplus.org/manga/') and contains(concat(' ',normalize-space(@class),' '),' clamp ') and not(contains(@href, '/chapter'))]")
|
||||
.Select(mangaNode => mangaNode.GetAttributeValue("href", "")).ToList();
|
||||
logger?.WriteLine($"Got {urls.Count} urls.");
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://manhuaplus.org/manga/{publicationId}");
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
Regex publicationIdRex = new(@"https:\/\/manhuaplus.org\/manga\/(.*)(\/.*)*");
|
||||
string publicationId = publicationIdRex.Match(url).Groups[1].Value;
|
||||
|
||||
RequestResult requestResult = this.downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if((int)requestResult.statusCode < 300 && (int)requestResult.statusCode >= 200 && requestResult.htmlDocument is not null && requestResult.redirectedToUrl != "https://manhuaplus.org/home") //When manga doesnt exists it redirects to home
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
string originalLanguage = "", status = "";
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
HashSet<string> tags = new();
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
|
||||
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("/html/body/main/div/div/div[2]/div[1]/figure/a/img");//BRUH
|
||||
Regex posterRex = new(@".*(\/uploads/covers/[a-zA-Z0-9\-\._\~\!\$\&\'\(\)\*\+\,\;\=\:\@]+).*");
|
||||
string posterUrl = $"https://manhuaplus.org/{posterRex.Match(posterNode.GetAttributeValue("src", "")).Groups[1].Value}";
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
HtmlNode titleNode = document.DocumentNode.SelectSingleNode("//h1");
|
||||
string sortName = titleNode.InnerText.Replace("\n", "");
|
||||
|
||||
List<string> authors = new();
|
||||
try
|
||||
{
|
||||
HtmlNode[] authorsNodes = document.DocumentNode
|
||||
.SelectNodes("//a[contains(@href, 'https://manhuaplus.org/authors/')]")
|
||||
.ToArray();
|
||||
foreach (HtmlNode authorNode in authorsNodes)
|
||||
authors.Add(authorNode.InnerText);
|
||||
}
|
||||
catch (ArgumentNullException e)
|
||||
{
|
||||
Log("No authors found.");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
HtmlNode[] genreNodes = document.DocumentNode
|
||||
.SelectNodes("//a[contains(@href, 'https://manhuaplus.org/genres/')]").ToArray();
|
||||
foreach (HtmlNode genreNode in genreNodes)
|
||||
tags.Add(genreNode.InnerText.Replace("\n", ""));
|
||||
}
|
||||
catch (ArgumentNullException e)
|
||||
{
|
||||
Log("No genres found");
|
||||
}
|
||||
|
||||
Regex yearRex = new(@"(?:[0-9]{1,2}\/){2}([0-9]{2,4}) [0-9]{1,2}:[0-9]{1,2}");
|
||||
HtmlNode yearNode = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-clock ')]/../span");
|
||||
Match match = yearRex.Match(yearNode.InnerText);
|
||||
int year = match.Success && match.Groups[1].Success ? int.Parse(match.Groups[1].Value) : 1960;
|
||||
|
||||
status = document.DocumentNode.SelectSingleNode("//aside//i[contains(concat(' ',normalize-space(@class),' '),' fa-rss ')]/../span").InnerText.Replace("\n", "");
|
||||
switch (status.ToLower())
|
||||
{
|
||||
case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
|
||||
case "discontinued": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "complete": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
}
|
||||
|
||||
HtmlNode descriptionNode = document.DocumentNode
|
||||
.SelectSingleNode("//div[@id='syn-target']");
|
||||
string description = descriptionNode.InnerText;
|
||||
|
||||
Manga manga = new(sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
|
||||
coverFileNameInCache, links,
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language="en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
RequestResult result = downloadClient.MakeRequest($"https://manhuaplus.org/manga/{manga.publicationId}", RequestType.Default);
|
||||
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
|
||||
{
|
||||
return Array.Empty<Chapter>();
|
||||
}
|
||||
|
||||
HtmlNodeCollection chapterNodes = result.htmlDocument.DocumentNode.SelectNodes("//li[contains(concat(' ',normalize-space(@class),' '),' chapter ')]//a");
|
||||
string[] urls = chapterNodes.Select(node => node.GetAttributeValue("href", "")).ToArray();
|
||||
Regex urlRex = new (@".*\/chapter-([0-9\-]+).*");
|
||||
|
||||
List<Chapter> chapters = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Match rexMatch = urlRex.Match(url);
|
||||
|
||||
string volumeNumber = "1";
|
||||
string chapterNumber = rexMatch.Groups[1].Value;
|
||||
string fullUrl = url;
|
||||
try
|
||||
{
|
||||
chapters.Add(new Chapter(manga, "", volumeNumber, chapterNumber, fullUrl));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Log($"Failed to load chapter {chapterNumber}: {e.Message}");
|
||||
}
|
||||
}
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
|
||||
RequestResult requestResult = this.downloadClient.MakeRequest(chapter.url, RequestType.Default);
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
HtmlDocument document = requestResult.htmlDocument;
|
||||
|
||||
HtmlNode[] images = document.DocumentNode.SelectNodes("//a[contains(concat(' ',normalize-space(@class),' '),' readImg ')]/img").ToArray();
|
||||
List<string> urls = images.Select(node => node.GetAttributeValue("src", "")).ToList();
|
||||
|
||||
return DownloadChapterImages(urls.ToArray(), chapter, RequestType.MangaImage, progressToken:progressToken);
|
||||
}
|
||||
}
|
27
Tranga/MangaConnectors/RequestResult.cs
Normal file
27
Tranga/MangaConnectors/RequestResult.cs
Normal file
@ -0,0 +1,27 @@
|
||||
using System.Net;
|
||||
using HtmlAgilityPack;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public struct RequestResult
|
||||
{
|
||||
public HttpStatusCode statusCode { get; }
|
||||
public Stream result { get; }
|
||||
public bool hasBeenRedirected { get; }
|
||||
public string? redirectedToUrl { get; }
|
||||
public HtmlDocument? htmlDocument { get; }
|
||||
|
||||
public RequestResult(HttpStatusCode statusCode, HtmlDocument? htmlDocument, Stream result)
|
||||
{
|
||||
this.statusCode = statusCode;
|
||||
this.htmlDocument = htmlDocument;
|
||||
this.result = result;
|
||||
}
|
||||
|
||||
public RequestResult(HttpStatusCode statusCode, HtmlDocument? htmlDocument, Stream result, bool hasBeenRedirected, string redirectedTo)
|
||||
: this(statusCode, htmlDocument, result)
|
||||
{
|
||||
this.hasBeenRedirected = hasBeenRedirected;
|
||||
redirectedToUrl = redirectedTo;
|
||||
}
|
||||
}
|
11
Tranga/MangaConnectors/RequestType.cs
Normal file
11
Tranga/MangaConnectors/RequestType.cs
Normal file
@ -0,0 +1,11 @@
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public enum RequestType : byte
|
||||
{
|
||||
Default = 0,
|
||||
MangaDexFeed = 1,
|
||||
MangaImage = 2,
|
||||
MangaCover = 3,
|
||||
MangaDexImage = 5,
|
||||
MangaInfo = 6
|
||||
}
|
273
Tranga/MangaConnectors/Webtoons.cs
Normal file
273
Tranga/MangaConnectors/Webtoons.cs
Normal file
@ -0,0 +1,273 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class Webtoons : MangaConnector
|
||||
{
|
||||
|
||||
public Webtoons(GlobalBase clone) : base(clone, "Webtoons", ["en"])
|
||||
{
|
||||
this.downloadClient = new HttpDownloadClient(clone);
|
||||
}
|
||||
|
||||
// Done
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
string sanitizedTitle = string.Join(' ', Regex.Matches(publicationTitle, "[A-z]*").Where(m => m.Value.Length > 0)).ToLower();
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
string requestUrl = $"https://www.webtoons.com/en/search?keyword={sanitizedTitle}&searchType=WEBTOON";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) {
|
||||
Log($"Failed to retrieve site");
|
||||
return Array.Empty<Manga>();
|
||||
}
|
||||
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return Array.Empty<Manga>();
|
||||
}
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
return publications;
|
||||
}
|
||||
|
||||
// Done
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
PublicationManager pb = new PublicationManager(publicationId);
|
||||
return GetMangaFromUrl($"https://www.webtoons.com/en/{pb.Category}/{pb.Title}/list?title_no={pb.Id}");
|
||||
}
|
||||
|
||||
// Done
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300) {
|
||||
return null;
|
||||
}
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return null;
|
||||
}
|
||||
Regex regex = new Regex(@".*webtoons\.com\/en\/(?<category>[^\/]+)\/(?<title>[^\/]+)\/list\?title_no=(?<id>\d+).*");
|
||||
Match match = regex.Match(url);
|
||||
|
||||
if(match.Success) {
|
||||
PublicationManager pm = new PublicationManager(match.Groups["title"].Value, match.Groups["category"].Value, match.Groups["id"].Value);
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, pm.getPublicationId(), url);
|
||||
}
|
||||
Log($"Failed match Regex ID");
|
||||
return null;
|
||||
}
|
||||
|
||||
// Done
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
HtmlNode mangaList = document.DocumentNode.SelectSingleNode("//ul[contains(@class, 'card_lst')]");
|
||||
if (!mangaList.ChildNodes.Any(node => node.Name == "li")) {
|
||||
Log($"Failed to parse publication");
|
||||
return Array.Empty<Manga>();
|
||||
}
|
||||
|
||||
List<string> urls = document.DocumentNode
|
||||
.SelectNodes("//ul[contains(@class, 'card_lst')]/li/a")
|
||||
.Select(node => node.GetAttributeValue("href", "https://www.webtoons.com"))
|
||||
.ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
private string capitalizeString(string str = "") {
|
||||
if(str.Length == 0) return "";
|
||||
if(str.Length == 1) return str.ToUpper();
|
||||
return char.ToUpper(str[0]) + str.Substring(1).ToLower();
|
||||
}
|
||||
|
||||
// Done
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
HtmlNode infoNode1 = document.DocumentNode.SelectSingleNode("//*[@id='content']/div[2]/div[1]/div[1]");
|
||||
HtmlNode infoNode2 = document.DocumentNode.SelectSingleNode("//*[@id='content']/div[2]/div[2]/div[2]");
|
||||
|
||||
string sortName = infoNode1.SelectSingleNode(".//h1[contains(@class, 'subj')]").InnerText;
|
||||
string description = infoNode2.SelectSingleNode(".//p[contains(@class, 'summary')]")
|
||||
.InnerText.Trim();
|
||||
|
||||
HtmlNode posterNode = document.DocumentNode.SelectSingleNode("//div[contains(@class, 'detail_body') and contains(@class, 'banner')]");
|
||||
|
||||
Regex regex = new Regex(@"url\('(?<url>.*?)'\)");
|
||||
Match match = regex.Match(posterNode.GetAttributeValue("style", ""));
|
||||
|
||||
string posterUrl = match.Groups["url"].Value;
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover, websiteUrl);
|
||||
|
||||
string genre = infoNode1.SelectSingleNode(".//h2[contains(@class, 'genre')]")
|
||||
.InnerText.Trim();
|
||||
string[] tags = [ genre ];
|
||||
|
||||
List<HtmlNode> authorsNodes = infoNode1.SelectSingleNode(".//div[contains(@class, 'author_area')]").Descendants("a").ToList();
|
||||
List<string> authors = authorsNodes.Select(node => node.InnerText.Trim()).ToList();
|
||||
|
||||
string originalLanguage = "";
|
||||
|
||||
int year = DateTime.Now.Year;
|
||||
|
||||
string status1 = infoNode2.SelectSingleNode(".//p").InnerText;
|
||||
string status2 = infoNode2.SelectSingleNode(".//p/span").InnerText;
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
if(status2.Length == 0 || status1.ToLower() == "completed") {
|
||||
releaseStatus = Manga.ReleaseStatusByte.Completed;
|
||||
} else if(status2.ToLower() == "up") {
|
||||
releaseStatus = Manga.ReleaseStatusByte.Continuing;
|
||||
}
|
||||
|
||||
Manga manga = new(sortName, authors, description, new Dictionary<string, string>(), tags, posterUrl, coverFileNameInCache, new Dictionary<string, string>(),
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl: websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
// Done
|
||||
public override Chapter[] GetChapters(Manga manga, string language = "en")
|
||||
{
|
||||
PublicationManager pm = new PublicationManager(manga.publicationId);
|
||||
string requestUrl = $"https://www.webtoons.com/en/{pm.Category}/{pm.Title}/list?title_no={pm.Id}";
|
||||
// Leaving this in for verification if the page exists
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return Array.Empty<Chapter>();
|
||||
|
||||
// Get number of pages
|
||||
int pages = requestResult.htmlDocument.DocumentNode
|
||||
.SelectNodes("//div[contains(@class, 'paginate')]/a")
|
||||
.ToList()
|
||||
.Count;
|
||||
List<Chapter> chapters = new List<Chapter>();
|
||||
|
||||
for(int page = 1; page <= pages; page++) {
|
||||
string pageRequestUrl = $"{requestUrl}&page={page}";
|
||||
chapters.AddRange(ParseChaptersFromHtml(manga, pageRequestUrl));
|
||||
}
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.Order().ToArray();
|
||||
}
|
||||
|
||||
// Done
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, string mangaUrl)
|
||||
{
|
||||
RequestResult result = downloadClient.MakeRequest(mangaUrl, RequestType.Default);
|
||||
if ((int)result.statusCode < 200 || (int)result.statusCode >= 300 || result.htmlDocument is null)
|
||||
{
|
||||
Log("Failed to load site");
|
||||
return new List<Chapter>();
|
||||
}
|
||||
|
||||
List<Chapter> ret = new();
|
||||
|
||||
foreach (HtmlNode chapterInfo in result.htmlDocument.DocumentNode.SelectNodes("//ul/li[contains(@class, '_episodeItem')]"))
|
||||
{
|
||||
HtmlNode infoNode = chapterInfo.SelectSingleNode(".//a");
|
||||
string url = infoNode.GetAttributeValue("href", "");
|
||||
|
||||
string id = chapterInfo.GetAttributeValue("id", "");
|
||||
if(id == "") continue;
|
||||
string? volumeNumber = null;
|
||||
string chapterNumber = chapterInfo.GetAttributeValue("data-episode-no", "");
|
||||
if(chapterNumber == "") continue;
|
||||
string chapterName = infoNode.SelectSingleNode(".//span[contains(@class, 'subj')]/span").InnerText.Trim();
|
||||
ret.Add(new Chapter(manga, chapterName, volumeNumber, chapterNumber, url));
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
string requestUrl = chapter.url;
|
||||
// Leaving this in to check if the page exists
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return requestResult.statusCode;
|
||||
}
|
||||
|
||||
string[] imageUrls = ParseImageUrlsFromHtml(requestUrl);
|
||||
return DownloadChapterImages(imageUrls, chapter, RequestType.MangaImage, progressToken:progressToken, referrer: requestUrl);
|
||||
}
|
||||
|
||||
private string[] ParseImageUrlsFromHtml(string mangaUrl)
|
||||
{
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(mangaUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
Log($"Failed to retrieve site");
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
return requestResult.htmlDocument.DocumentNode
|
||||
.SelectNodes("//*[@id='_imageList']/img")
|
||||
.Select(node =>
|
||||
node.GetAttributeValue("data-url", ""))
|
||||
.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
internal class PublicationManager {
|
||||
public PublicationManager(string title = "", string category = "", string id = "") {
|
||||
this.Title = title;
|
||||
this.Category = category;
|
||||
this.Id = id;
|
||||
}
|
||||
|
||||
public PublicationManager(string publicationId) {
|
||||
string[] parts = publicationId.Split("|");
|
||||
if(parts.Length == 3) {
|
||||
this.Title = parts[0];
|
||||
this.Category = parts[1];
|
||||
this.Id = parts[2];
|
||||
} else {
|
||||
this.Title = "";
|
||||
this.Category = "";
|
||||
this.Id = "";
|
||||
}
|
||||
}
|
||||
|
||||
public string getPublicationId() {
|
||||
return $"{this.Title}|{this.Category}|{this.Id}";
|
||||
}
|
||||
|
||||
public string Title { get; set; }
|
||||
public string Category { get; set; }
|
||||
public string Id { get; set; }
|
||||
}
|
215
Tranga/MangaConnectors/WeebCentral.cs
Normal file
215
Tranga/MangaConnectors/WeebCentral.cs
Normal file
@ -0,0 +1,215 @@
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using HtmlAgilityPack;
|
||||
using Tranga.Jobs;
|
||||
|
||||
namespace Tranga.MangaConnectors;
|
||||
|
||||
public class Weebcentral : MangaConnector
|
||||
{
|
||||
private readonly string _baseUrl = "https://weebcentral.com";
|
||||
|
||||
private readonly string[] _filterWords =
|
||||
{ "a", "the", "of", "as", "to", "no", "for", "on", "with", "be", "and", "in", "wa", "at", "be", "ni" };
|
||||
|
||||
public Weebcentral(GlobalBase clone) : base(clone, "Weebcentral", ["en"])
|
||||
{
|
||||
downloadClient = new ChromiumDownloadClient(clone);
|
||||
}
|
||||
|
||||
public override Manga[] GetManga(string publicationTitle = "")
|
||||
{
|
||||
Log($"Searching Publications. Term=\"{publicationTitle}\"");
|
||||
const int limit = 32; //How many values we want returned at once
|
||||
int offset = 0; //"Page"
|
||||
string requestUrl =
|
||||
$"{_baseUrl}/search/data?limit={limit}&offset={offset}&text={publicationTitle}&sort=Best+Match&order=Ascending&official=Any&display_mode=Minimal%20Display";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300 ||
|
||||
requestResult.htmlDocument == null)
|
||||
{
|
||||
Log($"Failed to retrieve search: {requestResult.statusCode}");
|
||||
return [];
|
||||
}
|
||||
|
||||
Manga[] publications = ParsePublicationsFromHtml(requestResult.htmlDocument);
|
||||
Log($"Retrieved {publications.Length} publications. Term=\"{publicationTitle}\"");
|
||||
|
||||
return publications;
|
||||
}
|
||||
|
||||
private Manga[] ParsePublicationsFromHtml(HtmlDocument document)
|
||||
{
|
||||
if (document.DocumentNode.SelectNodes("//article") == null)
|
||||
return [];
|
||||
|
||||
List<string> urls = document.DocumentNode.SelectNodes("/html/body/article/a[@class='link link-hover tooltip tooltip-bottom']")
|
||||
.Select(elem => elem.GetAttributeValue("href", "")).ToList();
|
||||
|
||||
HashSet<Manga> ret = new();
|
||||
foreach (string url in urls)
|
||||
{
|
||||
Manga? manga = GetMangaFromUrl(url);
|
||||
if (manga is not null)
|
||||
ret.Add((Manga)manga);
|
||||
}
|
||||
|
||||
return ret.ToArray();
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromUrl(string url)
|
||||
{
|
||||
Regex publicationIdRex = new(@"https:\/\/weebcentral\.com\/series\/(\w*)\/(.*)");
|
||||
string publicationId = publicationIdRex.Match(url).Groups[1].Value;
|
||||
|
||||
RequestResult requestResult = downloadClient.MakeRequest(url, RequestType.MangaInfo);
|
||||
if ((int)requestResult.statusCode < 300 && (int)requestResult.statusCode >= 200 &&
|
||||
requestResult.htmlDocument is not null)
|
||||
return ParseSinglePublicationFromHtml(requestResult.htmlDocument, publicationId, url);
|
||||
return null;
|
||||
}
|
||||
|
||||
private Manga ParseSinglePublicationFromHtml(HtmlDocument document, string publicationId, string websiteUrl)
|
||||
{
|
||||
HtmlNode? posterNode =
|
||||
document.DocumentNode.SelectSingleNode("//section[@class='flex items-center justify-center']/picture/img");
|
||||
string posterUrl = posterNode?.GetAttributeValue("src", "") ?? "";
|
||||
string coverFileNameInCache = SaveCoverImageToCache(posterUrl, publicationId, RequestType.MangaCover);
|
||||
|
||||
HtmlNode? titleNode = document.DocumentNode.SelectSingleNode("//section/h1");
|
||||
string sortName = titleNode?.InnerText ?? "Undefined";
|
||||
|
||||
HtmlNode[] authorsNodes =
|
||||
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Author(s): ']/span")?.ToArray() ?? [];
|
||||
List<string> authors = authorsNodes.Select(n => n.InnerText).ToList();
|
||||
|
||||
HtmlNode[] genreNodes =
|
||||
document.DocumentNode.SelectNodes("//ul/li[strong/text() = 'Tags(s): ']/span")?.ToArray() ?? [];
|
||||
HashSet<string> tags = genreNodes.Select(n => n.InnerText).ToHashSet();
|
||||
|
||||
HtmlNode? statusNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Status: ']/a");
|
||||
string status = statusNode?.InnerText ?? "";
|
||||
Log("unable to parse status");
|
||||
Manga.ReleaseStatusByte releaseStatus = Manga.ReleaseStatusByte.Unreleased;
|
||||
switch (status.ToLower())
|
||||
{
|
||||
case "cancelled": releaseStatus = Manga.ReleaseStatusByte.Cancelled; break;
|
||||
case "hiatus": releaseStatus = Manga.ReleaseStatusByte.OnHiatus; break;
|
||||
case "complete": releaseStatus = Manga.ReleaseStatusByte.Completed; break;
|
||||
case "ongoing": releaseStatus = Manga.ReleaseStatusByte.Continuing; break;
|
||||
}
|
||||
|
||||
HtmlNode? yearNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Released: ']/span");
|
||||
int year = Convert.ToInt32(yearNode?.InnerText ?? "0");
|
||||
|
||||
HtmlNode? descriptionNode = document.DocumentNode.SelectSingleNode("//ul/li[strong/text() = 'Description']/p");
|
||||
string description = descriptionNode?.InnerText ?? "Undefined";
|
||||
|
||||
HtmlNode[] altTitleNodes = document.DocumentNode
|
||||
.SelectNodes("//ul/li[strong/text() = 'Associated Name(s)']/ul/li")?.ToArray() ?? [];
|
||||
Dictionary<string, string> altTitles = new(), links = new();
|
||||
for (int i = 0; i < altTitleNodes.Length; i++)
|
||||
altTitles.Add(i.ToString(), altTitleNodes[i].InnerText);
|
||||
|
||||
string originalLanguage = "";
|
||||
|
||||
Manga manga = new(sortName, authors.ToList(), description, altTitles, tags.ToArray(), posterUrl,
|
||||
coverFileNameInCache, links,
|
||||
year, originalLanguage, publicationId, releaseStatus, websiteUrl);
|
||||
AddMangaToCache(manga);
|
||||
return manga;
|
||||
}
|
||||
|
||||
public override Manga? GetMangaFromId(string publicationId)
|
||||
{
|
||||
return GetMangaFromUrl($"https://weebcentral.com/series/{publicationId}");
|
||||
}
|
||||
|
||||
public override Chapter[] GetChapters(Manga manga, string language = "en")
|
||||
{
|
||||
Log($"Getting chapters {manga}");
|
||||
string requestUrl = $"{_baseUrl}/series/{manga.publicationId}/full-chapter-list";
|
||||
RequestResult requestResult =
|
||||
downloadClient.MakeRequest(requestUrl, RequestType.Default);
|
||||
if ((int)requestResult.statusCode < 200 || (int)requestResult.statusCode >= 300)
|
||||
return [];
|
||||
|
||||
//Return Chapters ordered by Chapter-Number
|
||||
if (requestResult.htmlDocument is null)
|
||||
return [];
|
||||
List<Chapter> chapters = ParseChaptersFromHtml(manga, requestResult.htmlDocument);
|
||||
Log($"Got {chapters.Count} chapters. {manga}");
|
||||
return chapters.OrderByDescending(c => c.name).ThenBy(c => c.volumeNumber).ThenBy(c => c.chapterNumber).ToArray();
|
||||
}
|
||||
|
||||
private List<Chapter> ParseChaptersFromHtml(Manga manga, HtmlDocument document)
|
||||
{
|
||||
HtmlNode? chaptersWrapper = document.DocumentNode.SelectSingleNode("/html/body");
|
||||
|
||||
Regex chapterRex = new(@"(\d+(?:\.\d+)*)");
|
||||
Regex chapterNameRex = new(@"(\w* )+");
|
||||
Regex idRex = new(@"https:\/\/weebcentral\.com\/chapters\/(\w*)");
|
||||
|
||||
List<Chapter> ret = chaptersWrapper.Descendants("a").Select(elem =>
|
||||
{
|
||||
string url = elem.GetAttributeValue("href", "") ?? "Undefined";
|
||||
|
||||
if (!url.StartsWith("https://") && !url.StartsWith("http://"))
|
||||
return new Chapter(manga, null, null, "-1", "undefined");
|
||||
|
||||
Match idMatch = idRex.Match(url);
|
||||
string? id = idMatch.Success ? idMatch.Groups[1].Value : null;
|
||||
|
||||
string chapterNode = elem.SelectSingleNode("span[@class='grow flex items-center gap-2']/span")?.InnerText ??
|
||||
"Undefined";
|
||||
|
||||
MatchCollection chapterNumberMatch = chapterRex.Matches(chapterNode);
|
||||
string chapterNumber = chapterNumberMatch.Count > 0 ? chapterNumberMatch[^1].Groups[1].Value : "-1";
|
||||
MatchCollection chapterNameMatch = chapterNameRex.Matches(chapterNode);
|
||||
string chapterName = chapterNameMatch.Count > 0
|
||||
? string.Join(" - ",
|
||||
chapterNameMatch.Select(m => m.Groups[1].Value.Trim())
|
||||
.Where(name => name.Length > 0 && !name.Equals("Chapter", StringComparison.OrdinalIgnoreCase)).ToArray()).Trim()
|
||||
: "";
|
||||
|
||||
return new Chapter(manga, chapterName != "" ? chapterName : null, null, chapterNumber, url, id);
|
||||
}).Where(elem => elem.chapterNumber != -1 && elem.url != "undefined").ToList();
|
||||
|
||||
ret.Reverse();
|
||||
return ret;
|
||||
}
|
||||
|
||||
public override HttpStatusCode DownloadChapter(Chapter chapter, ProgressToken? progressToken = null)
|
||||
{
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Manga chapterParentManga = chapter.parentManga;
|
||||
if (progressToken?.cancellationRequested ?? false)
|
||||
{
|
||||
progressToken.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
Log($"Retrieving chapter-info {chapter} {chapterParentManga}");
|
||||
|
||||
RequestResult requestResult = downloadClient.MakeRequest(chapter.url, RequestType.Default);
|
||||
if (requestResult.htmlDocument is null)
|
||||
{
|
||||
progressToken?.Cancel();
|
||||
return HttpStatusCode.RequestTimeout;
|
||||
}
|
||||
|
||||
HtmlDocument? document = requestResult.htmlDocument;
|
||||
|
||||
HtmlNode[] imageNodes =
|
||||
document.DocumentNode.SelectNodes($"//section[@hx-get='{chapter.url}/images']/img")?.ToArray() ?? [];
|
||||
string[] urls = imageNodes.Select(imgNode => imgNode.GetAttributeValue("src", "")).ToArray();
|
||||
|
||||
return DownloadChapterImages(urls, chapter, RequestType.MangaImage, progressToken: progressToken, referrer: "https://weebcentral.com/");
|
||||
}
|
||||
}
|
@ -13,7 +13,9 @@ public class Gotify : NotificationConnector
|
||||
[JsonConstructor]
|
||||
public Gotify(GlobalBase clone, string endpoint, string appToken) : base(clone, NotificationConnectorType.Gotify)
|
||||
{
|
||||
this.endpoint = endpoint;
|
||||
if (!baseUrlRex.IsMatch(endpoint))
|
||||
throw new ArgumentException("endpoint does not match pattern");
|
||||
this.endpoint = baseUrlRex.Match(endpoint).Value;;
|
||||
this.appToken = appToken;
|
||||
}
|
||||
|
||||
@ -22,7 +24,7 @@ public class Gotify : NotificationConnector
|
||||
return $"Gotify {endpoint}";
|
||||
}
|
||||
|
||||
public override void SendNotification(string title, string notificationText)
|
||||
protected override void SendNotificationInternal(string title, string notificationText)
|
||||
{
|
||||
Log($"Sending notification: {title} - {notificationText}");
|
||||
MessageData message = new(title, notificationText);
|
||||
|
@ -20,7 +20,7 @@ public class LunaSea : NotificationConnector
|
||||
return $"LunaSea {id}";
|
||||
}
|
||||
|
||||
public override void SendNotification(string title, string notificationText)
|
||||
protected override void SendNotificationInternal(string title, string notificationText)
|
||||
{
|
||||
Log($"Sending notification: {title} - {notificationText}");
|
||||
MessageData message = new(title, notificationText);
|
||||
|
@ -3,13 +3,72 @@
|
||||
public abstract class NotificationConnector : GlobalBase
|
||||
{
|
||||
public readonly NotificationConnectorType notificationConnectorType;
|
||||
private DateTime? _notificationRequested = null;
|
||||
private readonly Thread? _notificationBufferThread = null;
|
||||
private const int NoChangeTimeout = 3, BiggestInterval = 30;
|
||||
private List<KeyValuePair<string, string>> _notifications = new();
|
||||
|
||||
protected NotificationConnector(GlobalBase clone, NotificationConnectorType notificationConnectorType) : base(clone)
|
||||
{
|
||||
Log($"Creating notificationConnector {Enum.GetName(notificationConnectorType)}");
|
||||
this.notificationConnectorType = notificationConnectorType;
|
||||
|
||||
|
||||
if (TrangaSettings.bufferLibraryUpdates)
|
||||
{
|
||||
_notificationBufferThread = new(CheckNotificationBuffer);
|
||||
_notificationBufferThread.Start();
|
||||
}
|
||||
}
|
||||
|
||||
public enum NotificationConnectorType : byte { Gotify = 0, LunaSea = 1 }
|
||||
private void CheckNotificationBuffer()
|
||||
{
|
||||
while (true)
|
||||
{
|
||||
if (_notificationRequested is not null && DateTime.Now.Subtract((DateTime)_notificationRequested) > TimeSpan.FromMinutes(NoChangeTimeout)) //If no updates have been requested for NoChangeTimeout minutes, update library
|
||||
{
|
||||
string[] uniqueTitles = _notifications.DistinctBy(n => n.Key).Select(n => n.Key).ToArray();
|
||||
Log($"Notification Buffer sending! Notifications: {string.Join(", ", uniqueTitles)}");
|
||||
foreach (string ut in uniqueTitles)
|
||||
{
|
||||
string[] texts = _notifications.Where(n => n.Key == ut).Select(n => n.Value).ToArray();
|
||||
SendNotificationInternal($"{ut} ({texts.Length})", string.Join('\n', texts));
|
||||
}
|
||||
_notificationRequested = null;
|
||||
_notifications.Clear();
|
||||
}
|
||||
Thread.Sleep(100);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract void SendNotification(string title, string notificationText);
|
||||
public enum NotificationConnectorType : byte { Gotify = 0, LunaSea = 1, Ntfy = 2 }
|
||||
|
||||
public void SendNotification(string title, string notificationText, bool buffer = false)
|
||||
{
|
||||
_notificationRequested ??= DateTime.Now;
|
||||
if (!TrangaSettings.bufferNotifications || !buffer)
|
||||
{
|
||||
SendNotificationInternal(title, notificationText);
|
||||
return;
|
||||
}
|
||||
_notifications.Add(new(title, notificationText));
|
||||
if (_notificationRequested is not null &&
|
||||
DateTime.Now.Subtract((DateTime)_notificationRequested) > TimeSpan.FromMinutes(BiggestInterval)) //If the last update has been more than BiggestInterval minutes ago, update library
|
||||
{
|
||||
string[] uniqueTitles = _notifications.DistinctBy(n => n.Key).Select(n => n.Key).ToArray();
|
||||
foreach (string ut in uniqueTitles)
|
||||
{
|
||||
string[] texts = _notifications.Where(n => n.Key == ut).Select(n => n.Value).ToArray();
|
||||
SendNotificationInternal(ut, string.Join('\n', texts));
|
||||
}
|
||||
_notificationRequested = null;
|
||||
_notifications.Clear();
|
||||
}
|
||||
else if(_notificationRequested is not null)
|
||||
{
|
||||
Log($"Buffering Notifications (Updates in latest {((DateTime)_notificationRequested).Add(TimeSpan.FromMinutes(BiggestInterval)).Subtract(DateTime.Now)} or {((DateTime)_notificationRequested).Add(TimeSpan.FromMinutes(NoChangeTimeout)).Subtract(DateTime.Now)})");
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void SendNotificationInternal(string title, string notificationText);
|
||||
}
|
@ -21,11 +21,15 @@ public class NotificationManagerJsonConverter : JsonConverter
|
||||
JsonSerializer serializer)
|
||||
{
|
||||
JObject jo = JObject.Load(reader);
|
||||
if (jo["notificationConnectorType"]!.Value<byte>() == (byte)NotificationConnector.NotificationConnectorType.Gotify)
|
||||
return new Gotify(this._clone, jo.GetValue("endpoint")!.Value<string>()!, jo.GetValue("appToken")!.Value<string>()!);
|
||||
else if (jo["notificationConnectorType"]!.Value<byte>() ==
|
||||
(byte)NotificationConnector.NotificationConnectorType.LunaSea)
|
||||
return new LunaSea(this._clone, jo.GetValue("id")!.Value<string>()!);
|
||||
switch (jo["notificationConnectorType"]!.Value<byte>())
|
||||
{
|
||||
case (byte)NotificationConnector.NotificationConnectorType.Gotify:
|
||||
return new Gotify(this._clone, jo.GetValue("endpoint")!.Value<string>()!, jo.GetValue("appToken")!.Value<string>()!);
|
||||
case (byte)NotificationConnector.NotificationConnectorType.LunaSea:
|
||||
return new LunaSea(this._clone, jo.GetValue("id")!.Value<string>()!);
|
||||
case (byte)NotificationConnector.NotificationConnectorType.Ntfy:
|
||||
return new Ntfy(this._clone, jo.GetValue("endpoint")!.Value<string>()!, jo.GetValue("topic")!.Value<string>()!, jo.GetValue("auth")!.Value<string>()!);
|
||||
}
|
||||
|
||||
throw new Exception();
|
||||
}
|
||||
|
87
Tranga/NotificationConnectors/Ntfy.cs
Normal file
87
Tranga/NotificationConnectors/Ntfy.cs
Normal file
@ -0,0 +1,87 @@
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using Newtonsoft.Json;
|
||||
|
||||
namespace Tranga.NotificationConnectors;
|
||||
|
||||
public class Ntfy : NotificationConnector
|
||||
{
|
||||
// ReSharper disable twice MemberCanBePrivate.Global
|
||||
public string endpoint { get; init; }
|
||||
public string auth { get; init; }
|
||||
public string topic { get; init; }
|
||||
private readonly HttpClient _client = new();
|
||||
|
||||
[JsonConstructor]
|
||||
public Ntfy(GlobalBase clone, string endpoint, string topic, string auth) : base(clone, NotificationConnectorType.Ntfy)
|
||||
{
|
||||
this.endpoint = endpoint;
|
||||
this.topic = topic;
|
||||
this.auth = auth;
|
||||
}
|
||||
|
||||
public Ntfy(GlobalBase clone, string endpoint, string username, string password, string? topic = null) :
|
||||
this(clone, EndpointAndTopicFromUrl(endpoint)[0], topic??EndpointAndTopicFromUrl(endpoint)[1], AuthFromUsernamePassword(username, password))
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
private static string AuthFromUsernamePassword(string username, string password)
|
||||
{
|
||||
string authHeader = "Basic " + Convert.ToBase64String(Encoding.UTF8.GetBytes($"{username}:{password}"));
|
||||
string authParam = Convert.ToBase64String(Encoding.UTF8.GetBytes(authHeader)).Replace("=","");
|
||||
return authParam;
|
||||
}
|
||||
|
||||
private static string[] EndpointAndTopicFromUrl(string url)
|
||||
{
|
||||
string[] ret = new string[2];
|
||||
if (!baseUrlRex.IsMatch(url))
|
||||
throw new ArgumentException("url does not match pattern");
|
||||
Regex rootUriRex = new(@"(https?:\/\/[a-zA-Z0-9-\.]+\.[a-zA-Z0-9]+)(?:\/([a-zA-Z0-9-\.]+))?.*");
|
||||
Match match = rootUriRex.Match(url);
|
||||
if(!match.Success)
|
||||
throw new ArgumentException($"Error getting URI from provided endpoint-URI: {url}");
|
||||
|
||||
ret[0] = match.Groups[1].Value;
|
||||
ret[1] = match.Groups[2].Success && match.Groups[2].Value.Length > 0 ? match.Groups[2].Value : "tranga";
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
return $"Ntfy {endpoint} {topic}";
|
||||
}
|
||||
|
||||
protected override void SendNotificationInternal(string title, string notificationText)
|
||||
{
|
||||
Log($"Sending notification: {title} - {notificationText}");
|
||||
MessageData message = new(title, topic, notificationText);
|
||||
HttpRequestMessage request = new(HttpMethod.Post, $"{this.endpoint}?auth={this.auth}");
|
||||
request.Content = new StringContent(JsonConvert.SerializeObject(message, Formatting.None), Encoding.UTF8, "application/json");
|
||||
HttpResponseMessage response = _client.Send(request);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
StreamReader sr = new (response.Content.ReadAsStream());
|
||||
Log($"{response.StatusCode}: {sr.ReadToEnd()}");
|
||||
}
|
||||
}
|
||||
|
||||
private class MessageData
|
||||
{
|
||||
// ReSharper disable UnusedAutoPropertyAccessor.Local
|
||||
public string topic { get; }
|
||||
public string title { get; }
|
||||
public string message { get; }
|
||||
public int priority { get; }
|
||||
|
||||
public MessageData(string title, string topic, string message)
|
||||
{
|
||||
this.topic = topic;
|
||||
this.title = title;
|
||||
this.message = message;
|
||||
this.priority = 3;
|
||||
}
|
||||
}
|
||||
}
|
359
Tranga/Server.cs
359
Tranga/Server.cs
@ -19,9 +19,9 @@ public class Server : GlobalBase
|
||||
{
|
||||
this._parent = parent;
|
||||
if(RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
this._listener.Prefixes.Add($"http://*:{settings.apiPortNumber}/");
|
||||
this._listener.Prefixes.Add($"http://*:{TrangaSettings.apiPortNumber}/");
|
||||
else
|
||||
this._listener.Prefixes.Add($"http://localhost:{settings.apiPortNumber}/");
|
||||
this._listener.Prefixes.Add($"http://localhost:{TrangaSettings.apiPortNumber}/");
|
||||
Thread listenThread = new (Listen);
|
||||
listenThread.Start();
|
||||
Thread watchThread = new(WatchRunning);
|
||||
@ -52,7 +52,7 @@ public class Server : GlobalBase
|
||||
});
|
||||
t.Start();
|
||||
}
|
||||
catch (HttpListenerException e)
|
||||
catch (HttpListenerException)
|
||||
{
|
||||
|
||||
}
|
||||
@ -63,10 +63,11 @@ public class Server : GlobalBase
|
||||
{
|
||||
HttpListenerRequest request = context.Request;
|
||||
HttpListenerResponse response = context.Response;
|
||||
if(request.HttpMethod == "OPTIONS")
|
||||
SendResponse(HttpStatusCode.OK, context.Response);
|
||||
if(request.Url!.LocalPath.Contains("favicon"))
|
||||
if (request.Url!.LocalPath.Contains("favicon"))
|
||||
{
|
||||
SendResponse(HttpStatusCode.NoContent, response);
|
||||
return;
|
||||
}
|
||||
|
||||
switch (request.HttpMethod)
|
||||
{
|
||||
@ -79,6 +80,9 @@ public class Server : GlobalBase
|
||||
case "DELETE":
|
||||
HandleDelete(request, response);
|
||||
break;
|
||||
case "OPTIONS":
|
||||
SendResponse(HttpStatusCode.OK, context.Response);
|
||||
break;
|
||||
default:
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
@ -114,6 +118,15 @@ public class Server : GlobalBase
|
||||
case "Connectors":
|
||||
SendResponse(HttpStatusCode.OK, response, _parent.GetConnectors().Select(con => con.name).ToArray());
|
||||
break;
|
||||
case "Languages":
|
||||
if (!requestVariables.TryGetValue("connector", out connectorName) ||
|
||||
!_parent.TryGetConnector(connectorName, out connector))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
SendResponse(HttpStatusCode.OK, response, connector);
|
||||
break;
|
||||
case "Manga/Cover":
|
||||
if (!requestVariables.TryGetValue("internalId", out internalId) ||
|
||||
!_parent.TryGetPublicationById(internalId, out manga))
|
||||
@ -122,7 +135,7 @@ public class Server : GlobalBase
|
||||
break;
|
||||
}
|
||||
|
||||
string filePath = settings.GetFullCoverPath((Manga)manga!);
|
||||
string filePath = manga?.coverFileNameInCache ?? "";
|
||||
if (File.Exists(filePath))
|
||||
{
|
||||
FileStream coverStream = new(filePath, FileMode.Open);
|
||||
@ -163,7 +176,8 @@ public class Server : GlobalBase
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
SendResponse(HttpStatusCode.OK, response, connector!.GetChapters((Manga)manga!));
|
||||
requestVariables.TryGetValue("translatedLanguage", out string? translatedLanguage);
|
||||
SendResponse(HttpStatusCode.OK, response, connector!.GetChapters((Manga)manga!, translatedLanguage??"en"));
|
||||
break;
|
||||
case "Jobs":
|
||||
if (!requestVariables.TryGetValue("jobId", out jobId))
|
||||
@ -191,13 +205,22 @@ public class Server : GlobalBase
|
||||
SendResponse(HttpStatusCode.OK, response, _parent.jobBoss.jobs.Where(jjob => jjob.progressToken.state is ProgressToken.State.Running));
|
||||
break;
|
||||
case "Jobs/Waiting":
|
||||
SendResponse(HttpStatusCode.OK, response, _parent.jobBoss.jobs.Where(jjob => jjob.progressToken.state is ProgressToken.State.Standby));
|
||||
SendResponse(HttpStatusCode.OK, response, _parent.jobBoss.jobs.Where(jjob => jjob.progressToken.state is ProgressToken.State.Standby).OrderBy(jjob => jjob.nextExecution));
|
||||
break;
|
||||
case "Jobs/MonitorJobs":
|
||||
SendResponse(HttpStatusCode.OK, response, _parent.jobBoss.jobs.Where(jjob => jjob is DownloadNewChapters));
|
||||
SendResponse(HttpStatusCode.OK, response, _parent.jobBoss.jobs.Where(jjob => jjob is DownloadNewChapters).OrderBy(jjob => ((DownloadNewChapters)jjob).manga.sortName));
|
||||
break;
|
||||
case "Settings":
|
||||
SendResponse(HttpStatusCode.OK, response, settings);
|
||||
SendResponse(HttpStatusCode.OK, response, TrangaSettings.AsJObject());
|
||||
break;
|
||||
case "Settings/userAgent":
|
||||
SendResponse(HttpStatusCode.OK, response, TrangaSettings.userAgent);
|
||||
break;
|
||||
case "Settings/customRequestLimit":
|
||||
SendResponse(HttpStatusCode.OK, response, TrangaSettings.requestLimits);
|
||||
break;
|
||||
case "Settings/AprilFoolsMode":
|
||||
SendResponse(HttpStatusCode.OK, response, TrangaSettings.aprilFoolsMode);
|
||||
break;
|
||||
case "NotificationConnectors":
|
||||
SendResponse(HttpStatusCode.OK, response, notificationConnectors);
|
||||
@ -216,6 +239,40 @@ public class Server : GlobalBase
|
||||
case "Ping":
|
||||
SendResponse(HttpStatusCode.OK, response, "Pong");
|
||||
break;
|
||||
case "LogMessages":
|
||||
if (logger is null || !File.Exists(logger?.logFilePath))
|
||||
{
|
||||
SendResponse(HttpStatusCode.NotFound, response);
|
||||
break;
|
||||
}
|
||||
|
||||
if (requestVariables.TryGetValue("count", out string? count))
|
||||
{
|
||||
try
|
||||
{
|
||||
uint messageCount = uint.Parse(count);
|
||||
SendResponse(HttpStatusCode.OK, response, logger.Tail(messageCount));
|
||||
}
|
||||
catch (FormatException f)
|
||||
{
|
||||
SendResponse(HttpStatusCode.InternalServerError, response, f);
|
||||
}
|
||||
}else
|
||||
SendResponse(HttpStatusCode.OK, response, logger.GetLog());
|
||||
break;
|
||||
case "LogFile":
|
||||
if (logger is null || !File.Exists(logger?.logFilePath))
|
||||
{
|
||||
SendResponse(HttpStatusCode.NotFound, response);
|
||||
break;
|
||||
}
|
||||
|
||||
string logDir = new FileInfo(logger.logFilePath).DirectoryName!;
|
||||
string tmpFilePath = Path.Join(logDir, "Tranga.log");
|
||||
File.Copy(logger.logFilePath, tmpFilePath);
|
||||
SendResponse(HttpStatusCode.OK, response, new FileStream(tmpFilePath, FileMode.Open));
|
||||
File.Delete(tmpFilePath);
|
||||
break;
|
||||
default:
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
@ -225,11 +282,13 @@ public class Server : GlobalBase
|
||||
private void HandlePost(HttpListenerRequest request, HttpListenerResponse response)
|
||||
{
|
||||
Dictionary<string, string> requestVariables = GetRequestVariables(request.Url!.Query);
|
||||
string? connectorName, internalId, jobId, chapterNumStr, customFolderName;
|
||||
string? connectorName, internalId, jobId, chapterNumStr, customFolderName, translatedLanguage, notificationConnectorStr, libraryConnectorStr;
|
||||
MangaConnector? connector;
|
||||
Manga? tmpManga;
|
||||
Manga manga;
|
||||
Job? job;
|
||||
NotificationConnector.NotificationConnectorType notificationConnectorType;
|
||||
LibraryConnector.LibraryType libraryConnectorType;
|
||||
string path = Regex.Match(request.Url!.LocalPath, @"[A-z0-9]+(\/[A-z0-9]+)*").Value;
|
||||
switch (path)
|
||||
{
|
||||
@ -268,9 +327,10 @@ public class Server : GlobalBase
|
||||
}
|
||||
|
||||
if (requestVariables.TryGetValue("customFolderName", out customFolderName))
|
||||
manga.MovePublicationFolder(settings.downloadLocation, customFolderName);
|
||||
manga.MovePublicationFolder(TrangaSettings.downloadLocation, customFolderName);
|
||||
requestVariables.TryGetValue("translatedLanguage", out translatedLanguage);
|
||||
|
||||
_parent.jobBoss.AddJob(new DownloadNewChapters(this, connector!, manga, true, interval));
|
||||
_parent.jobBoss.AddJob(new DownloadNewChapters(this, connector!, manga, true, interval, translatedLanguage: translatedLanguage??"en"));
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
case "Jobs/DownloadNewChapters":
|
||||
@ -296,11 +356,40 @@ public class Server : GlobalBase
|
||||
}
|
||||
|
||||
if (requestVariables.TryGetValue("customFolderName", out customFolderName))
|
||||
manga.MovePublicationFolder(settings.downloadLocation, customFolderName);
|
||||
manga.MovePublicationFolder(TrangaSettings.downloadLocation, customFolderName);
|
||||
requestVariables.TryGetValue("translatedLanguage", out translatedLanguage);
|
||||
|
||||
_parent.jobBoss.AddJob(new DownloadNewChapters(this, connector!, manga, false));
|
||||
_parent.jobBoss.AddJob(new DownloadNewChapters(this, connector!, manga, false, translatedLanguage: translatedLanguage??"en"));
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
case "Jobs/UpdateMetadata":
|
||||
if (!requestVariables.TryGetValue("internalId", out internalId))
|
||||
{
|
||||
foreach (Job pJob in _parent.jobBoss.jobs.Where(possibleDncJob =>
|
||||
possibleDncJob.jobType is Job.JobType.DownloadNewChaptersJob).ToArray())//ToArray to avoid modyifying while adding new jobs
|
||||
{
|
||||
DownloadNewChapters dncJob = pJob as DownloadNewChapters ??
|
||||
throw new Exception("Has to be DownloadNewChapters Job");
|
||||
_parent.jobBoss.AddJob(new UpdateMetadata(this, dncJob.mangaConnector, dncJob.manga));
|
||||
}
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
}
|
||||
else
|
||||
{
|
||||
Job[] possibleDncJobs = _parent.jobBoss.GetJobsLike(internalId: internalId).ToArray();
|
||||
switch (possibleDncJobs.Length)
|
||||
{
|
||||
case <1: SendResponse(HttpStatusCode.BadRequest, response, "Could not find matching release"); break;
|
||||
case >1: SendResponse(HttpStatusCode.BadRequest, response, "Multiple releases??"); break;
|
||||
default:
|
||||
DownloadNewChapters dncJob = possibleDncJobs[0] as DownloadNewChapters ??
|
||||
throw new Exception("Has to be DownloadNewChapters Job");
|
||||
_parent.jobBoss.AddJob(new UpdateMetadata(this, dncJob.mangaConnector, dncJob.manga));
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case "Jobs/StartNow":
|
||||
if (!requestVariables.TryGetValue("jobId", out jobId) ||
|
||||
!_parent.jobBoss.TryGetJobById(jobId, out job))
|
||||
@ -324,12 +413,22 @@ public class Server : GlobalBase
|
||||
case "Settings/UpdateDownloadLocation":
|
||||
if (!requestVariables.TryGetValue("downloadLocation", out string? downloadLocation) ||
|
||||
!requestVariables.TryGetValue("moveFiles", out string? moveFilesStr) ||
|
||||
!Boolean.TryParse(moveFilesStr, out bool moveFiles))
|
||||
!bool.TryParse(moveFilesStr, out bool moveFiles))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
settings.UpdateDownloadLocation(downloadLocation, moveFiles);
|
||||
TrangaSettings.UpdateDownloadLocation(downloadLocation, moveFiles);
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
case "Settings/AprilFoolsMode":
|
||||
if (!requestVariables.TryGetValue("enabled", out string? aprilFoolsModeEnabledStr) ||
|
||||
!bool.TryParse(aprilFoolsModeEnabledStr, out bool aprilFoolsModeEnabled))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
TrangaSettings.UpdateAprilFoolsMode(aprilFoolsModeEnabled);
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
/*case "Settings/UpdateWorkingDirectory":
|
||||
@ -341,9 +440,38 @@ public class Server : GlobalBase
|
||||
settings.UpdateWorkingDirectory(workingDirectory);
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;*/
|
||||
case "Settings/userAgent":
|
||||
if(!requestVariables.TryGetValue("userAgent", out string? customUserAgent))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
TrangaSettings.UpdateUserAgent(customUserAgent);
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
case "Settings/userAgent/Reset":
|
||||
TrangaSettings.UpdateUserAgent(null);
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
case "Settings/customRequestLimit":
|
||||
if (!requestVariables.TryGetValue("requestType", out string? requestTypeStr) ||
|
||||
!requestVariables.TryGetValue("requestsPerMinute", out string? requestsPerMinuteStr) ||
|
||||
!Enum.TryParse(requestTypeStr, out RequestType requestType) ||
|
||||
!int.TryParse(requestsPerMinuteStr, out int requestsPerMinute))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
|
||||
TrangaSettings.UpdateRateLimit(requestType, requestsPerMinute);
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
case "Settings/customRequestLimit/Reset":
|
||||
TrangaSettings.ResetRateLimits();
|
||||
break;
|
||||
case "NotificationConnectors/Update":
|
||||
if (!requestVariables.TryGetValue("notificationConnector", out string? notificationConnectorStr) ||
|
||||
!Enum.TryParse(notificationConnectorStr, out NotificationConnector.NotificationConnectorType notificationConnectorType))
|
||||
if (!requestVariables.TryGetValue("notificationConnector", out notificationConnectorStr) ||
|
||||
!Enum.TryParse(notificationConnectorStr, out notificationConnectorType))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
@ -359,10 +487,7 @@ public class Server : GlobalBase
|
||||
}
|
||||
AddNotificationConnector(new Gotify(this, gotifyUrl, gotifyAppToken));
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
}
|
||||
|
||||
if (notificationConnectorType is NotificationConnector.NotificationConnectorType.LunaSea)
|
||||
}else if (notificationConnectorType is NotificationConnector.NotificationConnectorType.LunaSea)
|
||||
{
|
||||
if (!requestVariables.TryGetValue("lunaseaWebhook", out string? lunaseaWebhook))
|
||||
{
|
||||
@ -371,13 +496,82 @@ public class Server : GlobalBase
|
||||
}
|
||||
AddNotificationConnector(new LunaSea(this, lunaseaWebhook));
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
}else if (notificationConnectorType is NotificationConnector.NotificationConnectorType.Ntfy)
|
||||
{
|
||||
if (!requestVariables.TryGetValue("ntfyUrl", out string? ntfyUrl) ||
|
||||
!requestVariables.TryGetValue("ntfyUser", out string? ntfyUser)||
|
||||
!requestVariables.TryGetValue("ntfyPass", out string? ntfyPass))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
AddNotificationConnector(new Ntfy(this, ntfyUrl, ntfyUser, ntfyPass, null));
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
}
|
||||
else
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
}
|
||||
break;
|
||||
case "NotificationConnectors/Test":
|
||||
NotificationConnector notificationConnector;
|
||||
if (!requestVariables.TryGetValue("notificationConnector", out notificationConnectorStr) ||
|
||||
!Enum.TryParse(notificationConnectorStr, out notificationConnectorType))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
|
||||
if (notificationConnectorType is NotificationConnector.NotificationConnectorType.Gotify)
|
||||
{
|
||||
if (!requestVariables.TryGetValue("gotifyUrl", out string? gotifyUrl) ||
|
||||
!requestVariables.TryGetValue("gotifyAppToken", out string? gotifyAppToken))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
notificationConnector = new Gotify(this, gotifyUrl, gotifyAppToken);
|
||||
}else if (notificationConnectorType is NotificationConnector.NotificationConnectorType.LunaSea)
|
||||
{
|
||||
if (!requestVariables.TryGetValue("lunaseaWebhook", out string? lunaseaWebhook))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
notificationConnector = new LunaSea(this, lunaseaWebhook);
|
||||
}else if (notificationConnectorType is NotificationConnector.NotificationConnectorType.Ntfy)
|
||||
{
|
||||
if (!requestVariables.TryGetValue("ntfyUrl", out string? ntfyUrl) ||
|
||||
!requestVariables.TryGetValue("ntfyUser", out string? ntfyUser)||
|
||||
!requestVariables.TryGetValue("ntfyPass", out string? ntfyPass))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
notificationConnector = new Ntfy(this, ntfyUrl, ntfyUser, ntfyPass, null);
|
||||
}
|
||||
else
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
|
||||
notificationConnector.SendNotification("Tranga Test", "This is Test-Notification.");
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
case "NotificationConnectors/Reset":
|
||||
if (!requestVariables.TryGetValue("notificationConnector", out notificationConnectorStr) ||
|
||||
!Enum.TryParse(notificationConnectorStr, out notificationConnectorType))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
DeleteNotificationConnector(notificationConnectorType);
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
case "LibraryConnectors/Update":
|
||||
if (!requestVariables.TryGetValue("libraryConnector", out string? libraryConnectorStr) ||
|
||||
!Enum.TryParse(libraryConnectorStr,
|
||||
out LibraryConnector.LibraryType libraryConnectorType))
|
||||
if (!requestVariables.TryGetValue("libraryConnector", out libraryConnectorStr) ||
|
||||
!Enum.TryParse(libraryConnectorStr, out libraryConnectorType))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
@ -394,10 +588,7 @@ public class Server : GlobalBase
|
||||
}
|
||||
AddLibraryConnector(new Kavita(this, kavitaUrl, kavitaUsername, kavitaPassword));
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
}
|
||||
|
||||
if (libraryConnectorType is LibraryConnector.LibraryType.Komga)
|
||||
}else if (libraryConnectorType is LibraryConnector.LibraryType.Komga)
|
||||
{
|
||||
if (!requestVariables.TryGetValue("komgaUrl", out string? komgaUrl) ||
|
||||
!requestVariables.TryGetValue("komgaAuth", out string? komgaAuth))
|
||||
@ -407,29 +598,58 @@ public class Server : GlobalBase
|
||||
}
|
||||
AddLibraryConnector(new Komga(this, komgaUrl, komgaAuth));
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
}
|
||||
break;
|
||||
case "LogMessages":
|
||||
if (logger is null || !File.Exists(logger?.logFilePath))
|
||||
case "LibraryConnectors/Test":
|
||||
LibraryConnector libraryConnector;
|
||||
if (!requestVariables.TryGetValue("libraryConnector", out libraryConnectorStr) ||
|
||||
!Enum.TryParse(libraryConnectorStr, out libraryConnectorType))
|
||||
{
|
||||
SendResponse(HttpStatusCode.NotFound, response);
|
||||
break;
|
||||
}
|
||||
SendResponse(HttpStatusCode.OK, response, logger.GetLog());
|
||||
break;
|
||||
case "LogFile":
|
||||
if (logger is null || !File.Exists(logger?.logFilePath))
|
||||
{
|
||||
SendResponse(HttpStatusCode.NotFound, response);
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
|
||||
string logDir = new FileInfo(logger.logFilePath).DirectoryName!;
|
||||
string tmpFilePath = Path.Join(logDir, "Tranga.log");
|
||||
File.Copy(logger.logFilePath, tmpFilePath);
|
||||
SendResponse(HttpStatusCode.OK, response, new FileStream(tmpFilePath, FileMode.Open));
|
||||
File.Delete(tmpFilePath);
|
||||
if (libraryConnectorType is LibraryConnector.LibraryType.Kavita)
|
||||
{
|
||||
if (!requestVariables.TryGetValue("kavitaUrl", out string? kavitaUrl) ||
|
||||
!requestVariables.TryGetValue("kavitaUsername", out string? kavitaUsername) ||
|
||||
!requestVariables.TryGetValue("kavitaPassword", out string? kavitaPassword))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
libraryConnector = new Kavita(this, kavitaUrl, kavitaUsername, kavitaPassword);
|
||||
}else if (libraryConnectorType is LibraryConnector.LibraryType.Komga)
|
||||
{
|
||||
if (!requestVariables.TryGetValue("komgaUrl", out string? komgaUrl) ||
|
||||
!requestVariables.TryGetValue("komgaAuth", out string? komgaAuth))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
libraryConnector = new Komga(this, komgaUrl, komgaAuth);
|
||||
}
|
||||
else
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
libraryConnector.UpdateLibrary();
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
case "LibraryConnectors/Reset":
|
||||
if (!requestVariables.TryGetValue("libraryConnector", out libraryConnectorStr) ||
|
||||
!Enum.TryParse(libraryConnectorStr, out libraryConnectorType))
|
||||
{
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
break;
|
||||
}
|
||||
DeleteLibraryConnector(libraryConnectorType);
|
||||
SendResponse(HttpStatusCode.Accepted, response);
|
||||
break;
|
||||
default:
|
||||
SendResponse(HttpStatusCode.BadRequest, response);
|
||||
@ -500,32 +720,53 @@ public class Server : GlobalBase
|
||||
private void SendResponse(HttpStatusCode statusCode, HttpListenerResponse response, object? content = null)
|
||||
{
|
||||
//Log($"Response: {statusCode} {content}");
|
||||
|
||||
response.StatusCode = (int)statusCode;
|
||||
response.AddHeader("Access-Control-Allow-Headers", "Content-Type, Accept, X-Requested-With");
|
||||
response.AddHeader("Access-Control-Allow-Methods", "GET, POST, DELETE");
|
||||
response.AddHeader("Access-Control-Max-Age", "1728000");
|
||||
response.AppendHeader("Access-Control-Allow-Origin", "*");
|
||||
|
||||
if (content is not FileStream stream)
|
||||
try
|
||||
{
|
||||
response.ContentType = "application/json";
|
||||
try
|
||||
|
||||
if (content is not Stream)
|
||||
{
|
||||
response.ContentType = "application/json";
|
||||
response.AddHeader("Cache-Control", "no-store");
|
||||
response.OutputStream.Write(content is not null
|
||||
? Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(content))
|
||||
: Array.Empty<byte>());
|
||||
response.OutputStream.Close();
|
||||
}
|
||||
catch (HttpListenerException e)
|
||||
else if (content is FileStream stream)
|
||||
{
|
||||
Log(e.ToString());
|
||||
string contentType = stream.Name.Split('.')[^1];
|
||||
response.AddHeader("Cache-Control", "max-age=600");
|
||||
switch (contentType.ToLower())
|
||||
{
|
||||
case "gif":
|
||||
response.ContentType = "image/gif";
|
||||
break;
|
||||
case "png":
|
||||
response.ContentType = "image/png";
|
||||
break;
|
||||
case "jpg":
|
||||
case "jpeg":
|
||||
response.ContentType = "image/jpeg";
|
||||
break;
|
||||
case "log":
|
||||
response.ContentType = "text/plain";
|
||||
break;
|
||||
}
|
||||
|
||||
stream.CopyTo(response.OutputStream);
|
||||
response.OutputStream.Close();
|
||||
stream.Close();
|
||||
}
|
||||
}
|
||||
else
|
||||
catch (Exception e)
|
||||
{
|
||||
stream.CopyTo(response.OutputStream);
|
||||
response.OutputStream.Close();
|
||||
stream.Close();
|
||||
Log(e.ToString());
|
||||
}
|
||||
}
|
||||
}
|
@ -11,19 +11,31 @@ public partial class Tranga : GlobalBase
|
||||
private Server _server;
|
||||
private HashSet<MangaConnector> _connectors;
|
||||
|
||||
public Tranga(Logger? logger, TrangaSettings settings) : base(logger, settings)
|
||||
public Tranga(Logger? logger) : base(logger)
|
||||
{
|
||||
Log("\n\n _______ \n|_ _|.----..---.-..-----..-----..---.-.\n | | | _|| _ || || _ || _ |\n |___| |__| |___._||__|__||___ ||___._|\n |_____| \n\n");
|
||||
keepRunning = true;
|
||||
_connectors = new HashSet<MangaConnector>()
|
||||
{
|
||||
new Manganato(this),
|
||||
new Mangasee(this),
|
||||
new MangaDex(this),
|
||||
new MangaKatana(this)
|
||||
new MangaKatana(this),
|
||||
new Mangaworld(this),
|
||||
new Bato(this),
|
||||
new ManhuaPlus(this),
|
||||
new MangaHere(this),
|
||||
new AsuraToon(this),
|
||||
new Weebcentral(this),
|
||||
new Webtoons(this),
|
||||
};
|
||||
foreach(DirectoryInfo dir in new DirectoryInfo(Path.GetTempPath()).GetDirectories("trangatemp"))//Cleanup old temp folders
|
||||
dir.Delete();
|
||||
jobBoss = new(this, this._connectors);
|
||||
StartJobBoss();
|
||||
this._server = new Server(this);
|
||||
string[] emojis = { "(•‿•)", "(づ \u25d5‿\u25d5 )づ", "( \u02d8\u25bd\u02d8)っ\u2668", "=\uff3e\u25cf \u22cf \u25cf\uff3e=", "(ΦωΦ)", "(\u272a\u3268\u272a)", "( ノ・o・ )ノ", "(〜^\u2207^ )〜", "~(\u2267ω\u2266)~","૮ \u00b4• ﻌ \u00b4• ა", "(\u02c3ᆺ\u02c2)", "(=\ud83d\udf66 \u0f1d \ud83d\udf66=)"};
|
||||
SendNotifications("Tranga Started", emojis[Random.Shared.Next(0,emojis.Length-1)]);
|
||||
Log(TrangaSettings.AsJObject().ToString());
|
||||
}
|
||||
|
||||
public MangaConnector? GetConnector(string name)
|
||||
@ -45,12 +57,7 @@ public partial class Tranga : GlobalBase
|
||||
return _connectors;
|
||||
}
|
||||
|
||||
public Manga? GetPublicationById(string internalId)
|
||||
{
|
||||
if (cachedPublications.Exists(publication => publication.internalId == internalId))
|
||||
return cachedPublications.First(publication => publication.internalId == internalId);
|
||||
return null;
|
||||
}
|
||||
public Manga? GetPublicationById(string internalId) => GetCachedManga(internalId);
|
||||
|
||||
public bool TryGetPublicationById(string internalId, out Manga? manga)
|
||||
{
|
||||
@ -64,10 +71,23 @@ public partial class Tranga : GlobalBase
|
||||
{
|
||||
while (keepRunning)
|
||||
{
|
||||
jobBoss.CheckJobs();
|
||||
if(!TrangaSettings.aprilFoolsMode || !IsAprilFirst())
|
||||
jobBoss.CheckJobs();
|
||||
else
|
||||
Log("April Fools Mode in Effect");
|
||||
Thread.Sleep(100);
|
||||
}
|
||||
});
|
||||
t.Start();
|
||||
}
|
||||
|
||||
private bool IsAprilFirst()
|
||||
{
|
||||
//UTC 01 Apr +-12hrs
|
||||
DateTime start = new DateTime(DateTime.Now.Year, 03, 31, 12, 0, 0, DateTimeKind.Utc);
|
||||
DateTime end = new DateTime(DateTime.Now.Year, 04, 02, 12, 0, 0, DateTimeKind.Utc);
|
||||
if (DateTime.UtcNow > start && DateTime.UtcNow < end)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
}
|
@ -1,16 +1,19 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net7.0</TargetFramework>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<OutputType>Exe</OutputType>
|
||||
<LangVersion>12</LangVersion>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="HtmlAgilityPack" Version="1.11.46" />
|
||||
<PackageReference Include="GlaxArguments" Version="1.1.0" />
|
||||
<PackageReference Include="HtmlAgilityPack" Version="1.11.72" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="PuppeteerSharp" Version="10.0.0" />
|
||||
<PackageReference Include="PuppeteerSharp" Version="20.1.0" />
|
||||
<PackageReference Include="Soenneker.Utils.String.NeedlemanWunsch" Version="2.1.301" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
@ -1,4 +1,5 @@
|
||||
using Logging;
|
||||
using GlaxArguments;
|
||||
|
||||
namespace Tranga;
|
||||
|
||||
@ -7,130 +8,44 @@ public partial class Tranga : GlobalBase
|
||||
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
string[]? help = GetArg(args, ArgEnum.Help);
|
||||
if (help is not null)
|
||||
{
|
||||
PrintHelp();
|
||||
return;
|
||||
}
|
||||
Argument downloadLocation = new (new[] { "-d", "--downloadLocation" }, 1, "Directory to which downloaded Manga are saved");
|
||||
Argument workingDirectory = new (new[] { "-w", "--workingDirectory" }, 1, "Directory in which application-data is saved");
|
||||
Argument consoleLogger = new (new []{"-c", "--consoleLogger"}, 0, "Enables the consoleLogger");
|
||||
Argument fileLogger = new (new []{"-f", "--fileLogger"}, 0, "Enables the fileLogger");
|
||||
Argument fPath = new (new []{"-l", "--fPath"}, 1, "Log Folder Path");
|
||||
|
||||
string[]? consoleLogger = GetArg(args, ArgEnum.ConsoleLogger);
|
||||
string[]? fileLogger = GetArg(args, ArgEnum.FileLogger);
|
||||
string? filePath = fileLogger?[0];//TODO validate path
|
||||
Argument[] arguments = new[]
|
||||
{
|
||||
downloadLocation,
|
||||
workingDirectory,
|
||||
consoleLogger,
|
||||
fileLogger,
|
||||
fPath
|
||||
};
|
||||
ArgumentFetcher fetcher = new (arguments);
|
||||
Dictionary<Argument, string[]> fetched = fetcher.Fetch(args);
|
||||
|
||||
string? directoryPath = fetched.TryGetValue(fPath, out string[]? path) ? path[0] : null;
|
||||
if (directoryPath is not null && !Directory.Exists(directoryPath))
|
||||
Directory.CreateDirectory(directoryPath);
|
||||
|
||||
List<Logger.LoggerType> enabledLoggers = new();
|
||||
if(consoleLogger is not null)
|
||||
if(fetched.ContainsKey(consoleLogger))
|
||||
enabledLoggers.Add(Logger.LoggerType.ConsoleLogger);
|
||||
if (fileLogger is not null)
|
||||
if (fetched.ContainsKey(fileLogger))
|
||||
enabledLoggers.Add(Logger.LoggerType.FileLogger);
|
||||
Logger logger = new(enabledLoggers.ToArray(), Console.Out, Console.OutputEncoding, filePath);
|
||||
Logger logger = new(enabledLoggers.ToArray(), Console.Out, Console.OutputEncoding, directoryPath);
|
||||
|
||||
TrangaSettings? settings = null;
|
||||
string[]? downloadLocationPath = GetArg(args, ArgEnum.DownloadLocation);
|
||||
string[]? workingDirectory = GetArg(args, ArgEnum.WorkingDirectory);
|
||||
bool dlp = fetched.TryGetValue(downloadLocation, out string[]? downloadLocationPath);
|
||||
bool wdp = fetched.TryGetValue(workingDirectory, out string[]? workingDirectoryPath);
|
||||
|
||||
if (downloadLocationPath is not null && workingDirectory is not null)
|
||||
{
|
||||
settings = new TrangaSettings(downloadLocationPath[0], workingDirectory[0]);
|
||||
}else if (downloadLocationPath is not null)
|
||||
{
|
||||
if (settings is null)
|
||||
settings = new TrangaSettings(downloadLocation: downloadLocationPath[0]);
|
||||
else
|
||||
settings = new TrangaSettings(downloadLocation: downloadLocationPath[0], settings.workingDirectory);
|
||||
}else if (workingDirectory is not null)
|
||||
{
|
||||
if (settings is null)
|
||||
settings = new TrangaSettings(downloadLocation: workingDirectory[0]);
|
||||
else
|
||||
settings = new TrangaSettings(settings.downloadLocation, workingDirectory[0]);
|
||||
}
|
||||
if (wdp)
|
||||
TrangaSettings.LoadFromWorkingDirectory(workingDirectoryPath![0]);
|
||||
else
|
||||
{
|
||||
settings = new TrangaSettings();
|
||||
}
|
||||
TrangaSettings.CreateOrUpdate();
|
||||
if(dlp)
|
||||
TrangaSettings.CreateOrUpdate(downloadDirectory: downloadLocationPath![0]);
|
||||
|
||||
Directory.CreateDirectory(settings.downloadLocation);//TODO validate path
|
||||
Directory.CreateDirectory(settings.workingDirectory);//TODO validate path
|
||||
|
||||
Tranga _ = new (logger, settings);
|
||||
}
|
||||
|
||||
private static void PrintHelp()
|
||||
{
|
||||
Console.WriteLine("Tranga-Help:");
|
||||
foreach (Argument argument in arguments.Values)
|
||||
{
|
||||
foreach(string name in argument.names)
|
||||
Console.Write("{0} ", name);
|
||||
if(argument.parameterCount > 0)
|
||||
Console.Write($"<{argument.parameterCount}>");
|
||||
Console.Write("\r\n {0}\r\n", argument.helpText);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns an array containing the parameters for the argument.
|
||||
/// </summary>
|
||||
/// <param name="args">List of argument-strings</param>
|
||||
/// <param name="arg">Requested parameter</param>
|
||||
/// <returns>
|
||||
/// If there are no parameters for an argument, returns an empty array.
|
||||
/// If the argument is not found returns null.
|
||||
/// </returns>
|
||||
private static string[]? GetArg(string[] args, ArgEnum arg)
|
||||
{
|
||||
List<string> argsList = args.ToList();
|
||||
List<string> ret = new();
|
||||
foreach (string name in arguments[arg].names)
|
||||
{
|
||||
int argIndex = argsList.IndexOf(name);
|
||||
if (argIndex != -1)
|
||||
{
|
||||
if (arguments[arg].parameterCount == 0)
|
||||
return ret.ToArray();
|
||||
for (int parameterIndex = 1; parameterIndex <= arguments[arg].parameterCount; parameterIndex++)
|
||||
{
|
||||
if(argIndex + parameterIndex >= argsList.Count || args[argIndex + parameterIndex].Contains('-'))//End of arguments, or no parameter provided, when one is required
|
||||
Console.WriteLine($"No parameter provided for argument {name}. -h for help.");
|
||||
ret.Add(args[argIndex + parameterIndex]);
|
||||
}
|
||||
}
|
||||
}
|
||||
return ret.Any() ? ret.ToArray() : null;
|
||||
}
|
||||
|
||||
private static Dictionary<ArgEnum, Argument> arguments = new()
|
||||
{
|
||||
{ ArgEnum.DownloadLocation, new(new []{"-d", "--downloadLocation"}, 1, "Directory to which downloaded Manga are saved") },
|
||||
{ ArgEnum.WorkingDirectory, new(new []{"-w", "--workingDirectory"}, 1, "Directory in which application-data is saved") },
|
||||
{ ArgEnum.ConsoleLogger, new(new []{"-c", "--consoleLogger"}, 0, "Enables the consoleLogger") },
|
||||
{ ArgEnum.FileLogger, new(new []{"-f", "--fileLogger"}, 1, "Enables the fileLogger, Directory where logfiles are saved") },
|
||||
{ ArgEnum.Help, new(new []{"-h", "--help"}, 0, "Print this") }
|
||||
//{ ArgEnum., new(new []{""}, 1, "") }
|
||||
};
|
||||
|
||||
internal enum ArgEnum
|
||||
{
|
||||
TrangaSettings,
|
||||
DownloadLocation,
|
||||
WorkingDirectory,
|
||||
ConsoleLogger,
|
||||
FileLogger,
|
||||
Help
|
||||
}
|
||||
|
||||
private struct Argument
|
||||
{
|
||||
public string[] names { get; }
|
||||
public byte parameterCount { get; }
|
||||
public string helpText { get; }
|
||||
|
||||
public Argument(string[] names, byte parameterCount, string helpText)
|
||||
{
|
||||
this.names = names;
|
||||
this.parameterCount = parameterCount;
|
||||
this.helpText = helpText;
|
||||
}
|
||||
Tranga _ = new (logger);
|
||||
}
|
||||
}
|
@ -1,55 +1,72 @@
|
||||
using System.Runtime.InteropServices;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using Tranga.LibraryConnectors;
|
||||
using Tranga.MangaConnectors;
|
||||
using Tranga.NotificationConnectors;
|
||||
using static System.IO.UnixFileMode;
|
||||
|
||||
namespace Tranga;
|
||||
|
||||
public class TrangaSettings
|
||||
public static class TrangaSettings
|
||||
{
|
||||
public string downloadLocation { get; private set; }
|
||||
public string workingDirectory { get; private set; }
|
||||
public int apiPortNumber { get; init; }
|
||||
[JsonIgnore] public string settingsFilePath => Path.Join(workingDirectory, "settings.json");
|
||||
[JsonIgnore] public string libraryConnectorsFilePath => Path.Join(workingDirectory, "libraryConnectors.json");
|
||||
[JsonIgnore] public string notificationConnectorsFilePath => Path.Join(workingDirectory, "notificationConnectors.json");
|
||||
[JsonIgnore] public string jobsFilePath => Path.Join(workingDirectory, "jobs.json");
|
||||
[JsonIgnore] public string coverImageCache => Path.Join(workingDirectory, "imageCache");
|
||||
public ushort? version { get; set; }
|
||||
|
||||
public TrangaSettings(string? downloadLocation = null, string? workingDirectory = null, int? apiPortNumber = null)
|
||||
[JsonIgnore] internal static readonly string DefaultUserAgent = $"Tranga ({Enum.GetName(Environment.OSVersion.Platform)}; {(Environment.Is64BitOperatingSystem ? "x64" : "")}) / 1.0";
|
||||
public static string downloadLocation { get; private set; } = (RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "/Manga" : Path.Join(Directory.GetCurrentDirectory(), "Downloads"));
|
||||
public static string workingDirectory { get; private set; } = Path.Join(RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "/usr/share" : Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "tranga-api");
|
||||
public static int apiPortNumber { get; private set; } = 6531;
|
||||
public static string userAgent { get; private set; } = DefaultUserAgent;
|
||||
public static bool bufferLibraryUpdates { get; private set; } = false;
|
||||
public static bool bufferNotifications { get; private set; } = false;
|
||||
[JsonIgnore] public static string settingsFilePath => Path.Join(workingDirectory, "settings.json");
|
||||
[JsonIgnore] public static string libraryConnectorsFilePath => Path.Join(workingDirectory, "libraryConnectors.json");
|
||||
[JsonIgnore] public static string notificationConnectorsFilePath => Path.Join(workingDirectory, "notificationConnectors.json");
|
||||
[JsonIgnore] public static string jobsFolderPath => Path.Join(workingDirectory, "jobs");
|
||||
[JsonIgnore] public static string coverImageCache => Path.Join(workingDirectory, "imageCache");
|
||||
public static ushort? version { get; } = 2;
|
||||
public static bool aprilFoolsMode { get; private set; } = true;
|
||||
[JsonIgnore]internal static readonly Dictionary<RequestType, int> DefaultRequestLimits = new ()
|
||||
{
|
||||
string lockFilePath = $"{settingsFilePath}.lock";
|
||||
if (File.Exists(settingsFilePath) && !File.Exists(lockFilePath))
|
||||
{//Load from settings file
|
||||
FileStream lockFile = File.Create(lockFilePath,0, FileOptions.DeleteOnClose);
|
||||
string settingsStr = File.ReadAllText(settingsFilePath);
|
||||
TrangaSettings settings = JsonConvert.DeserializeObject<TrangaSettings>(settingsStr)!;
|
||||
this.downloadLocation = downloadLocation ?? settings.downloadLocation;
|
||||
this.workingDirectory = workingDirectory ?? settings.workingDirectory;
|
||||
this.apiPortNumber = apiPortNumber ?? settings.apiPortNumber;
|
||||
lockFile.Close();
|
||||
}
|
||||
else if(!File.Exists(settingsFilePath))
|
||||
{//No settings file exists
|
||||
if (downloadLocation?.Length < 1 || workingDirectory?.Length < 1)
|
||||
throw new ArgumentException("Download-location and working-directory paths can not be empty!");
|
||||
this.apiPortNumber = apiPortNumber ?? 6531;
|
||||
this.downloadLocation = downloadLocation ?? (RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "/Manga" : Path.Join(Directory.GetCurrentDirectory(), "Downloads"));
|
||||
this.workingDirectory = workingDirectory ?? Path.Join(RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "/var/lib" : Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "tranga-api");
|
||||
ExportSettings();
|
||||
}
|
||||
else
|
||||
{//Settingsfile is locked
|
||||
this.apiPortNumber = apiPortNumber!.Value;
|
||||
this.downloadLocation = downloadLocation!;
|
||||
this.workingDirectory = workingDirectory!;
|
||||
}
|
||||
UpdateDownloadLocation(this.downloadLocation!, false);
|
||||
{RequestType.MangaInfo, 250},
|
||||
{RequestType.MangaDexFeed, 250},
|
||||
{RequestType.MangaDexImage, 40},
|
||||
{RequestType.MangaImage, 60},
|
||||
{RequestType.MangaCover, 250},
|
||||
{RequestType.Default, 60}
|
||||
};
|
||||
|
||||
public static Dictionary<RequestType, int> requestLimits { get; set; } = DefaultRequestLimits;
|
||||
public static int ChromiumStartupTimeoutMs { get; set; } = 30000;
|
||||
public static int ChromiumPageTimeoutMs { get; set; } = 30000;
|
||||
|
||||
public static void LoadFromWorkingDirectory(string directory)
|
||||
{
|
||||
TrangaSettings.workingDirectory = directory;
|
||||
if(File.Exists(settingsFilePath))
|
||||
Deserialize(File.ReadAllText(settingsFilePath));
|
||||
else return;
|
||||
|
||||
Directory.CreateDirectory(downloadLocation);
|
||||
Directory.CreateDirectory(workingDirectory);
|
||||
ExportSettings();
|
||||
}
|
||||
|
||||
public HashSet<LibraryConnector> LoadLibraryConnectors(GlobalBase clone)
|
||||
public static void CreateOrUpdate(string? downloadDirectory = null, string? pWorkingDirectory = null, int? pApiPortNumber = null, string? pUserAgent = null, bool? pAprilFoolsMode = null, bool? pBufferLibraryUpdates = null, bool? pBufferNotifications = null)
|
||||
{
|
||||
if(pWorkingDirectory is null && File.Exists(settingsFilePath))
|
||||
LoadFromWorkingDirectory(workingDirectory);
|
||||
downloadLocation = downloadDirectory ?? downloadLocation;
|
||||
workingDirectory = pWorkingDirectory ?? workingDirectory;
|
||||
apiPortNumber = pApiPortNumber ?? apiPortNumber;
|
||||
userAgent = pUserAgent ?? userAgent;
|
||||
aprilFoolsMode = pAprilFoolsMode ?? aprilFoolsMode;
|
||||
bufferLibraryUpdates = pBufferLibraryUpdates ?? bufferLibraryUpdates;
|
||||
bufferNotifications = pBufferNotifications ?? bufferNotifications;
|
||||
Directory.CreateDirectory(downloadLocation);
|
||||
Directory.CreateDirectory(workingDirectory);
|
||||
ExportSettings();
|
||||
}
|
||||
|
||||
public static HashSet<LibraryConnector> LoadLibraryConnectors(GlobalBase clone)
|
||||
{
|
||||
if (!File.Exists(libraryConnectorsFilePath))
|
||||
return new HashSet<LibraryConnector>();
|
||||
@ -63,7 +80,7 @@ public class TrangaSettings
|
||||
})!;
|
||||
}
|
||||
|
||||
public HashSet<NotificationConnector> LoadNotificationConnectors(GlobalBase clone)
|
||||
public static HashSet<NotificationConnector> LoadNotificationConnectors(GlobalBase clone)
|
||||
{
|
||||
if (!File.Exists(notificationConnectorsFilePath))
|
||||
return new HashSet<NotificationConnector>();
|
||||
@ -77,7 +94,13 @@ public class TrangaSettings
|
||||
})!;
|
||||
}
|
||||
|
||||
public void UpdateDownloadLocation(string newPath, bool moveFiles = true)
|
||||
public static void UpdateAprilFoolsMode(bool enabled)
|
||||
{
|
||||
aprilFoolsMode = enabled;
|
||||
ExportSettings();
|
||||
}
|
||||
|
||||
public static void UpdateDownloadLocation(string newPath, bool moveFiles = true)
|
||||
{
|
||||
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
Directory.CreateDirectory(newPath,
|
||||
@ -85,51 +108,96 @@ public class TrangaSettings
|
||||
else
|
||||
Directory.CreateDirectory(newPath);
|
||||
|
||||
if (moveFiles && Directory.Exists(this.downloadLocation))
|
||||
Directory.Move(this.downloadLocation, newPath);
|
||||
if (moveFiles && Directory.Exists(downloadLocation))
|
||||
Directory.Move(downloadLocation, newPath);
|
||||
|
||||
this.downloadLocation = newPath;
|
||||
downloadLocation = newPath;
|
||||
ExportSettings();
|
||||
}
|
||||
|
||||
public void UpdateWorkingDirectory(string newPath)
|
||||
public static void UpdateWorkingDirectory(string newPath)
|
||||
{
|
||||
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
Directory.CreateDirectory(newPath,
|
||||
GroupRead | GroupWrite | None | OtherRead | OtherWrite | UserRead | UserWrite);
|
||||
else
|
||||
Directory.CreateDirectory(newPath);
|
||||
Directory.Move(this.workingDirectory, newPath);
|
||||
this.workingDirectory = newPath;
|
||||
Directory.Move(workingDirectory, newPath);
|
||||
workingDirectory = newPath;
|
||||
ExportSettings();
|
||||
}
|
||||
|
||||
public void ExportSettings()
|
||||
public static void UpdateUserAgent(string? customUserAgent)
|
||||
{
|
||||
userAgent = customUserAgent ?? DefaultUserAgent;
|
||||
ExportSettings();
|
||||
}
|
||||
|
||||
public static void UpdateRateLimit(RequestType requestType, int newLimit)
|
||||
{
|
||||
requestLimits[requestType] = newLimit;
|
||||
ExportSettings();
|
||||
}
|
||||
|
||||
public static void ResetRateLimits()
|
||||
{
|
||||
requestLimits = DefaultRequestLimits;
|
||||
ExportSettings();
|
||||
}
|
||||
|
||||
public static void ExportSettings()
|
||||
{
|
||||
if (File.Exists(settingsFilePath))
|
||||
{
|
||||
bool inUse = true;
|
||||
while (inUse)
|
||||
{
|
||||
try
|
||||
{
|
||||
using FileStream stream = new(settingsFilePath, FileMode.Open, FileAccess.Read, FileShare.None);
|
||||
stream.Close();
|
||||
inUse = false;
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
Thread.Sleep(100);
|
||||
}
|
||||
}
|
||||
while(GlobalBase.IsFileInUse(settingsFilePath, null))
|
||||
Thread.Sleep(100);
|
||||
}
|
||||
else
|
||||
Directory.CreateDirectory(new FileInfo(settingsFilePath).DirectoryName!);
|
||||
File.WriteAllText(settingsFilePath, JsonConvert.SerializeObject(this));
|
||||
File.WriteAllText(settingsFilePath, Serialize());
|
||||
}
|
||||
|
||||
public string GetFullCoverPath(Manga manga)
|
||||
public static JObject AsJObject()
|
||||
{
|
||||
return Path.Join(this.coverImageCache, manga.coverFileNameInCache);
|
||||
JObject jobj = new JObject();
|
||||
jobj.Add("downloadLocation", JToken.FromObject(downloadLocation));
|
||||
jobj.Add("workingDirectory", JToken.FromObject(workingDirectory));
|
||||
jobj.Add("apiPortNumber", JToken.FromObject(apiPortNumber));
|
||||
jobj.Add("userAgent", JToken.FromObject(userAgent));
|
||||
jobj.Add("aprilFoolsMode", JToken.FromObject(aprilFoolsMode));
|
||||
jobj.Add("version", JToken.FromObject(version));
|
||||
jobj.Add("requestLimits", JToken.FromObject(requestLimits));
|
||||
jobj.Add("bufferLibraryUpdates", JToken.FromObject(bufferLibraryUpdates));
|
||||
jobj.Add("bufferNotifications", JToken.FromObject(bufferNotifications));
|
||||
jobj.Add("chromiumStartTimeout", JToken.FromObject(ChromiumStartupTimeoutMs));
|
||||
jobj.Add("chromiumPageTimeout", JToken.FromObject(ChromiumPageTimeoutMs));
|
||||
return jobj;
|
||||
}
|
||||
|
||||
public static string Serialize() => AsJObject().ToString();
|
||||
|
||||
public static void Deserialize(string serialized)
|
||||
{
|
||||
JObject jobj = JObject.Parse(serialized);
|
||||
if (jobj.TryGetValue("downloadLocation", out JToken? dl))
|
||||
downloadLocation = dl.Value<string>()!;
|
||||
if (jobj.TryGetValue("workingDirectory", out JToken? wd))
|
||||
workingDirectory = wd.Value<string>()!;
|
||||
if (jobj.TryGetValue("apiPortNumber", out JToken? apn))
|
||||
apiPortNumber = apn.Value<int>();
|
||||
if (jobj.TryGetValue("userAgent", out JToken? ua))
|
||||
userAgent = ua.Value<string>()!;
|
||||
if (jobj.TryGetValue("aprilFoolsMode", out JToken? afm))
|
||||
aprilFoolsMode = afm.Value<bool>()!;
|
||||
if (jobj.TryGetValue("requestLimits", out JToken? rl))
|
||||
requestLimits = rl.ToObject<Dictionary<RequestType, int>>()!;
|
||||
if (jobj.TryGetValue("bufferLibraryUpdates", out JToken? blu))
|
||||
bufferLibraryUpdates = blu.Value<bool>()!;
|
||||
if (jobj.TryGetValue("bufferNotifications", out JToken? bn))
|
||||
bufferNotifications = bn.Value<bool>()!;
|
||||
if (jobj.TryGetValue("chromiumStartTimeout", out JToken? cst))
|
||||
ChromiumStartupTimeoutMs = cst.Value<int>();
|
||||
if (jobj.TryGetValue("chromiumPageTimeout", out JToken? cpt))
|
||||
ChromiumPageTimeoutMs = cpt.Value<int>();
|
||||
}
|
||||
}
|
21
docker-compose.local.yaml
Normal file
21
docker-compose.local.yaml
Normal file
@ -0,0 +1,21 @@
|
||||
version: '3'
|
||||
services:
|
||||
tranga-api:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
context: .
|
||||
container_name: tranga-api
|
||||
volumes:
|
||||
- ./Manga:/Manga
|
||||
- ./settings:/usr/share/tranga-api
|
||||
ports:
|
||||
- "6531:6531"
|
||||
restart: unless-stopped
|
||||
tranga-website:
|
||||
image: glax/tranga-website:latest
|
||||
container_name: tranga-website
|
||||
ports:
|
||||
- "9555:80"
|
||||
depends_on:
|
||||
- tranga-api
|
||||
restart: unless-stopped
|
@ -1,15 +1,16 @@
|
||||
version: '3'
|
||||
services:
|
||||
tranga-api:
|
||||
image: glax/tranga-api:cuttingedge
|
||||
image: glax/tranga-api:latest
|
||||
container_name: tranga-api
|
||||
volumes:
|
||||
- ./Manga:/Manga
|
||||
- ./settings:/usr/share/tranga-api
|
||||
ports:
|
||||
- "6531:6531"
|
||||
restart: unless-stopped
|
||||
tranga-website:
|
||||
image: glax/tranga-website:cuttingedge
|
||||
image: glax/tranga-website:latest
|
||||
container_name: tranga-website
|
||||
ports:
|
||||
- "9555:80"
|
||||
|
Reference in New Issue
Block a user