Compare commits

..

180 commits

Author SHA1 Message Date
Simon
4332b1beef
not add start time when syncing (#442)
* not add start time when syncing

* use itsoffset for all syncing related time shifts
2024-07-01 18:43:16 +02:00
bytedream
2cf9125de3 Update README.md 2024-07-01 16:38:29 +02:00
bytedream
756022b955 Fix panic when in anonymously 2024-06-20 00:12:33 +02:00
bytedream
509683d23a Update dependencies and version 2024-06-19 23:38:57 +02:00
bytedream
8047680799 Add drm check 2024-06-19 23:18:35 +02:00
bytedream
287df84382 Rework episode filtering 2024-06-14 00:21:07 +02:00
bytedream
e7ac6d8874 Deprecate search stream.is_drm option 2024-05-24 22:17:25 +02:00
bytedream
fb8e535644 Fix subtitle title not being human-readable 2024-05-24 22:09:23 +02:00
bytedream
67c267be20 Remove unused variable 2024-05-24 22:05:04 +02:00
bytedream
a1c7b2069d Update dependencies and version 2024-05-23 00:01:42 +02:00
bytedream
74e5e05b0f Invalidate stream when using search command (#428) 2024-05-22 23:59:12 +02:00
bytedream
7d2ae719c8 Remove internal jwt error retry 2024-05-22 16:54:58 +02:00
bytedream
5593046aae Update dependencies and version 2024-05-22 16:52:43 +02:00
bytedream
f8bd092987 Add custom error message if too many streams are active 2024-05-21 21:51:18 +02:00
bytedream
cbe57e2b6e Update dependencies and version 2024-05-21 21:34:05 +02:00
bytedream
f7ce888329 Bypass stream limits 2024-05-21 21:33:08 +02:00
bytedream
301dac478f Update dependencies and version 2024-05-20 15:57:28 +02:00
bytedream
9819b62259 Fix typo in additional subtitle field (#421) 2024-05-17 23:45:41 +02:00
bytedream
5279a9b759 Update dependencies and version 2024-05-14 23:59:01 +02:00
bytedream
a98e31f959 Only include one CC subtitle 2024-05-14 22:36:59 +02:00
bytedream
590242712b Add warning message the --skip-existing-method has no effect without --skip-existing (#418) 2024-05-14 21:36:12 +02:00
bytedream
817963af4f Fix video containing hardsub if not requested (#415) 2024-05-14 21:22:23 +02:00
bytedream
48bb7a5ef6 Fix crashes when converting subtitles (#408) 2024-05-14 16:11:55 +02:00
Simon
53a710a373
Fix audio syncing using wrong internal index (#407) 2024-05-07 16:13:10 +02:00
bytedream
ab63dcd2e0 Update dependencies and version 2024-05-06 20:31:12 +02:00
bytedream
4d1df83342 Fix build badge 2024-05-05 00:05:10 +02:00
bytedream
89b9c5db39 Update dependencies and version 2024-05-04 23:40:02 +02:00
bytedream
96d3de48cf Add missing code examples 2024-05-04 23:40:02 +02:00
bytedream
dad91dba91 Rename --sync-tolerance to --merge-sync-tolerance and --merge-sync-precision to --merge-sync-precision 2024-05-04 23:39:55 +02:00
bytedream
757d3094ea Rename directory for workflow resources 2024-05-03 21:14:51 +02:00
bytedream
fca1b74cac Separate build and lint pipelines 2024-05-03 21:11:46 +02:00
bytedream
0f7d7d928c Add format check and linting action pipelines 2024-05-03 21:08:34 +02:00
bytedream
f77804fcb5 Apply lints 2024-05-03 20:58:54 +02:00
bytedream
4066b8511c Build binaries locked 2024-05-03 20:51:31 +02:00
bytedream
dcbe433a9c Manually git hash when publishing source AUR package 2024-05-03 20:38:11 +02:00
bytedream
55f1e1d32d Add option to overwrite git hash on build 2024-05-03 20:33:19 +02:00
bytedream
442173c08c Fix empty subtitles if multiple subtitle formats are used (#398) 2024-05-03 13:46:43 +02:00
bytedream
173292ff32 Prettify negated subtitle cc boolean 2024-05-02 17:10:09 +02:00
Simon
72c574c883
Switch to audio fingerprinting based syncing (#393)
* rename merge-auto-tolerance -> merge-time-tolerance

* move format_time_delta to own file

* switch to audio fingerprinting based syncing

* move format_time_delta to own file

* simpler approach to determine negative time deltas

* add missing readme part for --sync-precision

* fix all clippy "errors"

* Use rust-native chromaprint port instead of ffmpeg

* buffer with 128kb instead of 32kb

* improve helps

* improve help

---------

Co-authored-by: bytedream <bytedream@protonmail.com>
2024-05-02 00:35:13 +02:00
Simon
f237033aff
move format_time_delta to own file (#392) 2024-04-28 15:15:23 +02:00
Simon
bf28dbf1ce
rename merge-auto-tolerance to merge-time-tolerance (#391) 2024-04-26 15:50:44 +02:00
bytedream
cf8bfb02ac Automatically cut too long path segments 2024-04-25 20:32:51 +02:00
bytedream
74aaed4e7a Update dependencies and version 2024-04-25 00:49:31 +02:00
bytedream
177ceb1920 Update dependencies and version 2024-04-23 16:13:42 +02:00
bytedream
541f0e2747 Fix wrong audio and subtitle video reference number (#384) 2024-04-23 16:00:53 +02:00
Simon
777b39aba1
Fix: stop skipping every episode with archive command while using a non premium account (#388) 2024-04-22 23:47:49 +02:00
bytedream
4f3475131c Disable LTO in source aur pkgbuild 2024-04-21 13:25:12 +02:00
Simon
177aa37631
Move help for --language-tagging (#385) 2024-04-21 12:40:53 +02:00
bytedream
8fff807ae6 Add message if stored login is expired 2024-04-20 00:23:10 +02:00
bytedream
db6e45e7f4 Update dependencies and version 2024-04-20 00:02:18 +02:00
bytedream
8ada822396 Remove etp-rt login 2024-04-20 00:02:06 +02:00
bytedream
9bdd3aa85b Switch to openssl-tls on nix flake (#359) 2024-04-18 18:45:35 +02:00
Amelia
4fc20c7c1c
Support override fonts (#378)
* Support override fonts

* Compile fix

* Actual compile fix

* Use snake_case
2024-04-14 21:55:55 +02:00
bytedream
6515d3025f Add warn message when using a non-premium account with download or archive 2024-04-14 21:43:24 +02:00
bytedream
fe17f3951e Update dependencies and version 2024-04-14 21:25:17 +02:00
bytedream
cdad7fc000 Skip premium episode if account has no premium subscription 2024-04-14 21:18:13 +02:00
bytedream
d7dac2acd4 Update dependencies and version 2024-04-11 17:06:43 +02:00
bytedream
dbbb445c55 Fix invalid 0% generate video file progress bar 2024-04-10 01:36:20 +02:00
bytedream
733d9f9787 Update dependencies and version 2024-04-10 01:36:10 +02:00
bytedream
0257fdea0d Remove chapters if sync doesn't work 2024-04-09 23:00:01 +02:00
bytedream
9e5feef4d4 Change archive --sync-start defaults 2024-04-09 22:58:28 +02:00
bytedream
b9f5fadbb3 Fix archive --language-tagging sometimes causing crash 2024-04-09 22:50:25 +02:00
bytedream
ea39dcbc71 Embed chapters only to archive merge auto if --sync-start flag is set 2024-04-09 18:59:41 +02:00
bytedream
a73773ce1d Add id to every flag in README 2024-04-09 18:59:33 +02:00
bytedream
0115730d60 Add archive --sync-start flag documentation to README 2024-04-09 18:55:12 +02:00
bytedream
18534b259b Remove deprecated {resolution} output format option 2024-04-08 14:34:50 +02:00
bytedream
77103ff1f1 Update dependencies and version 2024-04-08 14:18:10 +02:00
bytedream
771594a231 Remove hardcoded pixel format (#352) 2024-04-08 14:03:30 +02:00
bytedream
1a511e12f9 Add archive start sync flag 2024-04-08 13:57:06 +02:00
bytedream
fe49161e93 End ffmpeg progress always with at least 100% 2024-04-08 00:37:19 +02:00
bytedream
25cde6163c Add account scope for search command 2024-04-06 21:25:19 +02:00
bytedream
4b74299733 Only run ci action on branch push 2024-04-05 22:53:53 +02:00
bytedream
c40ea8b132 Update dependencies and version 2024-04-05 22:32:18 +02:00
bytedream
6b6d24a575 Update dependencies and version 2024-04-04 21:16:32 +02:00
bytedream
8c1868f2fd Update dependencies and version 2024-04-03 17:14:07 +02:00
bytedream
af8ab24826 Update search command url help 2024-04-03 17:14:04 +02:00
bytedream
c0f3346846 Update README.md 2024-04-03 16:46:49 +02:00
bytedream
111e461b30 Update dependencies and version 2024-04-03 16:26:33 +02:00
Amelia Frost
f16cd25ea4 Fix for some chapters being sent by CR as floats (#351)
* Fix for some chapters being sent by CR as floats.
See: 3f3a80f7f7

* Compile fix for error[E0277]: cannot multiply `f32` by `u32`

* Format

Co-authored-by: bytedream <bytedream@protonmail.com>
2024-04-03 16:26:33 +02:00
bytedream
e694046b07 Move to new, DRM-free, endpoint 2024-04-03 16:26:33 +02:00
Amelia
ba8028737d
Update missing fonts (#360)
* Update missing fonts

* Compile fix
2024-04-03 15:49:51 +02:00
bytedream
89be8ac429 Update README.md 2024-03-25 13:31:23 +01:00
bytedream
26a858c1a1 Update dependencies and version 2024-03-10 22:04:58 +01:00
bytedream
d3696c783c Include archive chapters only if flag is set 2024-03-10 21:57:20 +01:00
bytedream
88a28e843f Manually specify ffmpeg output color format 2024-03-10 19:40:36 +01:00
bytedream
a0fa2bfd8a Update dependencies and version 2024-03-10 13:40:26 +01:00
bytedream
013273b832 Format code 2024-03-10 13:40:16 +01:00
bytedream
3bf2458774 Pass command args manually to cli entrypoint instead of parsing from environment 2024-03-10 13:28:18 +01:00
bytedream
e3a7fd9246 Add option so specify different proxies for api and download requests (#282) 2024-03-10 13:21:53 +01:00
bytedream
f1d266c940 Add options to specify audio & subtitle locales as IETF language tag and add --language_tagging flag for archive and download to modify the output file language tagging (#330) 2024-03-10 04:04:58 +01:00
bytedream
3f33db6728 Remove deprecated openssl and openssl-static features 2024-03-10 02:07:05 +01:00
bytedream
56f0ed1795 Add --include-chapters flag to archive and download (#301) 2024-03-10 01:59:47 +01:00
Username404-59
9c44fa7dae
README.md: Fix a typo (#344) 2024-03-03 22:40:41 +01:00
bytedream
3099aac0e7 Revert macos action downgrade and disable caching instead 2024-02-26 20:42:45 +01:00
Hannes Braun
9a6959970a
Remove superfluous mut keywords (#341) 2024-02-26 20:09:54 +01:00
bytedream
d2589a3a6f Use macos 12 instead of 13 for ci 2024-02-25 19:01:35 +01:00
bytedream
52da6eacc9 Fix search command always showing non-premium account warning message 2024-02-25 19:01:35 +01:00
bytedream
5634ce3277
Add archive --skip-existing-method flag (#292) (#325)
* Add archive `--skip-existing-method` flag (#292)

* Fix re-download only issued when local file has more audios/subtitles & respect `--no-closed-captions` flag
2024-02-25 18:48:18 +01:00
bytedream
6a7aa25e1a
Add ffmpeg amd hardware acceleration presets (#324) 2024-02-25 18:46:48 +01:00
bytedream
6a50567916
Merge pull request #335 from KevinStaude/patch-1
Update README.md
2024-02-23 18:46:02 +01:00
bytedream
2084328069 Fix ffmpeg progress panic (#337) 2024-02-23 17:36:37 +01:00
Kevin
d3ab2245a8
Update README.md
minor fix
--output-specials -o "something" isn't working
--output-specials "something" is correct
2024-02-15 23:52:47 +01:00
bytedream
c31b1f4db9 Update nix flake.lock (#333) 2024-02-14 20:27:00 +01:00
bytedream
8187269128 Upload manpages and completions only once in ci 2024-02-01 14:45:12 +01:00
bytedream
5d68f0334a Update actions used in ci 2024-01-30 23:55:52 +01:00
bytedream
a2464bad4e Add M1 runner to mac build ci 2024-01-30 23:49:20 +01:00
bytedream
0f06c7ac71 Change delimiter of audio template option to _ and make it configurable via the CRUNCHY_CLI_FORMAT_DELIMITER env variable (#311) 2024-01-29 11:52:32 +01:00
kralverde
f8309f2e80
add archive no-closed-captions flag (#323) 2024-01-29 08:26:40 +01:00
kralverde
982e521e0b
add universal output flag (#319)
* add universal filenames setting

* rename flag and help
2024-01-29 08:24:56 +01:00
kralverde
a4abb14ae3
use a 'close enough' method to audio auto merge (#286) (#320)
* use a 'close enough' method to audio merge

* change default, rename flag, and use more gooder words
2024-01-29 08:18:42 +01:00
bytedream
7cf7a8e71c Take closed_captions api field for subtitles into account (#297) 2024-01-28 02:04:42 +01:00
bytedream
3b9fc52890 Add notice & warning that an anonymous or non-premium account may result to incomplete results with search (#288) 2024-01-28 01:03:59 +01:00
bytedream
444dc65a29 Clarify risks of using the --experimental-fixes flag 2024-01-28 01:02:51 +01:00
bytedream
658bb86800 Run ci on every branch 2024-01-26 00:07:15 +01:00
bytedream
6e01e9e8a7 Fix comment misspelling 2024-01-14 22:39:05 +01:00
bytedream
937e9a2fdc Fix verbosity not applied if flag is used globally 2024-01-14 22:33:32 +01:00
bytedream
fbe182239a Update dependencies and version 2024-01-14 22:15:08 +01:00
bytedream
5490243df8 Fix episode filtering not working if specifying no season 2024-01-14 21:02:33 +01:00
bytedream
20f796f603 Re-add download timeout 2024-01-14 20:36:00 +01:00
bytedream
f3faa5bf94 Update dependencies and version 2024-01-11 13:53:05 +01:00
bytedream
3f401ccbd7 Fix output progressbar always on 100% when using download 2024-01-10 23:17:20 +01:00
bytedream
35447c5cb0 Fix Windows output progress bar (#305) 2024-01-10 23:17:02 +01:00
bytedream
333d574e56 Update dependencies and version 2024-01-10 13:37:16 +01:00
bytedream
7c42f29596 Only use tempfile name as windows named pipe name (#305) 2024-01-10 13:15:30 +01:00
bytedream
ef2898f0e1 Update dependencies and version 2024-01-09 15:30:52 +01:00
bytedream
650338d3e6 Prepend ./ to the output path on linux if the input path is only a filename (#303) 2024-01-09 15:24:08 +01:00
bytedream
c37d55aade Update version 2024-01-03 01:20:34 +01:00
bytedream
d90f45fa31 Update checkout action version 2024-01-03 01:08:40 +01:00
bytedream
99f96e3e35 Fix login command not working 2024-01-03 01:07:12 +01:00
bytedream
d3837f2495 Add new flags and format options to README 2024-01-03 00:34:16 +01:00
bytedream
fc6da9a76d Use latest Rust version in Linux and Mac toolchain 2024-01-03 00:00:00 +01:00
bytedream
283a3802b2 Update dependencies and version 2024-01-02 23:59:44 +01:00
bytedream
172e3612d0 Fix open-ended episode filter (#293) 2024-01-02 22:48:21 +01:00
bytedream
2e6246c439 Do not sanitize user path input 2024-01-02 22:26:52 +01:00
bytedream
d503d459cd Differ between illegal Windows and non Windows file characters 2024-01-02 22:26:16 +01:00
bytedream
19935df545 Add more output format options (#284) 2023-12-23 15:28:10 +01:00
bytedream
0da81a4814 Add --include-fonts flag for archive (#277) 2023-12-19 22:41:02 +01:00
bytedream
0a26083232 Fix ffmpeg progress not working with fast encoder 2023-12-10 14:27:05 +01:00
bytedream
8613ea80cc Add forced flag to all CC subtitles (#274) 2023-12-10 13:52:33 +01:00
bytedream
b97c2a922e Fix windows ci 2023-12-10 03:36:39 +01:00
bytedream
be3248a4f9 Add download/request speed limiter (#250) 2023-12-10 02:52:42 +01:00
bytedream
f9e431e181 Add ability to use root flags after subcommands 2023-12-09 17:23:18 +01:00
bytedream
77609be598 Replace all login username references with email 2023-12-09 17:22:53 +01:00
bytedream
b4057599a1 Add --ffmpeg-threads flag to control the ffmpeg thread number 2023-12-09 01:34:23 +01:00
bytedream
6c7ab04b99 Lint 2023-12-08 23:04:04 +01:00
bytedream
9487dd3dbf Show ffmpeg progress (#270) 2023-12-08 23:03:44 +01:00
bytedream
9ca3b79291 Fix spelling 2023-12-03 00:15:57 +01:00
bytedream
8f77028fcb Show error message instead of panicking when capturing video length of invalid file (#258) 2023-12-01 01:17:49 +01:00
bytedream
d5df3df95f Fix fixed subtitle formatting and sorting (#272) 2023-12-01 01:02:53 +01:00
bytedream
440ccd99b5 Update dependencies and version 2023-11-20 22:05:06 +01:00
bytedream
2c37093959 Manually burn-in subtitles only if no pre-burned video is available (#268) 2023-11-19 19:24:15 +01:00
bytedream
14e71c05b8 Fix aur binary checksums (#266) 2023-11-16 13:51:30 +01:00
bytedream
d52fe7fb92 Update dependencies and version 2023-11-06 22:56:51 +01:00
bytedream
c08931b610 Add new commands and format option to readme 2023-11-06 22:55:23 +01:00
bytedream
fc6511a361 Format code 2023-11-06 22:12:28 +01:00
bytedream
56411c6547 Add missing whitespaces in command help 2023-11-06 22:01:44 +01:00
ByteDream
4d01e2a4ec
Merge pull request #257 from crunchy-labs/feature/relative_sequence_number
Add flags and option to control special episode behavior (#206, #241, #246)
2023-11-06 20:58:55 +00:00
bytedream
cd35dfe276 Rename --special-output to --output-specials 2023-11-06 21:49:47 +01:00
bytedream
f31437fba2 Remove leading and trailing whitespaces from output file 2023-11-06 21:20:43 +01:00
bytedream
e5d9c27af7 Fix ass filter path escape on windows (#262) 2023-11-06 21:15:50 +01:00
bytedream
787d8ab02c Add --special-output and --skip-specials flag 2023-11-04 15:24:14 +01:00
kennedy
7594412f58
updated brew url (#263)
* updated brew url

Its most appropriate to forward users to the brew's information page generated for crunchy-cli. There are stats on amount of downloads, see where the manifest is location, and what architectures are built for it.

* Update README.md

Co-authored-by: ByteDream <63594396+ByteDream@users.noreply.github.com>

---------

Co-authored-by: ByteDream <63594396+ByteDream@users.noreply.github.com>
2023-11-02 13:37:40 +01:00
kennedy
d8b76f8cc7
Add homebrew instructions (#261)
Added details about homebrew and what archs are supported.

made minor style linting: add space surrounding shell code blocks, and headers.
2023-10-29 06:12:25 +01:00
Catd
f56d9ecabf
Changes in Readme regarding subtitles and flag usage (#255)
* Update README.md

updated Flags and subtitles sections

* Update README.md

* Update README.md

Comma in a better place
2023-10-16 17:04:45 +02:00
bytedream
5a3a304443 Use episode sequence number as filter number for url episode filtering 2023-10-15 23:52:44 +02:00
bytedream
d0fe7f54f6 Show fractal in relative_sequence_number if present 2023-10-15 23:34:22 +02:00
bytedream
685c79d673 Add 2-digit padding to relative_episode_number, sequence_number and relative_sequence_number format option 2023-10-15 22:56:45 +02:00
bytedream
5d17bb1ac7 Merge remote-tracking branch 'origin/master' into feature/relative_sequence_number
# Conflicts:
#	crunchy-cli-core/src/utils/format.rs
2023-10-15 22:53:47 +02:00
bytedream
568bce0008 Manually implement filename sanitizing to allow the usage of file separators 2023-10-15 22:43:04 +02:00
Valentine Briese
bbb5a78765
Add --threads (-t) option to downloading commands (#256)
* Add `single-threaded` option to downloading commands

* Replace `--single-threaded` boolean option with `--threads` optional `usize` option

* Simplify `threads` field unwrapping

* Make `--threads` `usize` with a default value
2023-10-15 20:52:53 +02:00
bytedream
81385ef6ce Add relative_sequence_number format option (#206, #241, #246) 2023-10-15 20:49:03 +02:00
bytedream
13335c020b Sanitize the full output filename (#253) 2023-10-13 11:41:56 +02:00
Valentine Briese
e5db8e9504
Fix ffmpeg-preset option in download command (#254) 2023-10-12 21:20:06 +02:00
ByteDream
5bc68ad592
Merge pull request #251 from valentinegb/apple-hardware-acceleration
Add FFmpeg Apple hardware acceleration and make HEVC codec compatible with Apple standards
2023-10-12 13:28:37 +02:00
Valentine Briese
7095e2b8b6 Use -q:v FFmpeg option for Apple hardware acceleration 2023-10-11 18:54:47 -07:00
Valentine Briese
610593a795 Make H265 codec compatible with Apple HEVC standards 2023-10-11 18:26:51 -07:00
Valentine Briese
9596175f7f Add FFmpeg Apple hardware acceleration 2023-10-11 18:24:45 -07:00
bytedream
f48474ba77 Remove numbers from binary PKGBUILD env variables 2023-09-27 00:03:26 +02:00
bytedream
d79197edc6 Use async mutex and channel instead of the std equivalents 2023-09-23 16:56:42 +02:00
bytedream
a93a1fa807 Fix env variable resolving in publish pipeline 2023-09-22 12:11:00 +02:00
40 changed files with 4973 additions and 2292 deletions

View file

@ -24,8 +24,8 @@ source_aarch64=(
"LICENSE::https://raw.githubusercontent.com/crunchy-labs/crunchy-cli/v${pkgver}/LICENSE" "LICENSE::https://raw.githubusercontent.com/crunchy-labs/crunchy-cli/v${pkgver}/LICENSE"
) )
noextract=("manpages.zip" "completions.zip") noextract=("manpages.zip" "completions.zip")
sha256sums_x86_64=('$CI_x86_64_SHA_SUM' '$CI_MANPAGES_SHA_SUM' '$CI_COMPLETIONS_SHA_SUM' '$CI_LICENSE_SHA_SUM') sha256sums_x86_64=('$CI_AMD_BINARY_SHA_SUM' '$CI_MANPAGES_SHA_SUM' '$CI_COMPLETIONS_SHA_SUM' '$CI_LICENSE_SHA_SUM')
sha256sums_aarch64=('$CI_aarch64_SHA_SUM' '$CI_MANPAGES_SHA_SUM' '$CI_COMPLETIONS_SHA_SUM' '$CI_LICENSE_SHA_SUM') sha256sums_aarch64=('$CI_ARM_BINARY_SHA_SUM' '$CI_MANPAGES_SHA_SUM' '$CI_COMPLETIONS_SHA_SUM' '$CI_LICENSE_SHA_SUM')
package() { package() {
cd "$srcdir" cd "$srcdir"

View file

@ -12,14 +12,26 @@ depends=('ffmpeg' 'openssl')
makedepends=('cargo') makedepends=('cargo')
source=("${pkgname}-${pkgver}.tar.gz::https://github.com/crunchy-labs/crunchy-cli/archive/refs/tags/v${pkgver}.tar.gz") source=("${pkgname}-${pkgver}.tar.gz::https://github.com/crunchy-labs/crunchy-cli/archive/refs/tags/v${pkgver}.tar.gz")
sha256sums=('$CI_SHA_SUM') sha256sums=('$CI_SHA_SUM')
# lto causes linking errors when executed by this buildscript. besides, lto is already done by cargo itself (which doesn't cause linking errors)
options=(!lto)
prepare() {
cd "$srcdir/${pkgname}-$pkgver"
export RUSTUP_TOOLCHAIN=stable
export CARGO_HOME="$srcdir/cargo-home"
cargo fetch --locked --target "$(rustc -vV | sed -n 's/host: //p')"
}
build() { build() {
cd "$srcdir/${pkgname}-$pkgver" cd "$srcdir/${pkgname}-$pkgver"
export CARGO_HOME="$srcdir/cargo-home"
export RUSTUP_TOOLCHAIN=stable export RUSTUP_TOOLCHAIN=stable
export CARGO_HOME="$srcdir/cargo-home"
cargo build --release export CRUNCHY_CLI_GIT_HASH=$CI_GIT_HASH
cargo build --frozen --release
} }
package() { package() {

View file

@ -1,9 +1,9 @@
name: ci name: build
on: on:
push: push:
branches: branches:
- master - '*'
pull_request: pull_request:
workflow_dispatch: workflow_dispatch:
@ -19,10 +19,10 @@ jobs:
toolchain: aarch64-unknown-linux-musl toolchain: aarch64-unknown-linux-musl
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Cargo cache - name: Cargo cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: | path: |
~/.cargo/bin/ ~/.cargo/bin/
@ -32,41 +32,60 @@ jobs:
target/ target/
key: ${{ matrix.toolchain }}-cargo-${{ hashFiles('**/Cargo.lock') }} key: ${{ matrix.toolchain }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- name: Install cross - name: Install cross
run: cargo install --force cross run: cargo install --force cross
- name: Build - name: Build
run: cross build --release --no-default-features --features openssl-tls-static --target ${{ matrix.toolchain }} run: cross build --locked --release --no-default-features --features openssl-tls-static --target ${{ matrix.toolchain }}
- name: Upload binary artifact - name: Upload binary artifact
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: crunchy-cli-linux-${{ matrix.arch }} name: crunchy-cli-linux-${{ matrix.arch }}
path: ./target/${{ matrix.toolchain }}/release/crunchy-cli path: ./target/${{ matrix.toolchain }}/release/crunchy-cli
if-no-files-found: error if-no-files-found: error
- name: Upload manpages artifact - name: Upload manpages artifact
uses: actions/upload-artifact@v3 if: ${{ matrix.arch == 'x86_64' }} # only upload the manpages once
uses: actions/upload-artifact@v4
with: with:
name: manpages name: manpages
path: ./target/${{ matrix.toolchain }}/release/manpages path: ./target/${{ matrix.toolchain }}/release/manpages
if-no-files-found: error if-no-files-found: error
- name: Upload completions artifact - name: Upload completions artifact
uses: actions/upload-artifact@v3 if: ${{ matrix.arch == 'x86_64' }} # only upload the completions once
uses: actions/upload-artifact@v4
with: with:
name: completions name: completions
path: ./target/${{ matrix.toolchain }}/release/completions path: ./target/${{ matrix.toolchain }}/release/completions
if-no-files-found: error if-no-files-found: error
build-mac: build-mac:
runs-on: macos-latest runs-on: ${{ matrix.os }}
strategy:
matrix:
# macos-13 uses x86_64, macos-14 aarch64
# see https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources
include:
- os: macos-13
arch: x86_64
toolchain: x86_64-apple-darwin
- os: macos-14
arch: aarch64
toolchain: aarch64-apple-darwin
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Cargo cache - name: Cargo cache
uses: actions/cache@v3 if: ${{ matrix.os != 'macos-13' }} # when using cache, the 'Setup Rust' step fails for macos 13
uses: actions/cache@v4
with: with:
path: | path: |
~/.cargo/bin/ ~/.cargo/bin/
@ -76,24 +95,29 @@ jobs:
target/ target/
key: x86_64-apple-darwin-cargo-${{ hashFiles('**/Cargo.lock') }} key: x86_64-apple-darwin-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- name: Build - name: Build
run: cargo build --release --target x86_64-apple-darwin run: cargo build --locked --release --target ${{ matrix.toolchain }}
- name: Upload binary artifact - name: Upload binary artifact
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: crunchy-cli-darwin-x86_64 name: crunchy-cli-darwin-${{ matrix.arch }}
path: ./target/x86_64-apple-darwin/release/crunchy-cli path: ./target/${{ matrix.toolchain }}/release/crunchy-cli
if-no-files-found: error if-no-files-found: error
build-windows: build-windows:
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Cargo cache - name: Cargo cache
uses: actions/cache@v3 uses: actions/cache@v4
with: with:
path: | path: |
~/.cargo/bin/ ~/.cargo/bin/
@ -106,14 +130,15 @@ jobs:
- name: Install system dependencies - name: Install system dependencies
uses: msys2/setup-msys2@v2 uses: msys2/setup-msys2@v2
with: with:
update: true
install: mingw-w64-x86_64-rust base-devel install: mingw-w64-x86_64-rust base-devel
- name: Build - name: Build
shell: msys2 {0} shell: msys2 {0}
run: cargo build --release --target x86_64-pc-windows-gnu run: cargo build --locked --release --target x86_64-pc-windows-gnu
- name: Upload binary artifact - name: Upload binary artifact
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: crunchy-cli-windows-x86_64 name: crunchy-cli-windows-x86_64
path: ./target/x86_64-pc-windows-gnu/release/crunchy-cli.exe path: ./target/x86_64-pc-windows-gnu/release/crunchy-cli.exe

58
.github/workflows/lint.yml vendored Normal file
View file

@ -0,0 +1,58 @@
name: lint
on:
push:
branches:
- '*'
pull_request:
jobs:
fmt:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Cargo cache
uses: actions/cache@v4
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: x86_64-unknown-linux-gnu-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- name: Check fmt
run: cargo fmt --check
lint:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Cargo cache
uses: actions/cache@v4
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: x86_64-unknown-linux-gnu-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
with:
toolchain: stable
- name: Lint
run: cargo clippy -- -D warnings

View file

@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Get version - name: Get version
run: echo "RELEASE_VERSION=$(echo ${{ github.ref_name }} | cut -c 2-)" >> $GITHUB_ENV run: echo "RELEASE_VERSION=$(echo ${{ github.ref_name }} | cut -c 2-)" >> $GITHUB_ENV
@ -20,11 +20,15 @@ jobs:
curl -LO https://github.com/crunchy-labs/crunchy-cli/archive/refs/tags/${{ github.ref_name }}.tar.gz curl -LO https://github.com/crunchy-labs/crunchy-cli/archive/refs/tags/${{ github.ref_name }}.tar.gz
echo "CRUNCHY_CLI_SHA256=$(sha256sum ${{ github.ref_name }}.tar.gz | cut -f 1 -d ' ')" >> $GITHUB_ENV echo "CRUNCHY_CLI_SHA256=$(sha256sum ${{ github.ref_name }}.tar.gz | cut -f 1 -d ' ')" >> $GITHUB_ENV
- name: Get release commit hash
run: echo "CRUNCHY_CLI_GIT_HASH=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
- name: Generate crunchy-cli PKGBUILD - name: Generate crunchy-cli PKGBUILD
env: env:
CI_PKG_VERSION: ${{ env.RELEASE_VERSION }} CI_PKG_VERSION: ${{ env.RELEASE_VERSION }}
CI_SHA_SUM: ${{ env.CRUNCHY_CLI_SHA256 }} CI_SHA_SUM: ${{ env.CRUNCHY_CLI_SHA256 }}
run: envsubst '$CI_PKG_VERSION,$CI_SHA_SUM' < .github/scripts/PKGBUILD.source > PKGBUILD CI_GIT_HASH: ${{ env.CRUNCHY_CLI_GIT_HASH }}
run: envsubst '$CI_PKG_VERSION,$CI_SHA_SUM,$CI_GIT_HASH' < .github/workflow-resources/PKGBUILD.source > PKGBUILD
- name: Publish crunchy-cli to AUR - name: Publish crunchy-cli to AUR
uses: KSXGitHub/github-actions-deploy-aur@v2.7.0 uses: KSXGitHub/github-actions-deploy-aur@v2.7.0
@ -34,7 +38,7 @@ jobs:
commit_username: release-action commit_username: release-action
commit_email: ${{ secrets.AUR_EMAIL }} commit_email: ${{ secrets.AUR_EMAIL }}
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }} ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
commit_message: Update to version {{ env.RELEASE_VERSION }} commit_message: Update to version ${{ env.RELEASE_VERSION }}
- name: Generate crunchy-cli-bin sha sums - name: Generate crunchy-cli-bin sha sums
run: | run: |
@ -43,8 +47,8 @@ jobs:
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-completions.zip curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-completions.zip
curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-manpages.zip curl -LO https://github.com/crunchy-labs/crunchy-cli/releases/download/${{ github.ref_name }}/crunchy-cli-${{ github.ref_name }}-manpages.zip
curl -LO https://raw.githubusercontent.com/crunchy-labs/crunchy-cli/${{ github.ref_name }}/LICENSE curl -LO https://raw.githubusercontent.com/crunchy-labs/crunchy-cli/${{ github.ref_name }}/LICENSE
echo "CRUNCHY_CLI_BIN_x86_64_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-x86_64-linux | cut -f 1 -d ' ')" >> $GITHUB_ENV echo "CRUNCHY_CLI_BIN_x86_64_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-linux-x86_64 | cut -f 1 -d ' ')" >> $GITHUB_ENV
echo "CRUNCHY_CLI_BIN_aarch64_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-aarch64-linux | cut -f 1 -d ' ')" >> $GITHUB_ENV echo "CRUNCHY_CLI_BIN_aarch64_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-linux-aarch64 | cut -f 1 -d ' ')" >> $GITHUB_ENV
echo "CRUNCHY_CLI_BIN_COMPLETIONS_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-completions.zip | cut -f 1 -d ' ')" >> $GITHUB_ENV echo "CRUNCHY_CLI_BIN_COMPLETIONS_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-completions.zip | cut -f 1 -d ' ')" >> $GITHUB_ENV
echo "CRUNCHY_CLI_BIN_MANPAGES_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-manpages.zip | cut -f 1 -d ' ')" >> $GITHUB_ENV echo "CRUNCHY_CLI_BIN_MANPAGES_SHA256=$(sha256sum crunchy-cli-${{ github.ref_name }}-manpages.zip | cut -f 1 -d ' ')" >> $GITHUB_ENV
echo "CRUNCHY_CLI_BIN_LICENSE_SHA256=$(sha256sum LICENSE | cut -f 1 -d ' ')" >> $GITHUB_ENV echo "CRUNCHY_CLI_BIN_LICENSE_SHA256=$(sha256sum LICENSE | cut -f 1 -d ' ')" >> $GITHUB_ENV
@ -52,12 +56,12 @@ jobs:
- name: Generate crunchy-cli-bin PKGBUILD - name: Generate crunchy-cli-bin PKGBUILD
env: env:
CI_PKG_VERSION: ${{ env.RELEASE_VERSION }} CI_PKG_VERSION: ${{ env.RELEASE_VERSION }}
CI_x86_64_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_x86_64_SHA256 }} CI_AMD_BINARY_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_x86_64_SHA256 }}
CI_aarch64_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_aarch64_SHA256 }} CI_ARM_BINARY_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_aarch64_SHA256 }}
CI_MANPAGES_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_MANPAGES_SHA256 }} CI_MANPAGES_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_MANPAGES_SHA256 }}
CI_COMPLETIONS_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_COMPLETIONS_SHA256 }} CI_COMPLETIONS_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_COMPLETIONS_SHA256 }}
CI_LICENSE_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_LICENSE_SHA256 }} CI_LICENSE_SHA_SUM: ${{ env.CRUNCHY_CLI_BIN_LICENSE_SHA256 }}
run: envsubst '$CI_PKG_VERSION,$CI_x86_64_SHA_SUM,$CI_aarch64_SHA_SUM,$CI_COMPLETIONS_SHA_SUM,$CI_MANPAGES_SHA_SUM,$CI_LICENSE_SHA_SUM' < .github/scripts/PKGBUILD.binary > PKGBUILD run: envsubst '$CI_PKG_VERSION,$CI_AMD_BINARY_SHA_SUM,$CI_ARM_BINARY_SHA_SUM,$CI_COMPLETIONS_SHA_SUM,$CI_MANPAGES_SHA_SUM,$CI_LICENSE_SHA_SUM' < .github/workflow-resources/PKGBUILD.binary > PKGBUILD
- name: Publish crunchy-cli-bin to AUR - name: Publish crunchy-cli-bin to AUR
uses: KSXGitHub/github-actions-deploy-aur@v2.7.0 uses: KSXGitHub/github-actions-deploy-aur@v2.7.0
@ -67,4 +71,4 @@ jobs:
commit_username: release-action commit_username: release-action
commit_email: ${{ secrets.AUR_EMAIL }} commit_email: ${{ secrets.AUR_EMAIL }}
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }} ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
commit_message: Update to version {{ env.RELEASE_VERSION }} commit_message: Update to version ${{ env.RELEASE_VERSION }}

1356
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
[package] [package]
name = "crunchy-cli" name = "crunchy-cli"
authors = ["Crunchy Labs Maintainers"] authors = ["Crunchy Labs Maintainers"]
version = "3.0.3" version = "3.6.7"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
@ -13,21 +13,17 @@ native-tls = ["crunchy-cli-core/native-tls"]
openssl-tls = ["dep:native-tls-crate", "native-tls-crate/openssl", "crunchy-cli-core/openssl-tls"] openssl-tls = ["dep:native-tls-crate", "native-tls-crate/openssl", "crunchy-cli-core/openssl-tls"]
openssl-tls-static = ["dep:native-tls-crate", "native-tls-crate/openssl", "crunchy-cli-core/openssl-tls-static"] openssl-tls-static = ["dep:native-tls-crate", "native-tls-crate/openssl", "crunchy-cli-core/openssl-tls-static"]
# deprecated
openssl = ["openssl-tls"]
openssl-static = ["openssl-tls-static"]
[dependencies] [dependencies]
tokio = { version = "1.32", features = ["macros", "rt-multi-thread", "time"], default-features = false } tokio = { version = "1.38", features = ["macros", "rt-multi-thread", "time"], default-features = false }
native-tls-crate = { package = "native-tls", version = "0.2.11", optional = true } native-tls-crate = { package = "native-tls", version = "0.2.12", optional = true }
crunchy-cli-core = { path = "./crunchy-cli-core" } crunchy-cli-core = { path = "./crunchy-cli-core" }
[build-dependencies] [build-dependencies]
chrono = "0.4" chrono = "0.4"
clap = { version = "4.4", features = ["string"] } clap = { version = "4.5", features = ["string"] }
clap_complete = "4.4" clap_complete = "4.5"
clap_mangen = "0.2" clap_mangen = "0.2"
crunchy-cli-core = { path = "./crunchy-cli-core" } crunchy-cli-core = { path = "./crunchy-cli-core" }
@ -38,7 +34,7 @@ members = ["crunchy-cli-core"]
[patch.crates-io] [patch.crates-io]
# fork of the `native-tls` crate which can use openssl as backend on every platform. this is done as `reqwest` only # fork of the `native-tls` crate which can use openssl as backend on every platform. this is done as `reqwest` only
# supports `rustls` and `native-tls` as tls backend # supports `rustls` and `native-tls` as tls backend
native-tls = { git = "https://github.com/crunchy-labs/rust-not-so-native-tls.git", rev = "fdba246" } native-tls = { git = "https://github.com/crunchy-labs/rust-not-so-native-tls.git", rev = "c7ac566" }
[profile.release] [profile.release]
strip = true strip = true

401
README.md
View file

@ -1,3 +1,5 @@
# This project has been sunset as Crunchyroll moved to a DRM-only system. See [#362](https://github.com/crunchy-labs/crunchy-cli/issues/362).
# crunchy-cli # crunchy-cli
👇 A Command-line downloader for [Crunchyroll](https://www.crunchyroll.com). 👇 A Command-line downloader for [Crunchyroll](https://www.crunchyroll.com).
@ -18,8 +20,8 @@
<a href="https://discord.gg/PXGPGpQxgk"> <a href="https://discord.gg/PXGPGpQxgk">
<img src="https://img.shields.io/discord/994882878125121596?label=discord&style=flat-square" alt="Discord"> <img src="https://img.shields.io/discord/994882878125121596?label=discord&style=flat-square" alt="Discord">
</a> </a>
<a href="https://github.com/crunchy-labs/crunchy-cli/actions/workflows/ci.yml"> <a href="https://github.com/crunchy-labs/crunchy-cli/actions/workflows/build.yml">
<img src="https://img.shields.io/github/actions/workflow/status/crunchy-labs/crunchy-cli/ci.yml?branch=master&style=flat-square" alt="CI"> <img src="https://img.shields.io/github/actions/workflow/status/crunchy-labs/crunchy-cli/build.yml?branch=master&style=flat-square" alt="Build">
</a> </a>
</p> </p>
@ -53,6 +55,7 @@ Check out the [releases](https://github.com/crunchy-labs/crunchy-cli/releases) t
If you're using Arch or an Arch based Linux distribution you are able to install our [AUR](https://aur.archlinux.org/) package. If you're using Arch or an Arch based Linux distribution you are able to install our [AUR](https://aur.archlinux.org/) package.
You need an [AUR helper](https://wiki.archlinux.org/title/AUR_helpers) like [yay](https://github.com/Jguer/yay) to install it. You need an [AUR helper](https://wiki.archlinux.org/title/AUR_helpers) like [yay](https://github.com/Jguer/yay) to install it.
```shell ```shell
# this package builds crunchy-cli manually (recommended) # this package builds crunchy-cli manually (recommended)
$ yay -S crunchy-cli $ yay -S crunchy-cli
@ -60,25 +63,38 @@ Check out the [releases](https://github.com/crunchy-labs/crunchy-cli/releases) t
$ yay -S crunchy-cli-bin $ yay -S crunchy-cli-bin
``` ```
- [Nix](https://nixos.org/)
This requires [nix](https://nixos.org) and you'll probably need `--extra-experimental-features "nix-command flakes"`, depending on your configurations.
```shell
$ nix <run|shell|develop> github:crunchy-labs/crunchy-cli
```
- [Scoop](https://scoop.sh/) - [Scoop](https://scoop.sh/)
For Windows users, we support the [scoop](https://scoop.sh/#/) command-line installer. For Windows users, we support the [scoop](https://scoop.sh/#/) command-line installer.
```shell ```shell
$ scoop bucket add extras $ scoop bucket add extras
$ scoop install extras/crunchy-cli $ scoop install extras/crunchy-cli
``` ```
- [Homebrew](https://brew.sh/)
For macOS/linux users, we support the [brew](https://brew.sh/#/) command-line installer. Packages are compiled by the [homebrew project](https://formulae.brew.sh/formula/crunchy-cli), and will also install the `openssl@3` and `ffmpeg` dependencies.
```shell
$ brew install crunchy-cli
```
Supported archs: `x86_64_linux`, `arm64_monterey`, `sonoma`, `ventura`
- [Nix](https://nixos.org/)
This requires [nix](https://nixos.org) and you'll probably need `--extra-experimental-features "nix-command flakes"`, depending on your configurations.
```shell
$ nix <run|shell|develop> github:crunchy-labs/crunchy-cli
```
### 🛠 Build it yourself ### 🛠 Build it yourself
Since we do not support every platform and architecture you may have to build the project yourself. Since we do not support every platform and architecture you may have to build the project yourself.
This requires [git](https://git-scm.com/) and [Cargo](https://doc.rust-lang.org/cargo). This requires [git](https://git-scm.com/) and [Cargo](https://doc.rust-lang.org/cargo).
```shell ```shell
$ git clone https://github.com/crunchy-labs/crunchy-cli $ git clone https://github.com/crunchy-labs/crunchy-cli
$ cd crunchy-cli $ cd crunchy-cli
@ -92,96 +108,122 @@ $ cargo install --force --path .
> All shown commands are examples 🧑🏼‍🍳 > All shown commands are examples 🧑🏼‍🍳
### Global Flags
crunchy-cli requires you to log in. crunchy-cli requires you to log in.
Though you can use a non-premium account, you will not have access to premium content without a subscription. Though you can use a non-premium account, you will not have access to premium content without a subscription.
You can authenticate with your credentials (user:password) or by using a refresh token. You can authenticate with your credentials (email:password) or by using a refresh token.
- Credentials - <span id="global-credentials">Credentials</span>
```shell ```shell
$ crunchy-cli --credentials "user:password" $ crunchy-cli --credentials "email:password" <command>
``` ```
- Refresh Token
To obtain a refresh token, you have to log in at [crunchyroll.com](https://www.crunchyroll.com/) and extract the `etp_rt` cookie. - <span id="global-anonymous">Stay Anonymous</span>
The easiest way to get it is via a browser extension which lets you export your cookies, like [Cookie-Editor](https://cookie-editor.cgagnier.ca/) ([Firefox](https://addons.mozilla.org/en-US/firefox/addon/cookie-editor/) / [Chrome](https://chrome.google.com/webstore/detail/cookie-editor/hlkenndednhfkekhgcdicdfddnkalmdm)).
When installed, look for the `etp_rt` entry and extract its value.
```shell
$ crunchy-cli --etp-rt "4ebf1690-53a4-491a-a2ac-488309120f5d"
```
- Stay Anonymous
Login without an account (you won't be able to access premium content): Login without an account (you won't be able to access premium content):
```shell ```shell
$ crunchy-cli --anonymous $ crunchy-cli --anonymous <command>
``` ```
### Global settings ### Global settings
You can set specific settings which will be You can set specific settings which will be
- Verbose output - <span id="global-verbose">Verbose output</span>
If you want to include debug information in the output, use the `-v` / `--verbose` flag to show it. If you want to include debug information in the output, use the `-v` / `--verbose` flag to show it.
```shell
$ crunchy-cli -v
```
This flag can't be used with `-q` / `--quiet`.
- Quiet output ```shell
$ crunchy-cli -v <command>
```
This flag can't be used in combination with `-q` / `--quiet`.
- <span id="global-quiet">Quiet output</span>
If you want to hide all output, use the `-q` / `--quiet` flag to do so. If you want to hide all output, use the `-q` / `--quiet` flag to do so.
This is especially useful if you want to pipe the output video to an external program (like a video player). This is especially useful if you want to pipe the output video to an external program (like a video player).
```shell ```shell
$ crunchy-cli -q $ crunchy-cli -q <command>
``` ```
- Language This flag can't be used in combination with `-v` / `--verbose`.
- <span id="global-lang">Language</span>
By default, the resulting metadata like title or description are shown in your system language (if Crunchyroll supports it, else in English). By default, the resulting metadata like title or description are shown in your system language (if Crunchyroll supports it, else in English).
If you want to show the results in another language, use the `--lang` flag to set it. If you want to show the results in another language, use the `--lang` flag to set it.
```shell ```shell
$ crunchy-cli --lang de-DE $ crunchy-cli --lang de-DE <command>
``` ```
- Experimental fixes - <span id="global-experimental-fixes">Experimental fixes</span>
Crunchyroll constantly changes and breaks its services or just delivers incorrect answers. Crunchyroll constantly changes and breaks its services or just delivers incorrect answers.
The `--experimental-fixes` flag tries to fix some of those issues. The `--experimental-fixes` flag tries to fix some of those issues.
As the *experimental* in `--experimental-fixes` states, these fixes may or may not break other functionality. As the *experimental* in `--experimental-fixes` states, these fixes may or may not break other functionality.
```shell ```shell
$ crunchy-cli --experimental-fixes $ crunchy-cli --experimental-fixes <command>
``` ```
For an overview which parts this flag affects, see the [documentation](https://docs.rs/crunchyroll-rs/latest/crunchyroll_rs/crunchyroll/struct.CrunchyrollBuilder.html) of the underlying Crunchyroll library, all functions beginning with `stabilization_` are applied. For an overview which parts this flag affects, see the [documentation](https://docs.rs/crunchyroll-rs/latest/crunchyroll_rs/crunchyroll/struct.CrunchyrollBuilder.html) of the underlying Crunchyroll library, all functions beginning with `stabilization_` are applied.
- Proxy - <span id="global-proxy">Proxy</span>
The `--proxy` flag supports https and socks5 proxies to route all your traffic through. The `--proxy` flag supports https and socks5 proxies to route all your traffic through.
This may be helpful to bypass the geo-restrictions Crunchyroll has on certain series. This may be helpful to bypass the geo-restrictions Crunchyroll has on certain series.
You are also able to set in which part of the cli a proxy should be used.
Instead of a normal url you can also use: `<url>:` (only proxies api requests), `:<url>` (only proxies download traffic), `<url>:<url>` (proxies api requests through the first url and download traffic through the second url).
```shell ```shell
$ crunchy-cli --proxy socks5://127.0.0.1:8080 $ crunchy-cli --proxy socks5://127.0.0.1:8080 <command>
``` ```
Make sure that proxy can either forward TLS requests, which is needed to bypass the (cloudflare) bot protection, or that it is configured so that the proxy can bypass the protection itself. Make sure that proxy can either forward TLS requests, which is needed to bypass the (cloudflare) bot protection, or that it is configured so that the proxy can bypass the protection itself.
- <span id="global-user-agent">User Agent</span>
There might be cases where a custom user agent is necessary, e.g. to bypass the cloudflare bot protection (#104).
In such cases, the `--user-agent` flag can be used to set a custom user agent.
```shell
$ crunchy-cli --user-agent "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)" <command>
```
Default is the user agent, defined in the underlying [library](https://github.com/crunchy-labs/crunchyroll-rs).
- <span id="global-speed-limit">Speed limit</span>
If you want to limit how fast requests/downloads should be, you can use the `--speed-limit` flag. Allowed units are `B` (bytes), `KB` (kilobytes) and `MB` (megabytes).
```shell
$ crunchy-cli --speed-limit 10MB
```
### Login ### Login
The `login` command can store your session, so you don't have to authenticate every time you execute a command. The `login` command can store your session, so you don't have to authenticate every time you execute a command.
```shell ```shell
# save the refresh token which gets generated when login with credentials. # save the refresh token which gets generated when login with credentials.
# your username/email and password won't be stored at any time on disk # your email and password won't be stored at any time on disk
$ crunchy-cli login --credentials "user:password" $ crunchy-cli login --credentials "email:password"
# save etp-rt cookie
$ crunchy-cli login --etp-rt "4ebf1690-53a4-491a-a2ac-488309120f5d"
``` ```
With the session stored, you do not need to pass `--credentials` / `--etp-rt` / `--anonymous` anymore when you want to execute a command. With the session stored, you do not need to pass `--credentials` / `--anonymous` anymore when you want to execute a command.
### Download ### Download
The `download` command lets you download episodes with a specific audio language and optional subtitles. The `download` command lets you download episodes with a specific audio language and optional subtitles.
**Supported urls** **Supported urls**
- Single episode (with [episode filtering](#episode-filtering)) - Single episode (with [episode filtering](#episode-filtering))
```shell ```shell
$ crunchy-cli download https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome $ crunchy-cli download https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
@ -192,77 +234,155 @@ The `download` command lets you download episodes with a specific audio language
``` ```
**Options** **Options**
- Audio language
- <span id="download-audio">Audio language</span>
Set the audio language with the `-a` / `--audio` flag. Set the audio language with the `-a` / `--audio` flag.
This only works if the url points to a series since episode urls are language specific. This only works if the url points to a series since episode urls are language specific.
```shell ```shell
$ crunchy-cli download -a de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx $ crunchy-cli download -a de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
``` ```
Default is your system locale. If not supported by Crunchyroll, `en-US` (American English) is the default. Default is your system locale. If not supported by Crunchyroll, `en-US` (American English) is the default.
- Subtitle language - <span id="download-subtitle">Subtitle language</span>
Besides the audio, you can specify the subtitle language by using the `-s` / `--subtitle` flag. Besides the audio, you can specify the subtitle language by using the `-s` / `--subtitle` flag.
The subtitles will be burned into the video track (cf. [hardsub](https://www.urbandictionary.com/define.php?term=hardsub)) and thus can not be turned off. In formats that support it (.mp4, .mov and .mkv ), subtitles are stored as soft-subs. All other formats are hardsubbed: the subtitles will be burned into the video track (cf. [hardsub](https://www.urbandictionary.com/define.php?term=hardsub)) and thus can not be turned off.
```shell ```shell
$ crunchy-cli download -s de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx $ crunchy-cli download -s de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
``` ```
Default is none. Default is none.
- Output template - <span id="download-output">Output template</span>
Define an output template by using the `-o` / `--output` flag. Define an output template by using the `-o` / `--output` flag.
```shell ```shell
$ crunchy-cli download -o "ditf.mp4" https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome $ crunchy-cli download -o "ditf.mp4" https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
``` ```
Default is `{title}.mp4`. See the [Template Options section](#output-template-options) below for more options. Default is `{title}.mp4`. See the [Template Options section](#output-template-options) below for more options.
- Resolution - <span id="download-output-specials">Output template for special episodes</span>
Define an output template which only gets used when the episode is a special (episode number is 0 or has non-zero decimal places) by using the `--output-special` flag.
```shell
$ crunchy-cli download --output-specials "Special EP - {title}" https://www.crunchyroll.com/watch/GY8D975JY/veldoras-journal
```
Default is the template, set by the `-o` / `--output` flag. See the [Template Options section](#output-template-options) below for more options.
- <span id="download-universal-output">Universal output</span>
The output template options can be forced to get sanitized via the `--universal-output` flag to be valid across all supported operating systems (Windows has a lot of characters which aren't allowed in filenames...).
```shell
$ crunchy-cli download --universal-output -o https://www.crunchyroll.com/watch/G7PU4XD48/tales-veldoras-journal-2
```
- <span id="download-resolution">Resolution</span>
The resolution for videos can be set via the `-r` / `--resolution` flag. The resolution for videos can be set via the `-r` / `--resolution` flag.
```shell ```shell
$ crunchy-cli download -r worst https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome $ crunchy-cli download -r worst https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
``` ```
Default is `best`. Default is `best`.
- FFmpeg Preset - <span id="download-language-tagging">Language tagging</span>
You can force the usage of a specific language tagging in the output file with the `--language-tagging` flag.
This might be useful as some video players doesn't recognize the language tagging Crunchyroll uses internally.
```shell
$ crunchy-cli download --language-tagging ietf https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
- <span id="download-ffmpeg-preset">FFmpeg Preset</span>
You can specify specific built-in presets with the `--ffmpeg-preset` flag to convert videos to a specific coding while downloading. You can specify specific built-in presets with the `--ffmpeg-preset` flag to convert videos to a specific coding while downloading.
Multiple predefined presets how videos should be encoded (h264, h265, av1, ...) are available, you can see them with `crunchy-cli download --help`. Multiple predefined presets how videos should be encoded (h264, h265, av1, ...) are available, you can see them with `crunchy-cli download --help`.
If you need more specific ffmpeg customizations you could either convert the output file manually or use ffmpeg output arguments as value for this flag. If you need more specific ffmpeg customizations you could either convert the output file manually or use ffmpeg output arguments as value for this flag.
```shell ```shell
$ crunchy-cli downlaod --ffmpeg-preset av1-lossless https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome $ crunchy-cli download --ffmpeg-preset av1-lossless https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
``` ```
- Skip existing - <span id="download-ffmpeg-threads">FFmpeg threads</span>
If you want to manually set how many threads FFmpeg should use, you can use the `--ffmpeg-threads` flag. This does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`.
```shell
$ crunchy-cli download --ffmpeg-threads 4 https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
- <span id="download-skip-existing">Skip existing</span>
If you re-download a series but want to skip episodes you've already downloaded, the `--skip-existing` flag skips the already existing/downloaded files. If you re-download a series but want to skip episodes you've already downloaded, the `--skip-existing` flag skips the already existing/downloaded files.
```shell ```shell
$ crunchy-cli download --skip-existing https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx $ crunchy-cli download --skip-existing https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
``` ```
- Yes - <span id="download-skip-specials">Skip specials</span>
If you doesn't want to download special episodes, use the `--skip-specials` flag to skip the download of them.
```shell
$ crunchy-cli download --skip-specials https://www.crunchyroll.com/series/GYZJ43JMR/that-time-i-got-reincarnated-as-a-slime[S2]
```
- <span id="download-include-chapters">Include chapters</span>
Crunchyroll sometimes provide information about skippable events like the intro or credits.
These information can be stored as chapters in the resulting video file via the `--include-chapters` flag.
```shell
$ crunchy-cli download --include-chapters https://www.crunchyroll.com/watch/G0DUND0K2/the-journeys-end
```
- <span id="download-yes">Yes</span>
Sometimes different seasons have the same season number (e.g. Sword Art Online Alicization and Alicization War of Underworld are both marked as season 3), in such cases an interactive prompt is shown which needs user further user input to decide which season to download. Sometimes different seasons have the same season number (e.g. Sword Art Online Alicization and Alicization War of Underworld are both marked as season 3), in such cases an interactive prompt is shown which needs user further user input to decide which season to download.
The `--yes` flag suppresses this interactive prompt and just downloads all seasons. The `--yes` flag suppresses this interactive prompt and just downloads all seasons.
```shell ```shell
$ crunchy-cli download --yes https://www.crunchyroll.com/series/GR49G9VP6/sword-art-online $ crunchy-cli download --yes https://www.crunchyroll.com/series/GR49G9VP6/sword-art-online
``` ```
If you've passed the `-q` / `--quiet` [global flag](#global-settings), this flag is automatically set. If you've passed the `-q` / `--quiet` [global flag](#global-settings), this flag is automatically set.
- Force hardsub - <span id="download-force-hardsub">Force hardsub</span>
If you want to burn-in the subtitles, even if the output format/container supports soft-subs (e.g. `.mp4`), use the `--force-hardsub` flag to do so. If you want to burn-in the subtitles, even if the output format/container supports soft-subs (e.g. `.mp4`), use the `--force-hardsub` flag to do so.
```shell ```shell
$ crunchy-cli download --force-hardsub -s en-US https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome $ crunchy-cli download --force-hardsub -s en-US https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
``` ```
- <span id="download-threads">Threads</span>
To increase the download speed, video segments are downloaded simultaneously by creating multiple threads.
If you want to manually specify how many threads to use when downloading, do this with the `-t` / `--threads` flag.
```shell
$ crunchy-cli download -t 1 https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
The default thread count is the count of cpu threads your pc has.
### Archive ### Archive
The `archive` command lets you download episodes with multiple audios and subtitles and merges it into a `.mkv` file. The `archive` command lets you download episodes with multiple audios and subtitles and merges it into a `.mkv` file.
**Supported urls** **Supported urls**
- Single episode (with [episode filtering](#episode-filtering)) - Single episode (with [episode filtering](#episode-filtering))
```shell ```shell
$ crunchy-cli archive https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome $ crunchy-cli archive https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
@ -273,88 +393,225 @@ The `archive` command lets you download episodes with multiple audios and subtit
``` ```
**Options** **Options**
- Audio languages
- <span id="archive-audio">Audio languages</span>
Set the audio language with the `-a` / `--audio` flag. Can be used multiple times. Set the audio language with the `-a` / `--audio` flag. Can be used multiple times.
```shell ```shell
$ crunchy-cli archive -a ja-JP -a de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx $ crunchy-cli archive -a ja-JP -a de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
``` ```
Default is your system locale (if not supported by Crunchyroll, `en-US` (American English) and `ja-JP` (Japanese) are used). Default is your system locale (if not supported by Crunchyroll, `en-US` (American English) and `ja-JP` (Japanese) are used).
- Subtitle languages - <span id="archive-subtitle">Subtitle languages</span>
Besides the audio, you can specify the subtitle language by using the `-s` / `--subtitle` flag. Besides the audio, you can specify the subtitle language by using the `-s` / `--subtitle` flag.
```shell ```shell
$ crunchy-cli archive -s de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx $ crunchy-cli archive -s de-DE https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
``` ```
Default is `all` subtitles. Default is `all` subtitles.
- Output template - <span id="archive-output">Output template</span>
Define an output template by using the `-o` / `--output` flag. Define an output template by using the `-o` / `--output` flag.
crunchy-cli uses the [`.mkv`](https://en.wikipedia.org/wiki/Matroska) container format, because of it's ability to store multiple audio, video and subtitle tracks at once. _crunchy-cli_ exclusively uses the [`.mkv`](https://en.wikipedia.org/wiki/Matroska) container format, because of its ability to store multiple audio, video and subtitle tracks at once.
```shell ```shell
$ crunchy-cli archive -o "{title}.mkv" https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx $ crunchy-cli archive -o "{title}.mkv" https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
``` ```
Default is `{title}.mkv`. See the [Template Options section](#output-template-options) below for more options. Default is `{title}.mkv`. See the [Template Options section](#output-template-options) below for more options.
- Resolution - <span id="archive-output-specials">Output template for special episodes</span>
Define an output template which only gets used when the episode is a special (episode number is 0 or has non-zero decimal places) by using the `--output-special` flag.
_crunchy-cli_ exclusively uses the [`.mkv`](https://en.wikipedia.org/wiki/Matroska) container format, because of its ability to store multiple audio, video and subtitle tracks at once.
```shell
$ crunchy-cli archive --output-specials "Special EP - {title}" https://www.crunchyroll.com/watch/GY8D975JY/veldoras-journal
```
Default is the template, set by the `-o` / `--output` flag. See the [Template Options section](#output-template-options) below for more options.
- <span id="archive-universal-output">Universal output</span>
The output template options can be forced to get sanitized via the `--universal-output` flag to be valid across all supported operating systems (Windows has a lot of characters which aren't allowed in filenames...).
```shell
$ crunchy-cli archive --universal-output -o https://www.crunchyroll.com/watch/G7PU4XD48/tales-veldoras-journal-2
```
- <span id="archive-resolution">Resolution</span>
The resolution for videos can be set via the `-r` / `--resolution` flag. The resolution for videos can be set via the `-r` / `--resolution` flag.
```shell ```shell
$ crunchy-cli archive -r worst https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx $ crunchy-cli archive -r worst https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
``` ```
Default is `best`. Default is `best`.
- Merge behavior - <span id="archive-merge">Merge behavior</span>
Due to censorship, some episodes have multiple lengths for different languages. Due to censorship or additional intros, some episodes have multiple lengths for different languages.
In the best case, when multiple audio & subtitle tracks are used, there is only one *video* track and all other languages can be stored as audio-only. In the best case, when multiple audio & subtitle tracks are used, there is only one *video* track and all other languages can be stored as audio-only.
But, as said, this is not always the case. But, as said, this is not always the case.
With the `-m` / `--merge` flag you can define the behaviour when an episodes' video tracks differ in length. With the `-m` / `--merge` flag you can define the behaviour when an episodes' video tracks differ in length.
Valid options are `audio` - store one video and all other languages as audio only; `video` - store the video + audio for every language; `auto` - detect if videos differ in length: if so, behave like `video` - otherwise like `audio`. Valid options are `audio` - store one video and all other languages as audio only; `video` - store the video + audio for every language; `auto` - detect if videos differ in length: if so, behave like `video` - otherwise like `audio`; `sync` - detect if videos differ in length: if so, it tries to find the offset of matching audio parts and removes the offset from the beginning, otherwise it behaves like `audio`.
Subtitles will always match the primary audio and video. Subtitles will always match the primary audio and video.
```shell ```shell
$ crunchy-cli archive -m audio https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx $ crunchy-cli archive -m audio https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
``` ```
Default is `auto`. Default is `auto`.
- FFmpeg Preset - <span id="archive-merge-time-tolerance">Merge time tolerance</span>
Sometimes two video tracks are downloaded with `--merge` set to `auto` even if they only differ some milliseconds in length which shouldn't be noticeable to the viewer.
To prevent this, you can specify a range in milliseconds with the `--merge-time-tolerance` flag that only downloads one video if the length difference is in the given range.
```shell
$ crunchy-cli archive -m auto --merge-time-tolerance 100 https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
Default are `200` milliseconds.
- <span id="archive-merge-sync-tolerance">Merge sync tolerance</span>
Sometimes two video tracks are downloaded with `--merge` set to `sync` because the audio fingerprinting fails to identify matching audio parts (e.g. opening).
To prevent this, you can use the `--merge-sync-tolerance` flag to specify the difference by which two fingerprints are considered equal.
```shell
$ crunchy-cli archive -m sync --merge-sync-tolerance 3 https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
Default is `6`.
- <span id="archive-merge-sync-precision">Merge sync precision</span>
If you use `--merge` set to `sync` and the syncing seems to be not accurate enough or takes to long, you can use the `--sync-precision` flag to specify the amount of offset determination runs from which the final offset is calculated.
```shell
$ crunchy-cli archive -m sync --merge-sync-precision 3 https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
Default is `4`.
- <span id="archive-language-tagging">Language tagging</span>
You can force the usage of a specific language tagging in the output file with the `--language-tagging` flag.
This might be useful as some video players doesn't recognize the language tagging Crunchyroll uses internally.
```shell
$ crunchy-cli archive --language-tagging ietf https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
- <span id="archive-ffmpeg-preset">FFmpeg Preset</span>
You can specify specific built-in presets with the `--ffmpeg-preset` flag to convert videos to a specific coding while downloading. You can specify specific built-in presets with the `--ffmpeg-preset` flag to convert videos to a specific coding while downloading.
Multiple predefined presets how videos should be encoded (h264, h265, av1, ...) are available, you can see them with `crunchy-cli archive --help`. Multiple predefined presets how videos should be encoded (h264, h265, av1, ...) are available, you can see them with `crunchy-cli archive --help`.
If you need more specific ffmpeg customizations you could either convert the output file manually or use ffmpeg output arguments as value for this flag. If you need more specific ffmpeg customizations you could either convert the output file manually or use ffmpeg output arguments as value for this flag.
```shell ```shell
$ crunchy-cli archive --ffmpeg-preset av1-lossless https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome $ crunchy-cli archive --ffmpeg-preset av1-lossless https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
``` ```
- Default subtitle - <span id="archive-ffmpeg-threads">FFmpeg threads</span>
If you want to manually set how many threads FFmpeg should use, you can use the `--ffmpeg-threads` flag. This does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`.
```shell
$ crunchy-cli archive --ffmpeg-threads 4 https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
```
- <span id="archive-default-subtitle">Default subtitle</span>
`--default-subtitle` Set which subtitle language is to be flagged as **default** and **forced**. `--default-subtitle` Set which subtitle language is to be flagged as **default** and **forced**.
```shell ```shell
$ crunchy-cli archive --default-subtitle en-US https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx $ crunchy-cli archive --default-subtitle en-US https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
``` ```
Default is none. Default is none.
- Skip existing - <span id="archive-include-fonts">Include fonts</span>
You can include the fonts required by subtitles directly into the output file with the `--include-fonts` flag. This will use the embedded font for subtitles instead of the system font when playing the video in a video player which supports it.
```shell
$ crunchy-cli archive --include-fonts https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
- <span id="archive-include-chapters">Include chapters</span>
Crunchyroll sometimes provide information about skippable events like the intro or credits.
These information can be stored as chapters in the resulting video file via the `--include-chapters` flag.
This flag only works if `--merge` is set to `audio` because chapters cannot be mapped to a specific video steam.
```shell
$ crunchy-cli archive --include-chapters https://www.crunchyroll.com/watch/G0DUND0K2/the-journeys-end
```
- <span id="archive-skip-existing">Skip existing</span>
If you re-download a series but want to skip episodes you've already downloaded, the `--skip-existing` flag skips the already existing/downloaded files. If you re-download a series but want to skip episodes you've already downloaded, the `--skip-existing` flag skips the already existing/downloaded files.
```shell ```shell
$ crunchy-cli archive --skip-existing https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx $ crunchy-cli archive --skip-existing https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
``` ```
- Yes - <span id="archive-skip-existing-method">Skip existing method</span>
By default, already existing files are determined by their name and the download of the corresponding episode is skipped.
But sometimes Crunchyroll adds dubs or subs to an already existing episode and these changes aren't recognized and `--skip-existing` just skips it.
This behavior can be changed by the `--skip-existing-method` flag. Valid options are `audio` and `subtitle` (if the file already exists but the audio/subtitle are less from what should be downloaded, the episode gets downloaded and the file overwritten).
```shell
$ crunchy-cli archive --skip-existing-method audio --skip-existing-method video https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
- <span id="archive-skip-specials">Skip specials</span>
If you doesn't want to download special episodes, use the `--skip-specials` flag to skip the download of them.
```shell
$ crunchy-cli archive --skip-specials https://www.crunchyroll.com/series/GYZJ43JMR/that-time-i-got-reincarnated-as-a-slime[S2]
```
- <span id="archive-yes">Yes</span>
Sometimes different seasons have the same season number (e.g. Sword Art Online Alicization and Alicization War of Underworld are both marked as season 3), in such cases an interactive prompt is shown which needs user further user input to decide which season to download. Sometimes different seasons have the same season number (e.g. Sword Art Online Alicization and Alicization War of Underworld are both marked as season 3), in such cases an interactive prompt is shown which needs user further user input to decide which season to download.
The `--yes` flag suppresses this interactive prompt and just downloads all seasons. The `--yes` flag suppresses this interactive prompt and just downloads all seasons.
```shell ```shell
$ crunchy-cli archive --yes https://www.crunchyroll.com/series/GR49G9VP6/sword-art-online $ crunchy-cli archive --yes https://www.crunchyroll.com/series/GR49G9VP6/sword-art-online
``` ```
If you've passed the `-q` / `--quiet` [global flag](#global-settings), this flag is automatically set. If you've passed the `-q` / `--quiet` [global flag](#global-settings), this flag is automatically set.
- <span id="archive-threads">Threads</span>
To increase the download speed, video segments are downloaded simultaneously by creating multiple threads.
If you want to manually specify how many threads to use when downloading, do this with the `-t` / `--threads` flag.
```shell
$ crunchy-cli archive -t 1 https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
```
The default thread count is the count of cpu threads your pc has.
### Search ### Search
The `search` command is a powerful tool to query the Crunchyroll library.
It behaves like the regular search on the website but is able to further process the results and return everything it can find, from the series title down to the raw stream url.
_Using this command with the `--anonymous` flag or a non-premium account may return incomplete results._
**Supported urls/input** **Supported urls/input**
- Single episode (with [episode filtering](#episode-filtering)) - Single episode (with [episode filtering](#episode-filtering))
```shell ```shell
$ crunchy-cli search https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome $ crunchy-cli search https://www.crunchyroll.com/watch/GRDQPM1ZY/alone-and-lonesome
@ -369,19 +626,23 @@ The `archive` command lets you download episodes with multiple audios and subtit
``` ```
**Options** **Options**
- Audio
- <span id="search-audio">Audio</span>
Set the audio language to search via the `--audio` flag. Can be used multiple times. Set the audio language to search via the `--audio` flag. Can be used multiple times.
```shell ```shell
$ crunchy-cli search --audio en-US https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx $ crunchy-cli search --audio en-US https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
``` ```
Default is your system locale. Default is your system locale.
- Result limit - <span id="search-result-limit">Result limit</span>
If your input is a search term instead of an url, you have multiple options to control which results to process. If your input is a search term instead of an url, you have multiple options to control which results to process.
The `--search-top-results-limit` flag sets the limit of top search results to process. The `--search-top-results-limit` flag sets the limit of top search results to process.
`--search-series-limit` sets the limit of only series, `--search-movie-listing-limit` of only movie listings, `--search-episode-limit` of only episodes and `--search-music-limit` of only concerts and music videos. `--search-series-limit` sets the limit of only series, `--search-movie-listing-limit` of only movie listings, `--search-episode-limit` of only episodes and `--search-music-limit` of only concerts and music videos.
```shell ```shell
$ crunchy-cli search --search-top-results-limit 10 "darling in the franxx" $ crunchy-cli search --search-top-results-limit 10 "darling in the franxx"
# only return series which have 'darling' in it. do not return top results which might also be non-series items # only return series which have 'darling' in it. do not return top results which might also be non-series items
@ -389,6 +650,7 @@ The `archive` command lets you download episodes with multiple audios and subtit
# this returns 2 top results, 3 movie listings, 5 episodes and 1 music item as result # this returns 2 top results, 3 movie listings, 5 episodes and 1 music item as result
$ crunchy-cli search --search-top-results-limit 2 --search-movie-listing-limit 3 --search-episode-limit 5 --search-music-limit 1 "test" $ crunchy-cli search --search-top-results-limit 2 --search-movie-listing-limit 3 --search-episode-limit 5 --search-music-limit 1 "test"
``` ```
Default is `5` for `--search-top-results-limit`, `0` for all others. Default is `5` for `--search-top-results-limit`, `0` for all others.
- Output template - Output template
@ -399,9 +661,11 @@ The `archive` command lets you download episodes with multiple audios and subtit
The required pattern for this begins with `{{`, then the keyword, and closes with `}}` (e.g. `{{episode.title}}`). The required pattern for this begins with `{{`, then the keyword, and closes with `}}` (e.g. `{{episode.title}}`).
For example, if you want to get the title of an episode, you can use `Title: {{episode.title}}` and `{{episode.title}}` will be replaced with the episode title. For example, if you want to get the title of an episode, you can use `Title: {{episode.title}}` and `{{episode.title}}` will be replaced with the episode title.
You can see all supported keywords with `crunchy-cli search --help`. You can see all supported keywords with `crunchy-cli search --help`.
```shell ```shell
$ crunchy-cli search -o "{{series.title}}" https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx $ crunchy-cli search -o "{{series.title}}" https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx
``` ```
Default is `S{{season.number}}E{{episode.number}} - {{episode.title}}`. Default is `S{{season.number}}E{{episode.number}} - {{episode.title}}`.
--- ---
@ -414,15 +678,22 @@ You can use various template options to change how the filename is processed. Th
- `{series_name}` → Name of the series - `{series_name}` → Name of the series
- `{season_name}` → Name of the season - `{season_name}` → Name of the season
- `{audio}` → Audio language of the video - `{audio}` → Audio language of the video
- `{resolution}` → Resolution of the video - `{width}` → Width of the video
- `{height}` → Height of the video
- `{season_number}` → Number of the season - `{season_number}` → Number of the season
- `{episode_number}` → Number of the episode - `{episode_number}` → Number of the episode
- `{relative_episode_number}` → Number of the episode relative to its season - `{relative_episode_number}` → Number of the episode relative to its season
- `{sequence_number}` → Like `{episode_number}` but without possible non-number characters
- `{relative_sequence_number}` → Like `{relative_episode_number}` but with support for episode 0's and .5's
- `{release_year}` → Release year of the video
- `{release_month}` → Release month of the video
- `{release_day} ` → Release day of the video
- `{series_id}` → ID of the series - `{series_id}` → ID of the series
- `{season_id}` → ID of the season - `{season_id}` → ID of the season
- `{episode_id}` → ID of the episode - `{episode_id}` → ID of the episode
Example: Example:
```shell ```shell
$ crunchy-cli archive -o "[S{season_number}E{episode_number}] {title}.mkv" https://www.crunchyroll.com/series/G8DHV7W21/dragon-ball $ crunchy-cli archive -o "[S{season_number}E{episode_number}] {title}.mkv" https://www.crunchyroll.com/series/G8DHV7W21/dragon-ball
# Output file: '[S01E01] Secret of the Dragon Ball.mkv' # Output file: '[S01E01] Secret of the Dragon Ball.mkv'
@ -436,6 +707,7 @@ A filter pattern may consist of either a season, an episode, or a combination of
When used in combination, seasons `S` must be defined before episodes `E`. When used in combination, seasons `S` must be defined before episodes `E`.
There are many possible patterns, for example: There are many possible patterns, for example:
- `...[E5]` - Download the fifth episode. - `...[E5]` - Download the fifth episode.
- `...[S1]` - Download the whole first season. - `...[S1]` - Download the whole first season.
- `...[-S2]` - Download the first two seasons. - `...[-S2]` - Download the first two seasons.
@ -445,6 +717,7 @@ There are many possible patterns, for example:
- `...[S1-S3,S4E2-S4E6]` - Download season one to three, then episodes two to six from season four. - `...[S1-S3,S4E2-S4E6]` - Download season one to three, then episodes two to six from season four.
In practice, it would look like this: In practice, it would look like this:
``` ```
https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx[E1-E5] https://www.crunchyroll.com/series/GY8VEQ95Y/darling-in-the-franxx[E1-E5]
``` ```

View file

@ -19,13 +19,6 @@ fn main() -> std::io::Result<()> {
println!("cargo:warning=Multiple tls backends are activated (through the '*-tls' features). Consider to activate only one as it is not possible to change the backend during runtime. The active backend for this build will be '{}'.", active_tls_backend) println!("cargo:warning=Multiple tls backends are activated (through the '*-tls' features). Consider to activate only one as it is not possible to change the backend during runtime. The active backend for this build will be '{}'.", active_tls_backend)
} }
if cfg!(feature = "openssl") {
println!("cargo:warning=The 'openssl' feature is deprecated and will be removed in a future version. Use the 'openssl-tls' feature instead.")
}
if cfg!(feature = "openssl-static") {
println!("cargo:warning=The 'openssl-static' feature is deprecated and will be removed in a future version. Use the 'openssl-tls-static' feature instead.")
}
// note that we're using an anti-pattern here / violate the rust conventions. build script are // note that we're using an anti-pattern here / violate the rust conventions. build script are
// not supposed to write outside of 'OUT_DIR'. to have the generated files in the build "root" // not supposed to write outside of 'OUT_DIR'. to have the generated files in the build "root"
// (the same directory where the output binary lives) is much simpler than in 'OUT_DIR' since // (the same directory where the output binary lives) is much simpler than in 'OUT_DIR' since

View file

@ -1,7 +1,7 @@
[package] [package]
name = "crunchy-cli-core" name = "crunchy-cli-core"
authors = ["Crunchy Labs Maintainers"] authors = ["Crunchy Labs Maintainers"]
version = "3.0.3" version = "3.6.7"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
@ -13,30 +13,39 @@ openssl-tls-static = ["reqwest/native-tls", "reqwest/native-tls-alpn", "reqwest/
[dependencies] [dependencies]
anyhow = "1.0" anyhow = "1.0"
async-trait = "0.1" async-speed-limit = "0.4"
clap = { version = "4.4", features = ["derive", "string"] } clap = { version = "4.5", features = ["derive", "string"] }
chrono = "0.4" chrono = "0.4"
crunchyroll-rs = { version = "0.6.2", features = ["dash-stream"] } crunchyroll-rs = { version = "0.11.4", features = ["experimental-stabilizations", "tower"] }
ctrlc = "3.4" ctrlc = "3.4"
dialoguer = { version = "0.11", default-features = false } dialoguer = { version = "0.11", default-features = false }
dirs = "5.0" dirs = "5.0"
derive_setters = "0.1" derive_setters = "0.1"
futures-util = { version = "0.3", features = ["io"] }
fs2 = "0.4" fs2 = "0.4"
http = "1.1"
indicatif = "0.17" indicatif = "0.17"
lazy_static = "1.4" lazy_static = "1.4"
log = { version = "0.4", features = ["std"] } log = { version = "0.4", features = ["std"] }
num_cpus = "1.16" num_cpus = "1.16"
regex = "1.9" regex = "1.10"
reqwest = { version = "0.11", default-features = false, features = ["socks"] } reqwest = { version = "0.12", features = ["socks", "stream"] }
sanitize-filename = "0.5" rsubs-lib = "~0.3.2"
rusty-chromaprint = "0.2"
serde = "1.0" serde = "1.0"
serde_json = "1.0" serde_json = "1.0"
serde_plain = "1.0" serde_plain = "1.0"
shlex = "1.2" shlex = "1.3"
sys-locale = "0.3" sys-locale = "0.3"
tempfile = "3.8" tempfile = "3.10"
tokio = { version = "1.32", features = ["macros", "rt-multi-thread", "time"] } time = "0.3"
rustls-native-certs = { version = "0.6", optional = true } tokio = { version = "1.38", features = ["io-util", "macros", "net", "rt-multi-thread", "time"] }
tokio-util = "0.7"
tower-service = "0.3"
rustls-native-certs = { version = "0.7", optional = true }
[target.'cfg(not(target_os = "windows"))'.dependencies]
nix = { version = "0.28", features = ["fs"] }
[build-dependencies] [build-dependencies]
chrono = "0.4" chrono = "0.4"

View file

@ -1,7 +1,8 @@
fn main() -> std::io::Result<()> { fn main() -> std::io::Result<()> {
println!( println!(
"cargo:rustc-env=GIT_HASH={}", "cargo:rustc-env=GIT_HASH={}",
get_short_commit_hash()?.unwrap_or_default() std::env::var("CRUNCHY_CLI_GIT_HASH")
.or::<std::io::Error>(Ok(get_short_commit_hash()?.unwrap_or_default()))?
); );
println!( println!(
"cargo:rustc-env=BUILD_DATE={}", "cargo:rustc-env=BUILD_DATE={}",

View file

@ -1,23 +1,28 @@
use crate::archive::filter::ArchiveFilter;
use crate::utils::context::Context; use crate::utils::context::Context;
use crate::utils::download::{DownloadBuilder, DownloadFormat, MergeBehavior}; use crate::utils::download::{
DownloadBuilder, DownloadFormat, DownloadFormatMetadata, MergeBehavior,
};
use crate::utils::ffmpeg::FFmpegPreset; use crate::utils::ffmpeg::FFmpegPreset;
use crate::utils::filter::Filter; use crate::utils::filter::{Filter, FilterMediaScope};
use crate::utils::format::{Format, SingleFormat}; use crate::utils::format::{Format, SingleFormat};
use crate::utils::locale::all_locale_in_locales; use crate::utils::locale::{all_locale_in_locales, resolve_locales, LanguageTagging};
use crate::utils::log::progress; use crate::utils::log::progress;
use crate::utils::os::{free_file, has_ffmpeg, is_special_file}; use crate::utils::os::{free_file, has_ffmpeg, is_special_file};
use crate::utils::parse::parse_url; use crate::utils::parse::parse_url;
use crate::utils::video::variant_data_from_stream; use crate::utils::video::stream_data_from_stream;
use crate::Execute; use crate::Execute;
use anyhow::bail; use anyhow::bail;
use anyhow::Result; use anyhow::Result;
use chrono::Duration; use chrono::Duration;
use crunchyroll_rs::media::{Resolution, Subtitle}; use crunchyroll_rs::media::{Resolution, Subtitle};
use crunchyroll_rs::Locale; use crunchyroll_rs::Locale;
use log::debug; use log::{debug, warn};
use std::collections::HashMap; use regex::Regex;
use std::path::PathBuf; use std::fmt::{Display, Formatter};
use std::iter::zip;
use std::ops::Sub;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
#[derive(Clone, Debug, clap::Parser)] #[derive(Clone, Debug, clap::Parser)]
#[clap(about = "Archive a video")] #[clap(about = "Archive a video")]
@ -26,35 +31,55 @@ pub struct Archive {
#[arg(help = format!("Audio languages. Can be used multiple times. \ #[arg(help = format!("Audio languages. Can be used multiple times. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))] Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(long_help = format!("Audio languages. Can be used multiple times. \ #[arg(long_help = format!("Audio languages. Can be used multiple times. \
Available languages are:\n {}", Locale::all().into_iter().map(|l| format!("{:<6} {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))] Available languages are:\n {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| format!("{:<6} {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
#[arg(short, long, default_values_t = vec![Locale::ja_JP, crate::utils::locale::system_locale()])] #[arg(short, long, default_values_t = vec![Locale::ja_JP, crate::utils::locale::system_locale()])]
pub(crate) audio: Vec<Locale>, pub(crate) audio: Vec<Locale>,
#[arg(skip)]
output_audio_locales: Vec<String>,
#[arg(help = format!("Subtitle languages. Can be used multiple times. \ #[arg(help = format!("Subtitle languages. Can be used multiple times. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))] Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(long_help = format!("Subtitle languages. Can be used multiple times. \ #[arg(long_help = format!("Subtitle languages. Can be used multiple times. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))] Available languages are: {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(short, long, default_values_t = Locale::all())] #[arg(short, long, default_values_t = Locale::all())]
pub(crate) subtitle: Vec<Locale>, pub(crate) subtitle: Vec<Locale>,
#[arg(skip)]
output_subtitle_locales: Vec<String>,
#[arg(help = "Name of the output file")] #[arg(help = "Name of the output file")]
#[arg(long_help = "Name of the output file.\ #[arg(long_help = "Name of the output file. \
If you use one of the following pattern they will get replaced:\n \ If you use one of the following pattern they will get replaced:\n \
{title} Title of the video\n \ {title} Title of the video\n \
{series_name} Name of the series\n \ {series_name} Name of the series\n \
{season_name} Name of the season\n \ {season_name} Name of the season\n \
{audio} Audio language of the video\n \ {audio} Audio language of the video\n \
{resolution} Resolution of the video\n \ {width} Width of the video\n \
{height} Height of the video\n \
{season_number} Number of the season\n \ {season_number} Number of the season\n \
{episode_number} Number of the episode\n \ {episode_number} Number of the episode\n \
{relative_episode_number} Number of the episode relative to its season\n \ {relative_episode_number} Number of the episode relative to its season\n \
{sequence_number} Like '{episode_number}' but without possible non-number characters\n \
{relative_sequence_number} Like '{relative_episode_number}' but with support for episode 0's and .5's\n \
{release_year} Release year of the video\n \
{release_month} Release month of the video\n \
{release_day} Release day of the video\n \
{series_id} ID of the series\n \ {series_id} ID of the series\n \
{season_id} ID of the season\n \ {season_id} ID of the season\n \
{episode_id} ID of the episode")] {episode_id} ID of the episode")]
#[arg(short, long, default_value = "{title}.mkv")] #[arg(short, long, default_value = "{title}.mkv")]
pub(crate) output: String, pub(crate) output: String,
#[arg(help = "Name of the output file if the episode is a special")]
#[arg(long_help = "Name of the output file if the episode is a special. \
If not set, the '-o'/'--output' flag will be used as name template")]
#[arg(long)]
pub(crate) output_specials: Option<String>,
#[arg(help = "Sanitize the output file for use with all operating systems. \
This option only affects template options and not static characters.")]
#[arg(long, default_value_t = false)]
pub(crate) universal_output: bool,
#[arg(help = "Video resolution")] #[arg(help = "Video resolution")]
#[arg(long_help = "The video resolution.\ #[arg(long_help = "The video resolution. \
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \ Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
Specifying the exact pixels is not recommended, use one of the other options instead. \ Specifying the exact pixels is not recommended, use one of the other options instead. \
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \ Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
@ -64,16 +89,43 @@ pub struct Archive {
pub(crate) resolution: Resolution, pub(crate) resolution: Resolution,
#[arg( #[arg(
help = "Sets the behavior of the stream merging. Valid behaviors are 'auto', 'audio' and 'video'" help = "Sets the behavior of the stream merging. Valid behaviors are 'auto', 'sync', 'audio' and 'video'"
)] )]
#[arg( #[arg(
long_help = "Because of local restrictions (or other reasons) some episodes with different languages does not have the same length (e.g. when some scenes were cut out). \ long_help = "Because of local restrictions (or other reasons) some episodes with different languages does not have the same length (e.g. when some scenes were cut out). \
With this flag you can set the behavior when handling multiple language. With this flag you can set the behavior when handling multiple language.
Valid options are 'audio' (stores one video and all other languages as audio only), 'video' (stores the video + audio for every language) and 'auto' (detects if videos differ in length: if so, behave like 'video' else like 'audio')" Valid options are 'audio' (stores one video and all other languages as audio only), 'video' (stores the video + audio for every language), 'auto' (detects if videos differ in length: if so, behave like 'video' else like 'audio') and 'sync' (detects if videos differ in length: if so, tries to find the offset of matching audio parts and removes it from the beginning, otherwise it behaves like 'audio')"
)] )]
#[arg(short, long, default_value = "auto")] #[arg(short, long, default_value = "auto")]
#[arg(value_parser = MergeBehavior::parse)] #[arg(value_parser = MergeBehavior::parse)]
pub(crate) merge: MergeBehavior, pub(crate) merge: MergeBehavior,
#[arg(
help = "If the merge behavior is 'auto' or 'sync', consider videos to be of equal lengths if the difference in length is smaller than the specified milliseconds"
)]
#[arg(long, default_value_t = 200)]
pub(crate) merge_time_tolerance: u32,
#[arg(
help = "If the merge behavior is 'sync', specify the difference by which two fingerprints are considered equal, higher values can help when the algorithm fails"
)]
#[arg(long, default_value_t = 6)]
pub(crate) merge_sync_tolerance: u32,
#[arg(
help = "If the merge behavior is 'sync', specify the amount of offset determination runs from which the final offset is calculated, higher values will increase the time required but lead to more precise offsets"
)]
#[arg(long, default_value_t = 4)]
pub(crate) merge_sync_precision: u32,
#[arg(
help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard)"
)]
#[arg(
long_help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard; you might run in issues as there are multiple locales which resolve to the same IETF language code, e.g. 'es-LA' and 'es-ES' are both resolving to 'es')"
)]
#[arg(long)]
#[arg(value_parser = LanguageTagging::parse)]
pub(crate) language_tagging: Option<LanguageTagging>,
#[arg(help = format!("Presets for converting the video to a specific coding format. \ #[arg(help = format!("Presets for converting the video to a specific coding format. \
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))] Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
@ -83,27 +135,71 @@ pub struct Archive {
#[arg(long)] #[arg(long)]
#[arg(value_parser = FFmpegPreset::parse)] #[arg(value_parser = FFmpegPreset::parse)]
pub(crate) ffmpeg_preset: Option<FFmpegPreset>, pub(crate) ffmpeg_preset: Option<FFmpegPreset>,
#[arg(
help = "The number of threads used by ffmpeg to generate the output file. Does not work with every codec/preset"
)]
#[arg(
long_help = "The number of threads used by ffmpeg to generate the output file. \
Does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`. \
By default, ffmpeg chooses the thread count which works best for the output codec"
)]
#[arg(long)]
pub(crate) ffmpeg_threads: Option<usize>,
#[arg( #[arg(
help = "Set which subtitle language should be set as default / auto shown when starting a video" help = "Set which subtitle language should be set as default / auto shown when starting a video"
)] )]
#[arg(long)] #[arg(long)]
pub(crate) default_subtitle: Option<Locale>, pub(crate) default_subtitle: Option<Locale>,
#[arg(help = "Include fonts in the downloaded file")]
#[arg(long)]
pub(crate) include_fonts: bool,
#[arg(
help = "Includes chapters (e.g. intro, credits, ...). Only works if `--merge` is set to 'audio'"
)]
#[arg(
long_help = "Includes chapters (e.g. intro, credits, ...). . Only works if `--merge` is set to 'audio'. \
Because chapters are essentially only special timeframes in episodes like the intro, most of the video timeline isn't covered by a chapter.
These \"gaps\" are filled with an 'Episode' chapter because many video players are ignore those gaps and just assume that a chapter ends when the next chapter start is reached, even if a specific end-time is set.
Also chapters aren't always available, so in this case, just a big 'Episode' chapter from start to end will be created"
)]
#[arg(long, default_value_t = false)]
pub(crate) include_chapters: bool,
#[arg(help = "Skip files which are already existing")] #[arg(help = "Omit closed caption subtitles in the downloaded file")]
#[arg(long, default_value_t = false)]
pub(crate) no_closed_caption: bool,
#[arg(help = "Skip files which are already existing by their name")]
#[arg(long, default_value_t = false)] #[arg(long, default_value_t = false)]
pub(crate) skip_existing: bool, pub(crate) skip_existing: bool,
#[arg(
help = "Only works in combination with `--skip-existing`. Sets the method how already existing files should be skipped. Valid methods are 'audio' and 'subtitle'"
)]
#[arg(long_help = "Only works in combination with `--skip-existing`. \
By default, already existing files are determined by their name and the download of the corresponding episode is skipped. \
With this flag you can modify this behavior. \
Valid options are 'audio' and 'subtitle' (if the file already exists but the audio/subtitle are less from what should be downloaded, the episode gets downloaded and the file overwritten).")]
#[arg(long, default_values_t = SkipExistingMethod::default())]
#[arg(value_parser = SkipExistingMethod::parse)]
pub(crate) skip_existing_method: Vec<SkipExistingMethod>,
#[arg(help = "Skip special episodes")]
#[arg(long, default_value_t = false)]
pub(crate) skip_specials: bool,
#[arg(help = "Skip any interactive input")] #[arg(help = "Skip any interactive input")]
#[arg(short, long, default_value_t = false)] #[arg(short, long, default_value_t = false)]
pub(crate) yes: bool, pub(crate) yes: bool,
#[arg(help = "The number of threads used to download")]
#[arg(short, long, default_value_t = num_cpus::get())]
pub(crate) threads: usize,
#[arg(help = "Crunchyroll series url(s)")] #[arg(help = "Crunchyroll series url(s)")]
#[arg(required = true)] #[arg(required = true)]
pub(crate) urls: Vec<String>, pub(crate) urls: Vec<String>,
} }
#[async_trait::async_trait(?Send)]
impl Execute for Archive { impl Execute for Archive {
fn pre_check(&mut self) -> Result<()> { fn pre_check(&mut self) -> Result<()> {
if !has_ffmpeg() { if !has_ffmpeg() {
@ -117,15 +213,61 @@ impl Execute for Archive {
&& self.output != "-" && self.output != "-"
{ {
bail!("File extension is not '.mkv'. Currently only matroska / '.mkv' files are supported") bail!("File extension is not '.mkv'. Currently only matroska / '.mkv' files are supported")
} else if let Some(special_output) = &self.output_specials {
if PathBuf::from(special_output)
.extension()
.unwrap_or_default()
.to_string_lossy()
!= "mkv"
&& !is_special_file(special_output)
&& special_output != "-"
{
bail!("File extension for special episodes is not '.mkv'. Currently only matroska / '.mkv' files are supported")
}
}
if self.include_chapters
&& !matches!(self.merge, MergeBehavior::Sync)
&& !matches!(self.merge, MergeBehavior::Audio)
{
bail!("`--include-chapters` can only be used if `--merge` is set to 'audio' or 'sync'")
}
if !self.skip_existing_method.is_empty() && !self.skip_existing {
warn!("`--skip-existing-method` has no effect if `--skip-existing` is not set")
} }
self.audio = all_locale_in_locales(self.audio.clone()); self.audio = all_locale_in_locales(self.audio.clone());
self.subtitle = all_locale_in_locales(self.subtitle.clone()); self.subtitle = all_locale_in_locales(self.subtitle.clone());
if let Some(language_tagging) = &self.language_tagging {
self.audio = resolve_locales(&self.audio);
self.subtitle = resolve_locales(&self.subtitle);
self.output_audio_locales = language_tagging.convert_locales(&self.audio);
self.output_subtitle_locales = language_tagging.convert_locales(&self.subtitle);
} else {
self.output_audio_locales = self
.audio
.clone()
.into_iter()
.map(|l| l.to_string())
.collect();
self.output_subtitle_locales = self
.subtitle
.clone()
.into_iter()
.map(|l| l.to_string())
.collect();
}
Ok(()) Ok(())
} }
async fn execute(self, ctx: Context) -> Result<()> { async fn execute(self, ctx: Context) -> Result<()> {
if !ctx.crunchy.premium().await {
warn!("You may not be able to download all requested videos when logging in anonymously or using a non-premium account")
}
let mut parsed_urls = vec![]; let mut parsed_urls = vec![];
for (i, url) in self.urls.clone().into_iter().enumerate() { for (i, url) in self.urls.clone().into_iter().enumerate() {
@ -141,7 +283,53 @@ impl Execute for Archive {
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() { for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
let progress_handler = progress!("Fetching series details"); let progress_handler = progress!("Fetching series details");
let single_format_collection = ArchiveFilter::new(url_filter, self.clone(), !self.yes) let single_format_collection = Filter::new(
url_filter,
self.audio.clone(),
self.subtitle.clone(),
|scope, locales| {
let audios = locales.into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ");
match scope {
FilterMediaScope::Series(series) => warn!("Series {} is not available with {} audio", series.title, audios),
FilterMediaScope::Season(season) => warn!("Season {} is not available with {} audio", season.season_number, audios),
FilterMediaScope::Episode(episodes) => {
if episodes.len() == 1 {
warn!("Episode {} is not available with {} audio", episodes[0].sequence_number, audios)
} else if episodes.len() == 2 {
warn!("Season {} is only available with {} audio from episode {} to {}", episodes[0].season_number, audios, episodes[0].sequence_number, episodes[1].sequence_number)
} else {
unimplemented!()
}
}
}
Ok(true)
},
|scope, locales| {
let subtitles = locales.into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ");
match scope {
FilterMediaScope::Series(series) => warn!("Series {} is not available with {} subtitles", series.title, subtitles),
FilterMediaScope::Season(season) => warn!("Season {} is not available with {} subtitles", season.season_number, subtitles),
FilterMediaScope::Episode(episodes) => {
if episodes.len() == 1 {
warn!("Episode {} of season {} is not available with {} subtitles", episodes[0].sequence_number, episodes[0].season_title, subtitles)
} else if episodes.len() == 2 {
warn!("Season {} of season {} is only available with {} subtitles from episode {} to {}", episodes[0].season_number, episodes[0].season_title, subtitles, episodes[0].sequence_number, episodes[1].sequence_number)
} else {
unimplemented!()
}
}
}
Ok(true)
},
|season| {
warn!("Skipping premium episodes in season {season}");
Ok(())
},
Format::has_relative_fmt(&self.output),
!self.yes,
self.skip_specials,
ctx.crunchy.premium().await,
)
.visit(media_collection) .visit(media_collection)
.await?; .await?;
@ -153,12 +341,31 @@ impl Execute for Archive {
single_format_collection.full_visual_output(); single_format_collection.full_visual_output();
let download_builder = DownloadBuilder::new() let download_builder =
DownloadBuilder::new(ctx.client.clone(), ctx.rate_limiter.clone())
.default_subtitle(self.default_subtitle.clone()) .default_subtitle(self.default_subtitle.clone())
.download_fonts(self.include_fonts)
.ffmpeg_preset(self.ffmpeg_preset.clone().unwrap_or_default()) .ffmpeg_preset(self.ffmpeg_preset.clone().unwrap_or_default())
.ffmpeg_threads(self.ffmpeg_threads)
.output_format(Some("matroska".to_string())) .output_format(Some("matroska".to_string()))
.audio_sort(Some(self.audio.clone())) .audio_sort(Some(self.audio.clone()))
.subtitle_sort(Some(self.subtitle.clone())); .subtitle_sort(Some(self.subtitle.clone()))
.no_closed_caption(self.no_closed_caption)
.merge_sync_tolerance(match self.merge {
MergeBehavior::Sync => Some(self.merge_sync_tolerance),
_ => None,
})
.merge_sync_precision(match self.merge {
MergeBehavior::Sync => Some(self.merge_sync_precision),
_ => None,
})
.threads(self.threads)
.audio_locale_output_map(
zip(self.audio.clone(), self.output_audio_locales.clone()).collect(),
)
.subtitle_locale_output_map(
zip(self.subtitle.clone(), self.output_subtitle_locales.clone()).collect(),
);
for single_formats in single_format_collection.into_iter() { for single_formats in single_format_collection.into_iter() {
let (download_formats, mut format) = get_format(&self, &single_formats).await?; let (download_formats, mut format) = get_format(&self, &single_formats).await?;
@ -168,16 +375,80 @@ impl Execute for Archive {
downloader.add_format(download_format) downloader.add_format(download_format)
} }
let formatted_path = format.format_path((&self.output).into(), true); let formatted_path = if format.is_special() {
let (path, changed) = free_file(formatted_path.clone()); format.format_path(
self.output_specials
.as_ref()
.map_or((&self.output).into(), |so| so.into()),
self.universal_output,
self.language_tagging.as_ref(),
)
} else {
format.format_path(
(&self.output).into(),
self.universal_output,
self.language_tagging.as_ref(),
)
};
let (mut path, changed) = free_file(formatted_path.clone());
if changed && self.skip_existing { if changed && self.skip_existing {
let mut skip = true;
if !self.skip_existing_method.is_empty() {
if let Some((audio_locales, subtitle_locales)) =
get_video_streams(&formatted_path)?
{
let method_audio = self
.skip_existing_method
.contains(&SkipExistingMethod::Audio);
let method_subtitle = self
.skip_existing_method
.contains(&SkipExistingMethod::Subtitle);
let audio_differ = if method_audio {
format
.locales
.iter()
.any(|(a, _)| !audio_locales.contains(a))
} else {
false
};
let subtitle_differ = if method_subtitle {
format
.locales
.clone()
.into_iter()
.flat_map(|(a, mut s)| {
// remove the closed caption if the flag is given to omit
// closed captions
if self.no_closed_caption && a != Locale::ja_JP {
s.retain(|l| l != &a)
}
s
})
.any(|l| !subtitle_locales.contains(&l))
} else {
false
};
if (method_audio && audio_differ)
|| (method_subtitle && subtitle_differ)
{
skip = false;
path.clone_from(&formatted_path)
}
}
}
if skip {
debug!( debug!(
"Skipping already existing file '{}'", "Skipping already existing file '{}'",
formatted_path.to_string_lossy() formatted_path.to_string_lossy()
); );
continue; continue;
} }
}
format.locales.sort_by(|(a, _), (b, _)| { format.locales.sort_by(|(a, _), (b, _)| {
self.audio self.audio
@ -196,7 +467,7 @@ impl Execute for Archive {
format.visual_output(&path); format.visual_output(&path);
downloader.download(&ctx, &path).await? downloader.download(&path).await?
} }
} }
@ -204,6 +475,36 @@ impl Execute for Archive {
} }
} }
#[derive(Clone, Debug, Eq, PartialEq)]
pub(crate) enum SkipExistingMethod {
Audio,
Subtitle,
}
impl Display for SkipExistingMethod {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let value = match self {
SkipExistingMethod::Audio => "audio",
SkipExistingMethod::Subtitle => "subtitle",
};
write!(f, "{}", value)
}
}
impl SkipExistingMethod {
fn parse(s: &str) -> Result<Self, String> {
match s.to_lowercase().as_str() {
"audio" => Ok(Self::Audio),
"subtitle" => Ok(Self::Subtitle),
_ => Err(format!("invalid skip existing method '{}'", s)),
}
}
fn default<'a>() -> &'a [Self] {
&[]
}
}
async fn get_format( async fn get_format(
archive: &Archive, archive: &Archive,
single_formats: &Vec<SingleFormat>, single_formats: &Vec<SingleFormat>,
@ -213,7 +514,8 @@ async fn get_format(
for single_format in single_formats { for single_format in single_formats {
let stream = single_format.stream().await?; let stream = single_format.stream().await?;
let Some((video, audio)) = variant_data_from_stream(&stream, &archive.resolution).await? let Some((video, audio, _)) =
stream_data_from_stream(&stream, &archive.resolution, None).await?
else { else {
if single_format.is_episode() { if single_format.is_episode() {
bail!( bail!(
@ -237,24 +539,29 @@ async fn get_format(
let subtitles: Vec<(Subtitle, bool)> = archive let subtitles: Vec<(Subtitle, bool)> = archive
.subtitle .subtitle
.iter() .iter()
.filter_map(|s| { .flat_map(|s| {
stream let mut subtitles = vec![];
.subtitles if let Some(caption) = stream.captions.get(s) {
.get(s) subtitles.push((caption.clone(), true))
.cloned() }
// the subtitle is probably not cc if the audio is japanese or more than one if let Some(subtitle) = stream.subtitles.get(s) {
// subtitle exists for this stream // the subtitle is probably cc if the audio is not japanese or only one subtitle
.map(|l| { // exists for this stream
( let cc = single_format.audio != Locale::ja_JP && stream.subtitles.len() == 1;
l, // only include the subtitles if no cc subtitle is already present or if it's
single_format.audio == Locale::ja_JP || stream.subtitles.len() > 1, // not cc
) if subtitles.is_empty() || !cc {
}) subtitles.push((subtitle.clone(), cc))
}
}
subtitles
}) })
.collect(); .collect();
format_pairs.push((single_format, video.clone(), audio, subtitles.clone())); format_pairs.push((single_format, video.clone(), audio, subtitles.clone()));
single_format_to_format_pairs.push((single_format.clone(), video, subtitles)) single_format_to_format_pairs.push((single_format.clone(), video, subtitles));
stream.invalidate().await?
} }
let mut download_formats = vec![]; let mut download_formats = vec![];
@ -266,13 +573,14 @@ async fn get_format(
video: (video, single_format.audio.clone()), video: (video, single_format.audio.clone()),
audios: vec![(audio, single_format.audio.clone())], audios: vec![(audio, single_format.audio.clone())],
subtitles, subtitles,
metadata: DownloadFormatMetadata { skip_events: None },
}) })
} }
} }
MergeBehavior::Audio => download_formats.push(DownloadFormat { MergeBehavior::Audio => download_formats.push(DownloadFormat {
video: ( video: (
(*format_pairs.first().unwrap()).1.clone(), format_pairs.first().unwrap().1.clone(),
(*format_pairs.first().unwrap()).0.audio.clone(), format_pairs.first().unwrap().0.audio.clone(),
), ),
audios: format_pairs audios: format_pairs
.iter() .iter()
@ -284,28 +592,62 @@ async fn get_format(
.iter() .iter()
.flat_map(|(_, _, _, subtitles)| subtitles.clone()) .flat_map(|(_, _, _, subtitles)| subtitles.clone())
.collect(), .collect(),
metadata: DownloadFormatMetadata {
skip_events: if archive.include_chapters {
format_pairs.first().unwrap().0.skip_events().await?
} else {
None
},
},
}), }),
MergeBehavior::Auto => { MergeBehavior::Auto | MergeBehavior::Sync => {
let mut d_formats: HashMap<Duration, DownloadFormat> = HashMap::new(); let mut d_formats: Vec<(Duration, DownloadFormat)> = vec![];
for (single_format, video, audio, subtitles) in format_pairs { for (single_format, video, audio, subtitles) in format_pairs {
if let Some(d_format) = d_formats.get_mut(&single_format.duration) { let closest_format = d_formats.iter_mut().min_by(|(x, _), (y, _)| {
d_format.audios.push((audio, single_format.audio.clone())); x.sub(single_format.duration)
d_format.subtitles.extend(subtitles) .abs()
} else { .cmp(&y.sub(single_format.duration).abs())
d_formats.insert( });
match closest_format {
Some(closest_format)
if closest_format
.0
.sub(single_format.duration)
.abs()
.num_milliseconds()
< archive.merge_time_tolerance.into() =>
{
// If less than `audio_error` apart, use same audio.
closest_format
.1
.audios
.push((audio, single_format.audio.clone()));
closest_format.1.subtitles.extend(subtitles);
}
_ => {
d_formats.push((
single_format.duration, single_format.duration,
DownloadFormat { DownloadFormat {
video: (video, single_format.audio.clone()), video: (video, single_format.audio.clone()),
audios: vec![(audio, single_format.audio.clone())], audios: vec![(audio, single_format.audio.clone())],
subtitles, subtitles,
metadata: DownloadFormatMetadata {
skip_events: if archive.include_chapters {
single_format.skip_events().await?
} else {
None
}, },
); },
},
));
} }
};
} }
for d_format in d_formats.into_values() { for (_, d_format) in d_formats.into_iter() {
download_formats.push(d_format) download_formats.push(d_format);
} }
} }
} }
@ -315,3 +657,36 @@ async fn get_format(
Format::from_single_formats(single_format_to_format_pairs), Format::from_single_formats(single_format_to_format_pairs),
)) ))
} }
fn get_video_streams(path: &Path) -> Result<Option<(Vec<Locale>, Vec<Locale>)>> {
let video_streams =
Regex::new(r"(?m)Stream\s#\d+:\d+\((?P<language>.+)\):\s(?P<type>(Audio|Subtitle))")
.unwrap();
let ffmpeg = Command::new("ffmpeg")
.stdout(Stdio::null())
.stderr(Stdio::piped())
.arg("-hide_banner")
.args(["-i", &path.to_string_lossy()])
.output()?;
let ffmpeg_output = String::from_utf8(ffmpeg.stderr)?;
let mut audio = vec![];
let mut subtitle = vec![];
for cap in video_streams.captures_iter(&ffmpeg_output) {
let locale = cap.name("language").unwrap().as_str();
let type_ = cap.name("type").unwrap().as_str();
match type_ {
"Audio" => audio.push(Locale::from(locale.to_string())),
"Subtitle" => subtitle.push(Locale::from(locale.to_string())),
_ => unreachable!(),
}
}
if audio.is_empty() && subtitle.is_empty() {
Ok(None)
} else {
Ok(Some((audio, subtitle)))
}
}

View file

@ -1,410 +0,0 @@
use crate::archive::command::Archive;
use crate::utils::filter::{real_dedup_vec, Filter};
use crate::utils::format::{Format, SingleFormat, SingleFormatCollection};
use crate::utils::interactive_select::{check_for_duplicated_seasons, get_duplicated_seasons};
use crate::utils::parse::UrlFilter;
use anyhow::Result;
use crunchyroll_rs::{Concert, Episode, Locale, Movie, MovieListing, MusicVideo, Season, Series};
use log::{info, warn};
use std::collections::{BTreeMap, HashMap};
enum Visited {
Series,
Season,
None,
}
pub(crate) struct ArchiveFilter {
url_filter: UrlFilter,
archive: Archive,
interactive_input: bool,
season_episode_count: HashMap<String, Vec<String>>,
season_subtitles_missing: Vec<u32>,
season_sorting: Vec<String>,
visited: Visited,
}
impl ArchiveFilter {
pub(crate) fn new(url_filter: UrlFilter, archive: Archive, interactive_input: bool) -> Self {
Self {
url_filter,
archive,
interactive_input,
season_episode_count: HashMap::new(),
season_subtitles_missing: vec![],
season_sorting: vec![],
visited: Visited::None,
}
}
}
#[async_trait::async_trait]
impl Filter for ArchiveFilter {
type T = Vec<SingleFormat>;
type Output = SingleFormatCollection;
async fn visit_series(&mut self, series: Series) -> Result<Vec<Season>> {
// `series.audio_locales` isn't always populated b/c of crunchyrolls api. so check if the
// audio is matching only if the field is populated
if !series.audio_locales.is_empty() {
let missing_audio = missing_locales(&series.audio_locales, &self.archive.audio);
if !missing_audio.is_empty() {
warn!(
"Series {} is not available with {} audio",
series.title,
missing_audio
.into_iter()
.map(|l| l.to_string())
.collect::<Vec<String>>()
.join(", ")
)
}
let missing_subtitle =
missing_locales(&series.subtitle_locales, &self.archive.subtitle);
if !missing_subtitle.is_empty() {
warn!(
"Series {} is not available with {} subtitles",
series.title,
missing_subtitle
.into_iter()
.map(|l| l.to_string())
.collect::<Vec<String>>()
.join(", ")
)
}
self.visited = Visited::Series
}
let mut seasons = series.seasons().await?;
let mut remove_ids = vec![];
for season in seasons.iter_mut() {
if !self.url_filter.is_season_valid(season.season_number)
|| (!season
.audio_locales
.iter()
.any(|l| self.archive.audio.contains(l))
&& !season
.available_versions()
.await?
.iter()
.any(|l| self.archive.audio.contains(l)))
{
remove_ids.push(season.id.clone());
}
}
seasons.retain(|s| !remove_ids.contains(&s.id));
let duplicated_seasons = get_duplicated_seasons(&seasons);
if duplicated_seasons.len() > 0 {
if self.interactive_input {
check_for_duplicated_seasons(&mut seasons);
} else {
info!(
"Found duplicated seasons: {}",
duplicated_seasons
.iter()
.map(|d| d.to_string())
.collect::<Vec<String>>()
.join(", ")
)
}
}
Ok(seasons)
}
async fn visit_season(&mut self, mut season: Season) -> Result<Vec<Episode>> {
if !self.url_filter.is_season_valid(season.season_number) {
return Ok(vec![]);
}
let mut seasons = season.version(self.archive.audio.clone()).await?;
if self
.archive
.audio
.iter()
.any(|l| season.audio_locales.contains(l))
{
seasons.insert(0, season.clone());
}
if !matches!(self.visited, Visited::Series) {
let mut audio_locales: Vec<Locale> = seasons
.iter()
.map(|s| s.audio_locales.clone())
.flatten()
.collect();
real_dedup_vec(&mut audio_locales);
let missing_audio = missing_locales(&audio_locales, &self.archive.audio);
if !missing_audio.is_empty() {
warn!(
"Season {} is not available with {} audio",
season.season_number,
missing_audio
.into_iter()
.map(|l| l.to_string())
.collect::<Vec<String>>()
.join(", ")
)
}
let subtitle_locales: Vec<Locale> = seasons
.iter()
.map(|s| s.subtitle_locales.clone())
.flatten()
.collect();
let missing_subtitle = missing_locales(&subtitle_locales, &self.archive.subtitle);
if !missing_subtitle.is_empty() {
warn!(
"Season {} is not available with {} subtitles",
season.season_number,
missing_subtitle
.into_iter()
.map(|l| l.to_string())
.collect::<Vec<String>>()
.join(", ")
)
}
self.visited = Visited::Season
}
let mut episodes = vec![];
for season in seasons {
self.season_sorting.push(season.id.clone());
let season_locale = if season.audio_locales.len() < 2 {
Some(
season
.audio_locales
.get(0)
.cloned()
.unwrap_or(Locale::ja_JP),
)
} else {
None
};
let mut eps = season.episodes().await?;
let before_len = eps.len();
for mut ep in eps.clone() {
if let Some(l) = &season_locale {
if &ep.audio_locale == l {
continue;
}
eps.remove(eps.iter().position(|p| p.id == ep.id).unwrap());
} else {
let mut requested_locales = self.archive.audio.clone();
if let Some(idx) = requested_locales.iter().position(|p| p == &ep.audio_locale)
{
requested_locales.remove(idx);
} else {
eps.remove(eps.iter().position(|p| p.id == ep.id).unwrap());
}
eps.extend(ep.version(self.archive.audio.clone()).await?);
}
}
if eps.len() < before_len {
if eps.len() == 0 {
if matches!(self.visited, Visited::Series) {
warn!(
"Season {} is not available with {} audio",
season.season_number,
season_locale.unwrap_or(Locale::ja_JP)
)
}
} else {
let last_episode = eps.last().unwrap();
warn!(
"Season {} is only available with {} audio until episode {} ({})",
season.season_number,
season_locale.unwrap_or(Locale::ja_JP),
last_episode.episode_number,
last_episode.title
)
}
}
episodes.extend(eps)
}
if Format::has_relative_episodes_fmt(&self.archive.output) {
for episode in episodes.iter() {
self.season_episode_count
.entry(episode.season_id.clone())
.or_insert(vec![])
.push(episode.id.clone())
}
}
Ok(episodes)
}
async fn visit_episode(&mut self, mut episode: Episode) -> Result<Option<Self::T>> {
if !self
.url_filter
.is_episode_valid(episode.episode_number, episode.season_number)
{
return Ok(None);
}
let mut episodes = vec![];
if !matches!(self.visited, Visited::Series) && !matches!(self.visited, Visited::Season) {
if self.archive.audio.contains(&episode.audio_locale) {
episodes.push((episode.clone(), episode.subtitle_locales.clone()))
}
episodes.extend(
episode
.version(self.archive.audio.clone())
.await?
.into_iter()
.map(|e| (e.clone(), e.subtitle_locales.clone())),
);
let audio_locales: Vec<Locale> = episodes
.iter()
.map(|(e, _)| e.audio_locale.clone())
.collect();
let missing_audio = missing_locales(&audio_locales, &self.archive.audio);
if !missing_audio.is_empty() {
warn!(
"Episode {} is not available with {} audio",
episode.episode_number,
missing_audio
.into_iter()
.map(|l| l.to_string())
.collect::<Vec<String>>()
.join(", ")
)
}
let mut subtitle_locales: Vec<Locale> =
episodes.iter().map(|(_, s)| s.clone()).flatten().collect();
real_dedup_vec(&mut subtitle_locales);
let missing_subtitles = missing_locales(&subtitle_locales, &self.archive.subtitle);
if !missing_subtitles.is_empty()
&& !self
.season_subtitles_missing
.contains(&episode.season_number)
{
warn!(
"Episode {} is not available with {} subtitles",
episode.episode_number,
missing_subtitles
.into_iter()
.map(|l| l.to_string())
.collect::<Vec<String>>()
.join(", ")
);
self.season_subtitles_missing.push(episode.season_number)
}
} else {
episodes.push((episode.clone(), episode.subtitle_locales.clone()))
}
let relative_episode_number = if Format::has_relative_episodes_fmt(&self.archive.output) {
if self.season_episode_count.get(&episode.season_id).is_none() {
let season_episodes = episode.season().await?.episodes().await?;
self.season_episode_count.insert(
episode.season_id.clone(),
season_episodes.into_iter().map(|e| e.id).collect(),
);
}
let relative_episode_number = self
.season_episode_count
.get(&episode.season_id)
.unwrap()
.iter()
.position(|id| id == &episode.id)
.map(|index| index + 1);
if relative_episode_number.is_none() {
warn!(
"Failed to get relative episode number for episode {} ({}) of {} season {}",
episode.episode_number,
episode.title,
episode.series_title,
episode.season_number,
)
}
relative_episode_number
} else {
None
};
Ok(Some(
episodes
.into_iter()
.map(|(e, s)| {
SingleFormat::new_from_episode(e, s, relative_episode_number.map(|n| n as u32))
})
.collect(),
))
}
async fn visit_movie_listing(&mut self, movie_listing: MovieListing) -> Result<Vec<Movie>> {
Ok(movie_listing.movies().await?)
}
async fn visit_movie(&mut self, movie: Movie) -> Result<Option<Self::T>> {
Ok(Some(vec![SingleFormat::new_from_movie(movie, vec![])]))
}
async fn visit_music_video(&mut self, music_video: MusicVideo) -> Result<Option<Self::T>> {
Ok(Some(vec![SingleFormat::new_from_music_video(music_video)]))
}
async fn visit_concert(&mut self, concert: Concert) -> Result<Option<Self::T>> {
Ok(Some(vec![SingleFormat::new_from_concert(concert)]))
}
async fn finish(self, input: Vec<Self::T>) -> Result<Self::Output> {
let flatten_input: Self::T = input.into_iter().flatten().collect();
let mut single_format_collection = SingleFormatCollection::new();
let mut pre_sorted: BTreeMap<String, Self::T> = BTreeMap::new();
for data in flatten_input {
pre_sorted
.entry(data.identifier.clone())
.or_insert(vec![])
.push(data)
}
let mut sorted: Vec<(String, Self::T)> = pre_sorted.into_iter().collect();
sorted.sort_by(|(_, a), (_, b)| {
self.season_sorting
.iter()
.position(|p| p == &a.first().unwrap().season_id)
.unwrap()
.cmp(
&self
.season_sorting
.iter()
.position(|p| p == &b.first().unwrap().season_id)
.unwrap(),
)
});
for (_, mut data) in sorted {
data.sort_by(|a, b| {
self.archive
.audio
.iter()
.position(|p| p == &a.audio)
.unwrap_or(usize::MAX)
.cmp(
&self
.archive
.audio
.iter()
.position(|p| p == &b.audio)
.unwrap_or(usize::MAX),
)
});
single_format_collection.add_single_formats(data)
}
Ok(single_format_collection)
}
}
fn missing_locales<'a>(available: &Vec<Locale>, searched: &'a Vec<Locale>) -> Vec<&'a Locale> {
searched.iter().filter(|p| !available.contains(p)).collect()
}

View file

@ -1,4 +1,3 @@
mod command; mod command;
mod filter;
pub use command::Archive; pub use command::Archive;

View file

@ -1,19 +1,20 @@
use crate::download::filter::DownloadFilter;
use crate::utils::context::Context; use crate::utils::context::Context;
use crate::utils::download::{DownloadBuilder, DownloadFormat}; use crate::utils::download::{DownloadBuilder, DownloadFormat, DownloadFormatMetadata};
use crate::utils::ffmpeg::FFmpegPreset; use crate::utils::ffmpeg::{FFmpegPreset, SOFTSUB_CONTAINERS};
use crate::utils::filter::Filter; use crate::utils::filter::{Filter, FilterMediaScope};
use crate::utils::format::{Format, SingleFormat}; use crate::utils::format::{Format, SingleFormat};
use crate::utils::locale::{resolve_locales, LanguageTagging};
use crate::utils::log::progress; use crate::utils::log::progress;
use crate::utils::os::{free_file, has_ffmpeg, is_special_file}; use crate::utils::os::{free_file, has_ffmpeg, is_special_file};
use crate::utils::parse::parse_url; use crate::utils::parse::parse_url;
use crate::utils::video::variant_data_from_stream; use crate::utils::video::stream_data_from_stream;
use crate::Execute; use crate::Execute;
use anyhow::bail; use anyhow::bail;
use anyhow::Result; use anyhow::Result;
use crunchyroll_rs::media::Resolution; use crunchyroll_rs::media::Resolution;
use crunchyroll_rs::Locale; use crunchyroll_rs::Locale;
use log::{debug, warn}; use log::{debug, error, warn};
use std::collections::HashMap;
use std::path::Path; use std::path::Path;
#[derive(Clone, Debug, clap::Parser)] #[derive(Clone, Debug, clap::Parser)]
@ -23,34 +24,54 @@ pub struct Download {
#[arg(help = format!("Audio language. Can only be used if the provided url(s) point to a series. \ #[arg(help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))] Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(long_help = format!("Audio language. Can only be used if the provided url(s) point to a series. \ #[arg(long_help = format!("Audio language. Can only be used if the provided url(s) point to a series. \
Available languages are:\n {}", Locale::all().into_iter().map(|l| format!("{:<6} {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))] Available languages are:\n {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| format!("{:<6} {}", l.to_string(), l.to_human_readable())).collect::<Vec<String>>().join("\n ")))]
#[arg(short, long, default_value_t = crate::utils::locale::system_locale())] #[arg(short, long, default_value_t = crate::utils::locale::system_locale())]
pub(crate) audio: Locale, pub(crate) audio: Locale,
#[arg(skip)]
output_audio_locale: String,
#[arg(help = format!("Subtitle language. Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))] #[arg(help = format!("Subtitle language. Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(long_help = format!("Subtitle language. If set, the subtitle will be burned into the video and cannot be disabled. \ #[arg(long_help = format!("Subtitle language. If set, the subtitle will be burned into the video and cannot be disabled. \
Available languages are: {}", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))] Available languages are: {}\nIETF tagged language codes for the shown available locales can be used too", Locale::all().into_iter().map(|l| l.to_string()).collect::<Vec<String>>().join(", ")))]
#[arg(short, long)] #[arg(short, long)]
pub(crate) subtitle: Option<Locale>, pub(crate) subtitle: Option<Locale>,
#[arg(skip)]
output_subtitle_locale: String,
#[arg(help = "Name of the output file")] #[arg(help = "Name of the output file")]
#[arg(long_help = "Name of the output file.\ #[arg(long_help = "Name of the output file. \
If you use one of the following pattern they will get replaced:\n \ If you use one of the following pattern they will get replaced:\n \
{title} Title of the video\n \ {title} Title of the video\n \
{series_name} Name of the series\n \ {series_name} Name of the series\n \
{season_name} Name of the season\n \ {season_name} Name of the season\n \
{audio} Audio language of the video\n \ {audio} Audio language of the video\n \
{resolution} Resolution of the video\n \ {width} Width of the video\n \
{height} Height of the video\n \
{season_number} Number of the season\n \ {season_number} Number of the season\n \
{episode_number} Number of the episode\n \ {episode_number} Number of the episode\n \
{relative_episode_number} Number of the episode relative to its season\n \ {relative_episode_number} Number of the episode relative to its season\n \
{sequence_number} Like '{episode_number}' but without possible non-number characters\n \
{relative_sequence_number} Like '{relative_episode_number}' but with support for episode 0's and .5's\n \
{release_year} Release year of the video\n \
{release_month} Release month of the video\n \
{release_day} Release day of the video\n \
{series_id} ID of the series\n \ {series_id} ID of the series\n \
{season_id} ID of the season\n \ {season_id} ID of the season\n \
{episode_id} ID of the episode")] {episode_id} ID of the episode")]
#[arg(short, long, default_value = "{title}.mp4")] #[arg(short, long, default_value = "{title}.mp4")]
pub(crate) output: String, pub(crate) output: String,
#[arg(help = "Name of the output file if the episode is a special")]
#[arg(long_help = "Name of the output file if the episode is a special. \
If not set, the '-o'/'--output' flag will be used as name template")]
#[arg(long)]
pub(crate) output_specials: Option<String>,
#[arg(help = "Sanitize the output file for use with all operating systems. \
This option only affects template options and not static characters.")]
#[arg(long, default_value_t = false)]
pub(crate) universal_output: bool,
#[arg(help = "Video resolution")] #[arg(help = "Video resolution")]
#[arg(long_help = "The video resolution.\ #[arg(long_help = "The video resolution. \
Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \ Can either be specified via the pixels (e.g. 1920x1080), the abbreviation for pixels (e.g. 1080p) or 'common-use' words (e.g. best). \
Specifying the exact pixels is not recommended, use one of the other options instead. \ Specifying the exact pixels is not recommended, use one of the other options instead. \
Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \ Crunchyroll let you choose the quality with pixel abbreviation on their clients, so you might be already familiar with the available options. \
@ -59,6 +80,18 @@ pub struct Download {
#[arg(value_parser = crate::utils::clap::clap_parse_resolution)] #[arg(value_parser = crate::utils::clap::clap_parse_resolution)]
pub(crate) resolution: Resolution, pub(crate) resolution: Resolution,
#[arg(
long,
help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard)"
)]
#[arg(
long_help = "Specified which language tagging the audio and subtitle tracks and language specific format options should have. \
Valid options are: 'default' (how Crunchyroll uses it internally), 'ietf' (according to the IETF standard; you might run in issues as there are multiple locales which resolve to the same IETF language code, e.g. 'es-LA' and 'es-ES' are both resolving to 'es')"
)]
#[arg(value_parser = LanguageTagging::parse)]
pub(crate) language_tagging: Option<LanguageTagging>,
#[arg(help = format!("Presets for converting the video to a specific coding format. \ #[arg(help = format!("Presets for converting the video to a specific coding format. \
Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))] Available presets: \n {}", FFmpegPreset::available_matches_human_readable().join("\n ")))]
#[arg(long_help = format!("Presets for converting the video to a specific coding format. \ #[arg(long_help = format!("Presets for converting the video to a specific coding format. \
@ -67,10 +100,31 @@ pub struct Download {
#[arg(long)] #[arg(long)]
#[arg(value_parser = FFmpegPreset::parse)] #[arg(value_parser = FFmpegPreset::parse)]
pub(crate) ffmpeg_preset: Option<FFmpegPreset>, pub(crate) ffmpeg_preset: Option<FFmpegPreset>,
#[arg(
help = "The number of threads used by ffmpeg to generate the output file. Does not work with every codec/preset"
)]
#[arg(
long_help = "The number of threads used by ffmpeg to generate the output file. \
Does not work with every codec/preset and is skipped entirely when specifying custom ffmpeg output arguments instead of a preset for `--ffmpeg-preset`. \
By default, ffmpeg chooses the thread count which works best for the output codec"
)]
#[arg(long)]
pub(crate) ffmpeg_threads: Option<usize>,
#[arg(help = "Skip files which are already existing")] #[arg(help = "Skip files which are already existing by their name")]
#[arg(long, default_value_t = false)] #[arg(long, default_value_t = false)]
pub(crate) skip_existing: bool, pub(crate) skip_existing: bool,
#[arg(help = "Skip special episodes")]
#[arg(long, default_value_t = false)]
pub(crate) skip_specials: bool,
#[arg(help = "Includes chapters (e.g. intro, credits, ...)")]
#[arg(long_help = "Includes chapters (e.g. intro, credits, ...). \
Because chapters are essentially only special timeframes in episodes like the intro, most of the video timeline isn't covered by a chapter.
These \"gaps\" are filled with an 'Episode' chapter because many video players are ignore those gaps and just assume that a chapter ends when the next chapter start is reached, even if a specific end-time is set.
Also chapters aren't always available, so in this case, just a big 'Episode' chapter from start to end will be created")]
#[arg(long, default_value_t = false)]
pub(crate) include_chapters: bool,
#[arg(help = "Skip any interactive input")] #[arg(help = "Skip any interactive input")]
#[arg(short, long, default_value_t = false)] #[arg(short, long, default_value_t = false)]
@ -80,12 +134,15 @@ pub struct Download {
#[arg(long, default_value_t = false)] #[arg(long, default_value_t = false)]
pub(crate) force_hardsub: bool, pub(crate) force_hardsub: bool,
#[arg(help = "The number of threads used to download")]
#[arg(short, long, default_value_t = num_cpus::get())]
pub(crate) threads: usize,
#[arg(help = "Url(s) to Crunchyroll episodes or series")] #[arg(help = "Url(s) to Crunchyroll episodes or series")]
#[arg(required = true)] #[arg(required = true)]
pub(crate) urls: Vec<String>, pub(crate) urls: Vec<String>,
} }
#[async_trait::async_trait(?Send)]
impl Execute for Download { impl Execute for Download {
fn pre_check(&mut self) -> Result<()> { fn pre_check(&mut self) -> Result<()> {
if !has_ffmpeg() { if !has_ffmpeg() {
@ -110,12 +167,75 @@ impl Execute for Download {
} }
} }
if let Some(special_output) = &self.output_specials {
if Path::new(special_output)
.extension()
.unwrap_or_default()
.is_empty()
&& !is_special_file(special_output)
&& special_output != "-"
{
bail!("No file extension found. Please specify a file extension (via `--output-specials`) for the output file")
}
if let Some(ext) = Path::new(special_output).extension() {
if self.force_hardsub {
warn!("Hardsubs are forced for special episodes. Adding subtitles may take a while")
} else if !["mkv", "mov", "mp4"].contains(&ext.to_string_lossy().as_ref()) {
warn!("Detected a container which does not support softsubs. Adding subtitles for special episodes may take a while")
}
}
}
if let Some(language_tagging) = &self.language_tagging {
self.audio = resolve_locales(&[self.audio.clone()]).remove(0);
self.subtitle = self
.subtitle
.as_ref()
.map(|s| resolve_locales(&[s.clone()]).remove(0));
self.output_audio_locale = language_tagging.for_locale(&self.audio);
self.output_subtitle_locale = self
.subtitle
.as_ref()
.map(|s| language_tagging.for_locale(s))
.unwrap_or_default()
} else {
self.output_audio_locale = self.audio.to_string();
self.output_subtitle_locale = self
.subtitle
.as_ref()
.map(|s| s.to_string())
.unwrap_or_default();
}
Ok(()) Ok(())
} }
async fn execute(self, ctx: Context) -> Result<()> { async fn execute(self, ctx: Context) -> Result<()> {
if !ctx.crunchy.premium().await {
warn!("You may not be able to download all requested videos when logging in anonymously or using a non-premium account")
}
let mut parsed_urls = vec![]; let mut parsed_urls = vec![];
let output_supports_softsubs = SOFTSUB_CONTAINERS.contains(
&Path::new(&self.output)
.extension()
.unwrap_or_default()
.to_string_lossy()
.as_ref(),
);
let special_output_supports_softsubs = if let Some(so) = &self.output_specials {
SOFTSUB_CONTAINERS.contains(
&Path::new(so)
.extension()
.unwrap_or_default()
.to_string_lossy()
.as_ref(),
)
} else {
output_supports_softsubs
};
for (i, url) in self.urls.clone().into_iter().enumerate() { for (i, url) in self.urls.clone().into_iter().enumerate() {
let progress_handler = progress!("Parsing url {}", i + 1); let progress_handler = progress!("Parsing url {}", i + 1);
match parse_url(&ctx.crunchy, url.clone(), true).await { match parse_url(&ctx.crunchy, url.clone(), true).await {
@ -129,7 +249,57 @@ impl Execute for Download {
for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() { for (i, (media_collection, url_filter)) in parsed_urls.into_iter().enumerate() {
let progress_handler = progress!("Fetching series details"); let progress_handler = progress!("Fetching series details");
let single_format_collection = DownloadFilter::new(url_filter, self.clone(), !self.yes) let single_format_collection = Filter::new(
url_filter,
vec![self.audio.clone()],
self.subtitle.as_ref().map_or(vec![], |s| vec![s.clone()]),
|scope, locales| {
match scope {
FilterMediaScope::Series(series) => bail!("Series {} is not available with {} audio", series.title, locales[0]),
FilterMediaScope::Season(season) => {
error!("Season {} is not available with {} audio", season.season_number, locales[0]);
Ok(false)
}
FilterMediaScope::Episode(episodes) => {
if episodes.len() == 1 {
warn!("Episode {} of season {} is not available with {} audio", episodes[0].sequence_number, episodes[0].season_title, locales[0])
} else if episodes.len() == 2 {
warn!("Season {} is only available with {} audio from episode {} to {}", episodes[0].season_number, locales[0], episodes[0].sequence_number, episodes[1].sequence_number)
} else {
unimplemented!()
}
Ok(false)
}
}
},
|scope, locales| {
match scope {
FilterMediaScope::Series(series) => bail!("Series {} is not available with {} subtitles", series.title, locales[0]),
FilterMediaScope::Season(season) => {
warn!("Season {} is not available with {} subtitles", season.season_number, locales[0]);
Ok(false)
},
FilterMediaScope::Episode(episodes) => {
if episodes.len() == 1 {
warn!("Episode {} of season {} is not available with {} subtitles", episodes[0].sequence_number, episodes[0].season_title, locales[0])
} else if episodes.len() == 2 {
warn!("Season {} is only available with {} subtitles from episode {} to {}", episodes[0].season_number, locales[0], episodes[0].sequence_number, episodes[1].sequence_number)
} else {
unimplemented!()
}
Ok(false)
}
}
},
|season| {
warn!("Skipping premium episodes in season {season}");
Ok(())
},
Format::has_relative_fmt(&self.output),
!self.yes,
self.skip_specials,
ctx.crunchy.premium().await,
)
.visit(media_collection) .visit(media_collection)
.await?; .await?;
@ -141,25 +311,63 @@ impl Execute for Download {
single_format_collection.full_visual_output(); single_format_collection.full_visual_output();
let download_builder = DownloadBuilder::new() let download_builder =
DownloadBuilder::new(ctx.client.clone(), ctx.rate_limiter.clone())
.default_subtitle(self.subtitle.clone()) .default_subtitle(self.subtitle.clone())
.force_hardsub(self.force_hardsub) .force_hardsub(self.force_hardsub)
.output_format(if is_special_file(&self.output) || self.output == "-" { .output_format(if is_special_file(&self.output) || self.output == "-" {
Some("mpegts".to_string()) Some("mpegts".to_string())
} else { } else {
None None
}); })
.ffmpeg_preset(self.ffmpeg_preset.clone().unwrap_or_default())
.ffmpeg_threads(self.ffmpeg_threads)
.threads(self.threads)
.audio_locale_output_map(HashMap::from([(
self.audio.clone(),
self.output_audio_locale.clone(),
)]))
.subtitle_locale_output_map(
self.subtitle.as_ref().map_or(HashMap::new(), |s| {
HashMap::from([(s.clone(), self.output_subtitle_locale.clone())])
}),
);
for mut single_formats in single_format_collection.into_iter() { for mut single_formats in single_format_collection.into_iter() {
// the vec contains always only one item // the vec contains always only one item
let single_format = single_formats.remove(0); let single_format = single_formats.remove(0);
let (download_format, format) = get_format(&self, &single_format).await?; let (download_format, format) = get_format(
&self,
&single_format,
if self.force_hardsub {
true
} else if single_format.is_special() {
!special_output_supports_softsubs
} else {
!output_supports_softsubs
},
)
.await?;
let mut downloader = download_builder.clone().build(); let mut downloader = download_builder.clone().build();
downloader.add_format(download_format); downloader.add_format(download_format);
let formatted_path = format.format_path((&self.output).into(), true); let formatted_path = if format.is_special() {
format.format_path(
self.output_specials
.as_ref()
.map_or((&self.output).into(), |so| so.into()),
self.universal_output,
self.language_tagging.as_ref(),
)
} else {
format.format_path(
(&self.output).into(),
self.universal_output,
self.language_tagging.as_ref(),
)
};
let (path, changed) = free_file(formatted_path.clone()); let (path, changed) = free_file(formatted_path.clone());
if changed && self.skip_existing { if changed && self.skip_existing {
@ -172,7 +380,7 @@ impl Execute for Download {
format.visual_output(&path); format.visual_output(&path);
downloader.download(&ctx, &path).await? downloader.download(&path).await?
} }
} }
@ -183,9 +391,19 @@ impl Execute for Download {
async fn get_format( async fn get_format(
download: &Download, download: &Download,
single_format: &SingleFormat, single_format: &SingleFormat,
try_peer_hardsubs: bool,
) -> Result<(DownloadFormat, Format)> { ) -> Result<(DownloadFormat, Format)> {
let stream = single_format.stream().await?; let stream = single_format.stream().await?;
let Some((video, audio)) = variant_data_from_stream(&stream, &download.resolution).await? let Some((video, audio, contains_hardsub)) = stream_data_from_stream(
&stream,
&download.resolution,
if try_peer_hardsubs {
download.subtitle.clone()
} else {
None
},
)
.await?
else { else {
if single_format.is_episode() { if single_format.is_episode() {
bail!( bail!(
@ -206,8 +424,23 @@ async fn get_format(
} }
}; };
let subtitle = if let Some(subtitle_locale) = &download.subtitle { let subtitle = if contains_hardsub {
stream.subtitles.get(subtitle_locale).map(|s| s.clone()) None
} else if let Some(subtitle_locale) = &download.subtitle {
if download.audio == Locale::ja_JP {
stream
.subtitles
.get(subtitle_locale)
// use closed captions as fallback if no actual subtitles are found
.or_else(|| stream.captions.get(subtitle_locale))
.cloned()
} else {
stream
.captions
.get(subtitle_locale)
.or_else(|| stream.subtitles.get(subtitle_locale))
.cloned()
}
} else { } else {
None None
}; };
@ -218,20 +451,33 @@ async fn get_format(
subtitles: subtitle.clone().map_or(vec![], |s| { subtitles: subtitle.clone().map_or(vec![], |s| {
vec![( vec![(
s, s,
single_format.audio == Locale::ja_JP || stream.subtitles.len() > 1, single_format.audio != Locale::ja_JP && stream.subtitles.len() == 1,
)] )]
}), }),
metadata: DownloadFormatMetadata {
skip_events: if download.include_chapters {
single_format.skip_events().await?
} else {
None
},
},
}; };
let format = Format::from_single_formats(vec![( let mut format = Format::from_single_formats(vec![(
single_format.clone(), single_format.clone(),
video, video,
subtitle.map_or(vec![], |s| { subtitle.map_or(vec![], |s| {
vec![( vec![(
s, s,
single_format.audio == Locale::ja_JP || stream.subtitles.len() > 1, single_format.audio != Locale::ja_JP && stream.subtitles.len() == 1,
)] )]
}), }),
)]); )]);
if contains_hardsub {
let (_, subs) = format.locales.get_mut(0).unwrap();
subs.push(download.subtitle.clone().unwrap())
}
stream.invalidate().await?;
Ok((download_format, format)) Ok((download_format, format))
} }

View file

@ -1,265 +0,0 @@
use crate::download::Download;
use crate::utils::filter::Filter;
use crate::utils::format::{Format, SingleFormat, SingleFormatCollection};
use crate::utils::interactive_select::{check_for_duplicated_seasons, get_duplicated_seasons};
use crate::utils::parse::UrlFilter;
use anyhow::{bail, Result};
use crunchyroll_rs::{Concert, Episode, Movie, MovieListing, MusicVideo, Season, Series};
use log::{error, info, warn};
use std::collections::HashMap;
pub(crate) struct DownloadFilter {
url_filter: UrlFilter,
download: Download,
interactive_input: bool,
season_episode_count: HashMap<u32, Vec<String>>,
season_subtitles_missing: Vec<u32>,
season_visited: bool,
}
impl DownloadFilter {
pub(crate) fn new(url_filter: UrlFilter, download: Download, interactive_input: bool) -> Self {
Self {
url_filter,
download,
interactive_input,
season_episode_count: HashMap::new(),
season_subtitles_missing: vec![],
season_visited: false,
}
}
}
#[async_trait::async_trait]
impl Filter for DownloadFilter {
type T = SingleFormat;
type Output = SingleFormatCollection;
async fn visit_series(&mut self, series: Series) -> Result<Vec<Season>> {
// `series.audio_locales` isn't always populated b/c of crunchyrolls api. so check if the
// audio is matching only if the field is populated
if !series.audio_locales.is_empty() {
if !series.audio_locales.contains(&self.download.audio) {
error!(
"Series {} is not available with {} audio",
series.title, self.download.audio
);
return Ok(vec![]);
}
}
let mut seasons = vec![];
for mut season in series.seasons().await? {
if !self.url_filter.is_season_valid(season.season_number) {
continue;
}
if !season
.audio_locales
.iter()
.any(|l| l == &self.download.audio)
{
if season
.available_versions()
.await?
.iter()
.any(|l| l == &self.download.audio)
{
season = season
.version(vec![self.download.audio.clone()])
.await?
.remove(0)
} else {
error!(
"Season {} - '{}' is not available with {} audio",
season.season_number,
season.title,
self.download.audio.clone(),
);
continue;
}
}
seasons.push(season)
}
let duplicated_seasons = get_duplicated_seasons(&seasons);
if duplicated_seasons.len() > 0 {
if self.interactive_input {
check_for_duplicated_seasons(&mut seasons);
} else {
info!(
"Found duplicated seasons: {}",
duplicated_seasons
.iter()
.map(|d| d.to_string())
.collect::<Vec<String>>()
.join(", ")
)
}
}
Ok(seasons)
}
async fn visit_season(&mut self, season: Season) -> Result<Vec<Episode>> {
self.season_visited = true;
let mut episodes = season.episodes().await?;
if Format::has_relative_episodes_fmt(&self.download.output) {
for episode in episodes.iter() {
self.season_episode_count
.entry(episode.season_number)
.or_insert(vec![])
.push(episode.id.clone())
}
}
episodes.retain(|e| {
self.url_filter
.is_episode_valid(e.episode_number, season.season_number)
});
Ok(episodes)
}
async fn visit_episode(&mut self, mut episode: Episode) -> Result<Option<Self::T>> {
if !self
.url_filter
.is_episode_valid(episode.episode_number, episode.season_number)
{
return Ok(None);
}
// check if the audio locale is correct.
// should only be incorrect if the console input was a episode url. otherwise
// `DownloadFilter::visit_season` returns the correct episodes with matching audio
if episode.audio_locale != self.download.audio {
// check if any other version (same episode, other language) of this episode is available
// with the requested audio. if not, return an error
if !episode
.available_versions()
.await?
.contains(&self.download.audio)
{
let error_message = format!(
"Episode {} ({}) of {} season {} is not available with {} audio",
episode.episode_number,
episode.title,
episode.series_title,
episode.season_number,
self.download.audio
);
// sometimes a series randomly has episode in an other language. if this is the case,
// only error if the input url was a episode url
if self.season_visited {
warn!("{}", error_message);
return Ok(None);
} else {
bail!("{}", error_message)
}
}
// overwrite the current episode with the other version episode
episode = episode
.version(vec![self.download.audio.clone()])
.await?
.remove(0)
}
// check if the subtitles are supported
if let Some(subtitle_locale) = &self.download.subtitle {
if !episode.subtitle_locales.contains(subtitle_locale) {
// if the episode doesn't have the requested subtitles, print a error. to print this
// error only once per season, it's checked if an error got printed before by looking
// up if the season id is present in `self.season_subtitles_missing`. if not, print
// the error and add the season id to `self.season_subtitles_missing`. if it is
// present, skip the error printing
if !self
.season_subtitles_missing
.contains(&episode.season_number)
{
self.season_subtitles_missing.push(episode.season_number);
error!(
"{} season {} is not available with {} subtitles",
episode.series_title, episode.season_number, subtitle_locale
);
}
return Ok(None);
}
}
// get the relative episode number. only done if the output string has the pattern to include
// the relative episode number as this requires some extra fetching
let relative_episode_number = if Format::has_relative_episodes_fmt(&self.download.output) {
if self
.season_episode_count
.get(&episode.season_number)
.is_none()
{
let season_episodes = episode.season().await?.episodes().await?;
self.season_episode_count.insert(
episode.season_number,
season_episodes.into_iter().map(|e| e.id).collect(),
);
}
let relative_episode_number = self
.season_episode_count
.get(&episode.season_number)
.unwrap()
.iter()
.position(|id| id == &episode.id)
.map(|index| index + 1);
if relative_episode_number.is_none() {
warn!(
"Failed to get relative episode number for episode {} ({}) of {} season {}",
episode.episode_number,
episode.title,
episode.series_title,
episode.season_number,
)
}
relative_episode_number
} else {
None
};
Ok(Some(SingleFormat::new_from_episode(
episode.clone(),
self.download.subtitle.clone().map_or(vec![], |s| {
if episode.subtitle_locales.contains(&s) {
vec![s]
} else {
vec![]
}
}),
relative_episode_number.map(|n| n as u32),
)))
}
async fn visit_movie_listing(&mut self, movie_listing: MovieListing) -> Result<Vec<Movie>> {
Ok(movie_listing.movies().await?)
}
async fn visit_movie(&mut self, movie: Movie) -> Result<Option<Self::T>> {
Ok(Some(SingleFormat::new_from_movie(movie, vec![])))
}
async fn visit_music_video(&mut self, music_video: MusicVideo) -> Result<Option<Self::T>> {
Ok(Some(SingleFormat::new_from_music_video(music_video)))
}
async fn visit_concert(&mut self, concert: Concert) -> Result<Option<Self::T>> {
Ok(Some(SingleFormat::new_from_concert(concert)))
}
async fn finish(self, input: Vec<Self::T>) -> Result<Self::Output> {
let mut single_format_collection = SingleFormatCollection::new();
for data in input {
single_format_collection.add_single_formats(vec![data])
}
Ok(single_format_collection)
}
}

View file

@ -1,4 +1,3 @@
mod command; mod command;
mod filter;
pub use command::Download; pub use command::Download;

View file

@ -8,7 +8,7 @@ use crunchyroll_rs::crunchyroll::CrunchyrollBuilder;
use crunchyroll_rs::error::Error; use crunchyroll_rs::error::Error;
use crunchyroll_rs::{Crunchyroll, Locale}; use crunchyroll_rs::{Crunchyroll, Locale};
use log::{debug, error, warn, LevelFilter}; use log::{debug, error, warn, LevelFilter};
use reqwest::Proxy; use reqwest::{Client, Proxy};
use std::{env, fs}; use std::{env, fs};
mod archive; mod archive;
@ -17,18 +17,18 @@ mod login;
mod search; mod search;
mod utils; mod utils;
use crate::utils::rate_limit::RateLimiterService;
pub use archive::Archive; pub use archive::Archive;
use dialoguer::console::Term; use dialoguer::console::Term;
pub use download::Download; pub use download::Download;
pub use login::Login; pub use login::Login;
pub use search::Search; pub use search::Search;
#[async_trait::async_trait(?Send)]
trait Execute { trait Execute {
fn pre_check(&mut self) -> Result<()> { fn pre_check(&mut self) -> Result<()> {
Ok(()) Ok(())
} }
async fn execute(mut self, ctx: Context) -> Result<()>; async fn execute(self, ctx: Context) -> Result<()>;
} }
#[derive(Debug, Parser)] #[derive(Debug, Parser)]
@ -36,21 +36,24 @@ trait Execute {
#[clap(name = "crunchy-cli")] #[clap(name = "crunchy-cli")]
pub struct Cli { pub struct Cli {
#[clap(flatten)] #[clap(flatten)]
verbosity: Option<Verbosity>, verbosity: Verbosity,
#[arg( #[arg(
help = "Overwrite the language in which results are returned. Default is your system language" help = "Overwrite the language in which results are returned. Default is your system language"
)] )]
#[arg(long)] #[arg(global = true, long)]
lang: Option<Locale>, lang: Option<Locale>,
#[arg(help = "Enable experimental fixes which may resolve some unexpected errors")] #[arg(
help = "Enable experimental fixes which may resolve some unexpected errors. Generally not recommended as this flag may crash the program completely"
)]
#[arg( #[arg(
long_help = "Enable experimental fixes which may resolve some unexpected errors. \ long_help = "Enable experimental fixes which may resolve some unexpected errors. \
It is not recommended to use this this flag regularly, it might cause unexpected errors which may crash the program completely. \
If everything works as intended this option isn't needed, but sometimes Crunchyroll mislabels \ If everything works as intended this option isn't needed, but sometimes Crunchyroll mislabels \
the audio of a series/season or episode or returns a wrong season number. This is when using this option might help to solve the issue" the audio of a series/season or episode or returns a wrong season number. This is when using this option might help to solve the issue"
)] )]
#[arg(long, default_value_t = false)] #[arg(global = true, long, default_value_t = false)]
experimental_fixes: bool, experimental_fixes: bool,
#[clap(flatten)] #[clap(flatten)]
@ -58,15 +61,24 @@ pub struct Cli {
#[arg(help = "Use a proxy to route all traffic through")] #[arg(help = "Use a proxy to route all traffic through")]
#[arg(long_help = "Use a proxy to route all traffic through. \ #[arg(long_help = "Use a proxy to route all traffic through. \
Make sure that the proxy can either forward TLS requests, which is needed to bypass the (cloudflare) bot protection, or that it is configured so that the proxy can bypass the protection itself")] Make sure that the proxy can either forward TLS requests, which is needed to bypass the (cloudflare) bot protection, or that it is configured so that the proxy can bypass the protection itself. \
#[clap(long)] Besides specifying a simple url, you also can partially control where a proxy should be used: '<url>:' only proxies api requests, ':<url>' only proxies download traffic, '<url>:<url>' proxies api requests through the first url and download traffic through the second url")]
#[arg(value_parser = crate::utils::clap::clap_parse_proxy)] #[arg(global = true, long, value_parser = crate::utils::clap::clap_parse_proxies)]
proxy: Option<Proxy>, proxy: Option<(Option<Proxy>, Option<Proxy>)>,
#[arg(help = "Use custom user agent")] #[arg(help = "Use custom user agent")]
#[clap(long)] #[arg(global = true, long)]
user_agent: Option<String>, user_agent: Option<String>,
#[arg(
help = "Maximal speed to download/request (may be a bit off here and there). Must be in format of <number>[B|KB|MB]"
)]
#[arg(
long_help = "Maximal speed to download/request (may be a bit off here and there). Must be in format of <number>[B|KB|MB] (e.g. 500KB or 10MB)"
)]
#[arg(global = true, long, value_parser = crate::utils::clap::clap_parse_speed_limit)]
speed_limit: Option<u32>,
#[clap(subcommand)] #[clap(subcommand)]
command: Command, command: Command,
} }
@ -77,7 +89,7 @@ fn version() -> String {
let build_date = env!("BUILD_DATE"); let build_date = env!("BUILD_DATE");
if git_commit_hash.is_empty() { if git_commit_hash.is_empty() {
format!("{}", package_version) package_version.to_string()
} else { } else {
format!("{} ({} {})", package_version, git_commit_hash, build_date) format!("{} ({} {})", package_version, git_commit_hash, build_date)
} }
@ -94,27 +106,27 @@ enum Command {
#[derive(Debug, Parser)] #[derive(Debug, Parser)]
struct Verbosity { struct Verbosity {
#[arg(help = "Verbose output")] #[arg(help = "Verbose output")]
#[arg(short, long)] #[arg(global = true, short, long)]
verbose: bool, verbose: bool,
#[arg(help = "Quiet output. Does not print anything unless it's a error")] #[arg(help = "Quiet output. Does not print anything unless it's a error")]
#[arg( #[arg(
long_help = "Quiet output. Does not print anything unless it's a error. Can be helpful if you pipe the output to stdout" long_help = "Quiet output. Does not print anything unless it's a error. Can be helpful if you pipe the output to stdout"
)] )]
#[arg(short, long)] #[arg(global = true, short, long)]
quiet: bool, quiet: bool,
} }
pub async fn cli_entrypoint() { pub async fn main(args: &[String]) {
let mut cli: Cli = Cli::parse(); let mut cli: Cli = Cli::parse_from(args);
if let Some(verbosity) = &cli.verbosity { if cli.verbosity.verbose || cli.verbosity.quiet {
if verbosity.verbose as u8 + verbosity.quiet as u8 > 1 { if cli.verbosity.verbose && cli.verbosity.quiet {
eprintln!("Output cannot be verbose ('-v') and quiet ('-q') at the same time"); eprintln!("Output cannot be verbose ('-v') and quiet ('-q') at the same time");
std::process::exit(1) std::process::exit(1)
} else if verbosity.verbose { } else if cli.verbosity.verbose {
CliLogger::init(LevelFilter::Debug).unwrap() CliLogger::init(LevelFilter::Debug).unwrap()
} else if verbosity.quiet { } else if cli.verbosity.quiet {
CliLogger::init(LevelFilter::Error).unwrap() CliLogger::init(LevelFilter::Error).unwrap()
} }
} else { } else {
@ -126,14 +138,14 @@ pub async fn cli_entrypoint() {
match &mut cli.command { match &mut cli.command {
Command::Archive(archive) => { Command::Archive(archive) => {
// prevent interactive select to be shown when output should be quiet // prevent interactive select to be shown when output should be quiet
if cli.verbosity.is_some() && cli.verbosity.as_ref().unwrap().quiet { if cli.verbosity.quiet {
archive.yes = true; archive.yes = true;
} }
pre_check_executor(archive).await pre_check_executor(archive).await
} }
Command::Download(download) => { Command::Download(download) => {
// prevent interactive select to be shown when output should be quiet // prevent interactive select to be shown when output should be quiet
if cli.verbosity.is_some() && cli.verbosity.as_ref().unwrap().quiet { if cli.verbosity.quiet {
download.yes = true; download.yes = true;
} }
pre_check_executor(download).await pre_check_executor(download).await
@ -162,7 +174,7 @@ pub async fn cli_entrypoint() {
ctrlc::set_handler(move || { ctrlc::set_handler(move || {
debug!("Ctrl-c detected"); debug!("Ctrl-c detected");
if let Ok(dir) = fs::read_dir(&env::temp_dir()) { if let Ok(dir) = fs::read_dir(env::temp_dir()) {
for file in dir.flatten() { for file in dir.flatten() {
if file if file
.path() .path()
@ -172,6 +184,7 @@ pub async fn cli_entrypoint() {
.unwrap_or_default() .unwrap_or_default()
.starts_with(".crunchy-cli_") .starts_with(".crunchy-cli_")
{ {
if file.file_type().map_or(true, |ft| ft.is_file()) {
let result = fs::remove_file(file.path()); let result = fs::remove_file(file.path());
debug!( debug!(
"Ctrl-c removed temporary file {} {}", "Ctrl-c removed temporary file {} {}",
@ -182,6 +195,18 @@ pub async fn cli_entrypoint() {
"not successfully" "not successfully"
} }
) )
} else {
let result = fs::remove_dir_all(file.path());
debug!(
"Ctrl-c removed temporary directory {} {}",
file.path().to_string_lossy(),
if result.is_ok() {
"successfully"
} else {
"not successfully"
}
)
}
} }
} }
} }
@ -213,8 +238,6 @@ async fn execute_executor(executor: impl Execute, ctx: Context) {
if let Some(crunchy_error) = err.downcast_mut::<Error>() { if let Some(crunchy_error) = err.downcast_mut::<Error>() {
if let Error::Block { message, .. } = crunchy_error { if let Error::Block { message, .. } = crunchy_error {
*message = "Triggered Cloudflare bot protection. Try again later or use a VPN or proxy to spoof your location".to_string() *message = "Triggered Cloudflare bot protection. Try again later or use a VPN or proxy to spoof your location".to_string()
} else if let Error::Request { message, .. } = crunchy_error {
*message = "You've probably hit a rate limit. Try again later, generally after 10-20 minutes the rate limit is over and you can continue to use the cli".to_string()
} }
error!("An error occurred: {}", crunchy_error) error!("An error occurred: {}", crunchy_error)
@ -227,11 +250,37 @@ async fn execute_executor(executor: impl Execute, ctx: Context) {
} }
async fn create_ctx(cli: &mut Cli) -> Result<Context> { async fn create_ctx(cli: &mut Cli) -> Result<Context> {
let crunchy = crunchyroll_session(cli).await?; let crunchy_client = reqwest_client(
Ok(Context { crunchy }) cli.proxy.as_ref().and_then(|p| p.0.clone()),
cli.user_agent.clone(),
);
let internal_client = reqwest_client(
cli.proxy.as_ref().and_then(|p| p.1.clone()),
cli.user_agent.clone(),
);
let crunchy = crunchyroll_session(
cli,
crunchy_client.clone(),
cli.speed_limit
.map(|l| RateLimiterService::new(l, crunchy_client)),
)
.await?;
Ok(Context {
crunchy,
client: internal_client.clone(),
rate_limiter: cli
.speed_limit
.map(|l| RateLimiterService::new(l, internal_client)),
})
} }
async fn crunchyroll_session(cli: &mut Cli) -> Result<Crunchyroll> { async fn crunchyroll_session(
cli: &mut Cli,
client: Client,
rate_limiter: Option<RateLimiterService>,
) -> Result<Crunchyroll> {
let supported_langs = vec![ let supported_langs = vec![
Locale::ar_ME, Locale::ar_ME,
Locale::de_DE, Locale::de_DE,
@ -250,7 +299,7 @@ async fn crunchyroll_session(cli: &mut Cli) -> Result<Crunchyroll> {
"Via `--lang` specified language is not supported. Supported languages: {}", "Via `--lang` specified language is not supported. Supported languages: {}",
supported_langs supported_langs
.iter() .iter()
.map(|l| format!("`{}` ({})", l.to_string(), l.to_human_readable())) .map(|l| format!("`{}` ({})", l, l.to_human_readable()))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join(", ") .join(", ")
) )
@ -267,89 +316,58 @@ async fn crunchyroll_session(cli: &mut Cli) -> Result<Crunchyroll> {
let mut builder = Crunchyroll::builder() let mut builder = Crunchyroll::builder()
.locale(locale) .locale(locale)
.client({ .client(client.clone())
let mut builder = CrunchyrollBuilder::predefined_client_builder();
if let Some(p) = &cli.proxy {
builder = builder.proxy(p.clone())
}
if let Some(ua) = &cli.user_agent {
builder = builder.user_agent(ua)
}
#[cfg(any(feature = "openssl-tls", feature = "openssl-tls-static"))]
let client = {
let mut builder = builder.use_native_tls().tls_built_in_root_certs(false);
for certificate in rustls_native_certs::load_native_certs().unwrap() {
builder = builder.add_root_certificate(
reqwest::Certificate::from_der(certificate.0.as_slice()).unwrap(),
)
}
builder.build().unwrap()
};
#[cfg(not(any(feature = "openssl-tls", feature = "openssl-tls-static")))]
let client = builder.build().unwrap();
client
})
.stabilization_locales(cli.experimental_fixes) .stabilization_locales(cli.experimental_fixes)
.stabilization_season_number(cli.experimental_fixes); .stabilization_season_number(cli.experimental_fixes);
if let Command::Download(download) = &cli.command { if let Command::Download(download) = &cli.command {
builder = builder.preferred_audio_locale(download.audio.clone()) builder = builder.preferred_audio_locale(download.audio.clone())
} }
if let Some(rate_limiter) = rate_limiter {
let root_login_methods_count = cli.login_method.credentials.is_some() as u8 builder = builder.middleware(rate_limiter)
+ cli.login_method.etp_rt.is_some() as u8
+ cli.login_method.anonymous as u8;
let mut login_login_methods_count = 0;
if let Command::Login(login) = &cli.command {
login_login_methods_count += login.login_method.credentials.is_some() as u8
+ login.login_method.etp_rt.is_some() as u8
+ login.login_method.anonymous as u8
} }
let root_login_methods_count =
cli.login_method.credentials.is_some() as u8 + cli.login_method.anonymous as u8;
let progress_handler = progress!("Logging in"); let progress_handler = progress!("Logging in");
if root_login_methods_count + login_login_methods_count == 0 { if root_login_methods_count == 0 {
if let Some(login_file_path) = login::session_file_path() { if let Some(login_file_path) = login::session_file_path() {
if login_file_path.exists() { if login_file_path.exists() {
let session = fs::read_to_string(login_file_path)?; let session = fs::read_to_string(login_file_path)?;
if let Some((token_type, token)) = session.split_once(':') { if let Some((token_type, token)) = session.split_once(':') {
match token_type { match token_type {
"refresh_token" => { "refresh_token" => {
return Ok(builder.login_with_refresh_token(token).await?) return match builder.login_with_refresh_token(token).await {
Ok(crunchy) => Ok(crunchy),
Err(e) => {
if let Error::Request { message, .. } = &e {
if message.starts_with("invalid_grant") {
bail!("The stored login is expired, please login again")
} }
"etp_rt" => return Ok(builder.login_with_etp_rt(token).await?), }
Err(e.into())
}
}
}
"etp_rt" => bail!("The stored login method (etp-rt) isn't supported anymore. Please login again using your credentials"),
_ => (), _ => (),
} }
} }
bail!("Could not read stored session ('{}')", session) bail!("Could not read stored session ('{}')", session)
} }
} }
bail!("Please use a login method ('--credentials', '--etp-rt' or '--anonymous')") bail!("Please use a login method ('--credentials' or '--anonymous')")
} else if root_login_methods_count + login_login_methods_count > 1 { } else if root_login_methods_count > 1 {
bail!("Please use only one login method ('--credentials', '--etp-rt' or '--anonymous')") bail!("Please use only one login method ('--credentials' or '--anonymous')")
} }
let login_method = if login_login_methods_count > 0 { let crunchy = if let Some(credentials) = &cli.login_method.credentials {
if let Command::Login(login) = &cli.command { if let Some((email, password)) = credentials.split_once(':') {
login.login_method.clone() builder.login_with_credentials(email, password).await?
} else { } else {
unreachable!() bail!("Invalid credentials format. Please provide your credentials as email:password")
} }
} else { } else if cli.login_method.anonymous {
cli.login_method.clone()
};
let crunchy = if let Some(credentials) = &login_method.credentials {
if let Some((user, password)) = credentials.split_once(':') {
builder.login_with_credentials(user, password).await?
} else {
bail!("Invalid credentials format. Please provide your credentials as user:password")
}
} else if let Some(etp_rt) = &login_method.etp_rt {
builder.login_with_etp_rt(etp_rt).await?
} else if login_method.anonymous {
builder.login_anonymously().await? builder.login_anonymously().await?
} else { } else {
bail!("should never happen") bail!("should never happen")
@ -359,3 +377,29 @@ async fn crunchyroll_session(cli: &mut Cli) -> Result<Crunchyroll> {
Ok(crunchy) Ok(crunchy)
} }
fn reqwest_client(proxy: Option<Proxy>, user_agent: Option<String>) -> Client {
let mut builder = CrunchyrollBuilder::predefined_client_builder();
if let Some(p) = proxy {
builder = builder.proxy(p)
}
if let Some(ua) = user_agent {
builder = builder.user_agent(ua)
}
#[cfg(any(feature = "openssl-tls", feature = "openssl-tls-static"))]
let client = {
let mut builder = builder.use_native_tls().tls_built_in_root_certs(false);
for certificate in rustls_native_certs::load_native_certs().unwrap() {
builder =
builder.add_root_certificate(reqwest::Certificate::from_der(&certificate).unwrap())
}
builder.build().unwrap()
};
#[cfg(not(any(feature = "openssl-tls", feature = "openssl-tls-static")))]
let client = builder.build().unwrap();
client
}

View file

@ -11,14 +11,11 @@ use std::path::PathBuf;
#[derive(Debug, clap::Parser)] #[derive(Debug, clap::Parser)]
#[clap(about = "Save your login credentials persistent on disk")] #[clap(about = "Save your login credentials persistent on disk")]
pub struct Login { pub struct Login {
#[clap(flatten)]
pub login_method: LoginMethod,
#[arg(help = "Remove your stored credentials (instead of saving them)")] #[arg(help = "Remove your stored credentials (instead of saving them)")]
#[arg(long)] #[arg(long)]
pub remove: bool, pub remove: bool,
} }
#[async_trait::async_trait(?Send)]
impl Execute for Login { impl Execute for Login {
async fn execute(self, ctx: Context) -> Result<()> { async fn execute(self, ctx: Context) -> Result<()> {
if let Some(login_file_path) = session_file_path() { if let Some(login_file_path) = session_file_path() {
@ -28,9 +25,7 @@ impl Execute for Login {
SessionToken::RefreshToken(refresh_token) => { SessionToken::RefreshToken(refresh_token) => {
fs::write(login_file_path, format!("refresh_token:{}", refresh_token))? fs::write(login_file_path, format!("refresh_token:{}", refresh_token))?
} }
SessionToken::EtpRt(etp_rt) => { SessionToken::EtpRt(_) => bail!("Login with etp_rt isn't supported anymore. Please use your credentials to login"),
fs::write(login_file_path, format!("etp_rt:{}", etp_rt))?
}
SessionToken::Anonymous => bail!("Anonymous login cannot be saved"), SessionToken::Anonymous => bail!("Anonymous login cannot be saved"),
} }
@ -46,18 +41,12 @@ impl Execute for Login {
#[derive(Clone, Debug, Parser)] #[derive(Clone, Debug, Parser)]
pub struct LoginMethod { pub struct LoginMethod {
#[arg( #[arg(
help = "Login with credentials (username or email and password). Must be provided as user:password" help = "Login with credentials (email and password). Must be provided as email:password"
)] )]
#[arg(long)] #[arg(global = true, long)]
pub credentials: Option<String>, pub credentials: Option<String>,
#[arg(help = "Login with the etp-rt cookie")]
#[arg(
long_help = "Login with the etp-rt cookie. This can be obtained when you login on crunchyroll.com and extract it from there"
)]
#[arg(long)]
pub etp_rt: Option<String>,
#[arg(help = "Login anonymously / without an account")] #[arg(help = "Login anonymously / without an account")]
#[arg(long, default_value_t = false)] #[arg(global = true, long, default_value_t = false)]
pub anonymous: bool, pub anonymous: bool,
} }

View file

@ -7,6 +7,8 @@ use anyhow::{bail, Result};
use crunchyroll_rs::common::StreamExt; use crunchyroll_rs::common::StreamExt;
use crunchyroll_rs::search::QueryResults; use crunchyroll_rs::search::QueryResults;
use crunchyroll_rs::{Episode, Locale, MediaCollection, MovieListing, MusicVideo, Series}; use crunchyroll_rs::{Episode, Locale, MediaCollection, MovieListing, MusicVideo, Series};
use log::warn;
use std::sync::Arc;
#[derive(Debug, clap::Parser)] #[derive(Debug, clap::Parser)]
#[clap(about = "Search in videos")] #[clap(about = "Search in videos")]
@ -86,13 +88,16 @@ pub struct Search {
/// concert.premium_only → If the concert is only available with Crunchyroll premium /// concert.premium_only → If the concert is only available with Crunchyroll premium
/// ///
/// stream.locale → Stream locale/language /// stream.locale → Stream locale/language
/// stream.dash_url → Stream url in DASH format /// stream.dash_url → Stream url in DASH format. You need to set the `Authorization` header to `Bearer <account.token>` when requesting this url
/// stream.drm_dash_url → Stream url in DRM protected DASH format /// stream.is_drm → If `stream.dash_url` is DRM encrypted
/// stream.hls_url → Stream url in HLS format
/// stream.drm_hls_url → Stream url in DRM protected HLS format
/// ///
/// subtitle.locale → Subtitle locale/language /// subtitle.locale → Subtitle locale/language
/// subtitle.url → Url to the subtitle /// subtitle.url → Url to the subtitle
///
/// account.token → Access token to make request to restricted endpoints. This token is only valid for a max. of 5 minutes
/// account.id → Internal ID of the user account
/// account.profile_name → Profile name of the account
/// account.email → Email address of the account
#[arg(short, long, verbatim_doc_comment)] #[arg(short, long, verbatim_doc_comment)]
#[arg(default_value = "S{{season.number}}E{{episode.number}} - {{episode.title}}")] #[arg(default_value = "S{{season.number}}E{{episode.number}} - {{episode.title}}")]
output: String, output: String,
@ -100,9 +105,16 @@ pub struct Search {
input: String, input: String,
} }
#[async_trait::async_trait(?Send)]
impl Execute for Search { impl Execute for Search {
async fn execute(self, ctx: Context) -> Result<()> { async fn execute(self, ctx: Context) -> Result<()> {
if !ctx.crunchy.premium().await {
warn!("Using `search` anonymously or with a non-premium account may return incomplete results")
}
if self.output.contains("{{stream.is_drm}}") {
warn!("The `{{{{stream.is_drm}}}}` option is deprecated as it isn't reliable anymore and will be removed soon")
}
let input = if crunchyroll_rs::parse::parse_url(&self.input).is_some() { let input = if crunchyroll_rs::parse::parse_url(&self.input).is_some() {
match parse_url(&ctx.crunchy, self.input.clone(), true).await { match parse_url(&ctx.crunchy, self.input.clone(), true).await {
Ok(ok) => vec![ok], Ok(ok) => vec![ok],
@ -141,13 +153,14 @@ impl Execute for Search {
output output
}; };
let crunchy_arc = Arc::new(ctx.crunchy);
for (media_collection, url_filter) in input { for (media_collection, url_filter) in input {
let filter_options = FilterOptions { let filter_options = FilterOptions {
audio: self.audio.clone(), audio: self.audio.clone(),
url_filter, url_filter,
}; };
let format = Format::new(self.output.clone(), filter_options)?; let format = Format::new(self.output.clone(), filter_options, crunchy_arc.clone())?;
println!("{}", format.parse(media_collection).await?); println!("{}", format.parse(media_collection).await?);
} }

View file

@ -21,10 +21,10 @@ impl FilterOptions {
pub fn filter_episodes(&self, mut episodes: Vec<Episode>) -> Vec<Episode> { pub fn filter_episodes(&self, mut episodes: Vec<Episode>) -> Vec<Episode> {
episodes.retain(|e| { episodes.retain(|e| {
self.check_audio_language(&vec![e.audio_locale.clone()]) self.check_audio_language(&[e.audio_locale.clone()])
&& self && self
.url_filter .url_filter
.is_episode_valid(e.episode_number, e.season_number) .is_episode_valid(e.sequence_number, e.season_number)
}); });
episodes episodes
} }
@ -38,7 +38,7 @@ impl FilterOptions {
) )
} }
fn check_audio_language(&self, audio: &Vec<Locale>) -> bool { fn check_audio_language(&self, audio: &[Locale]) -> bool {
if !self.audio.is_empty() { if !self.audio.is_empty() {
return self.audio.iter().any(|a| audio.contains(a)); return self.audio.iter().any(|a| audio.contains(a));
} }

View file

@ -2,13 +2,15 @@ use crate::search::filter::FilterOptions;
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use crunchyroll_rs::media::{Stream, Subtitle}; use crunchyroll_rs::media::{Stream, Subtitle};
use crunchyroll_rs::{ use crunchyroll_rs::{
Concert, Episode, Locale, MediaCollection, Movie, MovieListing, MusicVideo, Season, Series, Concert, Crunchyroll, Episode, Locale, MediaCollection, Movie, MovieListing, MusicVideo,
Season, Series,
}; };
use regex::Regex; use regex::Regex;
use serde::Serialize; use serde::Serialize;
use serde_json::{Map, Value}; use serde_json::{Map, Value};
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Range; use std::ops::Range;
use std::sync::Arc;
#[derive(Default, Serialize)] #[derive(Default, Serialize)]
struct FormatSeries { struct FormatSeries {
@ -70,7 +72,7 @@ impl From<&Episode> for FormatEpisode {
title: value.title.clone(), title: value.title.clone(),
description: value.description.clone(), description: value.description.clone(),
locale: value.audio_locale.clone(), locale: value.audio_locale.clone(),
number: value.episode_number, number: value.episode_number.unwrap_or_default(),
sequence_number: value.sequence_number, sequence_number: value.sequence_number,
duration: value.duration.num_milliseconds(), duration: value.duration.num_milliseconds(),
air_date: value.episode_air_date.timestamp(), air_date: value.episode_air_date.timestamp(),
@ -163,37 +165,15 @@ impl From<&Concert> for FormatConcert {
struct FormatStream { struct FormatStream {
pub locale: Locale, pub locale: Locale,
pub dash_url: String, pub dash_url: String,
pub drm_dash_url: String, pub is_drm: bool,
pub hls_url: String,
pub drm_hls_url: String,
} }
impl From<&Stream> for FormatStream { impl From<&Stream> for FormatStream {
fn from(value: &Stream) -> Self { fn from(value: &Stream) -> Self {
let (dash_url, drm_dash_url, hls_url, drm_hls_url) =
value.variants.get(&Locale::Custom("".to_string())).map_or(
(
"".to_string(),
"".to_string(),
"".to_string(),
"".to_string(),
),
|v| {
(
v.adaptive_dash.clone().unwrap_or_default().url,
v.drm_adaptive_dash.clone().unwrap_or_default().url,
v.adaptive_hls.clone().unwrap_or_default().url,
v.drm_adaptive_hls.clone().unwrap_or_default().url,
)
},
);
Self { Self {
locale: value.audio_locale.clone(), locale: value.audio_locale.clone(),
dash_url, dash_url: value.url.clone(),
drm_dash_url, is_drm: false,
hls_url,
drm_hls_url,
} }
} }
} }
@ -213,6 +193,27 @@ impl From<&Subtitle> for FormatSubtitle {
} }
} }
#[derive(Default, Serialize)]
struct FormatAccount {
pub token: String,
pub id: String,
pub profile_name: String,
pub email: String,
}
impl FormatAccount {
pub async fn async_from(value: &Crunchyroll) -> Result<Self> {
let account = value.account().await?;
Ok(Self {
token: value.access_token().await,
id: account.account_id,
profile_name: account.profile_name,
email: account.email,
})
}
}
#[derive(Clone, Debug, Eq, PartialEq, Hash)] #[derive(Clone, Debug, Eq, PartialEq, Hash)]
enum Scope { enum Scope {
Series, Series,
@ -224,6 +225,7 @@ enum Scope {
Concert, Concert,
Stream, Stream,
Subtitle, Subtitle,
Account,
} }
macro_rules! must_match_if_true { macro_rules! must_match_if_true {
@ -239,23 +241,20 @@ macro_rules! must_match_if_true {
}; };
} }
macro_rules! self_and_versions {
($var:expr => $audio:expr) => {{
let mut items = vec![$var.clone()];
items.extend($var.clone().version($audio).await?);
items
}};
}
pub struct Format { pub struct Format {
pattern: Vec<(Range<usize>, Scope, String)>, pattern: Vec<(Range<usize>, Scope, String)>,
pattern_count: HashMap<Scope, u32>, pattern_count: HashMap<Scope, u32>,
input: String, input: String,
filter_options: FilterOptions, filter_options: FilterOptions,
crunchyroll: Arc<Crunchyroll>,
} }
impl Format { impl Format {
pub fn new(input: String, filter_options: FilterOptions) -> Result<Self> { pub fn new(
input: String,
filter_options: FilterOptions,
crunchyroll: Arc<Crunchyroll>,
) -> Result<Self> {
let scope_regex = Regex::new(r"(?m)\{\{\s*(?P<scope>\w+)\.(?P<field>\w+)\s*}}").unwrap(); let scope_regex = Regex::new(r"(?m)\{\{\s*(?P<scope>\w+)\.(?P<field>\w+)\s*}}").unwrap();
let mut pattern = vec![]; let mut pattern = vec![];
let mut pattern_count = HashMap::new(); let mut pattern_count = HashMap::new();
@ -282,6 +281,7 @@ impl Format {
Scope::Concert => FormatConcert Scope::Concert => FormatConcert
Scope::Stream => FormatStream Scope::Stream => FormatStream
Scope::Subtitle => FormatSubtitle Scope::Subtitle => FormatSubtitle
Scope::Account => FormatAccount
); );
for capture in scope_regex.captures_iter(&input) { for capture in scope_regex.captures_iter(&input) {
@ -299,6 +299,7 @@ impl Format {
"concert" => Scope::Concert, "concert" => Scope::Concert,
"stream" => Scope::Stream, "stream" => Scope::Stream,
"subtitle" => Scope::Subtitle, "subtitle" => Scope::Subtitle,
"account" => Scope::Account,
_ => bail!("'{}.{}' is not a valid keyword", scope, field), _ => bail!("'{}.{}' is not a valid keyword", scope, field),
}; };
@ -324,6 +325,7 @@ impl Format {
pattern_count, pattern_count,
input, input,
filter_options, filter_options,
crunchyroll,
}) })
} }
@ -338,6 +340,7 @@ impl Format {
Scope::Episode, Scope::Episode,
Scope::Stream, Scope::Stream,
Scope::Subtitle, Scope::Subtitle,
Scope::Account,
])?; ])?;
self.parse_series(media_collection).await self.parse_series(media_collection).await
@ -348,17 +351,28 @@ impl Format {
Scope::Movie, Scope::Movie,
Scope::Stream, Scope::Stream,
Scope::Subtitle, Scope::Subtitle,
Scope::Account,
])?; ])?;
self.parse_movie_listing(media_collection).await self.parse_movie_listing(media_collection).await
} }
MediaCollection::MusicVideo(_) => { MediaCollection::MusicVideo(_) => {
self.check_scopes(vec![Scope::MusicVideo, Scope::Stream, Scope::Subtitle])?; self.check_scopes(vec![
Scope::MusicVideo,
Scope::Stream,
Scope::Subtitle,
Scope::Account,
])?;
self.parse_music_video(media_collection).await self.parse_music_video(media_collection).await
} }
MediaCollection::Concert(_) => { MediaCollection::Concert(_) => {
self.check_scopes(vec![Scope::Concert, Scope::Stream, Scope::Subtitle])?; self.check_scopes(vec![
Scope::Concert,
Scope::Stream,
Scope::Subtitle,
Scope::Account,
])?;
self.parse_concert(media_collection).await self.parse_concert(media_collection).await
} }
@ -371,7 +385,9 @@ impl Format {
let episode_empty = self.check_pattern_count_empty(Scope::Episode); let episode_empty = self.check_pattern_count_empty(Scope::Episode);
let stream_empty = self.check_pattern_count_empty(Scope::Stream) let stream_empty = self.check_pattern_count_empty(Scope::Stream)
&& self.check_pattern_count_empty(Scope::Subtitle); && self.check_pattern_count_empty(Scope::Subtitle);
let account_empty = self.check_pattern_count_empty(Scope::Account);
#[allow(clippy::type_complexity)]
let mut tree: Vec<(Season, Vec<(Episode, Vec<Stream>)>)> = vec![]; let mut tree: Vec<(Season, Vec<(Episode, Vec<Stream>)>)> = vec![];
let series = if !series_empty { let series = if !series_empty {
@ -397,7 +413,15 @@ impl Format {
}; };
let mut seasons = vec![]; let mut seasons = vec![];
for season in tmp_seasons { for season in tmp_seasons {
seasons.extend(self_and_versions!(season => self.filter_options.audio.clone())) seasons.push(season.clone());
for version in season.versions {
if season.id == version.id {
continue;
}
if self.filter_options.audio.contains(&version.audio_locale) {
seasons.push(version.season().await?)
}
}
} }
tree.extend( tree.extend(
self.filter_options self.filter_options
@ -411,7 +435,15 @@ impl Format {
if !episode_empty || !stream_empty { if !episode_empty || !stream_empty {
match &media_collection { match &media_collection {
MediaCollection::Episode(episode) => { MediaCollection::Episode(episode) => {
let episodes = self_and_versions!(episode => self.filter_options.audio.clone()); let mut episodes = vec![episode.clone()];
for version in &episode.versions {
if episode.id == version.id {
continue;
}
if self.filter_options.audio.contains(&version.audio_locale) {
episodes.push(version.episode().await?)
}
}
tree.push(( tree.push((
Season::default(), Season::default(),
episodes episodes
@ -440,7 +472,9 @@ impl Format {
if !stream_empty { if !stream_empty {
for (_, episodes) in tree.iter_mut() { for (_, episodes) in tree.iter_mut() {
for (episode, streams) in episodes { for (episode, streams) in episodes {
streams.push(episode.stream().await?) let stream = episode.stream_maybe_without_drm().await?;
stream.clone().invalidate().await?;
streams.push(stream)
} }
} }
} else { } else {
@ -452,6 +486,11 @@ impl Format {
} }
let mut output = vec![]; let mut output = vec![];
let account_map = if !account_empty {
self.serializable_to_json_map(FormatAccount::async_from(&self.crunchyroll).await?)
} else {
Map::default()
};
let series_map = self.serializable_to_json_map(FormatSeries::from(&series)); let series_map = self.serializable_to_json_map(FormatSeries::from(&series));
for (season, episodes) in tree { for (season, episodes) in tree {
let season_map = self.serializable_to_json_map(FormatSeason::from(&season)); let season_map = self.serializable_to_json_map(FormatSeason::from(&season));
@ -463,6 +502,7 @@ impl Format {
output.push( output.push(
self.replace_all( self.replace_all(
HashMap::from([ HashMap::from([
(Scope::Account, &account_map),
(Scope::Series, &series_map), (Scope::Series, &series_map),
(Scope::Season, &season_map), (Scope::Season, &season_map),
(Scope::Episode, &episode_map), (Scope::Episode, &episode_map),
@ -509,7 +549,7 @@ impl Format {
} }
if !stream_empty { if !stream_empty {
for (movie, streams) in tree.iter_mut() { for (movie, streams) in tree.iter_mut() {
streams.push(movie.stream().await?) streams.push(movie.stream_maybe_without_drm().await?)
} }
} else { } else {
for (_, streams) in tree.iter_mut() { for (_, streams) in tree.iter_mut() {
@ -547,7 +587,7 @@ impl Format {
let stream_empty = self.check_pattern_count_empty(Scope::Stream); let stream_empty = self.check_pattern_count_empty(Scope::Stream);
let music_video = must_match_if_true!(!music_video_empty => media_collection|MediaCollection::MusicVideo(music_video) => music_video.clone()).unwrap_or_default(); let music_video = must_match_if_true!(!music_video_empty => media_collection|MediaCollection::MusicVideo(music_video) => music_video.clone()).unwrap_or_default();
let stream = must_match_if_true!(!stream_empty => media_collection|MediaCollection::MusicVideo(music_video) => music_video.stream().await?).unwrap_or_default(); let stream = must_match_if_true!(!stream_empty => media_collection|MediaCollection::MusicVideo(music_video) => music_video.stream_maybe_without_drm().await?).unwrap_or_default();
let music_video_map = self.serializable_to_json_map(FormatMusicVideo::from(&music_video)); let music_video_map = self.serializable_to_json_map(FormatMusicVideo::from(&music_video));
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream)); let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));
@ -569,7 +609,7 @@ impl Format {
let stream_empty = self.check_pattern_count_empty(Scope::Stream); let stream_empty = self.check_pattern_count_empty(Scope::Stream);
let concert = must_match_if_true!(!concert_empty => media_collection|MediaCollection::Concert(concert) => concert.clone()).unwrap_or_default(); let concert = must_match_if_true!(!concert_empty => media_collection|MediaCollection::Concert(concert) => concert.clone()).unwrap_or_default();
let stream = must_match_if_true!(!stream_empty => media_collection|MediaCollection::Concert(concert) => concert.stream().await?).unwrap_or_default(); let stream = must_match_if_true!(!stream_empty => media_collection|MediaCollection::Concert(concert) => concert.stream_maybe_without_drm().await?).unwrap_or_default();
let concert_map = self.serializable_to_json_map(FormatConcert::from(&concert)); let concert_map = self.serializable_to_json_map(FormatConcert::from(&concert));
let stream_map = self.serializable_to_json_map(FormatStream::from(&stream)); let stream_map = self.serializable_to_json_map(FormatStream::from(&stream));

View file

@ -1,11 +1,61 @@
use crate::utils::parse::parse_resolution; use crate::utils::parse::parse_resolution;
use crunchyroll_rs::media::Resolution; use crunchyroll_rs::media::Resolution;
use regex::Regex;
use reqwest::Proxy; use reqwest::Proxy;
pub fn clap_parse_resolution(s: &str) -> Result<Resolution, String> { pub fn clap_parse_resolution(s: &str) -> Result<Resolution, String> {
parse_resolution(s.to_string()).map_err(|e| e.to_string()) parse_resolution(s.to_string()).map_err(|e| e.to_string())
} }
pub fn clap_parse_proxy(s: &str) -> Result<Proxy, String> { pub fn clap_parse_proxies(s: &str) -> Result<(Option<Proxy>, Option<Proxy>), String> {
Proxy::all(s).map_err(|e| e.to_string()) let double_proxy_regex =
Regex::new(r"^(?P<first>(https?|socks5h?)://.+):(?P<second>(https?|socks5h?)://.+)$")
.unwrap();
if let Some(capture) = double_proxy_regex.captures(s) {
// checks if the input is formatted like 'https://example.com:socks5://examples.com' and
// splits the string into 2 separate proxies at the middle colon
let first = capture.name("first").unwrap().as_str();
let second = capture.name("second").unwrap().as_str();
Ok((
Some(Proxy::all(first).map_err(|e| format!("first proxy: {e}"))?),
Some(Proxy::all(second).map_err(|e| format!("second proxy: {e}"))?),
))
} else if s.starts_with(':') {
// checks if the input is formatted like ':https://example.com' and returns a proxy on the
// second tuple position
Ok((
None,
Some(Proxy::all(s.trim_start_matches(':')).map_err(|e| e.to_string())?),
))
} else if s.ends_with(':') {
// checks if the input is formatted like 'https://example.com:' and returns a proxy on the
// first tuple position
Ok((
Some(Proxy::all(s.trim_end_matches(':')).map_err(|e| e.to_string())?),
None,
))
} else {
// returns the same proxy for both tuple positions
let proxy = Proxy::all(s).map_err(|e| e.to_string())?;
Ok((Some(proxy.clone()), Some(proxy)))
}
}
pub fn clap_parse_speed_limit(s: &str) -> Result<u32, String> {
let quota = s.to_lowercase();
let bytes = if let Ok(b) = quota.parse() {
b
} else if let Ok(b) = quota.trim_end_matches('b').parse::<u32>() {
b
} else if let Ok(kb) = quota.trim_end_matches("kb").parse::<u32>() {
kb * 1024
} else if let Ok(mb) = quota.trim_end_matches("mb").parse::<u32>() {
mb * 1024 * 1024
} else {
return Err("Invalid speed limit".to_string());
};
Ok(bytes)
} }

View file

@ -1,5 +1,9 @@
use crate::utils::rate_limit::RateLimiterService;
use crunchyroll_rs::Crunchyroll; use crunchyroll_rs::Crunchyroll;
use reqwest::Client;
pub struct Context { pub struct Context {
pub crunchy: Crunchyroll, pub crunchy: Crunchyroll,
pub client: Client,
pub rate_limiter: Option<RateLimiterService>,
} }

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,11 @@
use lazy_static::lazy_static; use lazy_static::lazy_static;
use regex::Regex; use regex::Regex;
use std::fmt;
use std::fmt::Formatter;
use std::str::FromStr; use std::str::FromStr;
pub const SOFTSUB_CONTAINERS: [&str; 3] = ["mkv", "mov", "mp4"];
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
pub enum FFmpegPreset { pub enum FFmpegPreset {
Predefined(FFmpegCodec, Option<FFmpegHwAccel>, FFmpegQuality), Predefined(FFmpegCodec, Option<FFmpegHwAccel>, FFmpegQuality),
@ -31,11 +35,11 @@ macro_rules! ffmpeg_enum {
} }
} }
impl ToString for $name { impl fmt::Display for $name {
fn to_string(&self) -> String { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self { match self {
$( $(
&$name::$field => stringify!($field).to_string().to_lowercase() &$name::$field => write!(f, "{}", stringify!($field).to_string().to_lowercase())
),* ),*
} }
} }
@ -66,7 +70,9 @@ ffmpeg_enum! {
ffmpeg_enum! { ffmpeg_enum! {
enum FFmpegHwAccel { enum FFmpegHwAccel {
Nvidia Nvidia,
Amd,
Apple
} }
} }
@ -98,7 +104,11 @@ impl FFmpegPreset {
FFmpegHwAccel::all(), FFmpegHwAccel::all(),
FFmpegQuality::all(), FFmpegQuality::all(),
), ),
(FFmpegCodec::Av1, vec![], FFmpegQuality::all()), (
FFmpegCodec::Av1,
vec![FFmpegHwAccel::Amd],
FFmpegQuality::all(),
),
]; ];
let mut return_values = vec![]; let mut return_values = vec![];
@ -127,23 +137,16 @@ impl FFmpegPreset {
for (codec, hwaccel, quality) in FFmpegPreset::available_matches() { for (codec, hwaccel, quality) in FFmpegPreset::available_matches() {
let mut description_details = vec![]; let mut description_details = vec![];
if let Some(h) = &hwaccel { if let Some(h) = &hwaccel {
description_details.push(format!("{} hardware acceleration", h.to_string())) description_details.push(format!("{h} hardware acceleration"))
} }
if let Some(q) = &quality { if let Some(q) = &quality {
description_details.push(format!("{} video quality/compression", q.to_string())) description_details.push(format!("{q} video quality/compression"))
} }
let description = if description_details.len() == 0 { let description = if description_details.is_empty() {
format!( format!("{codec} encoded with default video quality/compression",)
"{} encoded with default video quality/compression",
codec.to_string()
)
} else if description_details.len() == 1 { } else if description_details.len() == 1 {
format!( format!("{} encoded with {}", codec, description_details[0])
"{} encoded with {}",
codec.to_string(),
description_details[0]
)
} else { } else {
let first = description_details.remove(0); let first = description_details.remove(0);
let last = description_details.remove(description_details.len() - 1); let last = description_details.remove(description_details.len() - 1);
@ -153,13 +156,7 @@ impl FFmpegPreset {
"".to_string() "".to_string()
}; };
format!( format!("{codec} encoded with {first}{mid} and {last}",)
"{} encoded with {}{} and {}",
codec.to_string(),
first,
mid,
last
)
}; };
return_values.push(format!( return_values.push(format!(
@ -193,11 +190,7 @@ impl FFmpegPreset {
.find(|p| p.to_string() == token.to_lowercase()) .find(|p| p.to_string() == token.to_lowercase())
{ {
if let Some(cc) = codec { if let Some(cc) = codec {
return Err(format!( return Err(format!("cannot use multiple codecs (found {cc} and {c})",));
"cannot use multiple codecs (found {} and {})",
cc.to_string(),
c.to_string()
));
} }
codec = Some(c) codec = Some(c)
} else if let Some(h) = FFmpegHwAccel::all() } else if let Some(h) = FFmpegHwAccel::all()
@ -206,9 +199,7 @@ impl FFmpegPreset {
{ {
if let Some(hh) = hwaccel { if let Some(hh) = hwaccel {
return Err(format!( return Err(format!(
"cannot use multiple hardware accelerations (found {} and {})", "cannot use multiple hardware accelerations (found {hh} and {h})",
hh.to_string(),
h.to_string()
)); ));
} }
hwaccel = Some(h) hwaccel = Some(h)
@ -218,15 +209,13 @@ impl FFmpegPreset {
{ {
if let Some(qq) = quality { if let Some(qq) = quality {
return Err(format!( return Err(format!(
"cannot use multiple ffmpeg preset qualities (found {} and {})", "cannot use multiple ffmpeg preset qualities (found {qq} and {q})",
qq.to_string(),
q.to_string()
)); ));
} }
quality = Some(q) quality = Some(q)
} else { } else {
return Err(format!( return Err(format!(
"'{}' is not a valid ffmpeg preset (unknown token '{}'", "'{}' is not a valid ffmpeg preset (unknown token '{}')",
s, token s, token
)); ));
} }
@ -238,7 +227,7 @@ impl FFmpegPreset {
hwaccel.clone(), hwaccel.clone(),
quality.clone(), quality.clone(),
)) { )) {
return Err(format!("ffmpeg preset is not supported")); return Err("ffmpeg preset is not supported".to_string());
} }
Ok(FFmpegPreset::Predefined( Ok(FFmpegPreset::Predefined(
c, c,
@ -246,7 +235,7 @@ impl FFmpegPreset {
quality.unwrap_or(FFmpegQuality::Normal), quality.unwrap_or(FFmpegQuality::Normal),
)) ))
} else { } else {
Err(format!("cannot use ffmpeg preset with without a codec")) Err("cannot use ffmpeg preset with without a codec".to_string())
} }
} }
@ -262,31 +251,12 @@ impl FFmpegPreset {
match codec { match codec {
FFmpegCodec::H264 => { FFmpegCodec::H264 => {
if let Some(hwaccel) = hwaccel_opt { let mut crf_quality = || match quality {
match hwaccel {
FFmpegHwAccel::Nvidia => {
input.extend([
"-hwaccel",
"cuda",
"-hwaccel_output_format",
"cuda",
"-c:v",
"h264_cuvid",
]);
output.extend(["-c:v", "h264_nvenc", "-c:a", "copy"])
}
}
} else {
output.extend(["-c:v", "libx264", "-c:a", "copy"])
}
match quality {
FFmpegQuality::Lossless => output.extend(["-crf", "18"]), FFmpegQuality::Lossless => output.extend(["-crf", "18"]),
FFmpegQuality::Normal => (), FFmpegQuality::Normal => (),
FFmpegQuality::Low => output.extend(["-crf", "35"]), FFmpegQuality::Low => output.extend(["-crf", "35"]),
} };
}
FFmpegCodec::H265 => {
if let Some(hwaccel) = hwaccel_opt { if let Some(hwaccel) = hwaccel_opt {
match hwaccel { match hwaccel {
FFmpegHwAccel::Nvidia => { FFmpegHwAccel::Nvidia => {
@ -298,26 +268,99 @@ impl FFmpegPreset {
"-c:v", "-c:v",
"h264_cuvid", "h264_cuvid",
]); ]);
output.extend(["-c:v", "hevc_nvenc", "-c:a", "copy"]) crf_quality();
output.extend(["-c:v", "h264_nvenc", "-c:a", "copy"])
}
FFmpegHwAccel::Amd => {
crf_quality();
output.extend(["-c:v", "h264_amf", "-c:a", "copy"])
}
FFmpegHwAccel::Apple => {
// Apple's Video Toolbox encoders ignore `-crf`, use `-q:v`
// instead. It's on a scale of 1-100, 100 being lossless. Just
// did some math ((-a/51+1)*99+1 where `a` is the old crf value)
// so these settings very likely need some more tweaking
match quality {
FFmpegQuality::Lossless => output.extend(["-q:v", "65"]),
FFmpegQuality::Normal => (),
FFmpegQuality::Low => output.extend(["-q:v", "32"]),
}
output.extend(["-c:v", "h264_videotoolbox", "-c:a", "copy"])
} }
} }
} else { } else {
output.extend(["-c:v", "libx265", "-c:a", "copy"]) crf_quality();
output.extend(["-c:v", "libx264", "-c:a", "copy"])
} }
}
match quality { FFmpegCodec::H265 => {
let mut crf_quality = || match quality {
FFmpegQuality::Lossless => output.extend(["-crf", "20"]), FFmpegQuality::Lossless => output.extend(["-crf", "20"]),
FFmpegQuality::Normal => (), FFmpegQuality::Normal => (),
FFmpegQuality::Low => output.extend(["-crf", "35"]), FFmpegQuality::Low => output.extend(["-crf", "35"]),
};
if let Some(hwaccel) = hwaccel_opt {
match hwaccel {
FFmpegHwAccel::Nvidia => {
input.extend([
"-hwaccel",
"cuda",
"-hwaccel_output_format",
"cuda",
"-c:v",
"h264_cuvid",
]);
crf_quality();
output.extend([
"-c:v",
"hevc_nvenc",
"-c:a",
"copy",
"-tag:v",
"hvc1",
])
}
FFmpegHwAccel::Amd => {
crf_quality();
output.extend(["-c:v", "hevc_amf", "-c:a", "copy"])
}
FFmpegHwAccel::Apple => {
// See the comment for apple h264 hwaccel
match quality {
FFmpegQuality::Lossless => output.extend(["-q:v", "61"]),
FFmpegQuality::Normal => (),
FFmpegQuality::Low => output.extend(["-q:v", "32"]),
}
output.extend([
"-c:v",
"hevc_videotoolbox",
"-c:a",
"copy",
"-tag:v",
"hvc1",
])
}
}
} else {
crf_quality();
output.extend(["-c:v", "libx265", "-c:a", "copy", "-tag:v", "hvc1"])
} }
} }
FFmpegCodec::Av1 => { FFmpegCodec::Av1 => {
output.extend(["-c:v", "libsvtav1", "-c:a", "copy"]); let mut crf_quality = || match quality {
match quality {
FFmpegQuality::Lossless => output.extend(["-crf", "22"]), FFmpegQuality::Lossless => output.extend(["-crf", "22"]),
FFmpegQuality::Normal => (), FFmpegQuality::Normal => (),
FFmpegQuality::Low => output.extend(["-crf", "35"]), FFmpegQuality::Low => output.extend(["-crf", "35"]),
};
crf_quality();
if let Some(FFmpegHwAccel::Amd) = hwaccel_opt {
output.extend(["-c:v", "av1_amf", "-c:a", "copy"]);
} else {
output.extend(["-c:v", "libsvtav1", "-c:a", "copy"]);
} }
} }
} }

View file

@ -1,27 +1,407 @@
use crate::utils::format::{SingleFormat, SingleFormatCollection};
use crate::utils::interactive_select::{check_for_duplicated_seasons, get_duplicated_seasons};
use crate::utils::parse::{fract, UrlFilter};
use anyhow::Result; use anyhow::Result;
use crunchyroll_rs::{ use crunchyroll_rs::{
Concert, Episode, MediaCollection, Movie, MovieListing, MusicVideo, Season, Series, Concert, Episode, Locale, MediaCollection, Movie, MovieListing, MusicVideo, Season, Series,
}; };
use log::{info, warn};
use std::collections::{BTreeMap, HashMap};
use std::ops::Not;
// Check when https://github.com/dtolnay/async-trait/issues/224 is resolved and update async-trait pub(crate) enum FilterMediaScope<'a> {
// to the new fixed version (as this causes some issues) Series(&'a Series),
#[async_trait::async_trait] Season(&'a Season),
pub trait Filter { /// Always contains 1 or 2 episodes.
type T: Send + Sized; /// - 1: The episode's audio is completely missing
type Output: Send + Sized; /// - 2: The requested audio is only available from first entry to last entry
Episode(Vec<&'a Episode>),
}
async fn visit_series(&mut self, series: Series) -> Result<Vec<Season>>; pub(crate) struct Filter {
async fn visit_season(&mut self, season: Season) -> Result<Vec<Episode>>; url_filter: UrlFilter,
async fn visit_episode(&mut self, episode: Episode) -> Result<Option<Self::T>>;
async fn visit_movie_listing(&mut self, movie_listing: MovieListing) -> Result<Vec<Movie>>;
async fn visit_movie(&mut self, movie: Movie) -> Result<Option<Self::T>>;
async fn visit_music_video(&mut self, music_video: MusicVideo) -> Result<Option<Self::T>>;
async fn visit_concert(&mut self, concert: Concert) -> Result<Option<Self::T>>;
async fn visit(mut self, media_collection: MediaCollection) -> Result<Self::Output> skip_specials: bool,
where interactive_input: bool,
Self: Send + Sized,
relative_episode_number: bool,
audio_locales: Vec<Locale>,
subtitle_locales: Vec<Locale>,
audios_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
subtitles_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
no_premium: fn(u32) -> Result<()>,
is_premium: bool,
series_visited: bool,
season_episodes: HashMap<String, Vec<Episode>>,
season_with_premium: Option<Vec<u32>>,
season_sorting: Vec<String>,
}
impl Filter {
#[allow(clippy::too_many_arguments)]
pub(crate) fn new(
url_filter: UrlFilter,
audio_locales: Vec<Locale>,
subtitle_locales: Vec<Locale>,
audios_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
subtitles_missing: fn(FilterMediaScope, Vec<&Locale>) -> Result<bool>,
no_premium: fn(u32) -> Result<()>,
relative_episode_number: bool,
interactive_input: bool,
skip_specials: bool,
is_premium: bool,
) -> Self {
Self {
url_filter,
audio_locales,
subtitle_locales,
relative_episode_number,
interactive_input,
audios_missing,
subtitles_missing,
no_premium,
is_premium,
series_visited: false,
season_episodes: HashMap::new(),
skip_specials,
season_with_premium: is_premium.not().then_some(vec![]),
season_sorting: vec![],
}
}
async fn visit_series(&mut self, series: Series) -> Result<Vec<Season>> {
// the audio locales field isn't always populated
if !series.audio_locales.is_empty() {
let missing_audios = missing_locales(&series.audio_locales, &self.audio_locales);
if !missing_audios.is_empty()
&& !(self.audios_missing)(FilterMediaScope::Series(&series), missing_audios)?
{ {
return Ok(vec![]);
}
let missing_subtitles =
missing_locales(&series.subtitle_locales, &self.subtitle_locales);
if !missing_subtitles.is_empty()
&& !(self.subtitles_missing)(FilterMediaScope::Series(&series), missing_subtitles)?
{
return Ok(vec![]);
}
}
let mut seasons = vec![];
for season in series.seasons().await? {
if !self.url_filter.is_season_valid(season.season_number) {
continue;
}
let missing_audios = missing_locales(
&season
.versions
.iter()
.map(|l| l.audio_locale.clone())
.collect::<Vec<Locale>>(),
&self.audio_locales,
);
if !missing_audios.is_empty()
&& !(self.audios_missing)(FilterMediaScope::Season(&season), missing_audios)?
{
return Ok(vec![]);
}
seasons.push(season)
}
let duplicated_seasons = get_duplicated_seasons(&seasons);
if !duplicated_seasons.is_empty() {
if self.interactive_input {
check_for_duplicated_seasons(&mut seasons)
} else {
info!(
"Found duplicated seasons: {}",
duplicated_seasons
.iter()
.map(|d| d.to_string())
.collect::<Vec<String>>()
.join(", ")
)
}
}
self.series_visited = true;
Ok(seasons)
}
async fn visit_season(&mut self, season: Season) -> Result<Vec<Episode>> {
if !self.url_filter.is_season_valid(season.season_number) {
return Ok(vec![]);
}
let mut seasons = vec![];
if self
.audio_locales
.iter()
.any(|l| season.audio_locales.contains(l))
{
seasons.push(season.clone())
}
for version in season.versions {
if season.id == version.id {
continue;
}
if self.audio_locales.contains(&version.audio_locale) {
seasons.push(version.season().await?)
}
}
let mut episodes = vec![];
for season in seasons {
self.season_sorting.push(season.id.clone());
let mut eps = season.episodes().await?;
// removes any episode that does not have the audio locale of the season. yes, this is
// the case sometimes
if season.audio_locales.len() < 2 {
let season_locale = season
.audio_locales
.first()
.cloned()
.unwrap_or(Locale::ja_JP);
eps.retain(|e| e.audio_locale == season_locale)
}
#[allow(clippy::if_same_then_else)]
if eps.len() < season.number_of_episodes as usize {
if eps.is_empty()
&& !(self.audios_missing)(
FilterMediaScope::Season(&season),
season.audio_locales.iter().collect(),
)?
{
return Ok(vec![]);
} else if !eps.is_empty()
&& !(self.audios_missing)(
FilterMediaScope::Episode(vec![eps.first().unwrap(), eps.last().unwrap()]),
vec![&eps.first().unwrap().audio_locale],
)?
{
return Ok(vec![]);
}
}
episodes.extend(eps)
}
if self.relative_episode_number {
for episode in &episodes {
self.season_episodes
.entry(episode.season_id.clone())
.or_default()
.push(episode.clone())
}
}
Ok(episodes)
}
async fn visit_episode(&mut self, episode: Episode) -> Result<Vec<SingleFormat>> {
if !self
.url_filter
.is_episode_valid(episode.sequence_number, episode.season_number)
{
return Ok(vec![]);
}
// skip the episode if it's a special
if self.skip_specials
&& (episode.sequence_number == 0.0 || episode.sequence_number.fract() != 0.0)
{
return Ok(vec![]);
}
let mut episodes = vec![];
if !self.series_visited {
if self.audio_locales.contains(&episode.audio_locale) {
episodes.push(episode.clone())
}
for version in &episode.versions {
// `episode` is also a version of itself. the if block above already adds the
// episode if it matches the requested audio, so it doesn't need to be requested
// here again
if version.id == episode.id {
continue;
}
if self.audio_locales.contains(&version.audio_locale) {
episodes.push(version.episode().await?)
}
}
let audio_locales: Vec<Locale> =
episodes.iter().map(|e| e.audio_locale.clone()).collect();
let missing_audios = missing_locales(&audio_locales, &self.audio_locales);
if !missing_audios.is_empty()
&& !(self.audios_missing)(
FilterMediaScope::Episode(vec![&episode]),
missing_audios,
)?
{
return Ok(vec![]);
}
let mut subtitle_locales: Vec<Locale> = episodes
.iter()
.flat_map(|e| e.subtitle_locales.clone())
.collect();
subtitle_locales.sort();
subtitle_locales.dedup();
let missing_subtitles = missing_locales(&subtitle_locales, &self.subtitle_locales);
if !missing_subtitles.is_empty()
&& !(self.subtitles_missing)(
FilterMediaScope::Episode(vec![&episode]),
missing_subtitles,
)?
{
return Ok(vec![]);
}
} else {
episodes.push(episode.clone())
}
if let Some(seasons_with_premium) = &mut self.season_with_premium {
let episodes_len_before = episodes.len();
episodes.retain(|e| !e.is_premium_only && !self.is_premium);
if episodes_len_before < episodes.len()
&& !seasons_with_premium.contains(&episode.season_number)
{
(self.no_premium)(episode.season_number)?;
seasons_with_premium.push(episode.season_number)
}
if episodes.is_empty() {
return Ok(vec![]);
}
}
let mut relative_episode_number = None;
let mut relative_sequence_number = None;
if self.relative_episode_number {
let season_eps = match self.season_episodes.get(&episode.season_id) {
Some(eps) => eps,
None => {
self.season_episodes.insert(
episode.season_id.clone(),
episode.season().await?.episodes().await?,
);
self.season_episodes.get(&episode.season_id).unwrap()
}
};
let mut non_integer_sequence_number_count = 0;
for (i, ep) in season_eps.iter().enumerate() {
if ep.sequence_number != 0.0 || ep.sequence_number.fract() == 0.0 {
non_integer_sequence_number_count += 1
}
if ep.id == episode.id {
relative_episode_number = Some(i + 1);
relative_sequence_number = Some(
(i + 1 - non_integer_sequence_number_count) as f32
+ fract(ep.sequence_number),
);
break;
}
}
if relative_episode_number.is_none() || relative_sequence_number.is_none() {
warn!(
"Failed to get relative episode number for episode {} ({}) of {} season {}",
episode.sequence_number,
episode.title,
episode.series_title,
episode.season_number,
)
}
}
Ok(episodes
.into_iter()
.map(|e| {
SingleFormat::new_from_episode(
e.clone(),
e.subtitle_locales,
relative_episode_number.map(|n| n as u32),
relative_sequence_number,
)
})
.collect())
}
async fn visit_movie_listing(&mut self, movie_listing: MovieListing) -> Result<Vec<Movie>> {
Ok(movie_listing.movies().await?)
}
async fn visit_movie(&mut self, movie: Movie) -> Result<Vec<SingleFormat>> {
Ok(vec![SingleFormat::new_from_movie(movie, vec![])])
}
async fn visit_music_video(&mut self, music_video: MusicVideo) -> Result<Vec<SingleFormat>> {
Ok(vec![SingleFormat::new_from_music_video(music_video)])
}
async fn visit_concert(&mut self, concert: Concert) -> Result<Vec<SingleFormat>> {
Ok(vec![SingleFormat::new_from_concert(concert)])
}
async fn finish(self, input: Vec<Vec<SingleFormat>>) -> Result<SingleFormatCollection> {
let flatten_input: Vec<SingleFormat> = input.into_iter().flatten().collect();
let mut single_format_collection = SingleFormatCollection::new();
let mut pre_sorted: BTreeMap<String, Vec<SingleFormat>> = BTreeMap::new();
for data in flatten_input {
pre_sorted
.entry(data.identifier.clone())
.or_default()
.push(data)
}
let mut sorted: Vec<(String, Vec<SingleFormat>)> = pre_sorted.into_iter().collect();
sorted.sort_by(|(_, a), (_, b)| {
self.season_sorting
.iter()
.position(|p| p == &a.first().unwrap().season_id)
.unwrap()
.cmp(
&self
.season_sorting
.iter()
.position(|p| p == &b.first().unwrap().season_id)
.unwrap(),
)
});
for (_, mut data) in sorted {
data.sort_by(|a, b| {
self.audio_locales
.iter()
.position(|p| p == &a.audio)
.unwrap_or(usize::MAX)
.cmp(
&self
.audio_locales
.iter()
.position(|p| p == &b.audio)
.unwrap_or(usize::MAX),
)
});
single_format_collection.add_single_formats(data)
}
Ok(single_format_collection)
}
pub(crate) async fn visit(
mut self,
media_collection: MediaCollection,
) -> Result<SingleFormatCollection> {
let mut items = vec![media_collection]; let mut items = vec![media_collection];
let mut result = vec![]; let mut result = vec![];
@ -45,9 +425,7 @@ pub trait Filter {
.collect::<Vec<MediaCollection>>(), .collect::<Vec<MediaCollection>>(),
), ),
MediaCollection::Episode(episode) => { MediaCollection::Episode(episode) => {
if let Some(t) = self.visit_episode(episode).await? { result.push(self.visit_episode(episode).await?)
result.push(t)
}
} }
MediaCollection::MovieListing(movie_listing) => new_items.extend( MediaCollection::MovieListing(movie_listing) => new_items.extend(
self.visit_movie_listing(movie_listing) self.visit_movie_listing(movie_listing)
@ -56,20 +434,12 @@ pub trait Filter {
.map(|m| m.into()) .map(|m| m.into())
.collect::<Vec<MediaCollection>>(), .collect::<Vec<MediaCollection>>(),
), ),
MediaCollection::Movie(movie) => { MediaCollection::Movie(movie) => result.push(self.visit_movie(movie).await?),
if let Some(t) = self.visit_movie(movie).await? {
result.push(t)
}
}
MediaCollection::MusicVideo(music_video) => { MediaCollection::MusicVideo(music_video) => {
if let Some(t) = self.visit_music_video(music_video).await? { result.push(self.visit_music_video(music_video).await?)
result.push(t)
}
} }
MediaCollection::Concert(concert) => { MediaCollection::Concert(concert) => {
if let Some(t) = self.visit_concert(concert).await? { result.push(self.visit_concert(concert).await?)
result.push(t)
}
} }
} }
} }
@ -79,8 +449,10 @@ pub trait Filter {
self.finish(result).await self.finish(result).await
} }
}
async fn finish(self, input: Vec<Self::T>) -> Result<Self::Output>; fn missing_locales<'a>(available: &[Locale], searched: &'a [Locale]) -> Vec<&'a Locale> {
searched.iter().filter(|p| !available.contains(p)).collect()
} }
/// Remove all duplicates from a [`Vec`]. /// Remove all duplicates from a [`Vec`].

View file

@ -0,0 +1,19 @@
use chrono::TimeDelta;
pub fn format_time_delta(time_delta: &TimeDelta) -> String {
let negative = *time_delta < TimeDelta::zero();
let time_delta = time_delta.abs();
let hours = time_delta.num_hours();
let minutes = time_delta.num_minutes() - time_delta.num_hours() * 60;
let seconds = time_delta.num_seconds() - time_delta.num_minutes() * 60;
let milliseconds = time_delta.num_milliseconds() - time_delta.num_seconds() * 1000;
format!(
"{}{}:{:0>2}:{:0>2}.{:0>3}",
if negative { "-" } else { "" },
hours,
minutes,
seconds,
milliseconds
)
}

View file

@ -1,15 +1,18 @@
use crate::utils::filter::real_dedup_vec; use crate::utils::filter::real_dedup_vec;
use crate::utils::locale::LanguageTagging;
use crate::utils::log::tab_info; use crate::utils::log::tab_info;
use crate::utils::os::is_special_file; use crate::utils::os::{is_special_file, sanitize};
use anyhow::Result; use anyhow::{bail, Result};
use chrono::Duration; use chrono::{Datelike, Duration};
use crunchyroll_rs::media::{Resolution, Stream, Subtitle, VariantData}; use crunchyroll_rs::media::{SkipEvents, Stream, StreamData, Subtitle};
use crunchyroll_rs::{Concert, Episode, Locale, MediaCollection, Movie, MusicVideo}; use crunchyroll_rs::{Concert, Episode, Locale, MediaCollection, Movie, MusicVideo};
use log::{debug, info}; use log::{debug, info};
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::env;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
#[allow(dead_code)]
#[derive(Clone)] #[derive(Clone)]
pub struct SingleFormat { pub struct SingleFormat {
pub identifier: String, pub identifier: String,
@ -17,6 +20,10 @@ pub struct SingleFormat {
pub title: String, pub title: String,
pub description: String, pub description: String,
pub release_year: u64,
pub release_month: u64,
pub release_day: u64,
pub audio: Locale, pub audio: Locale,
pub subtitles: Vec<Locale>, pub subtitles: Vec<Locale>,
@ -29,8 +36,9 @@ pub struct SingleFormat {
pub episode_id: String, pub episode_id: String,
pub episode_number: String, pub episode_number: String,
pub sequence_number: f32,
pub relative_episode_number: Option<u32>, pub relative_episode_number: Option<u32>,
pub sequence_number: f32,
pub relative_sequence_number: Option<f32>,
pub duration: Duration, pub duration: Duration,
@ -42,6 +50,7 @@ impl SingleFormat {
episode: Episode, episode: Episode,
subtitles: Vec<Locale>, subtitles: Vec<Locale>,
relative_episode_number: Option<u32>, relative_episode_number: Option<u32>,
relative_sequence_number: Option<f32>,
) -> Self { ) -> Self {
Self { Self {
identifier: if episode.identifier.is_empty() { identifier: if episode.identifier.is_empty() {
@ -58,6 +67,9 @@ impl SingleFormat {
}, },
title: episode.title.clone(), title: episode.title.clone(),
description: episode.description.clone(), description: episode.description.clone(),
release_year: episode.episode_air_date.year() as u64,
release_month: episode.episode_air_date.month() as u64,
release_day: episode.episode_air_date.day() as u64,
audio: episode.audio_locale.clone(), audio: episode.audio_locale.clone(),
subtitles, subtitles,
series_id: episode.series_id.clone(), series_id: episode.series_id.clone(),
@ -73,6 +85,7 @@ impl SingleFormat {
}, },
sequence_number: episode.sequence_number, sequence_number: episode.sequence_number,
relative_episode_number, relative_episode_number,
relative_sequence_number,
duration: episode.duration, duration: episode.duration,
source: episode.into(), source: episode.into(),
} }
@ -83,6 +96,9 @@ impl SingleFormat {
identifier: movie.id.clone(), identifier: movie.id.clone(),
title: movie.title.clone(), title: movie.title.clone(),
description: movie.description.clone(), description: movie.description.clone(),
release_year: movie.free_available_date.year() as u64,
release_month: movie.free_available_date.month() as u64,
release_day: movie.free_available_date.day() as u64,
audio: Locale::ja_JP, audio: Locale::ja_JP,
subtitles, subtitles,
series_id: movie.movie_listing_id.clone(), series_id: movie.movie_listing_id.clone(),
@ -92,8 +108,9 @@ impl SingleFormat {
season_number: 1, season_number: 1,
episode_id: movie.id.clone(), episode_id: movie.id.clone(),
episode_number: "1".to_string(), episode_number: "1".to_string(),
sequence_number: 1.0,
relative_episode_number: Some(1), relative_episode_number: Some(1),
sequence_number: 1.0,
relative_sequence_number: Some(1.0),
duration: movie.duration, duration: movie.duration,
source: movie.into(), source: movie.into(),
} }
@ -104,6 +121,9 @@ impl SingleFormat {
identifier: music_video.id.clone(), identifier: music_video.id.clone(),
title: music_video.title.clone(), title: music_video.title.clone(),
description: music_video.description.clone(), description: music_video.description.clone(),
release_year: music_video.original_release.year() as u64,
release_month: music_video.original_release.month() as u64,
release_day: music_video.original_release.day() as u64,
audio: Locale::ja_JP, audio: Locale::ja_JP,
subtitles: vec![], subtitles: vec![],
series_id: music_video.id.clone(), series_id: music_video.id.clone(),
@ -113,8 +133,9 @@ impl SingleFormat {
season_number: 1, season_number: 1,
episode_id: music_video.id.clone(), episode_id: music_video.id.clone(),
episode_number: "1".to_string(), episode_number: "1".to_string(),
sequence_number: 1.0,
relative_episode_number: Some(1), relative_episode_number: Some(1),
sequence_number: 1.0,
relative_sequence_number: Some(1.0),
duration: music_video.duration, duration: music_video.duration,
source: music_video.into(), source: music_video.into(),
} }
@ -125,6 +146,9 @@ impl SingleFormat {
identifier: concert.id.clone(), identifier: concert.id.clone(),
title: concert.title.clone(), title: concert.title.clone(),
description: concert.description.clone(), description: concert.description.clone(),
release_year: concert.original_release.year() as u64,
release_month: concert.original_release.month() as u64,
release_day: concert.original_release.day() as u64,
audio: Locale::ja_JP, audio: Locale::ja_JP,
subtitles: vec![], subtitles: vec![],
series_id: concert.id.clone(), series_id: concert.id.clone(),
@ -134,8 +158,9 @@ impl SingleFormat {
season_number: 1, season_number: 1,
episode_id: concert.id.clone(), episode_id: concert.id.clone(),
episode_number: "1".to_string(), episode_number: "1".to_string(),
sequence_number: 1.0,
relative_episode_number: Some(1), relative_episode_number: Some(1),
sequence_number: 1.0,
relative_sequence_number: Some(1.0),
duration: concert.duration, duration: concert.duration,
source: concert.into(), source: concert.into(),
} }
@ -143,13 +168,27 @@ impl SingleFormat {
pub async fn stream(&self) -> Result<Stream> { pub async fn stream(&self) -> Result<Stream> {
let stream = match &self.source { let stream = match &self.source {
MediaCollection::Episode(e) => e.stream().await?, MediaCollection::Episode(e) => e.stream_maybe_without_drm().await,
MediaCollection::Movie(m) => m.stream().await?, MediaCollection::Movie(m) => m.stream_maybe_without_drm().await,
MediaCollection::MusicVideo(mv) => mv.stream().await?, MediaCollection::MusicVideo(mv) => mv.stream_maybe_without_drm().await,
MediaCollection::Concert(c) => c.stream().await?, MediaCollection::Concert(c) => c.stream_maybe_without_drm().await,
_ => unreachable!(), _ => unreachable!(),
}; };
Ok(stream)
if let Err(crunchyroll_rs::error::Error::Request { message, .. }) = &stream {
if message.starts_with("TOO_MANY_ACTIVE_STREAMS") {
bail!("Too many active/parallel streams. Please close at least one stream you're watching and try again")
}
};
Ok(stream?)
}
pub async fn skip_events(&self) -> Result<Option<SkipEvents>> {
match &self.source {
MediaCollection::Episode(e) => Ok(Some(e.skip_events().await?)),
MediaCollection::Movie(m) => Ok(Some(m.skip_events().await?)),
_ => Ok(None),
}
} }
pub fn source_type(&self) -> String { pub fn source_type(&self) -> String {
@ -164,10 +203,11 @@ impl SingleFormat {
} }
pub fn is_episode(&self) -> bool { pub fn is_episode(&self) -> bool {
match self.source { matches!(self.source, MediaCollection::Episode(_))
MediaCollection::Episode(_) => true,
_ => false,
} }
pub fn is_special(&self) -> bool {
self.sequence_number == 0.0 || self.sequence_number.fract() != 0.0
} }
} }
@ -175,7 +215,7 @@ struct SingleFormatCollectionEpisodeKey(f32);
impl PartialOrd for SingleFormatCollectionEpisodeKey { impl PartialOrd for SingleFormatCollectionEpisodeKey {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.0.partial_cmp(&other.0) Some(self.cmp(other))
} }
} }
impl Ord for SingleFormatCollectionEpisodeKey { impl Ord for SingleFormatCollectionEpisodeKey {
@ -192,6 +232,7 @@ impl Eq for SingleFormatCollectionEpisodeKey {}
struct SingleFormatCollectionSeasonKey((u32, String)); struct SingleFormatCollectionSeasonKey((u32, String));
#[allow(clippy::non_canonical_partial_ord_impl)]
impl PartialOrd for SingleFormatCollectionSeasonKey { impl PartialOrd for SingleFormatCollectionSeasonKey {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
let mut cmp = self.0 .0.partial_cmp(&other.0 .0); let mut cmp = self.0 .0.partial_cmp(&other.0 .0);
@ -244,7 +285,7 @@ impl SingleFormatCollection {
format.season_number, format.season_number,
format.season_id.clone(), format.season_id.clone(),
))) )))
.or_insert(BTreeMap::new()) .or_default()
.insert( .insert(
SingleFormatCollectionEpisodeKey(format.sequence_number), SingleFormatCollectionEpisodeKey(format.sequence_number),
single_formats, single_formats,
@ -297,9 +338,7 @@ impl Iterator for SingleFormatCollectionIterator {
type Item = Vec<SingleFormat>; type Item = Vec<SingleFormat>;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
let Some((_, episodes)) = self.0 .0.iter_mut().next() else { let (_, episodes) = self.0 .0.iter_mut().next()?;
return None;
};
let value = episodes.pop_first().unwrap().1; let value = episodes.pop_first().unwrap().1;
if episodes.is_empty() { if episodes.is_empty() {
@ -309,6 +348,7 @@ impl Iterator for SingleFormatCollectionIterator {
} }
} }
#[allow(dead_code)]
#[derive(Clone)] #[derive(Clone)]
pub struct Format { pub struct Format {
pub title: String, pub title: String,
@ -316,9 +356,14 @@ pub struct Format {
pub locales: Vec<(Locale, Vec<Locale>)>, pub locales: Vec<(Locale, Vec<Locale>)>,
pub resolution: Resolution, pub width: u64,
pub height: u64,
pub fps: f64, pub fps: f64,
pub release_year: u64,
pub release_month: u64,
pub release_day: u64,
pub series_id: String, pub series_id: String,
pub series_name: String, pub series_name: String,
@ -328,13 +373,15 @@ pub struct Format {
pub episode_id: String, pub episode_id: String,
pub episode_number: String, pub episode_number: String,
pub sequence_number: f32,
pub relative_episode_number: Option<u32>, pub relative_episode_number: Option<u32>,
pub sequence_number: f32,
pub relative_sequence_number: Option<f32>,
} }
impl Format { impl Format {
#[allow(clippy::type_complexity)]
pub fn from_single_formats( pub fn from_single_formats(
mut single_formats: Vec<(SingleFormat, VariantData, Vec<(Subtitle, bool)>)>, mut single_formats: Vec<(SingleFormat, StreamData, Vec<(Subtitle, bool)>)>,
) -> Self { ) -> Self {
let locales: Vec<(Locale, Vec<Locale>)> = single_formats let locales: Vec<(Locale, Vec<Locale>)> = single_formats
.iter() .iter()
@ -342,7 +389,7 @@ impl Format {
( (
single_format.audio.clone(), single_format.audio.clone(),
subtitles subtitles
.into_iter() .iter()
.map(|(s, _)| s.locale.clone()) .map(|(s, _)| s.locale.clone())
.collect::<Vec<Locale>>(), .collect::<Vec<Locale>>(),
) )
@ -354,8 +401,12 @@ impl Format {
title: first_format.title, title: first_format.title,
description: first_format.description, description: first_format.description,
locales, locales,
resolution: first_stream.resolution, width: first_stream.resolution().unwrap().width,
fps: first_stream.fps, height: first_stream.resolution().unwrap().height,
fps: first_stream.fps().unwrap(),
release_year: first_format.release_year,
release_month: first_format.release_month,
release_day: first_format.release_day,
series_id: first_format.series_id, series_id: first_format.series_id,
series_name: first_format.series_name, series_name: first_format.series_name,
season_id: first_format.season_id, season_id: first_format.season_id,
@ -363,66 +414,152 @@ impl Format {
season_number: first_format.season_number, season_number: first_format.season_number,
episode_id: first_format.episode_id, episode_id: first_format.episode_id,
episode_number: first_format.episode_number, episode_number: first_format.episode_number,
sequence_number: first_format.sequence_number,
relative_episode_number: first_format.relative_episode_number, relative_episode_number: first_format.relative_episode_number,
sequence_number: first_format.sequence_number,
relative_sequence_number: first_format.relative_sequence_number,
} }
} }
/// Formats the given string if it has specific pattern in it. It's possible to sanitize it which /// Formats the given string if it has specific pattern in it. It also sanitizes the filename.
/// removes characters which can cause failures if the output string is used as a file name. pub fn format_path(
pub fn format_path(&self, path: PathBuf, sanitize: bool) -> PathBuf { &self,
let sanitize_func = if sanitize { path: PathBuf,
|s: &str| sanitize_filename::sanitize(s) universal: bool,
} else { language_tagging: Option<&LanguageTagging>,
// converting this to a string is actually unnecessary ) -> PathBuf {
|s: &str| s.to_string() let path = path
}; .to_string_lossy()
.to_string()
let as_string = path.to_string_lossy().to_string(); .replace("{title}", &sanitize(&self.title, true, universal))
PathBuf::from(
as_string
.replace("{title}", &sanitize_func(&self.title))
.replace( .replace(
"{audio}", "{audio}",
&sanitize_func( &sanitize(
&self self.locales
.locales
.iter() .iter()
.map(|(a, _)| a.to_string()) .map(|(a, _)| language_tagging.map_or(a.to_string(), |t| t.for_locale(a)))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join("|"), .join(
&env::var("CRUNCHY_CLI_FORMAT_DELIMITER")
.map_or("_".to_string(), |e| e),
),
true,
universal,
), ),
) )
.replace("{resolution}", &sanitize_func(&self.resolution.to_string())) .replace(
.replace("{series_id}", &sanitize_func(&self.series_id)) "{width}",
.replace("{series_name}", &sanitize_func(&self.series_name)) &sanitize(self.width.to_string(), true, universal),
.replace("{season_id}", &sanitize_func(&self.season_id)) )
.replace("{season_name}", &sanitize_func(&self.season_title)) .replace(
"{height}",
&sanitize(self.height.to_string(), true, universal),
)
.replace("{series_id}", &sanitize(&self.series_id, true, universal))
.replace(
"{series_name}",
&sanitize(&self.series_name, true, universal),
)
.replace("{season_id}", &sanitize(&self.season_id, true, universal))
.replace(
"{season_name}",
&sanitize(&self.season_title, true, universal),
)
.replace( .replace(
"{season_number}", "{season_number}",
&sanitize_func(&format!("{:0>2}", self.season_number.to_string())), &format!(
"{:0>2}",
sanitize(self.season_number.to_string(), true, universal)
),
) )
.replace("{episode_id}", &sanitize_func(&self.episode_id)) .replace("{episode_id}", &sanitize(&self.episode_id, true, universal))
.replace( .replace(
"{episode_number}", "{episode_number}",
&sanitize_func(&format!("{:0>2}", self.episode_number.to_string())), &format!("{:0>2}", sanitize(&self.episode_number, true, universal)),
) )
.replace( .replace(
"{relative_episode_number}", "{relative_episode_number}",
&sanitize_func(&format!( &format!(
"{:0>2}", "{:0>2}",
self.relative_episode_number.unwrap_or_default().to_string() sanitize(
)), self.relative_episode_number.unwrap_or_default().to_string(),
true,
universal,
)
), ),
) )
.replace(
"{sequence_number}",
&format!(
"{:0>2}",
sanitize(self.sequence_number.to_string(), true, universal)
),
)
.replace(
"{relative_sequence_number}",
&format!(
"{:0>2}",
sanitize(
self.relative_sequence_number
.unwrap_or_default()
.to_string(),
true,
universal,
)
),
)
.replace(
"{release_year}",
&sanitize(self.release_year.to_string(), true, universal),
)
.replace(
"{release_month}",
&format!(
"{:0>2}",
sanitize(self.release_month.to_string(), true, universal)
),
)
.replace(
"{release_day}",
&format!(
"{:0>2}",
sanitize(self.release_day.to_string(), true, universal)
),
);
let mut path = PathBuf::from(path);
// make sure that every path section has a maximum of 255 characters
if path.file_name().unwrap_or_default().to_string_lossy().len() > 255 {
let name = path
.file_stem()
.unwrap_or_default()
.to_string_lossy()
.to_string();
let ext = path
.extension()
.unwrap_or_default()
.to_string_lossy()
.to_string();
if ext != name {
path.set_file_name(format!("{}.{}", &name[..(255 - ext.len() - 1)], ext))
}
}
path.iter()
.map(|s| {
if s.len() > 255 {
s.to_string_lossy()[..255].to_string()
} else {
s.to_string_lossy().to_string()
}
})
.collect()
} }
pub fn visual_output(&self, dst: &Path) { pub fn visual_output(&self, dst: &Path) {
info!( info!(
"Downloading {} to {}", "Downloading {} to {}",
self.title, self.title,
if is_special_file(&dst) || dst.to_str().unwrap() == "-" { if is_special_file(dst) || dst.to_str().unwrap() == "-" {
dst.to_string_lossy().to_string() dst.to_string_lossy().to_string()
} else { } else {
format!("'{}'", dst.to_str().unwrap()) format!("'{}'", dst.to_str().unwrap())
@ -451,11 +588,16 @@ impl Format {
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join(", ") .join(", ")
); );
tab_info!("Resolution: {}", self.resolution); tab_info!("Resolution: {}x{}", self.height, self.width);
tab_info!("FPS: {:.2}", self.fps) tab_info!("FPS: {:.2}", self.fps)
} }
pub fn has_relative_episodes_fmt<S: AsRef<str>>(s: S) -> bool { pub fn is_special(&self) -> bool {
return s.as_ref().contains("{relative_episode_number}"); self.sequence_number == 0.0 || self.sequence_number.fract() != 0.0
}
pub fn has_relative_fmt<S: AsRef<str>>(s: S) -> bool {
return s.as_ref().contains("{relative_episode_number}")
|| s.as_ref().contains("{relative_sequence_number}");
} }
} }

View file

@ -1,4 +1,124 @@
use crunchyroll_rs::Locale; use crunchyroll_rs::Locale;
use log::warn;
#[derive(Clone, Debug)]
#[allow(clippy::upper_case_acronyms)]
pub enum LanguageTagging {
Default,
IETF,
}
impl LanguageTagging {
pub fn parse(s: &str) -> Result<Self, String> {
Ok(match s.to_lowercase().as_str() {
"default" => Self::Default,
"ietf" => Self::IETF,
_ => return Err(format!("'{}' is not a valid language tagging", s)),
})
}
pub fn convert_locales(&self, locales: &[Locale]) -> Vec<String> {
let ietf_language_codes = ietf_language_codes();
let mut converted = vec![];
match &self {
LanguageTagging::Default => {
for locale in locales {
let Some((_, available)) =
ietf_language_codes.iter().find(|(_, l)| l.contains(locale))
else {
// if no matching IETF language code was found, just pass it as it is
converted.push(locale.to_string());
continue;
};
converted.push(available.first().unwrap().to_string())
}
}
LanguageTagging::IETF => {
for locale in locales {
let Some((tag, _)) =
ietf_language_codes.iter().find(|(_, l)| l.contains(locale))
else {
// if no matching IETF language code was found, just pass it as it is
converted.push(locale.to_string());
continue;
};
converted.push(tag.to_string())
}
}
}
converted
}
pub fn for_locale(&self, locale: &Locale) -> String {
match &self {
LanguageTagging::Default => ietf_language_codes()
.iter()
.find(|(_, l)| l.contains(locale))
.map_or(locale.to_string(), |(_, l)| l[0].to_string()),
LanguageTagging::IETF => ietf_language_codes()
.iter()
.find(|(_, l)| l.contains(locale))
.map_or(locale.to_string(), |(tag, _)| tag.to_string()),
}
}
}
pub fn resolve_locales(locales: &[Locale]) -> Vec<Locale> {
let ietf_language_codes = ietf_language_codes();
let all_locales = Locale::all();
let mut resolved = vec![];
for locale in locales {
if all_locales.contains(locale) {
resolved.push(locale.clone())
} else if let Some((_, resolved_locales)) = ietf_language_codes
.iter()
.find(|(tag, _)| tag == &locale.to_string().as_str())
{
let (first, alternatives) = resolved_locales.split_first().unwrap();
resolved.push(first.clone());
// ignoring `Locale::en_IN` because I think the majority of users which want english
// audio / subs want the "actual" english version and not the hindi accent dub
if !alternatives.is_empty() && resolved_locales.first().unwrap() != &Locale::en_IN {
warn!("Resolving locale '{}' to '{}', but there are some alternatives: {}. If you an alternative instead, please write it completely out instead of '{}'", locale, first, alternatives.iter().map(|l| format!("'{l}'")).collect::<Vec<String>>().join(", "), locale)
}
} else {
resolved.push(locale.clone());
warn!("Unknown locale '{}'", locale)
}
}
resolved
}
fn ietf_language_codes<'a>() -> Vec<(&'a str, Vec<Locale>)> {
vec![
("ar", vec![Locale::ar_ME, Locale::ar_SA]),
("ca", vec![Locale::ca_ES]),
("de", vec![Locale::de_DE]),
("en", vec![Locale::en_US, Locale::hi_IN]),
("es", vec![Locale::es_ES, Locale::es_419, Locale::es_LA]),
("fr", vec![Locale::fr_FR]),
("hi", vec![Locale::hi_IN]),
("id", vec![Locale::id_ID]),
("it", vec![Locale::it_IT]),
("ja", vec![Locale::ja_JP]),
("ko", vec![Locale::ko_KR]),
("ms", vec![Locale::ms_MY]),
("pl", vec![Locale::pl_PL]),
("pt", vec![Locale::pt_PT, Locale::pt_BR]),
("ru", vec![Locale::ru_RU]),
("ta", vec![Locale::ta_IN]),
("te", vec![Locale::te_IN]),
("th", vec![Locale::th_TH]),
("tr", vec![Locale::tr_TR]),
("vi", vec![Locale::vi_VN]),
("zh", vec![Locale::zh_CN, Locale::zh_HK, Locale::zh_TW]),
]
}
/// Return the locale of the system. /// Return the locale of the system.
pub fn system_locale() -> Locale { pub fn system_locale() -> Locale {
@ -19,8 +139,7 @@ pub fn system_locale() -> Locale {
pub fn all_locale_in_locales(locales: Vec<Locale>) -> Vec<Locale> { pub fn all_locale_in_locales(locales: Vec<Locale>) -> Vec<Locale> {
if locales if locales
.iter() .iter()
.find(|l| l.to_string().to_lowercase().trim() == "all") .any(|l| l.to_string().to_lowercase().trim() == "all")
.is_some()
{ {
Locale::all() Locale::all()
} else { } else {

View file

@ -57,7 +57,6 @@ macro_rules! tab_info {
} }
pub(crate) use tab_info; pub(crate) use tab_info;
#[allow(clippy::type_complexity)]
pub struct CliLogger { pub struct CliLogger {
level: LevelFilter, level: LevelFilter,
progress: Mutex<Option<ProgressBar>>, progress: Mutex<Option<ProgressBar>>,

View file

@ -3,10 +3,13 @@ pub mod context;
pub mod download; pub mod download;
pub mod ffmpeg; pub mod ffmpeg;
pub mod filter; pub mod filter;
pub mod fmt;
pub mod format; pub mod format;
pub mod interactive_select; pub mod interactive_select;
pub mod locale; pub mod locale;
pub mod log; pub mod log;
pub mod os; pub mod os;
pub mod parse; pub mod parse;
pub mod rate_limit;
pub mod sync;
pub mod video; pub mod video;

View file

@ -1,9 +1,14 @@
use log::debug; use log::debug;
use regex::{Regex, RegexBuilder};
use std::borrow::Cow;
use std::io::ErrorKind; use std::io::ErrorKind;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::pin::Pin;
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};
use std::{env, io}; use std::task::{Context, Poll};
use tempfile::{Builder, NamedTempFile}; use std::{env, fs, io};
use tempfile::{Builder, NamedTempFile, TempPath};
use tokio::io::{AsyncRead, ReadBuf};
pub fn has_ffmpeg() -> bool { pub fn has_ffmpeg() -> bool {
if let Err(e) = Command::new("ffmpeg").stderr(Stdio::null()).spawn() { if let Err(e) = Command::new("ffmpeg").stderr(Stdio::null()).spawn() {
@ -22,11 +27,11 @@ pub fn has_ffmpeg() -> bool {
/// Get the temp directory either by the specified `CRUNCHY_CLI_TEMP_DIR` env variable or the dir /// Get the temp directory either by the specified `CRUNCHY_CLI_TEMP_DIR` env variable or the dir
/// provided by the os. /// provided by the os.
pub fn temp_directory() -> PathBuf { pub fn temp_directory() -> PathBuf {
env::var("CRUNCHY_CLI_TEMP_DIR").map_or(env::temp_dir(), |d| PathBuf::from(d)) env::var("CRUNCHY_CLI_TEMP_DIR").map_or(env::temp_dir(), PathBuf::from)
} }
/// Any tempfile should be created with this function. The prefix and directory of every file /// Any tempfile should be created with this function. The prefix and directory of every file
/// created with this method stays the same which is helpful to query all existing tempfiles and /// created with this function stays the same which is helpful to query all existing tempfiles and
/// e.g. remove them in a case of ctrl-c. Having one function also good to prevent mistakes like /// e.g. remove them in a case of ctrl-c. Having one function also good to prevent mistakes like
/// setting the wrong prefix if done manually. /// setting the wrong prefix if done manually.
pub fn tempfile<S: AsRef<str>>(suffix: S) -> io::Result<NamedTempFile> { pub fn tempfile<S: AsRef<str>>(suffix: S) -> io::Result<NamedTempFile> {
@ -41,6 +46,98 @@ pub fn tempfile<S: AsRef<str>>(suffix: S) -> io::Result<NamedTempFile> {
Ok(tempfile) Ok(tempfile)
} }
pub fn cache_dir<S: AsRef<str>>(name: S) -> io::Result<PathBuf> {
let cache_dir = temp_directory().join(format!(".crunchy-cli_{}_cache", name.as_ref()));
fs::create_dir_all(&cache_dir)?;
Ok(cache_dir)
}
pub struct TempNamedPipe {
path: TempPath,
#[cfg(not(target_os = "windows"))]
reader: tokio::net::unix::pipe::Receiver,
#[cfg(target_os = "windows")]
file: tokio::fs::File,
}
impl TempNamedPipe {
pub fn path(&self) -> &Path {
&self.path
}
}
impl AsyncRead for TempNamedPipe {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<io::Result<()>> {
#[cfg(not(target_os = "windows"))]
return Pin::new(&mut self.reader).poll_read(cx, buf);
// very very dirty implementation of a 'tail' like behavior
#[cfg(target_os = "windows")]
{
let mut tmp_bytes = vec![0; buf.remaining()];
let mut tmp_buf = ReadBuf::new(tmp_bytes.as_mut_slice());
loop {
return match Pin::new(&mut self.file).poll_read(cx, &mut tmp_buf) {
Poll::Ready(r) => {
if r.is_ok() {
if !tmp_buf.filled().is_empty() {
buf.put_slice(tmp_buf.filled())
} else {
// sleep to not loop insanely fast and consume unnecessary system resources
std::thread::sleep(std::time::Duration::from_millis(50));
continue;
}
}
Poll::Ready(r)
}
Poll::Pending => Poll::Pending,
};
}
}
}
}
impl Drop for TempNamedPipe {
fn drop(&mut self) {
#[cfg(not(target_os = "windows"))]
let _ = nix::unistd::unlink(self.path.to_string_lossy().to_string().as_str());
}
}
pub fn temp_named_pipe() -> io::Result<TempNamedPipe> {
let tmp = tempfile("")?;
#[cfg(not(target_os = "windows"))]
{
let path = tmp.into_temp_path();
let _ = fs::remove_file(&path);
nix::unistd::mkfifo(
path.to_string_lossy().to_string().as_str(),
nix::sys::stat::Mode::S_IRWXU,
)?;
Ok(TempNamedPipe {
reader: tokio::net::unix::pipe::OpenOptions::new().open_receiver(&path)?,
path,
})
}
#[cfg(target_os = "windows")]
{
let (file, path) = tmp.into_parts();
Ok(TempNamedPipe {
file: tokio::fs::File::from_std(file),
path,
})
}
}
/// Check if the given path exists and rename it until the new (renamed) file does not exist. /// Check if the given path exists and rename it until the new (renamed) file does not exist.
pub fn free_file(mut path: PathBuf) -> (PathBuf, bool) { pub fn free_file(mut path: PathBuf) -> (PathBuf, bool) {
// do not rename it if it exists but is a special file // do not rename it if it exists but is a special file
@ -78,3 +175,51 @@ pub fn free_file(mut path: PathBuf) -> (PathBuf, bool) {
pub fn is_special_file<P: AsRef<Path>>(path: P) -> bool { pub fn is_special_file<P: AsRef<Path>>(path: P) -> bool {
path.as_ref().exists() && !path.as_ref().is_file() && !path.as_ref().is_dir() path.as_ref().exists() && !path.as_ref().is_file() && !path.as_ref().is_dir()
} }
lazy_static::lazy_static! {
static ref WINDOWS_NON_PRINTABLE_RE: Regex = Regex::new(r"[\x00-\x1f\x80-\x9f]").unwrap();
static ref WINDOWS_ILLEGAL_RE: Regex = Regex::new(r#"[<>:"|?*]"#).unwrap();
static ref WINDOWS_RESERVED_RE: Regex = RegexBuilder::new(r"(?i)^(con|prn|aux|nul|com[0-9]|lpt[0-9])(\..*)?$")
.case_insensitive(true)
.build()
.unwrap();
static ref WINDOWS_TRAILING_RE: Regex = Regex::new(r"[\. ]+$").unwrap();
static ref LINUX_NON_PRINTABLE: Regex = Regex::new(r"[\x00]").unwrap();
static ref RESERVED_RE: Regex = Regex::new(r"^\.+$").unwrap();
}
/// Sanitizes a filename with the option to include/exclude the path separator from sanitizing.
pub fn sanitize<S: AsRef<str>>(path: S, include_path_separator: bool, universal: bool) -> String {
let path = Cow::from(path.as_ref().trim());
let path = RESERVED_RE.replace(&path, "");
let collect = |name: String| {
if name.len() > 255 {
name[..255].to_string()
} else {
name
}
};
if universal || cfg!(windows) {
let path = WINDOWS_NON_PRINTABLE_RE.replace_all(&path, "");
let path = WINDOWS_ILLEGAL_RE.replace_all(&path, "");
let path = WINDOWS_RESERVED_RE.replace_all(&path, "");
let path = WINDOWS_TRAILING_RE.replace(&path, "");
let mut path = path.to_string();
if include_path_separator {
path = path.replace(['\\', '/'], "");
}
collect(path)
} else {
let path = LINUX_NON_PRINTABLE.replace_all(&path, "");
let mut path = path.to_string();
if include_path_separator {
path = path.replace('/', "");
}
collect(path)
}
}

View file

@ -10,8 +10,8 @@ use regex::Regex;
/// If `to_*` is [`None`] they're set to [`u32::MAX`]. /// If `to_*` is [`None`] they're set to [`u32::MAX`].
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct InnerUrlFilter { pub struct InnerUrlFilter {
from_episode: Option<u32>, from_episode: Option<f32>,
to_episode: Option<u32>, to_episode: Option<f32>,
from_season: Option<u32>, from_season: Option<u32>,
to_season: Option<u32>, to_season: Option<u32>,
} }
@ -39,17 +39,20 @@ impl UrlFilter {
}) })
} }
pub fn is_episode_valid(&self, episode: u32, season: u32) -> bool { pub fn is_episode_valid(&self, episode: f32, season: u32) -> bool {
self.inner.iter().any(|f| { self.inner.iter().any(|f| {
let from_episode = f.from_episode.unwrap_or(u32::MIN); let from_episode = f.from_episode.unwrap_or(f32::MIN);
let to_episode = f.to_episode.unwrap_or(u32::MAX); let to_episode = f.to_episode.unwrap_or(f32::MAX);
let from_season = f.from_season.unwrap_or(u32::MIN); let from_season = f.from_season.unwrap_or(u32::MIN);
let to_season = f.to_season.unwrap_or(u32::MAX); let to_season = f.to_season.unwrap_or(u32::MAX);
episode >= from_episode if season < from_season || season > to_season {
&& episode <= to_episode false
&& season >= from_season } else if season == from_season || (f.from_season.is_none() && f.to_season.is_none()) {
&& season <= to_season episode >= from_episode && episode <= to_episode
} else {
true
}
}) })
} }
} }
@ -135,7 +138,7 @@ pub async fn parse_url(
let old_url_regex = Regex::new(r"https?://(www\.)?crunchyroll\.com/.+").unwrap(); let old_url_regex = Regex::new(r"https?://(www\.)?crunchyroll\.com/.+").unwrap();
if old_url_regex.is_match(&url) { if old_url_regex.is_match(&url) {
debug!("Detected maybe old url"); debug!("Detected maybe old url");
// replace the 'http' prefix with 'https' as https is not supported by the reqwest client // replace the 'http' prefix with 'https' as http is not supported by the reqwest client
if url.starts_with("http://") { if url.starts_with("http://") {
url.replace_range(0..4, "https") url.replace_range(0..4, "https")
} }
@ -144,7 +147,7 @@ pub async fn parse_url(
url = crunchy.client().get(&url).send().await?.url().to_string() url = crunchy.client().get(&url).send().await?.url().to_string()
} }
let parsed_url = crunchyroll_rs::parse_url(url).map_or(Err(anyhow!("Invalid url")), Ok)?; let parsed_url = crunchyroll_rs::parse_url(url).ok_or(anyhow!("Invalid url"))?;
debug!("Url type: {:?}", parsed_url); debug!("Url type: {:?}", parsed_url);
let media_collection = match parsed_url { let media_collection = match parsed_url {
UrlType::Series(id) UrlType::Series(id)
@ -192,3 +195,13 @@ pub fn parse_resolution(mut resolution: String) -> Result<Resolution> {
bail!("Could not find resolution") bail!("Could not find resolution")
} }
} }
/// Dirty implementation of [`f32::fract`] with more accuracy.
pub fn fract(input: f32) -> f32 {
if input.fract() == 0.0 {
return 0.0;
}
format!("0.{}", input.to_string().split('.').last().unwrap())
.parse::<f32>()
.unwrap()
}

View file

@ -0,0 +1,73 @@
use async_speed_limit::Limiter;
use crunchyroll_rs::error::Error;
use futures_util::TryStreamExt;
use reqwest::{Client, Request, Response, ResponseBuilderExt};
use std::future::Future;
use std::io;
use std::pin::Pin;
use std::sync::Arc;
use std::task::{Context, Poll};
use tower_service::Service;
#[derive(Clone)]
pub struct RateLimiterService {
client: Arc<Client>,
rate_limiter: Limiter,
}
impl RateLimiterService {
pub fn new(bytes: u32, client: Client) -> Self {
Self {
client: Arc::new(client),
rate_limiter: Limiter::new(bytes as f64),
}
}
}
impl Service<Request> for RateLimiterService {
type Response = Response;
type Error = Error;
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: Request) -> Self::Future {
let client = self.client.clone();
let rate_limiter = self.rate_limiter.clone();
Box::pin(async move {
let mut body = vec![];
let res = client.execute(req).await?;
let _url = res.url().clone().to_string();
let url = _url.as_str();
let mut http_res = http::Response::builder()
.url(res.url().clone())
.status(res.status())
.version(res.version());
*http_res.headers_mut().unwrap() = res.headers().clone();
http_res
.extensions_ref()
.unwrap()
.clone_from(&res.extensions());
let limiter = rate_limiter.limit(
res.bytes_stream()
.map_err(io::Error::other)
.into_async_read(),
);
futures_util::io::copy(limiter, &mut body)
.await
.map_err(|e| Error::Request {
url: url.to_string(),
status: None,
message: e.to_string(),
})?;
Ok(Response::from(http_res.body(body).unwrap()))
})
}
}

View file

@ -0,0 +1,432 @@
use std::io::Read;
use std::process::Stdio;
use std::{
cmp,
collections::{HashMap, HashSet},
mem,
ops::Not,
path::Path,
process::Command,
};
use chrono::TimeDelta;
use crunchyroll_rs::Locale;
use log::debug;
use tempfile::TempPath;
use anyhow::{bail, Result};
use rusty_chromaprint::{Configuration, Fingerprinter};
use super::fmt::format_time_delta;
pub struct SyncAudio {
pub format_id: usize,
pub path: TempPath,
pub locale: Locale,
pub sample_rate: u32,
pub video_idx: usize,
}
#[derive(Debug, Clone, Copy)]
struct TimeRange {
start: f64,
end: f64,
}
pub fn sync_audios(
available_audios: &Vec<SyncAudio>,
sync_tolerance: u32,
sync_precision: u32,
) -> Result<Option<HashMap<usize, TimeDelta>>> {
let mut result: HashMap<usize, TimeDelta> = HashMap::new();
let mut sync_audios = vec![];
let mut chromaprints = HashMap::new();
let mut formats = HashSet::new();
for audio in available_audios {
if formats.contains(&audio.format_id) {
continue;
}
formats.insert(audio.format_id);
sync_audios.push((audio.format_id, &audio.path, audio.sample_rate));
chromaprints.insert(
audio.format_id,
generate_chromaprint(
&audio.path,
audio.sample_rate,
&TimeDelta::zero(),
&TimeDelta::zero(),
&TimeDelta::zero(),
)?,
);
}
sync_audios.sort_by_key(|sync_audio| chromaprints.get(&sync_audio.0).unwrap().len());
let base_audio = sync_audios.remove(0);
let mut start = f64::MAX;
let mut end = f64::MIN;
let mut initial_offsets = HashMap::new();
for audio in &sync_audios {
debug!(
"Initial comparison of format {} to {}",
audio.0, &base_audio.0
);
let (lhs_ranges, rhs_ranges) = compare_chromaprints(
chromaprints.get(&base_audio.0).unwrap(),
chromaprints.get(&audio.0).unwrap(),
sync_tolerance,
);
if lhs_ranges.is_empty() || rhs_ranges.is_empty() {
bail!(
"Failed to sync videos, couldn't find matching audio parts between format {} and {}",
base_audio.0 + 1,
audio.0 + 1
);
}
let lhs_range = lhs_ranges[0];
let rhs_range = rhs_ranges[0];
start = start.min(lhs_range.start);
end = end.max(lhs_range.end);
start = start.min(rhs_range.start);
end = end.max(rhs_range.end);
let offset = TimeDelta::milliseconds(((rhs_range.start - lhs_range.start) * 1000.0) as i64);
initial_offsets.insert(audio.0, TimeDelta::zero().checked_sub(&offset).unwrap());
debug!(
"Found initial offset of {}ms ({} - {} {}s) ({} - {} {}s) for format {} to {}",
offset.num_milliseconds(),
lhs_range.start,
lhs_range.end,
lhs_range.end - lhs_range.start,
rhs_range.start,
rhs_range.end,
rhs_range.end - rhs_range.start,
audio.0,
base_audio.0
);
}
debug!(
"Found matching audio parts at {} - {}, narrowing search",
start, end
);
let start = TimeDelta::milliseconds((start * 1000.0) as i64 - 20000);
let end = TimeDelta::milliseconds((end * 1000.0) as i64 + 20000);
for sync_audio in &sync_audios {
let chromaprint = generate_chromaprint(
sync_audio.1,
sync_audio.2,
&start,
&end,
initial_offsets.get(&sync_audio.0).unwrap(),
)?;
chromaprints.insert(sync_audio.0, chromaprint);
}
let mut runs: HashMap<usize, i64> = HashMap::new();
let iterator_range_limits: i64 = 2 ^ sync_precision as i64;
for i in -iterator_range_limits..=iterator_range_limits {
let base_offset = TimeDelta::milliseconds(
((0.128 / iterator_range_limits as f64 * i as f64) * 1000.0) as i64,
);
chromaprints.insert(
base_audio.0,
generate_chromaprint(base_audio.1, base_audio.2, &start, &end, &base_offset)?,
);
for audio in &sync_audios {
let initial_offset = initial_offsets.get(&audio.0).copied().unwrap();
let offset = find_offset(
(&base_audio.0, chromaprints.get(&base_audio.0).unwrap()),
&base_offset,
(&audio.0, chromaprints.get(&audio.0).unwrap()),
&initial_offset,
&start,
sync_tolerance,
);
if offset.is_none() {
continue;
}
let offset = offset.unwrap();
result.insert(
audio.0,
result
.get(&audio.0)
.copied()
.unwrap_or_default()
.checked_add(&offset)
.unwrap(),
);
runs.insert(audio.0, runs.get(&audio.0).copied().unwrap_or_default() + 1);
}
}
let mut result: HashMap<usize, TimeDelta> = result
.iter()
.map(|(format_id, offset)| {
(
*format_id,
TimeDelta::milliseconds(
offset.num_milliseconds() / runs.get(format_id).copied().unwrap(),
),
)
})
.collect();
result.insert(base_audio.0, TimeDelta::milliseconds(0));
Ok(Some(result))
}
fn find_offset(
lhs: (&usize, &Vec<u32>),
lhs_shift: &TimeDelta,
rhs: (&usize, &Vec<u32>),
rhs_shift: &TimeDelta,
start: &TimeDelta,
sync_tolerance: u32,
) -> Option<TimeDelta> {
let (lhs_ranges, rhs_ranges) = compare_chromaprints(lhs.1, rhs.1, sync_tolerance);
if lhs_ranges.is_empty() || rhs_ranges.is_empty() {
return None;
}
let lhs_range = lhs_ranges[0];
let rhs_range = rhs_ranges[0];
let offset = rhs_range.end - lhs_range.end;
let offset = TimeDelta::milliseconds((offset * 1000.0) as i64)
.checked_add(lhs_shift)?
.checked_sub(rhs_shift)?;
debug!(
"Found offset of {}ms ({} - {} {}s) ({} - {} {}s) for format {} to {}",
offset.num_milliseconds(),
lhs_range.start + start.num_milliseconds() as f64 / 1000.0,
lhs_range.end + start.num_milliseconds() as f64 / 1000.0,
lhs_range.end - lhs_range.start,
rhs_range.start + start.num_milliseconds() as f64 / 1000.0,
rhs_range.end + start.num_milliseconds() as f64 / 1000.0,
rhs_range.end - rhs_range.start,
rhs.0,
lhs.0
);
Some(offset)
}
fn generate_chromaprint(
input_file: &Path,
sample_rate: u32,
start: &TimeDelta,
end: &TimeDelta,
offset: &TimeDelta,
) -> Result<Vec<u32>> {
let mut ss_argument: &TimeDelta = &start.checked_sub(offset).unwrap();
let mut offset_argument = &TimeDelta::zero();
if *offset < TimeDelta::zero() {
ss_argument = start;
offset_argument = offset;
};
let mut printer = Fingerprinter::new(&Configuration::preset_test1());
printer.start(sample_rate, 2)?;
let mut command = Command::new("ffmpeg");
command
.arg("-hide_banner")
.arg("-y")
.args(["-ss", format_time_delta(ss_argument).as_str()]);
if end.is_zero().not() {
command.args(["-to", format_time_delta(end).as_str()]);
}
command
.args(["-itsoffset", format_time_delta(offset_argument).as_str()])
.args(["-i", input_file.to_string_lossy().to_string().as_str()])
.args(["-ac", "2"])
.args([
"-f",
if cfg!(target_endian = "big") {
"s16be"
} else {
"s16le"
},
])
.arg("-");
let mut handle = command
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
// the stdout is read in chunks because keeping all the raw audio data in memory would take up
// a significant amount of space
let mut stdout = handle.stdout.take().unwrap();
let mut buf: [u8; 128_000] = [0; 128_000];
while handle.try_wait()?.is_none() {
loop {
let read_bytes = stdout.read(&mut buf)?;
if read_bytes == 0 {
break;
}
let data: [i16; 64_000] = unsafe { mem::transmute(buf) };
printer.consume(&data[0..(read_bytes / 2)])
}
}
if !handle.wait()?.success() {
bail!("{}", std::io::read_to_string(handle.stderr.unwrap())?)
}
printer.finish();
return Ok(printer.fingerprint().into());
}
fn compare_chromaprints(
lhs_chromaprint: &Vec<u32>,
rhs_chromaprint: &Vec<u32>,
sync_tolerance: u32,
) -> (Vec<TimeRange>, Vec<TimeRange>) {
let lhs_inverse_index = create_inverse_index(lhs_chromaprint);
let rhs_inverse_index = create_inverse_index(rhs_chromaprint);
let mut possible_shifts = HashSet::new();
for lhs_pair in lhs_inverse_index {
let original_point = lhs_pair.0;
for i in -2..=2 {
let modified_point = (original_point as i32 + i) as u32;
if rhs_inverse_index.contains_key(&modified_point) {
let rhs_index = rhs_inverse_index.get(&modified_point).copied().unwrap();
possible_shifts.insert(rhs_index as i32 - lhs_pair.1 as i32);
}
}
}
let mut all_lhs_time_ranges = vec![];
let mut all_rhs_time_ranges = vec![];
for shift_amount in possible_shifts {
let time_range_pair = find_time_ranges(
lhs_chromaprint,
rhs_chromaprint,
shift_amount,
sync_tolerance,
);
if time_range_pair.is_none() {
continue;
}
let (mut lhs_time_ranges, mut rhs_time_ranges) = time_range_pair.unwrap();
let mut lhs_time_ranges: Vec<TimeRange> = lhs_time_ranges
.drain(..)
.filter(|time_range| {
(20.0 < (time_range.end - time_range.start))
&& ((time_range.end - time_range.start) < 180.0)
&& time_range.end > 0.0
})
.collect();
lhs_time_ranges.sort_by(|a, b| (b.end - b.start).total_cmp(&(a.end - a.start)));
let mut rhs_time_ranges: Vec<TimeRange> = rhs_time_ranges
.drain(..)
.filter(|time_range| {
(20.0 < (time_range.end - time_range.start))
&& ((time_range.end - time_range.start) < 180.0)
&& time_range.end > 0.0
})
.collect();
rhs_time_ranges.sort_by(|a, b| (b.end - b.start).total_cmp(&(a.end - a.start)));
if lhs_time_ranges.is_empty() || rhs_time_ranges.is_empty() {
continue;
}
all_lhs_time_ranges.push(lhs_time_ranges[0]);
all_rhs_time_ranges.push(rhs_time_ranges[0]);
}
all_lhs_time_ranges.sort_by(|a, b| (a.end - a.start).total_cmp(&(b.end - b.start)));
all_lhs_time_ranges.reverse();
all_rhs_time_ranges.sort_by(|a, b| (a.end - a.start).total_cmp(&(b.end - b.start)));
all_rhs_time_ranges.reverse();
(all_lhs_time_ranges, all_rhs_time_ranges)
}
fn create_inverse_index(chromaprint: &Vec<u32>) -> HashMap<u32, usize> {
let mut inverse_index = HashMap::with_capacity(chromaprint.capacity());
for (i, fingerprint) in chromaprint.iter().enumerate().take(chromaprint.capacity()) {
inverse_index.insert(*fingerprint, i);
}
inverse_index
}
fn find_time_ranges(
lhs_chromaprint: &[u32],
rhs_chromaprint: &[u32],
shift_amount: i32,
sync_tolerance: u32,
) -> Option<(Vec<TimeRange>, Vec<TimeRange>)> {
let mut lhs_shift: i32 = 0;
let mut rhs_shift: i32 = 0;
if shift_amount < 0 {
lhs_shift -= shift_amount;
} else {
rhs_shift += shift_amount;
}
let mut lhs_matching_timestamps = vec![];
let mut rhs_matching_timestamps = vec![];
let upper_limit =
cmp::min(lhs_chromaprint.len(), rhs_chromaprint.len()) as i32 - shift_amount.abs();
for i in 0..upper_limit {
let lhs_position = i + lhs_shift;
let rhs_position = i + rhs_shift;
let difference = (lhs_chromaprint[lhs_position as usize]
^ rhs_chromaprint[rhs_position as usize])
.count_ones();
if difference > sync_tolerance {
continue;
}
lhs_matching_timestamps.push(lhs_position as f64 * 0.128);
rhs_matching_timestamps.push(rhs_position as f64 * 0.128);
}
lhs_matching_timestamps.push(f64::MAX);
rhs_matching_timestamps.push(f64::MAX);
let lhs_time_ranges = timestamps_to_ranges(lhs_matching_timestamps);
lhs_time_ranges.as_ref()?;
let lhs_time_ranges = lhs_time_ranges.unwrap();
let rhs_time_ranges = timestamps_to_ranges(rhs_matching_timestamps).unwrap();
Some((lhs_time_ranges, rhs_time_ranges))
}
fn timestamps_to_ranges(mut timestamps: Vec<f64>) -> Option<Vec<TimeRange>> {
if timestamps.is_empty() {
return None;
}
timestamps.sort_by(|a, b| a.total_cmp(b));
let mut time_ranges = vec![];
let mut current_range = TimeRange {
start: timestamps[0],
end: timestamps[0],
};
for i in 0..timestamps.len() - 1 {
let current = timestamps[i];
let next = timestamps[i + 1];
if next - current <= 1.0 {
current_range.end = next;
continue;
}
time_ranges.push(current_range);
current_range.start = next;
current_range.end = next;
}
if !time_ranges.is_empty() {
Some(time_ranges)
} else {
None
}
}

View file

@ -1,46 +1,46 @@
use anyhow::Result; use anyhow::{bail, Result};
use crunchyroll_rs::media::{Resolution, Stream, VariantData}; use crunchyroll_rs::media::{Resolution, Stream, StreamData};
use crunchyroll_rs::Locale; use crunchyroll_rs::Locale;
pub async fn variant_data_from_stream( pub async fn stream_data_from_stream(
stream: &Stream, stream: &Stream,
resolution: &Resolution, resolution: &Resolution,
) -> Result<Option<(VariantData, VariantData)>> { hardsub_subtitle: Option<Locale>,
// sometimes Crunchyroll marks episodes without real subtitles that they have subtitles and ) -> Result<Option<(StreamData, StreamData, bool)>> {
// reports that only hardsub episode are existing. the following lines are trying to prevent let (hardsub_locale, mut contains_hardsub) = if hardsub_subtitle.is_some() {
// potential errors which might get caused by this incorrect reporting (hardsub_subtitle, true)
// (https://github.com/crunchy-labs/crunchy-cli/issues/231)
let mut hardsub_locales = stream.streaming_hardsub_locales();
let hardsub_locale = if !hardsub_locales.contains(&Locale::Custom("".to_string()))
&& !hardsub_locales.contains(&Locale::Custom(":".to_string()))
{
// if only one hardsub locale exists, assume that this stream doesn't really contains hardsubs
if hardsub_locales.len() == 1 {
Some(hardsub_locales.remove(0))
} else { } else {
// fallback to `None`. this should trigger an error message in `stream.dash_streaming_data` (None, false)
// that the requested stream is not available
None
}
} else {
None
}; };
let mut streaming_data = stream.dash_streaming_data(hardsub_locale).await?; let (mut videos, mut audios) = match stream.stream_data(hardsub_locale).await {
streaming_data Ok(data) => data,
.0 Err(e) => {
.sort_by(|a, b| a.bandwidth.cmp(&b.bandwidth).reverse()); // the error variant is only `crunchyroll_rs::error::Error::Input` when the requested
streaming_data // hardsub is not available
.1 if let crunchyroll_rs::error::Error::Input { .. } = e {
.sort_by(|a, b| a.bandwidth.cmp(&b.bandwidth).reverse()); contains_hardsub = false;
stream.stream_data(None).await?
} else {
bail!(e)
}
}
}
.unwrap();
if videos.iter().any(|v| v.drm.is_some()) || audios.iter().any(|v| v.drm.is_some()) {
bail!("Stream is DRM protected")
}
videos.sort_by(|a, b| a.bandwidth.cmp(&b.bandwidth).reverse());
audios.sort_by(|a, b| a.bandwidth.cmp(&b.bandwidth).reverse());
let video_variant = match resolution.height { let video_variant = match resolution.height {
u64::MAX => Some(streaming_data.0.into_iter().next().unwrap()), u64::MAX => Some(videos.into_iter().next().unwrap()),
u64::MIN => Some(streaming_data.0.into_iter().last().unwrap()), u64::MIN => Some(videos.into_iter().last().unwrap()),
_ => streaming_data _ => videos
.0
.into_iter() .into_iter()
.find(|v| resolution.height == v.resolution.height), .find(|v| resolution.height == v.resolution().unwrap().height),
}; };
Ok(video_variant.map(|v| (v, streaming_data.1.first().unwrap().clone()))) Ok(video_variant.map(|v| (v, audios.first().unwrap().clone(), contains_hardsub)))
} }

12
flake.lock generated
View file

@ -2,11 +2,11 @@
"nodes": { "nodes": {
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1692128808, "lastModified": 1710534455,
"narHash": "sha256-Di1Zm/P042NuwThMiZNrtmaAjd4Tm2qBOKHX7xUOfMk=", "narHash": "sha256-huQT4Xs0y4EeFKn2BTBVYgEwJSv8SDlm82uWgMnCMmI=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "4ed9856be002a730234a1a1ed9dcd9dd10cbdb40", "rev": "9af9c1c87ed3e3ed271934cb896e0cdd33dae212",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -41,11 +41,11 @@
"systems": "systems" "systems": "systems"
}, },
"locked": { "locked": {
"lastModified": 1689068808, "lastModified": 1710146030,
"narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=", "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4", "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github" "type": "github"
}, },
"original": { "original": {

View file

@ -26,7 +26,7 @@
}; };
buildNoDefaultFeatures = true; buildNoDefaultFeatures = true;
buildFeatures = [ "openssl" ]; buildFeatures = [ "openssl-tls" ];
nativeBuildInputs = [ nativeBuildInputs = [
pkgs.pkg-config pkgs.pkg-config

View file

@ -8,5 +8,5 @@ compile_error!("At least one tls feature must be activated");
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
crunchy_cli_core::cli_entrypoint().await crunchy_cli_core::main(&std::env::args().collect::<Vec<String>>()).await
} }